1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
45 ;;---------------------------------------------------------------------------
48 ;; Register numbers -- All machine registers should be defined here
50 [(R0_REGNUM 0) ; First CORE register
51 (R1_REGNUM 1) ; Second CORE register
52 (IP_REGNUM 12) ; Scratch register
53 (SP_REGNUM 13) ; Stack pointer
54 (LR_REGNUM 14) ; Return address register
55 (PC_REGNUM 15) ; Program counter
56 (LAST_ARM_REGNUM 15) ;
57 (CC_REGNUM 100) ; Condition code pseudo register
58 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
61 ;; 3rd operand to select_dominance_cc_mode
68 ;; conditional compare combination
79 ;;---------------------------------------------------------------------------
82 ;; Processor type. This is created automatically from arm-cores.def.
83 (include "arm-tune.md")
85 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
86 ; generating ARM code. This is used to control the length of some insn
87 ; patterns that share the same RTL in both ARM and Thumb code.
88 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
90 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
91 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
93 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
94 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
96 ; We use this attribute to disable alternatives that can produce 32-bit
97 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
98 ; that contain 32-bit instructions.
99 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
101 ; This attribute is used to disable a predicated alternative when we have
103 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
105 ;; Operand number of an input operand that is shifted. Zero if the
106 ;; given instruction does not shift one of its input operands.
107 (define_attr "shift" "" (const_int 0))
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
115 (define_attr "predicated" "yes,no" (const_string "no"))
117 ; LENGTH of an instruction (in bytes)
118 (define_attr "length" ""
121 ; The architecture which supports the instruction (or alternative).
122 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
123 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
124 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
125 ; arm_arch6. This attribute is used to compute attribute "enabled",
126 ; use type "any" to enable an alternative in all cases.
127 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
128 (const_string "any"))
130 (define_attr "arch_enabled" "no,yes"
131 (cond [(eq_attr "arch" "any")
134 (and (eq_attr "arch" "a")
135 (match_test "TARGET_ARM"))
138 (and (eq_attr "arch" "t")
139 (match_test "TARGET_THUMB"))
142 (and (eq_attr "arch" "t1")
143 (match_test "TARGET_THUMB1"))
146 (and (eq_attr "arch" "t2")
147 (match_test "TARGET_THUMB2"))
150 (and (eq_attr "arch" "32")
151 (match_test "TARGET_32BIT"))
154 (and (eq_attr "arch" "v6")
155 (match_test "TARGET_32BIT && arm_arch6"))
158 (and (eq_attr "arch" "nov6")
159 (match_test "TARGET_32BIT && !arm_arch6"))
162 (and (eq_attr "arch" "avoid_neon_for_64bits")
163 (match_test "TARGET_NEON")
164 (not (match_test "TARGET_PREFER_NEON_64BITS")))
167 (and (eq_attr "arch" "neon_for_64bits")
168 (match_test "TARGET_NEON")
169 (match_test "TARGET_PREFER_NEON_64BITS"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")]
176 (const_string "no")))
178 (define_attr "opt" "any,speed,size"
179 (const_string "any"))
181 (define_attr "opt_enabled" "no,yes"
182 (cond [(eq_attr "opt" "any")
185 (and (eq_attr "opt" "speed")
186 (match_test "optimize_function_for_speed_p (cfun)"))
189 (and (eq_attr "opt" "size")
190 (match_test "optimize_function_for_size_p (cfun)"))
191 (const_string "yes")]
192 (const_string "no")))
194 ; Allows an insn to disable certain alternatives for reasons other than
196 (define_attr "insn_enabled" "no,yes"
197 (const_string "yes"))
199 ; Enable all alternatives that are both arch_enabled and insn_enabled.
200 (define_attr "enabled" "no,yes"
201 (cond [(eq_attr "insn_enabled" "no")
204 (and (eq_attr "predicable_short_it" "no")
205 (and (eq_attr "predicated" "yes")
206 (match_test "arm_restrict_it")))
209 (and (eq_attr "enabled_for_depr_it" "no")
210 (match_test "arm_restrict_it"))
213 (eq_attr "arch_enabled" "no")
216 (eq_attr "opt_enabled" "no")
218 (const_string "yes")))
220 ; POOL_RANGE is how far away from a constant pool entry that this insn
221 ; can be placed. If the distance is zero, then this insn will never
222 ; reference the pool.
223 ; Note that for Thumb constant pools the PC value is rounded down to the
224 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
225 ; Thumb insns) should be set to <max_range> - 2.
226 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
227 ; before its address. It is set to <max_range> - (8 + <data_size>).
228 (define_attr "arm_pool_range" "" (const_int 0))
229 (define_attr "thumb2_pool_range" "" (const_int 0))
230 (define_attr "arm_neg_pool_range" "" (const_int 0))
231 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
233 (define_attr "pool_range" ""
234 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
235 (attr "arm_pool_range")))
236 (define_attr "neg_pool_range" ""
237 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
238 (attr "arm_neg_pool_range")))
240 ; An assembler sequence may clobber the condition codes without us knowing.
241 ; If such an insn references the pool, then we have no way of knowing how,
242 ; so use the most conservative value for pool_range.
243 (define_asm_attributes
244 [(set_attr "conds" "clob")
245 (set_attr "length" "4")
246 (set_attr "pool_range" "250")])
248 ;; The instruction used to implement a particular pattern. This
249 ;; information is used by pipeline descriptions to provide accurate
250 ;; scheduling information.
253 "mov,mvn,clz,mrs,msr,xtab,sat,other"
254 (const_string "other"))
256 ; TYPE attribute is used to classify instructions for use in scheduling.
258 ; Instruction classification:
260 ; alu_reg any alu instruction that doesn't hit memory or fp
261 ; regs or have a shifted source operand and does not have
262 ; an immediate operand. This is also the default.
263 ; alu_shift any data instruction that doesn't hit memory or fp.
264 ; regs, but has a source operand shifted by a constant.
265 ; alu_shift_reg any data instruction that doesn't hit memory or fp.
266 ; block blockage insn, this blocks all functional units.
268 ; call subroutine call.
269 ; f_2_r transfer from float to core (no memory needed).
270 ; f_cvt conversion between float and integral.
271 ; f_flag transfer of co-processor flags to the CPSR.
272 ; f_load[d,s] double/single load from memory. Used for VFP unit.
273 ; f_minmax[d,s] double/single floating point minimum/maximum.
274 ; f_rint[d,s] double/single floating point rount to integral.
275 ; f_sel[d,s] double/single floating byte select.
276 ; f_store[d,s] double/single store to memory. Used for VFP unit.
277 ; fadd[d,s] double/single floating-point scalar addition.
278 ; fcmp[d,s] double/single floating-point compare.
279 ; fconst[d,s] double/single load immediate.
280 ; fcpys single precision floating point cpy.
281 ; fdiv[d,s] double/single precision floating point division.
282 ; ffarith[d,s] double/single floating point abs/neg/cpy.
283 ; ffma[d,s] double/single floating point fused multiply-accumulate.
284 ; float floating point arithmetic operation.
285 ; fmac[d,s] double/single floating point multiply-accumulate.
286 ; fmul[d,s] double/single floating point multiply.
287 ; load_byte load byte(s) from memory to arm registers.
288 ; load1 load 1 word from memory to arm registers.
289 ; load2 load 2 words from memory to arm registers.
290 ; load3 load 3 words from memory to arm registers.
291 ; load4 load 4 words from memory to arm registers.
292 ; mla integer multiply accumulate.
293 ; mlas integer multiply accumulate, flag setting.
295 ; mul integer multiply.
296 ; muls integer multiply, flag setting.
297 ; r_2_f transfer from core to float.
298 ; sdiv signed division.
299 ; simple_alu_imm simple alu instruction that doesn't hit memory or fp
300 ; regs or have a shifted source operand and has an
301 ; immediate operand. This currently only tracks very basic
302 ; immediate alu operations.
303 ; simple_alu_shift simple alu instruction with a shifted source operand.
304 ; smlad signed multiply accumulate dual.
305 ; smladx signed multiply accumulate dual reverse.
306 ; smlal signed multiply accumulate long.
307 ; smlald signed multiply accumulate long dual.
308 ; smlals signed multiply accumulate long, flag setting.
309 ; smlalxy signed multiply accumulate, 16x16-bit, 64-bit accumulate.
310 ; smlawx signed multiply accumulate, 32x16-bit, 32-bit accumulate.
311 ; smlawy signed multiply accumulate wide, 32x16-bit,
313 ; smlaxy signed multiply accumulate, 16x16-bit, 32-bit accumulate.
314 ; smlsd signed multiply subtract dual.
315 ; smlsdx signed multiply subtract dual reverse.
316 ; smlsld signed multiply subtract long dual.
317 ; smmla signed most significant word multiply accumulate.
318 ; smmul signed most significant word multiply.
319 ; smmulr signed most significant word multiply, rounded.
320 ; smuad signed dual multiply add.
321 ; smuadx signed dual multiply add reverse.
322 ; smull signed multiply long.
323 ; smulls signed multiply long, flag setting.
324 ; smulwy signed multiply wide, 32x16-bit, 32-bit accumulate.
325 ; smulxy signed multiply, 16x16-bit, 32-bit accumulate.
326 ; smusd signed dual multiply subtract.
327 ; smusdx signed dual multiply subtract reverse.
328 ; store1 store 1 word to memory from arm registers.
329 ; store2 store 2 words to memory from arm registers.
330 ; store3 store 3 words to memory from arm registers.
331 ; store4 store 4 (or more) words to memory from arm registers.
332 ; udiv unsigned division.
333 ; umaal unsigned multiply accumulate accumulate long.
334 ; umlal unsigned multiply accumulate long.
335 ; umlals unsigned multiply accumulate long, flag setting.
336 ; umull unsigned multiply long.
337 ; umulls unsigned multiply long, flag setting.
423 (const_string "alu_reg"))
425 ; Is this an (integer side) multiply with a 32-bit (or smaller) result?
426 (define_attr "mul32" "no,yes"
429 "smulxy,smlaxy,smulwy,smlawx,mul,muls,mla,mlas,smlawy,smuad,smuadx,\
430 smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,smlald,smlsld")
432 (const_string "no")))
434 ; Is this an (integer side) multiply with a 64-bit result?
435 (define_attr "mul64" "no,yes"
438 "smlalxy,umull,umulls,umaal,umlal,umlals,smull,smulls,smlal,smlals")
440 (const_string "no")))
442 ; wtype for WMMX insn scheduling purposes.
444 "none,wor,wxor,wand,wandn,wmov,tmcrr,tmrrc,wldr,wstr,tmcr,tmrc,wadd,wsub,wmul,wmac,wavg2,tinsr,textrm,wshufh,wcmpeq,wcmpgt,wmax,wmin,wpack,wunpckih,wunpckil,wunpckeh,wunpckel,wror,wsra,wsrl,wsll,wmadd,tmia,tmiaph,tmiaxy,tbcst,tmovmsk,wacc,waligni,walignr,tandc,textrc,torc,torvsc,wsad,wabs,wabsdiff,waddsubhx,wsubaddhx,wavg4,wmulw,wqmulm,wqmulwm,waddbhus,wqmiaxy,wmiaxy,wmiawxy,wmerge" (const_string "none"))
446 ; Load scheduling, set from the arm_ld_sched variable
447 ; initialized by arm_option_override()
448 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
450 ;; Classification of NEON instructions for scheduling purposes.
451 (define_attr "neon_type"
462 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
463 neon_mul_qqq_8_16_32_ddd_32,\
464 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
465 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
467 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
468 neon_mla_qqq_32_qqd_32_scalar,\
469 neon_mul_ddd_16_scalar_32_16_long_scalar,\
470 neon_mul_qqd_32_scalar,\
471 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
476 neon_vqshl_vrshl_vqrshl_qqq,\
478 neon_fp_vadd_ddd_vabs_dd,\
479 neon_fp_vadd_qqq_vabs_qq,\
485 neon_fp_vmla_ddd_scalar,\
486 neon_fp_vmla_qqq_scalar,\
487 neon_fp_vrecps_vrsqrts_ddd,\
488 neon_fp_vrecps_vrsqrts_qqq,\
496 neon_vld2_2_regs_vld1_vld2_all_lanes,\
499 neon_vst1_1_2_regs_vst2_2_regs,\
501 neon_vst2_4_regs_vst3_vst4,\
503 neon_vld1_vld2_lane,\
504 neon_vld3_vld4_lane,\
505 neon_vst1_vst2_lane,\
506 neon_vst3_vst4_lane,\
507 neon_vld3_vld4_all_lanes,\
515 (const_string "none"))
517 ; condition codes: this one is used by final_prescan_insn to speed up
518 ; conditionalizing instructions. It saves having to scan the rtl to see if
519 ; it uses or alters the condition codes.
521 ; USE means that the condition codes are used by the insn in the process of
522 ; outputting code, this means (at present) that we can't use the insn in
525 ; SET means that the purpose of the insn is to set the condition codes in a
526 ; well defined manner.
528 ; CLOB means that the condition codes are altered in an undefined manner, if
529 ; they are altered at all
531 ; UNCONDITIONAL means the instruction can not be conditionally executed and
532 ; that the instruction does not use or alter the condition codes.
534 ; NOCOND means that the instruction does not use or alter the condition
535 ; codes but can be converted into a conditionally exectuted instruction.
537 (define_attr "conds" "use,set,clob,unconditional,nocond"
539 (ior (eq_attr "is_thumb1" "yes")
540 (eq_attr "type" "call"))
541 (const_string "clob")
542 (if_then_else (eq_attr "neon_type" "none")
543 (const_string "nocond")
544 (const_string "unconditional"))))
546 ; Predicable means that the insn can be conditionally executed based on
547 ; an automatically added predicate (additional patterns are generated by
548 ; gen...). We default to 'no' because no Thumb patterns match this rule
549 ; and not all ARM patterns do.
550 (define_attr "predicable" "no,yes" (const_string "no"))
552 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
553 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
554 ; suffer blockages enough to warrant modelling this (and it can adversely
555 ; affect the schedule).
556 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
558 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
559 ; to stall the processor. Used with model_wbuf above.
560 (define_attr "write_conflict" "no,yes"
561 (if_then_else (eq_attr "type"
564 (const_string "no")))
566 ; Classify the insns into those that take one cycle and those that take more
567 ; than one on the main cpu execution unit.
568 (define_attr "core_cycles" "single,multi"
569 (if_then_else (eq_attr "type"
570 "simple_alu_imm,alu_reg,\
571 simple_alu_shift,alu_shift,\
573 (const_string "single")
574 (const_string "multi")))
576 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
577 ;; distant label. Only applicable to Thumb code.
578 (define_attr "far_jump" "yes,no" (const_string "no"))
581 ;; The number of machine instructions this pattern expands to.
582 ;; Used for Thumb-2 conditional execution.
583 (define_attr "ce_count" "" (const_int 1))
585 ;;---------------------------------------------------------------------------
588 (include "unspecs.md")
590 ;;---------------------------------------------------------------------------
593 (include "iterators.md")
595 ;;---------------------------------------------------------------------------
598 (include "predicates.md")
599 (include "constraints.md")
601 ;;---------------------------------------------------------------------------
602 ;; Pipeline descriptions
604 (define_attr "tune_cortexr4" "yes,no"
606 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
608 (const_string "no"))))
610 ;; True if the generic scheduling description should be used.
612 (define_attr "generic_sched" "yes,no"
614 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
615 (eq_attr "tune_cortexr4" "yes"))
617 (const_string "yes"))))
619 (define_attr "generic_vfp" "yes,no"
621 (and (eq_attr "fpu" "vfp")
622 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
623 (eq_attr "tune_cortexr4" "no"))
625 (const_string "no"))))
627 (include "marvell-f-iwmmxt.md")
628 (include "arm-generic.md")
629 (include "arm926ejs.md")
630 (include "arm1020e.md")
631 (include "arm1026ejs.md")
632 (include "arm1136jfs.md")
634 (include "fa606te.md")
635 (include "fa626te.md")
636 (include "fmp626.md")
637 (include "fa726te.md")
638 (include "cortex-a5.md")
639 (include "cortex-a7.md")
640 (include "cortex-a8.md")
641 (include "cortex-a9.md")
642 (include "cortex-a15.md")
643 (include "cortex-a53.md")
644 (include "cortex-r4.md")
645 (include "cortex-r4f.md")
646 (include "cortex-m4.md")
647 (include "cortex-m4-fpu.md")
649 (include "marvell-pj4.md")
652 ;;---------------------------------------------------------------------------
657 ;; Note: For DImode insns, there is normally no reason why operands should
658 ;; not be in the same register, what we don't want is for something being
659 ;; written to partially overlap something that is an input.
661 (define_expand "adddi3"
663 [(set (match_operand:DI 0 "s_register_operand" "")
664 (plus:DI (match_operand:DI 1 "s_register_operand" "")
665 (match_operand:DI 2 "arm_adddi_operand" "")))
666 (clobber (reg:CC CC_REGNUM))])]
671 if (!REG_P (operands[1]))
672 operands[1] = force_reg (DImode, operands[1]);
673 if (!REG_P (operands[2]))
674 operands[2] = force_reg (DImode, operands[2]);
679 (define_insn "*thumb1_adddi3"
680 [(set (match_operand:DI 0 "register_operand" "=l")
681 (plus:DI (match_operand:DI 1 "register_operand" "%0")
682 (match_operand:DI 2 "register_operand" "l")))
683 (clobber (reg:CC CC_REGNUM))
686 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
687 [(set_attr "length" "4")]
690 (define_insn_and_split "*arm_adddi3"
691 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
692 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
693 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
694 (clobber (reg:CC CC_REGNUM))]
695 "TARGET_32BIT && !TARGET_NEON"
697 "TARGET_32BIT && reload_completed
698 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
699 [(parallel [(set (reg:CC_C CC_REGNUM)
700 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
702 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
703 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
704 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
707 operands[3] = gen_highpart (SImode, operands[0]);
708 operands[0] = gen_lowpart (SImode, operands[0]);
709 operands[4] = gen_highpart (SImode, operands[1]);
710 operands[1] = gen_lowpart (SImode, operands[1]);
711 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
712 operands[2] = gen_lowpart (SImode, operands[2]);
714 [(set_attr "conds" "clob")
715 (set_attr "length" "8")]
718 (define_insn_and_split "*adddi_sesidi_di"
719 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
720 (plus:DI (sign_extend:DI
721 (match_operand:SI 2 "s_register_operand" "r,r"))
722 (match_operand:DI 1 "s_register_operand" "0,r")))
723 (clobber (reg:CC CC_REGNUM))]
726 "TARGET_32BIT && reload_completed"
727 [(parallel [(set (reg:CC_C CC_REGNUM)
728 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
730 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
731 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
734 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
737 operands[3] = gen_highpart (SImode, operands[0]);
738 operands[0] = gen_lowpart (SImode, operands[0]);
739 operands[4] = gen_highpart (SImode, operands[1]);
740 operands[1] = gen_lowpart (SImode, operands[1]);
741 operands[2] = gen_lowpart (SImode, operands[2]);
743 [(set_attr "conds" "clob")
744 (set_attr "length" "8")]
747 (define_insn_and_split "*adddi_zesidi_di"
748 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
749 (plus:DI (zero_extend:DI
750 (match_operand:SI 2 "s_register_operand" "r,r"))
751 (match_operand:DI 1 "s_register_operand" "0,r")))
752 (clobber (reg:CC CC_REGNUM))]
755 "TARGET_32BIT && reload_completed"
756 [(parallel [(set (reg:CC_C CC_REGNUM)
757 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
759 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
760 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
761 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
764 operands[3] = gen_highpart (SImode, operands[0]);
765 operands[0] = gen_lowpart (SImode, operands[0]);
766 operands[4] = gen_highpart (SImode, operands[1]);
767 operands[1] = gen_lowpart (SImode, operands[1]);
768 operands[2] = gen_lowpart (SImode, operands[2]);
770 [(set_attr "conds" "clob")
771 (set_attr "length" "8")]
774 (define_expand "addsi3"
775 [(set (match_operand:SI 0 "s_register_operand" "")
776 (plus:SI (match_operand:SI 1 "s_register_operand" "")
777 (match_operand:SI 2 "reg_or_int_operand" "")))]
780 if (TARGET_32BIT && CONST_INT_P (operands[2]))
782 arm_split_constant (PLUS, SImode, NULL_RTX,
783 INTVAL (operands[2]), operands[0], operands[1],
784 optimize && can_create_pseudo_p ());
790 ; If there is a scratch available, this will be faster than synthesizing the
793 [(match_scratch:SI 3 "r")
794 (set (match_operand:SI 0 "arm_general_register_operand" "")
795 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
796 (match_operand:SI 2 "const_int_operand" "")))]
798 !(const_ok_for_arm (INTVAL (operands[2]))
799 || const_ok_for_arm (-INTVAL (operands[2])))
800 && const_ok_for_arm (~INTVAL (operands[2]))"
801 [(set (match_dup 3) (match_dup 2))
802 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
806 ;; The r/r/k alternative is required when reloading the address
807 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
808 ;; put the duplicated register first, and not try the commutative version.
809 (define_insn_and_split "*arm_addsi3"
810 [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
811 (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
812 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
824 subw%?\\t%0, %1, #%n2
825 subw%?\\t%0, %1, #%n2
828 && CONST_INT_P (operands[2])
829 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
830 && (reload_completed || !arm_eliminable_register (operands[1]))"
831 [(clobber (const_int 0))]
833 arm_split_constant (PLUS, SImode, curr_insn,
834 INTVAL (operands[2]), operands[0],
838 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
839 (set_attr "predicable" "yes")
840 (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
841 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
842 (const_string "simple_alu_imm")
843 (const_string "alu_reg")))
847 (define_insn_and_split "*thumb1_addsi3"
848 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
849 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
850 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
853 static const char * const asms[] =
855 \"add\\t%0, %0, %2\",
856 \"sub\\t%0, %0, #%n2\",
857 \"add\\t%0, %1, %2\",
858 \"add\\t%0, %0, %2\",
859 \"add\\t%0, %0, %2\",
860 \"add\\t%0, %1, %2\",
861 \"add\\t%0, %1, %2\",
866 if ((which_alternative == 2 || which_alternative == 6)
867 && CONST_INT_P (operands[2])
868 && INTVAL (operands[2]) < 0)
869 return \"sub\\t%0, %1, #%n2\";
870 return asms[which_alternative];
872 "&& reload_completed && CONST_INT_P (operands[2])
873 && ((operands[1] != stack_pointer_rtx
874 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
875 || (operands[1] == stack_pointer_rtx
876 && INTVAL (operands[2]) > 1020))"
877 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
878 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
880 HOST_WIDE_INT offset = INTVAL (operands[2]);
881 if (operands[1] == stack_pointer_rtx)
887 else if (offset < -255)
890 operands[3] = GEN_INT (offset);
891 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
893 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
896 ;; Reloading and elimination of the frame pointer can
897 ;; sometimes cause this optimization to be missed.
899 [(set (match_operand:SI 0 "arm_general_register_operand" "")
900 (match_operand:SI 1 "const_int_operand" ""))
902 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
904 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
905 && (INTVAL (operands[1]) & 3) == 0"
906 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
910 (define_insn "addsi3_compare0"
911 [(set (reg:CC_NOOV CC_REGNUM)
913 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
914 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
916 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
917 (plus:SI (match_dup 1) (match_dup 2)))]
923 [(set_attr "conds" "set")
924 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
927 (define_insn "*addsi3_compare0_scratch"
928 [(set (reg:CC_NOOV CC_REGNUM)
930 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
931 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
938 [(set_attr "conds" "set")
939 (set_attr "predicable" "yes")
940 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
944 (define_insn "*compare_negsi_si"
945 [(set (reg:CC_Z CC_REGNUM)
947 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
948 (match_operand:SI 1 "s_register_operand" "r")))]
951 [(set_attr "conds" "set")
952 (set_attr "predicable" "yes")]
955 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
956 ;; addend is a constant.
957 (define_insn "cmpsi2_addneg"
958 [(set (reg:CC CC_REGNUM)
960 (match_operand:SI 1 "s_register_operand" "r,r")
961 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
962 (set (match_operand:SI 0 "s_register_operand" "=r,r")
963 (plus:SI (match_dup 1)
964 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
965 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
968 sub%.\\t%0, %1, #%n3"
969 [(set_attr "conds" "set")]
972 ;; Convert the sequence
974 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
978 ;; bcs dest ((unsigned)rn >= 1)
979 ;; similarly for the beq variant using bcc.
980 ;; This is a common looping idiom (while (n--))
982 [(set (match_operand:SI 0 "arm_general_register_operand" "")
983 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
985 (set (match_operand 2 "cc_register" "")
986 (compare (match_dup 0) (const_int -1)))
988 (if_then_else (match_operator 3 "equality_operator"
989 [(match_dup 2) (const_int 0)])
990 (match_operand 4 "" "")
991 (match_operand 5 "" "")))]
992 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
996 (match_dup 1) (const_int 1)))
997 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
999 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1002 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1003 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1006 operands[2], const0_rtx);"
1009 ;; The next four insns work because they compare the result with one of
1010 ;; the operands, and we know that the use of the condition code is
1011 ;; either GEU or LTU, so we can use the carry flag from the addition
1012 ;; instead of doing the compare a second time.
1013 (define_insn "*addsi3_compare_op1"
1014 [(set (reg:CC_C CC_REGNUM)
1016 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1017 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1019 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1020 (plus:SI (match_dup 1) (match_dup 2)))]
1024 sub%.\\t%0, %1, #%n2
1026 [(set_attr "conds" "set")
1027 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1030 (define_insn "*addsi3_compare_op2"
1031 [(set (reg:CC_C CC_REGNUM)
1033 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1034 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1036 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1037 (plus:SI (match_dup 1) (match_dup 2)))]
1042 sub%.\\t%0, %1, #%n2"
1043 [(set_attr "conds" "set")
1044 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1047 (define_insn "*compare_addsi2_op0"
1048 [(set (reg:CC_C CC_REGNUM)
1050 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1051 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
1058 [(set_attr "conds" "set")
1059 (set_attr "predicable" "yes")
1060 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1063 (define_insn "*compare_addsi2_op1"
1064 [(set (reg:CC_C CC_REGNUM)
1066 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1067 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
1074 [(set_attr "conds" "set")
1075 (set_attr "predicable" "yes")
1076 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1079 (define_insn "*addsi3_carryin_<optab>"
1080 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1081 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
1082 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1083 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1087 sbc%?\\t%0, %1, #%B2"
1088 [(set_attr "conds" "use")
1089 (set_attr "predicable" "yes")]
1092 (define_insn "*addsi3_carryin_alt2_<optab>"
1093 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1094 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1095 (match_operand:SI 1 "s_register_operand" "%r,r"))
1096 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
1100 sbc%?\\t%0, %1, #%B2"
1101 [(set_attr "conds" "use")
1102 (set_attr "predicable" "yes")]
1105 (define_insn "*addsi3_carryin_shift_<optab>"
1106 [(set (match_operand:SI 0 "s_register_operand" "=r")
1108 (match_operator:SI 2 "shift_operator"
1109 [(match_operand:SI 3 "s_register_operand" "r")
1110 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1111 (match_operand:SI 1 "s_register_operand" "r"))
1112 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1114 "adc%?\\t%0, %1, %3%S2"
1115 [(set_attr "conds" "use")
1116 (set_attr "predicable" "yes")
1117 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1118 (const_string "alu_shift")
1119 (const_string "alu_shift_reg")))]
1122 (define_insn "*addsi3_carryin_clobercc_<optab>"
1123 [(set (match_operand:SI 0 "s_register_operand" "=r")
1124 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1125 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1126 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1127 (clobber (reg:CC CC_REGNUM))]
1129 "adc%.\\t%0, %1, %2"
1130 [(set_attr "conds" "set")]
1133 (define_insn "*subsi3_carryin"
1134 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1135 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1136 (match_operand:SI 2 "s_register_operand" "r,r"))
1137 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1142 [(set_attr "conds" "use")
1143 (set_attr "arch" "*,a")
1144 (set_attr "predicable" "yes")]
1147 (define_insn "*subsi3_carryin_const"
1148 [(set (match_operand:SI 0 "s_register_operand" "=r")
1149 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1150 (match_operand:SI 2 "arm_not_operand" "K"))
1151 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1153 "sbc\\t%0, %1, #%B2"
1154 [(set_attr "conds" "use")]
1157 (define_insn "*subsi3_carryin_compare"
1158 [(set (reg:CC CC_REGNUM)
1159 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1160 (match_operand:SI 2 "s_register_operand" "r")))
1161 (set (match_operand:SI 0 "s_register_operand" "=r")
1162 (minus:SI (minus:SI (match_dup 1)
1164 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1167 [(set_attr "conds" "set")]
1170 (define_insn "*subsi3_carryin_compare_const"
1171 [(set (reg:CC CC_REGNUM)
1172 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1173 (match_operand:SI 2 "arm_not_operand" "K")))
1174 (set (match_operand:SI 0 "s_register_operand" "=r")
1175 (minus:SI (plus:SI (match_dup 1)
1177 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1179 "sbcs\\t%0, %1, #%B2"
1180 [(set_attr "conds" "set")]
1183 (define_insn "*subsi3_carryin_shift"
1184 [(set (match_operand:SI 0 "s_register_operand" "=r")
1186 (match_operand:SI 1 "s_register_operand" "r")
1187 (match_operator:SI 2 "shift_operator"
1188 [(match_operand:SI 3 "s_register_operand" "r")
1189 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1190 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1192 "sbc%?\\t%0, %1, %3%S2"
1193 [(set_attr "conds" "use")
1194 (set_attr "predicable" "yes")
1195 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1196 (const_string "alu_shift")
1197 (const_string "alu_shift_reg")))]
1200 (define_insn "*rsbsi3_carryin_shift"
1201 [(set (match_operand:SI 0 "s_register_operand" "=r")
1203 (match_operator:SI 2 "shift_operator"
1204 [(match_operand:SI 3 "s_register_operand" "r")
1205 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1206 (match_operand:SI 1 "s_register_operand" "r"))
1207 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1209 "rsc%?\\t%0, %1, %3%S2"
1210 [(set_attr "conds" "use")
1211 (set_attr "predicable" "yes")
1212 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1213 (const_string "alu_shift")
1214 (const_string "alu_shift_reg")))]
1217 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1219 [(set (match_operand:SI 0 "s_register_operand" "")
1220 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1221 (match_operand:SI 2 "s_register_operand" ""))
1223 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1225 [(set (match_dup 3) (match_dup 1))
1226 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1228 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1231 (define_expand "addsf3"
1232 [(set (match_operand:SF 0 "s_register_operand" "")
1233 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1234 (match_operand:SF 2 "s_register_operand" "")))]
1235 "TARGET_32BIT && TARGET_HARD_FLOAT"
1239 (define_expand "adddf3"
1240 [(set (match_operand:DF 0 "s_register_operand" "")
1241 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1242 (match_operand:DF 2 "s_register_operand" "")))]
1243 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1247 (define_expand "subdi3"
1249 [(set (match_operand:DI 0 "s_register_operand" "")
1250 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1251 (match_operand:DI 2 "s_register_operand" "")))
1252 (clobber (reg:CC CC_REGNUM))])]
1257 if (!REG_P (operands[1]))
1258 operands[1] = force_reg (DImode, operands[1]);
1259 if (!REG_P (operands[2]))
1260 operands[2] = force_reg (DImode, operands[2]);
1265 (define_insn_and_split "*arm_subdi3"
1266 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1267 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1268 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1269 (clobber (reg:CC CC_REGNUM))]
1270 "TARGET_32BIT && !TARGET_NEON"
1271 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1272 "&& reload_completed"
1273 [(parallel [(set (reg:CC CC_REGNUM)
1274 (compare:CC (match_dup 1) (match_dup 2)))
1275 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1276 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1277 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1279 operands[3] = gen_highpart (SImode, operands[0]);
1280 operands[0] = gen_lowpart (SImode, operands[0]);
1281 operands[4] = gen_highpart (SImode, operands[1]);
1282 operands[1] = gen_lowpart (SImode, operands[1]);
1283 operands[5] = gen_highpart (SImode, operands[2]);
1284 operands[2] = gen_lowpart (SImode, operands[2]);
1286 [(set_attr "conds" "clob")
1287 (set_attr "length" "8")]
1290 (define_insn "*thumb_subdi3"
1291 [(set (match_operand:DI 0 "register_operand" "=l")
1292 (minus:DI (match_operand:DI 1 "register_operand" "0")
1293 (match_operand:DI 2 "register_operand" "l")))
1294 (clobber (reg:CC CC_REGNUM))]
1296 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1297 [(set_attr "length" "4")]
1300 (define_insn_and_split "*subdi_di_zesidi"
1301 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1302 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1304 (match_operand:SI 2 "s_register_operand" "r,r"))))
1305 (clobber (reg:CC CC_REGNUM))]
1307 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1308 "&& reload_completed"
1309 [(parallel [(set (reg:CC CC_REGNUM)
1310 (compare:CC (match_dup 1) (match_dup 2)))
1311 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1312 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1313 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1315 operands[3] = gen_highpart (SImode, operands[0]);
1316 operands[0] = gen_lowpart (SImode, operands[0]);
1317 operands[4] = gen_highpart (SImode, operands[1]);
1318 operands[1] = gen_lowpart (SImode, operands[1]);
1319 operands[5] = GEN_INT (~0);
1321 [(set_attr "conds" "clob")
1322 (set_attr "length" "8")]
1325 (define_insn_and_split "*subdi_di_sesidi"
1326 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1327 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1329 (match_operand:SI 2 "s_register_operand" "r,r"))))
1330 (clobber (reg:CC CC_REGNUM))]
1332 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1333 "&& reload_completed"
1334 [(parallel [(set (reg:CC CC_REGNUM)
1335 (compare:CC (match_dup 1) (match_dup 2)))
1336 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1337 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1338 (ashiftrt:SI (match_dup 2)
1340 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1342 operands[3] = gen_highpart (SImode, operands[0]);
1343 operands[0] = gen_lowpart (SImode, operands[0]);
1344 operands[4] = gen_highpart (SImode, operands[1]);
1345 operands[1] = gen_lowpart (SImode, operands[1]);
1347 [(set_attr "conds" "clob")
1348 (set_attr "length" "8")]
1351 (define_insn_and_split "*subdi_zesidi_di"
1352 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1353 (minus:DI (zero_extend:DI
1354 (match_operand:SI 2 "s_register_operand" "r,r"))
1355 (match_operand:DI 1 "s_register_operand" "0,r")))
1356 (clobber (reg:CC CC_REGNUM))]
1358 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1360 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1361 "&& reload_completed"
1362 [(parallel [(set (reg:CC CC_REGNUM)
1363 (compare:CC (match_dup 2) (match_dup 1)))
1364 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1365 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1366 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1368 operands[3] = gen_highpart (SImode, operands[0]);
1369 operands[0] = gen_lowpart (SImode, operands[0]);
1370 operands[4] = gen_highpart (SImode, operands[1]);
1371 operands[1] = gen_lowpart (SImode, operands[1]);
1373 [(set_attr "conds" "clob")
1374 (set_attr "length" "8")]
1377 (define_insn_and_split "*subdi_sesidi_di"
1378 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1379 (minus:DI (sign_extend:DI
1380 (match_operand:SI 2 "s_register_operand" "r,r"))
1381 (match_operand:DI 1 "s_register_operand" "0,r")))
1382 (clobber (reg:CC CC_REGNUM))]
1384 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1386 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1387 "&& reload_completed"
1388 [(parallel [(set (reg:CC CC_REGNUM)
1389 (compare:CC (match_dup 2) (match_dup 1)))
1390 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1391 (set (match_dup 3) (minus:SI (minus:SI
1392 (ashiftrt:SI (match_dup 2)
1395 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1397 operands[3] = gen_highpart (SImode, operands[0]);
1398 operands[0] = gen_lowpart (SImode, operands[0]);
1399 operands[4] = gen_highpart (SImode, operands[1]);
1400 operands[1] = gen_lowpart (SImode, operands[1]);
1402 [(set_attr "conds" "clob")
1403 (set_attr "length" "8")]
1406 (define_insn_and_split "*subdi_zesidi_zesidi"
1407 [(set (match_operand:DI 0 "s_register_operand" "=r")
1408 (minus:DI (zero_extend:DI
1409 (match_operand:SI 1 "s_register_operand" "r"))
1411 (match_operand:SI 2 "s_register_operand" "r"))))
1412 (clobber (reg:CC CC_REGNUM))]
1414 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1415 "&& reload_completed"
1416 [(parallel [(set (reg:CC CC_REGNUM)
1417 (compare:CC (match_dup 1) (match_dup 2)))
1418 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1419 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1420 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1422 operands[3] = gen_highpart (SImode, operands[0]);
1423 operands[0] = gen_lowpart (SImode, operands[0]);
1425 [(set_attr "conds" "clob")
1426 (set_attr "length" "8")]
1429 (define_expand "subsi3"
1430 [(set (match_operand:SI 0 "s_register_operand" "")
1431 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1432 (match_operand:SI 2 "s_register_operand" "")))]
1435 if (CONST_INT_P (operands[1]))
1439 arm_split_constant (MINUS, SImode, NULL_RTX,
1440 INTVAL (operands[1]), operands[0],
1441 operands[2], optimize && can_create_pseudo_p ());
1444 else /* TARGET_THUMB1 */
1445 operands[1] = force_reg (SImode, operands[1]);
1450 (define_insn "thumb1_subsi3_insn"
1451 [(set (match_operand:SI 0 "register_operand" "=l")
1452 (minus:SI (match_operand:SI 1 "register_operand" "l")
1453 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1456 [(set_attr "length" "2")
1457 (set_attr "conds" "set")])
1459 ; ??? Check Thumb-2 split length
1460 (define_insn_and_split "*arm_subsi3_insn"
1461 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,rk,r")
1462 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1463 (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1471 "&& (CONST_INT_P (operands[1])
1472 && !const_ok_for_arm (INTVAL (operands[1])))"
1473 [(clobber (const_int 0))]
1475 arm_split_constant (MINUS, SImode, curr_insn,
1476 INTVAL (operands[1]), operands[0], operands[2], 0);
1479 [(set_attr "length" "4,4,4,4,16")
1480 (set_attr "predicable" "yes")
1481 (set_attr "type" "*,simple_alu_imm,*,*,*")]
1485 [(match_scratch:SI 3 "r")
1486 (set (match_operand:SI 0 "arm_general_register_operand" "")
1487 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1488 (match_operand:SI 2 "arm_general_register_operand" "")))]
1490 && !const_ok_for_arm (INTVAL (operands[1]))
1491 && const_ok_for_arm (~INTVAL (operands[1]))"
1492 [(set (match_dup 3) (match_dup 1))
1493 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1497 (define_insn "*subsi3_compare0"
1498 [(set (reg:CC_NOOV CC_REGNUM)
1500 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1501 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1503 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1504 (minus:SI (match_dup 1) (match_dup 2)))]
1510 [(set_attr "conds" "set")
1511 (set_attr "type" "simple_alu_imm,*,*")]
1514 (define_insn "subsi3_compare"
1515 [(set (reg:CC CC_REGNUM)
1516 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1517 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1518 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1519 (minus:SI (match_dup 1) (match_dup 2)))]
1525 [(set_attr "conds" "set")
1526 (set_attr "type" "simple_alu_imm,*,*")]
1529 (define_expand "subsf3"
1530 [(set (match_operand:SF 0 "s_register_operand" "")
1531 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1532 (match_operand:SF 2 "s_register_operand" "")))]
1533 "TARGET_32BIT && TARGET_HARD_FLOAT"
1537 (define_expand "subdf3"
1538 [(set (match_operand:DF 0 "s_register_operand" "")
1539 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1540 (match_operand:DF 2 "s_register_operand" "")))]
1541 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1546 ;; Multiplication insns
1548 (define_expand "mulsi3"
1549 [(set (match_operand:SI 0 "s_register_operand" "")
1550 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1551 (match_operand:SI 1 "s_register_operand" "")))]
1556 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1557 (define_insn "*arm_mulsi3"
1558 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1559 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1560 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1561 "TARGET_32BIT && !arm_arch6"
1562 "mul%?\\t%0, %2, %1"
1563 [(set_attr "type" "mul")
1564 (set_attr "predicable" "yes")]
1567 (define_insn "*arm_mulsi3_v6"
1568 [(set (match_operand:SI 0 "s_register_operand" "=r")
1569 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1570 (match_operand:SI 2 "s_register_operand" "r")))]
1571 "TARGET_32BIT && arm_arch6"
1572 "mul%?\\t%0, %1, %2"
1573 [(set_attr "type" "mul")
1574 (set_attr "predicable" "yes")]
1577 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1578 ; 1 and 2; are the same, because reload will make operand 0 match
1579 ; operand 1 without realizing that this conflicts with operand 2. We fix
1580 ; this by adding another alternative to match this case, and then `reload'
1581 ; it ourselves. This alternative must come first.
1582 (define_insn "*thumb_mulsi3"
1583 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1584 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1585 (match_operand:SI 2 "register_operand" "l,l,l")))]
1586 "TARGET_THUMB1 && !arm_arch6"
1588 if (which_alternative < 2)
1589 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1591 return \"mul\\t%0, %2\";
1593 [(set_attr "length" "4,4,2")
1594 (set_attr "type" "muls")]
1597 (define_insn "*thumb_mulsi3_v6"
1598 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1599 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1600 (match_operand:SI 2 "register_operand" "l,0,0")))]
1601 "TARGET_THUMB1 && arm_arch6"
1606 [(set_attr "length" "2")
1607 (set_attr "type" "muls")]
1610 (define_insn "*mulsi3_compare0"
1611 [(set (reg:CC_NOOV CC_REGNUM)
1612 (compare:CC_NOOV (mult:SI
1613 (match_operand:SI 2 "s_register_operand" "r,r")
1614 (match_operand:SI 1 "s_register_operand" "%0,r"))
1616 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1617 (mult:SI (match_dup 2) (match_dup 1)))]
1618 "TARGET_ARM && !arm_arch6"
1619 "mul%.\\t%0, %2, %1"
1620 [(set_attr "conds" "set")
1621 (set_attr "type" "muls")]
1624 (define_insn "*mulsi3_compare0_v6"
1625 [(set (reg:CC_NOOV CC_REGNUM)
1626 (compare:CC_NOOV (mult:SI
1627 (match_operand:SI 2 "s_register_operand" "r")
1628 (match_operand:SI 1 "s_register_operand" "r"))
1630 (set (match_operand:SI 0 "s_register_operand" "=r")
1631 (mult:SI (match_dup 2) (match_dup 1)))]
1632 "TARGET_ARM && arm_arch6 && optimize_size"
1633 "mul%.\\t%0, %2, %1"
1634 [(set_attr "conds" "set")
1635 (set_attr "type" "muls")]
1638 (define_insn "*mulsi_compare0_scratch"
1639 [(set (reg:CC_NOOV CC_REGNUM)
1640 (compare:CC_NOOV (mult:SI
1641 (match_operand:SI 2 "s_register_operand" "r,r")
1642 (match_operand:SI 1 "s_register_operand" "%0,r"))
1644 (clobber (match_scratch:SI 0 "=&r,&r"))]
1645 "TARGET_ARM && !arm_arch6"
1646 "mul%.\\t%0, %2, %1"
1647 [(set_attr "conds" "set")
1648 (set_attr "type" "muls")]
1651 (define_insn "*mulsi_compare0_scratch_v6"
1652 [(set (reg:CC_NOOV CC_REGNUM)
1653 (compare:CC_NOOV (mult:SI
1654 (match_operand:SI 2 "s_register_operand" "r")
1655 (match_operand:SI 1 "s_register_operand" "r"))
1657 (clobber (match_scratch:SI 0 "=r"))]
1658 "TARGET_ARM && arm_arch6 && optimize_size"
1659 "mul%.\\t%0, %2, %1"
1660 [(set_attr "conds" "set")
1661 (set_attr "type" "muls")]
1664 ;; Unnamed templates to match MLA instruction.
1666 (define_insn "*mulsi3addsi"
1667 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1669 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1670 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1671 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1672 "TARGET_32BIT && !arm_arch6"
1673 "mla%?\\t%0, %2, %1, %3"
1674 [(set_attr "type" "mla")
1675 (set_attr "predicable" "yes")]
1678 (define_insn "*mulsi3addsi_v6"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1681 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1682 (match_operand:SI 1 "s_register_operand" "r"))
1683 (match_operand:SI 3 "s_register_operand" "r")))]
1684 "TARGET_32BIT && arm_arch6"
1685 "mla%?\\t%0, %2, %1, %3"
1686 [(set_attr "type" "mla")
1687 (set_attr "predicable" "yes")]
1690 (define_insn "*mulsi3addsi_compare0"
1691 [(set (reg:CC_NOOV CC_REGNUM)
1694 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1695 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1696 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1698 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1699 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1701 "TARGET_ARM && arm_arch6"
1702 "mla%.\\t%0, %2, %1, %3"
1703 [(set_attr "conds" "set")
1704 (set_attr "type" "mlas")]
1707 (define_insn "*mulsi3addsi_compare0_v6"
1708 [(set (reg:CC_NOOV CC_REGNUM)
1711 (match_operand:SI 2 "s_register_operand" "r")
1712 (match_operand:SI 1 "s_register_operand" "r"))
1713 (match_operand:SI 3 "s_register_operand" "r"))
1715 (set (match_operand:SI 0 "s_register_operand" "=r")
1716 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1718 "TARGET_ARM && arm_arch6 && optimize_size"
1719 "mla%.\\t%0, %2, %1, %3"
1720 [(set_attr "conds" "set")
1721 (set_attr "type" "mlas")]
1724 (define_insn "*mulsi3addsi_compare0_scratch"
1725 [(set (reg:CC_NOOV CC_REGNUM)
1728 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1729 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1730 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1732 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1733 "TARGET_ARM && !arm_arch6"
1734 "mla%.\\t%0, %2, %1, %3"
1735 [(set_attr "conds" "set")
1736 (set_attr "type" "mlas")]
1739 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1740 [(set (reg:CC_NOOV CC_REGNUM)
1743 (match_operand:SI 2 "s_register_operand" "r")
1744 (match_operand:SI 1 "s_register_operand" "r"))
1745 (match_operand:SI 3 "s_register_operand" "r"))
1747 (clobber (match_scratch:SI 0 "=r"))]
1748 "TARGET_ARM && arm_arch6 && optimize_size"
1749 "mla%.\\t%0, %2, %1, %3"
1750 [(set_attr "conds" "set")
1751 (set_attr "type" "mlas")]
1754 (define_insn "*mulsi3subsi"
1755 [(set (match_operand:SI 0 "s_register_operand" "=r")
1757 (match_operand:SI 3 "s_register_operand" "r")
1758 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1759 (match_operand:SI 1 "s_register_operand" "r"))))]
1760 "TARGET_32BIT && arm_arch_thumb2"
1761 "mls%?\\t%0, %2, %1, %3"
1762 [(set_attr "type" "mla")
1763 (set_attr "predicable" "yes")]
1766 (define_expand "maddsidi4"
1767 [(set (match_operand:DI 0 "s_register_operand" "")
1770 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1771 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1772 (match_operand:DI 3 "s_register_operand" "")))]
1773 "TARGET_32BIT && arm_arch3m"
1776 (define_insn "*mulsidi3adddi"
1777 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1780 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1781 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1782 (match_operand:DI 1 "s_register_operand" "0")))]
1783 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1784 "smlal%?\\t%Q0, %R0, %3, %2"
1785 [(set_attr "type" "smlal")
1786 (set_attr "predicable" "yes")]
1789 (define_insn "*mulsidi3adddi_v6"
1790 [(set (match_operand:DI 0 "s_register_operand" "=r")
1793 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1794 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1795 (match_operand:DI 1 "s_register_operand" "0")))]
1796 "TARGET_32BIT && arm_arch6"
1797 "smlal%?\\t%Q0, %R0, %3, %2"
1798 [(set_attr "type" "smlal")
1799 (set_attr "predicable" "yes")]
1802 ;; 32x32->64 widening multiply.
1803 ;; As with mulsi3, the only difference between the v3-5 and v6+
1804 ;; versions of these patterns is the requirement that the output not
1805 ;; overlap the inputs, but that still means we have to have a named
1806 ;; expander and two different starred insns.
1808 (define_expand "mulsidi3"
1809 [(set (match_operand:DI 0 "s_register_operand" "")
1811 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1812 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1813 "TARGET_32BIT && arm_arch3m"
1817 (define_insn "*mulsidi3_nov6"
1818 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1820 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1821 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1822 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1823 "smull%?\\t%Q0, %R0, %1, %2"
1824 [(set_attr "type" "smull")
1825 (set_attr "predicable" "yes")]
1828 (define_insn "*mulsidi3_v6"
1829 [(set (match_operand:DI 0 "s_register_operand" "=r")
1831 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1832 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1833 "TARGET_32BIT && arm_arch6"
1834 "smull%?\\t%Q0, %R0, %1, %2"
1835 [(set_attr "type" "smull")
1836 (set_attr "predicable" "yes")]
1839 (define_expand "umulsidi3"
1840 [(set (match_operand:DI 0 "s_register_operand" "")
1842 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1843 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1844 "TARGET_32BIT && arm_arch3m"
1848 (define_insn "*umulsidi3_nov6"
1849 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1851 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1852 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1853 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1854 "umull%?\\t%Q0, %R0, %1, %2"
1855 [(set_attr "type" "umull")
1856 (set_attr "predicable" "yes")]
1859 (define_insn "*umulsidi3_v6"
1860 [(set (match_operand:DI 0 "s_register_operand" "=r")
1862 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1863 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1864 "TARGET_32BIT && arm_arch6"
1865 "umull%?\\t%Q0, %R0, %1, %2"
1866 [(set_attr "type" "umull")
1867 (set_attr "predicable" "yes")]
1870 (define_expand "umaddsidi4"
1871 [(set (match_operand:DI 0 "s_register_operand" "")
1874 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1875 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1876 (match_operand:DI 3 "s_register_operand" "")))]
1877 "TARGET_32BIT && arm_arch3m"
1880 (define_insn "*umulsidi3adddi"
1881 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1884 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1885 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1886 (match_operand:DI 1 "s_register_operand" "0")))]
1887 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1888 "umlal%?\\t%Q0, %R0, %3, %2"
1889 [(set_attr "type" "umlal")
1890 (set_attr "predicable" "yes")]
1893 (define_insn "*umulsidi3adddi_v6"
1894 [(set (match_operand:DI 0 "s_register_operand" "=r")
1897 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1898 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1899 (match_operand:DI 1 "s_register_operand" "0")))]
1900 "TARGET_32BIT && arm_arch6"
1901 "umlal%?\\t%Q0, %R0, %3, %2"
1902 [(set_attr "type" "umlal")
1903 (set_attr "predicable" "yes")]
1906 (define_expand "smulsi3_highpart"
1908 [(set (match_operand:SI 0 "s_register_operand" "")
1912 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1913 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1915 (clobber (match_scratch:SI 3 ""))])]
1916 "TARGET_32BIT && arm_arch3m"
1920 (define_insn "*smulsi3_highpart_nov6"
1921 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1925 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1926 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1928 (clobber (match_scratch:SI 3 "=&r,&r"))]
1929 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1930 "smull%?\\t%3, %0, %2, %1"
1931 [(set_attr "type" "smull")
1932 (set_attr "predicable" "yes")]
1935 (define_insn "*smulsi3_highpart_v6"
1936 [(set (match_operand:SI 0 "s_register_operand" "=r")
1940 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1941 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1943 (clobber (match_scratch:SI 3 "=r"))]
1944 "TARGET_32BIT && arm_arch6"
1945 "smull%?\\t%3, %0, %2, %1"
1946 [(set_attr "type" "smull")
1947 (set_attr "predicable" "yes")]
1950 (define_expand "umulsi3_highpart"
1952 [(set (match_operand:SI 0 "s_register_operand" "")
1956 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1957 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1959 (clobber (match_scratch:SI 3 ""))])]
1960 "TARGET_32BIT && arm_arch3m"
1964 (define_insn "*umulsi3_highpart_nov6"
1965 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1969 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1970 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1972 (clobber (match_scratch:SI 3 "=&r,&r"))]
1973 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1974 "umull%?\\t%3, %0, %2, %1"
1975 [(set_attr "type" "umull")
1976 (set_attr "predicable" "yes")]
1979 (define_insn "*umulsi3_highpart_v6"
1980 [(set (match_operand:SI 0 "s_register_operand" "=r")
1984 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1985 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1987 (clobber (match_scratch:SI 3 "=r"))]
1988 "TARGET_32BIT && arm_arch6"
1989 "umull%?\\t%3, %0, %2, %1"
1990 [(set_attr "type" "umull")
1991 (set_attr "predicable" "yes")]
1994 (define_insn "mulhisi3"
1995 [(set (match_operand:SI 0 "s_register_operand" "=r")
1996 (mult:SI (sign_extend:SI
1997 (match_operand:HI 1 "s_register_operand" "%r"))
1999 (match_operand:HI 2 "s_register_operand" "r"))))]
2000 "TARGET_DSP_MULTIPLY"
2001 "smulbb%?\\t%0, %1, %2"
2002 [(set_attr "type" "smulxy")
2003 (set_attr "predicable" "yes")]
2006 (define_insn "*mulhisi3tb"
2007 [(set (match_operand:SI 0 "s_register_operand" "=r")
2008 (mult:SI (ashiftrt:SI
2009 (match_operand:SI 1 "s_register_operand" "r")
2012 (match_operand:HI 2 "s_register_operand" "r"))))]
2013 "TARGET_DSP_MULTIPLY"
2014 "smultb%?\\t%0, %1, %2"
2015 [(set_attr "type" "smulxy")
2016 (set_attr "predicable" "yes")]
2019 (define_insn "*mulhisi3bt"
2020 [(set (match_operand:SI 0 "s_register_operand" "=r")
2021 (mult:SI (sign_extend:SI
2022 (match_operand:HI 1 "s_register_operand" "r"))
2024 (match_operand:SI 2 "s_register_operand" "r")
2026 "TARGET_DSP_MULTIPLY"
2027 "smulbt%?\\t%0, %1, %2"
2028 [(set_attr "type" "smulxy")
2029 (set_attr "predicable" "yes")]
2032 (define_insn "*mulhisi3tt"
2033 [(set (match_operand:SI 0 "s_register_operand" "=r")
2034 (mult:SI (ashiftrt:SI
2035 (match_operand:SI 1 "s_register_operand" "r")
2038 (match_operand:SI 2 "s_register_operand" "r")
2040 "TARGET_DSP_MULTIPLY"
2041 "smultt%?\\t%0, %1, %2"
2042 [(set_attr "type" "smulxy")
2043 (set_attr "predicable" "yes")]
2046 (define_insn "maddhisi4"
2047 [(set (match_operand:SI 0 "s_register_operand" "=r")
2048 (plus:SI (mult:SI (sign_extend:SI
2049 (match_operand:HI 1 "s_register_operand" "r"))
2051 (match_operand:HI 2 "s_register_operand" "r")))
2052 (match_operand:SI 3 "s_register_operand" "r")))]
2053 "TARGET_DSP_MULTIPLY"
2054 "smlabb%?\\t%0, %1, %2, %3"
2055 [(set_attr "type" "smlaxy")
2056 (set_attr "predicable" "yes")]
2059 ;; Note: there is no maddhisi4ibt because this one is canonical form
2060 (define_insn "*maddhisi4tb"
2061 [(set (match_operand:SI 0 "s_register_operand" "=r")
2062 (plus:SI (mult:SI (ashiftrt:SI
2063 (match_operand:SI 1 "s_register_operand" "r")
2066 (match_operand:HI 2 "s_register_operand" "r")))
2067 (match_operand:SI 3 "s_register_operand" "r")))]
2068 "TARGET_DSP_MULTIPLY"
2069 "smlatb%?\\t%0, %1, %2, %3"
2070 [(set_attr "type" "smlaxy")
2071 (set_attr "predicable" "yes")]
2074 (define_insn "*maddhisi4tt"
2075 [(set (match_operand:SI 0 "s_register_operand" "=r")
2076 (plus:SI (mult:SI (ashiftrt:SI
2077 (match_operand:SI 1 "s_register_operand" "r")
2080 (match_operand:SI 2 "s_register_operand" "r")
2082 (match_operand:SI 3 "s_register_operand" "r")))]
2083 "TARGET_DSP_MULTIPLY"
2084 "smlatt%?\\t%0, %1, %2, %3"
2085 [(set_attr "type" "smlaxy")
2086 (set_attr "predicable" "yes")]
2089 (define_insn "maddhidi4"
2090 [(set (match_operand:DI 0 "s_register_operand" "=r")
2092 (mult:DI (sign_extend:DI
2093 (match_operand:HI 1 "s_register_operand" "r"))
2095 (match_operand:HI 2 "s_register_operand" "r")))
2096 (match_operand:DI 3 "s_register_operand" "0")))]
2097 "TARGET_DSP_MULTIPLY"
2098 "smlalbb%?\\t%Q0, %R0, %1, %2"
2099 [(set_attr "type" "smlalxy")
2100 (set_attr "predicable" "yes")])
2102 ;; Note: there is no maddhidi4ibt because this one is canonical form
2103 (define_insn "*maddhidi4tb"
2104 [(set (match_operand:DI 0 "s_register_operand" "=r")
2106 (mult:DI (sign_extend:DI
2108 (match_operand:SI 1 "s_register_operand" "r")
2111 (match_operand:HI 2 "s_register_operand" "r")))
2112 (match_operand:DI 3 "s_register_operand" "0")))]
2113 "TARGET_DSP_MULTIPLY"
2114 "smlaltb%?\\t%Q0, %R0, %1, %2"
2115 [(set_attr "type" "smlalxy")
2116 (set_attr "predicable" "yes")])
2118 (define_insn "*maddhidi4tt"
2119 [(set (match_operand:DI 0 "s_register_operand" "=r")
2121 (mult:DI (sign_extend:DI
2123 (match_operand:SI 1 "s_register_operand" "r")
2127 (match_operand:SI 2 "s_register_operand" "r")
2129 (match_operand:DI 3 "s_register_operand" "0")))]
2130 "TARGET_DSP_MULTIPLY"
2131 "smlaltt%?\\t%Q0, %R0, %1, %2"
2132 [(set_attr "type" "smlalxy")
2133 (set_attr "predicable" "yes")])
2135 (define_expand "mulsf3"
2136 [(set (match_operand:SF 0 "s_register_operand" "")
2137 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2138 (match_operand:SF 2 "s_register_operand" "")))]
2139 "TARGET_32BIT && TARGET_HARD_FLOAT"
2143 (define_expand "muldf3"
2144 [(set (match_operand:DF 0 "s_register_operand" "")
2145 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2146 (match_operand:DF 2 "s_register_operand" "")))]
2147 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2153 (define_expand "divsf3"
2154 [(set (match_operand:SF 0 "s_register_operand" "")
2155 (div:SF (match_operand:SF 1 "s_register_operand" "")
2156 (match_operand:SF 2 "s_register_operand" "")))]
2157 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2160 (define_expand "divdf3"
2161 [(set (match_operand:DF 0 "s_register_operand" "")
2162 (div:DF (match_operand:DF 1 "s_register_operand" "")
2163 (match_operand:DF 2 "s_register_operand" "")))]
2164 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2167 ;; Boolean and,ior,xor insns
2169 ;; Split up double word logical operations
2171 ;; Split up simple DImode logical operations. Simply perform the logical
2172 ;; operation on the upper and lower halves of the registers.
2174 [(set (match_operand:DI 0 "s_register_operand" "")
2175 (match_operator:DI 6 "logical_binary_operator"
2176 [(match_operand:DI 1 "s_register_operand" "")
2177 (match_operand:DI 2 "s_register_operand" "")]))]
2178 "TARGET_32BIT && reload_completed
2179 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2180 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2181 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2182 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2185 operands[3] = gen_highpart (SImode, operands[0]);
2186 operands[0] = gen_lowpart (SImode, operands[0]);
2187 operands[4] = gen_highpart (SImode, operands[1]);
2188 operands[1] = gen_lowpart (SImode, operands[1]);
2189 operands[5] = gen_highpart (SImode, operands[2]);
2190 operands[2] = gen_lowpart (SImode, operands[2]);
2195 [(set (match_operand:DI 0 "s_register_operand" "")
2196 (match_operator:DI 6 "logical_binary_operator"
2197 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2198 (match_operand:DI 1 "s_register_operand" "")]))]
2199 "TARGET_32BIT && reload_completed"
2200 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2201 (set (match_dup 3) (match_op_dup:SI 6
2202 [(ashiftrt:SI (match_dup 2) (const_int 31))
2206 operands[3] = gen_highpart (SImode, operands[0]);
2207 operands[0] = gen_lowpart (SImode, operands[0]);
2208 operands[4] = gen_highpart (SImode, operands[1]);
2209 operands[1] = gen_lowpart (SImode, operands[1]);
2210 operands[5] = gen_highpart (SImode, operands[2]);
2211 operands[2] = gen_lowpart (SImode, operands[2]);
2215 ;; The zero extend of operand 2 means we can just copy the high part of
2216 ;; operand1 into operand0.
2218 [(set (match_operand:DI 0 "s_register_operand" "")
2220 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2221 (match_operand:DI 1 "s_register_operand" "")))]
2222 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2223 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2224 (set (match_dup 3) (match_dup 4))]
2227 operands[4] = gen_highpart (SImode, operands[1]);
2228 operands[3] = gen_highpart (SImode, operands[0]);
2229 operands[0] = gen_lowpart (SImode, operands[0]);
2230 operands[1] = gen_lowpart (SImode, operands[1]);
2234 ;; The zero extend of operand 2 means we can just copy the high part of
2235 ;; operand1 into operand0.
2237 [(set (match_operand:DI 0 "s_register_operand" "")
2239 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2240 (match_operand:DI 1 "s_register_operand" "")))]
2241 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2242 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2243 (set (match_dup 3) (match_dup 4))]
2246 operands[4] = gen_highpart (SImode, operands[1]);
2247 operands[3] = gen_highpart (SImode, operands[0]);
2248 operands[0] = gen_lowpart (SImode, operands[0]);
2249 operands[1] = gen_lowpart (SImode, operands[1]);
2253 (define_expand "anddi3"
2254 [(set (match_operand:DI 0 "s_register_operand" "")
2255 (and:DI (match_operand:DI 1 "s_register_operand" "")
2256 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2261 (define_insn_and_split "*anddi3_insn"
2262 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
2263 (and:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
2264 (match_operand:DI 2 "arm_anddi_operand_neon" "w ,DL,r ,r ,De,De,w ,DL")))]
2265 "TARGET_32BIT && !TARGET_IWMMXT"
2267 switch (which_alternative)
2269 case 0: /* fall through */
2270 case 6: return "vand\t%P0, %P1, %P2";
2271 case 1: /* fall through */
2272 case 7: return neon_output_logic_immediate ("vand", &operands[2],
2273 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2277 case 5: /* fall through */
2279 default: gcc_unreachable ();
2282 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
2283 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
2284 [(set (match_dup 3) (match_dup 4))
2285 (set (match_dup 5) (match_dup 6))]
2288 operands[3] = gen_lowpart (SImode, operands[0]);
2289 operands[5] = gen_highpart (SImode, operands[0]);
2291 operands[4] = simplify_gen_binary (AND, SImode,
2292 gen_lowpart (SImode, operands[1]),
2293 gen_lowpart (SImode, operands[2]));
2294 operands[6] = simplify_gen_binary (AND, SImode,
2295 gen_highpart (SImode, operands[1]),
2296 gen_highpart_mode (SImode, DImode, operands[2]));
2299 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
2300 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,
2301 avoid_neon_for_64bits,avoid_neon_for_64bits")
2302 (set_attr "length" "*,*,8,8,8,8,*,*")
2306 (define_insn_and_split "*anddi_zesidi_di"
2307 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2308 (and:DI (zero_extend:DI
2309 (match_operand:SI 2 "s_register_operand" "r,r"))
2310 (match_operand:DI 1 "s_register_operand" "0,r")))]
2313 "TARGET_32BIT && reload_completed"
2314 ; The zero extend of operand 2 clears the high word of the output
2316 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2317 (set (match_dup 3) (const_int 0))]
2320 operands[3] = gen_highpart (SImode, operands[0]);
2321 operands[0] = gen_lowpart (SImode, operands[0]);
2322 operands[1] = gen_lowpart (SImode, operands[1]);
2324 [(set_attr "length" "8")]
2327 (define_insn "*anddi_sesdi_di"
2328 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2329 (and:DI (sign_extend:DI
2330 (match_operand:SI 2 "s_register_operand" "r,r"))
2331 (match_operand:DI 1 "s_register_operand" "0,r")))]
2334 [(set_attr "length" "8")]
2337 (define_expand "andsi3"
2338 [(set (match_operand:SI 0 "s_register_operand" "")
2339 (and:SI (match_operand:SI 1 "s_register_operand" "")
2340 (match_operand:SI 2 "reg_or_int_operand" "")))]
2345 if (CONST_INT_P (operands[2]))
2347 if (INTVAL (operands[2]) == 255 && arm_arch6)
2349 operands[1] = convert_to_mode (QImode, operands[1], 1);
2350 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2354 arm_split_constant (AND, SImode, NULL_RTX,
2355 INTVAL (operands[2]), operands[0],
2357 optimize && can_create_pseudo_p ());
2362 else /* TARGET_THUMB1 */
2364 if (!CONST_INT_P (operands[2]))
2366 rtx tmp = force_reg (SImode, operands[2]);
2367 if (rtx_equal_p (operands[0], operands[1]))
2371 operands[2] = operands[1];
2379 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2381 operands[2] = force_reg (SImode,
2382 GEN_INT (~INTVAL (operands[2])));
2384 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2389 for (i = 9; i <= 31; i++)
2391 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2393 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2397 else if ((((HOST_WIDE_INT) 1) << i) - 1
2398 == ~INTVAL (operands[2]))
2400 rtx shift = GEN_INT (i);
2401 rtx reg = gen_reg_rtx (SImode);
2403 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2404 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2410 operands[2] = force_reg (SImode, operands[2]);
2416 ; ??? Check split length for Thumb-2
2417 (define_insn_and_split "*arm_andsi3_insn"
2418 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2419 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2420 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2424 bic%?\\t%0, %1, #%B2
2428 && CONST_INT_P (operands[2])
2429 && !(const_ok_for_arm (INTVAL (operands[2]))
2430 || const_ok_for_arm (~INTVAL (operands[2])))"
2431 [(clobber (const_int 0))]
2433 arm_split_constant (AND, SImode, curr_insn,
2434 INTVAL (operands[2]), operands[0], operands[1], 0);
2437 [(set_attr "length" "4,4,4,16")
2438 (set_attr "predicable" "yes")
2439 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2442 (define_insn "*thumb1_andsi3_insn"
2443 [(set (match_operand:SI 0 "register_operand" "=l")
2444 (and:SI (match_operand:SI 1 "register_operand" "%0")
2445 (match_operand:SI 2 "register_operand" "l")))]
2448 [(set_attr "length" "2")
2449 (set_attr "type" "simple_alu_imm")
2450 (set_attr "conds" "set")])
2452 (define_insn "*andsi3_compare0"
2453 [(set (reg:CC_NOOV CC_REGNUM)
2455 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2456 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2458 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2459 (and:SI (match_dup 1) (match_dup 2)))]
2463 bic%.\\t%0, %1, #%B2
2465 [(set_attr "conds" "set")
2466 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2469 (define_insn "*andsi3_compare0_scratch"
2470 [(set (reg:CC_NOOV CC_REGNUM)
2472 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2473 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2475 (clobber (match_scratch:SI 2 "=X,r,X"))]
2479 bic%.\\t%2, %0, #%B1
2481 [(set_attr "conds" "set")
2482 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2485 (define_insn "*zeroextractsi_compare0_scratch"
2486 [(set (reg:CC_NOOV CC_REGNUM)
2487 (compare:CC_NOOV (zero_extract:SI
2488 (match_operand:SI 0 "s_register_operand" "r")
2489 (match_operand 1 "const_int_operand" "n")
2490 (match_operand 2 "const_int_operand" "n"))
2493 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2494 && INTVAL (operands[1]) > 0
2495 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2496 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2498 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2499 << INTVAL (operands[2]));
2500 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2503 [(set_attr "conds" "set")
2504 (set_attr "predicable" "yes")
2505 (set_attr "type" "simple_alu_imm")]
2508 (define_insn_and_split "*ne_zeroextractsi"
2509 [(set (match_operand:SI 0 "s_register_operand" "=r")
2510 (ne:SI (zero_extract:SI
2511 (match_operand:SI 1 "s_register_operand" "r")
2512 (match_operand:SI 2 "const_int_operand" "n")
2513 (match_operand:SI 3 "const_int_operand" "n"))
2515 (clobber (reg:CC CC_REGNUM))]
2517 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2518 && INTVAL (operands[2]) > 0
2519 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2520 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2523 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2524 && INTVAL (operands[2]) > 0
2525 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2526 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2527 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2528 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2530 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2532 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2533 (match_dup 0) (const_int 1)))]
2535 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2536 << INTVAL (operands[3]));
2538 [(set_attr "conds" "clob")
2539 (set (attr "length")
2540 (if_then_else (eq_attr "is_thumb" "yes")
2545 (define_insn_and_split "*ne_zeroextractsi_shifted"
2546 [(set (match_operand:SI 0 "s_register_operand" "=r")
2547 (ne:SI (zero_extract:SI
2548 (match_operand:SI 1 "s_register_operand" "r")
2549 (match_operand:SI 2 "const_int_operand" "n")
2552 (clobber (reg:CC CC_REGNUM))]
2556 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2557 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2559 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2561 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2562 (match_dup 0) (const_int 1)))]
2564 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2566 [(set_attr "conds" "clob")
2567 (set_attr "length" "8")]
2570 (define_insn_and_split "*ite_ne_zeroextractsi"
2571 [(set (match_operand:SI 0 "s_register_operand" "=r")
2572 (if_then_else:SI (ne (zero_extract:SI
2573 (match_operand:SI 1 "s_register_operand" "r")
2574 (match_operand:SI 2 "const_int_operand" "n")
2575 (match_operand:SI 3 "const_int_operand" "n"))
2577 (match_operand:SI 4 "arm_not_operand" "rIK")
2579 (clobber (reg:CC CC_REGNUM))]
2581 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2582 && INTVAL (operands[2]) > 0
2583 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2584 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2585 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2588 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2589 && INTVAL (operands[2]) > 0
2590 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2591 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2592 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2593 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2594 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2596 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2598 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2599 (match_dup 0) (match_dup 4)))]
2601 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2602 << INTVAL (operands[3]));
2604 [(set_attr "conds" "clob")
2605 (set_attr "length" "8")]
2608 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2609 [(set (match_operand:SI 0 "s_register_operand" "=r")
2610 (if_then_else:SI (ne (zero_extract:SI
2611 (match_operand:SI 1 "s_register_operand" "r")
2612 (match_operand:SI 2 "const_int_operand" "n")
2615 (match_operand:SI 3 "arm_not_operand" "rIK")
2617 (clobber (reg:CC CC_REGNUM))]
2618 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2620 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2621 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2622 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2624 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2626 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2627 (match_dup 0) (match_dup 3)))]
2629 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2631 [(set_attr "conds" "clob")
2632 (set_attr "length" "8")]
2636 [(set (match_operand:SI 0 "s_register_operand" "")
2637 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2638 (match_operand:SI 2 "const_int_operand" "")
2639 (match_operand:SI 3 "const_int_operand" "")))
2640 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2642 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2643 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2645 HOST_WIDE_INT temp = INTVAL (operands[2]);
2647 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2648 operands[3] = GEN_INT (32 - temp);
2652 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2654 [(set (match_operand:SI 0 "s_register_operand" "")
2655 (match_operator:SI 1 "shiftable_operator"
2656 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2657 (match_operand:SI 3 "const_int_operand" "")
2658 (match_operand:SI 4 "const_int_operand" ""))
2659 (match_operand:SI 5 "s_register_operand" "")]))
2660 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2662 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2665 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2668 HOST_WIDE_INT temp = INTVAL (operands[3]);
2670 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2671 operands[4] = GEN_INT (32 - temp);
2676 [(set (match_operand:SI 0 "s_register_operand" "")
2677 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2678 (match_operand:SI 2 "const_int_operand" "")
2679 (match_operand:SI 3 "const_int_operand" "")))]
2681 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2682 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2684 HOST_WIDE_INT temp = INTVAL (operands[2]);
2686 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2687 operands[3] = GEN_INT (32 - temp);
2692 [(set (match_operand:SI 0 "s_register_operand" "")
2693 (match_operator:SI 1 "shiftable_operator"
2694 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2695 (match_operand:SI 3 "const_int_operand" "")
2696 (match_operand:SI 4 "const_int_operand" ""))
2697 (match_operand:SI 5 "s_register_operand" "")]))
2698 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2700 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2703 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2706 HOST_WIDE_INT temp = INTVAL (operands[3]);
2708 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2709 operands[4] = GEN_INT (32 - temp);
2713 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2714 ;;; represented by the bitfield, then this will produce incorrect results.
2715 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2716 ;;; which have a real bit-field insert instruction, the truncation happens
2717 ;;; in the bit-field insert instruction itself. Since arm does not have a
2718 ;;; bit-field insert instruction, we would have to emit code here to truncate
2719 ;;; the value before we insert. This loses some of the advantage of having
2720 ;;; this insv pattern, so this pattern needs to be reevalutated.
2722 (define_expand "insv"
2723 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2724 (match_operand 1 "general_operand" "")
2725 (match_operand 2 "general_operand" ""))
2726 (match_operand 3 "reg_or_int_operand" ""))]
2727 "TARGET_ARM || arm_arch_thumb2"
2730 int start_bit = INTVAL (operands[2]);
2731 int width = INTVAL (operands[1]);
2732 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2733 rtx target, subtarget;
2735 if (arm_arch_thumb2)
2737 if (unaligned_access && MEM_P (operands[0])
2738 && s_register_operand (operands[3], GET_MODE (operands[3]))
2739 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2743 if (BYTES_BIG_ENDIAN)
2744 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2749 base_addr = adjust_address (operands[0], SImode,
2750 start_bit / BITS_PER_UNIT);
2751 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2755 rtx tmp = gen_reg_rtx (HImode);
2757 base_addr = adjust_address (operands[0], HImode,
2758 start_bit / BITS_PER_UNIT);
2759 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2760 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2764 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2766 bool use_bfi = TRUE;
2768 if (CONST_INT_P (operands[3]))
2770 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2774 emit_insn (gen_insv_zero (operands[0], operands[1],
2779 /* See if the set can be done with a single orr instruction. */
2780 if (val == mask && const_ok_for_arm (val << start_bit))
2786 if (!REG_P (operands[3]))
2787 operands[3] = force_reg (SImode, operands[3]);
2789 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2798 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2801 target = copy_rtx (operands[0]);
2802 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2803 subreg as the final target. */
2804 if (GET_CODE (target) == SUBREG)
2806 subtarget = gen_reg_rtx (SImode);
2807 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2808 < GET_MODE_SIZE (SImode))
2809 target = SUBREG_REG (target);
2814 if (CONST_INT_P (operands[3]))
2816 /* Since we are inserting a known constant, we may be able to
2817 reduce the number of bits that we have to clear so that
2818 the mask becomes simple. */
2819 /* ??? This code does not check to see if the new mask is actually
2820 simpler. It may not be. */
2821 rtx op1 = gen_reg_rtx (SImode);
2822 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2823 start of this pattern. */
2824 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2825 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2827 emit_insn (gen_andsi3 (op1, operands[0],
2828 gen_int_mode (~mask2, SImode)));
2829 emit_insn (gen_iorsi3 (subtarget, op1,
2830 gen_int_mode (op3_value << start_bit, SImode)));
2832 else if (start_bit == 0
2833 && !(const_ok_for_arm (mask)
2834 || const_ok_for_arm (~mask)))
2836 /* A Trick, since we are setting the bottom bits in the word,
2837 we can shift operand[3] up, operand[0] down, OR them together
2838 and rotate the result back again. This takes 3 insns, and
2839 the third might be mergeable into another op. */
2840 /* The shift up copes with the possibility that operand[3] is
2841 wider than the bitfield. */
2842 rtx op0 = gen_reg_rtx (SImode);
2843 rtx op1 = gen_reg_rtx (SImode);
2845 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2846 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2847 emit_insn (gen_iorsi3 (op1, op1, op0));
2848 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2850 else if ((width + start_bit == 32)
2851 && !(const_ok_for_arm (mask)
2852 || const_ok_for_arm (~mask)))
2854 /* Similar trick, but slightly less efficient. */
2856 rtx op0 = gen_reg_rtx (SImode);
2857 rtx op1 = gen_reg_rtx (SImode);
2859 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2860 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2861 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2862 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2866 rtx op0 = gen_int_mode (mask, SImode);
2867 rtx op1 = gen_reg_rtx (SImode);
2868 rtx op2 = gen_reg_rtx (SImode);
2870 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2872 rtx tmp = gen_reg_rtx (SImode);
2874 emit_insn (gen_movsi (tmp, op0));
2878 /* Mask out any bits in operand[3] that are not needed. */
2879 emit_insn (gen_andsi3 (op1, operands[3], op0));
2881 if (CONST_INT_P (op0)
2882 && (const_ok_for_arm (mask << start_bit)
2883 || const_ok_for_arm (~(mask << start_bit))))
2885 op0 = gen_int_mode (~(mask << start_bit), SImode);
2886 emit_insn (gen_andsi3 (op2, operands[0], op0));
2890 if (CONST_INT_P (op0))
2892 rtx tmp = gen_reg_rtx (SImode);
2894 emit_insn (gen_movsi (tmp, op0));
2899 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2901 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2905 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2907 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2910 if (subtarget != target)
2912 /* If TARGET is still a SUBREG, then it must be wider than a word,
2913 so we must be careful only to set the subword we were asked to. */
2914 if (GET_CODE (target) == SUBREG)
2915 emit_move_insn (target, subtarget);
2917 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2924 (define_insn "insv_zero"
2925 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2926 (match_operand:SI 1 "const_int_operand" "M")
2927 (match_operand:SI 2 "const_int_operand" "M"))
2931 [(set_attr "length" "4")
2932 (set_attr "predicable" "yes")]
2935 (define_insn "insv_t2"
2936 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2937 (match_operand:SI 1 "const_int_operand" "M")
2938 (match_operand:SI 2 "const_int_operand" "M"))
2939 (match_operand:SI 3 "s_register_operand" "r"))]
2941 "bfi%?\t%0, %3, %2, %1"
2942 [(set_attr "length" "4")
2943 (set_attr "predicable" "yes")]
2946 ; constants for op 2 will never be given to these patterns.
2947 (define_insn_and_split "*anddi_notdi_di"
2948 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2949 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2950 (match_operand:DI 2 "s_register_operand" "r,0")))]
2953 "TARGET_32BIT && reload_completed
2954 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2955 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2956 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2957 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2960 operands[3] = gen_highpart (SImode, operands[0]);
2961 operands[0] = gen_lowpart (SImode, operands[0]);
2962 operands[4] = gen_highpart (SImode, operands[1]);
2963 operands[1] = gen_lowpart (SImode, operands[1]);
2964 operands[5] = gen_highpart (SImode, operands[2]);
2965 operands[2] = gen_lowpart (SImode, operands[2]);
2967 [(set_attr "length" "8")
2968 (set_attr "predicable" "yes")]
2971 (define_insn_and_split "*anddi_notzesidi_di"
2972 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2973 (and:DI (not:DI (zero_extend:DI
2974 (match_operand:SI 2 "s_register_operand" "r,r")))
2975 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2978 bic%?\\t%Q0, %Q1, %2
2980 ; (not (zero_extend ...)) allows us to just copy the high word from
2981 ; operand1 to operand0.
2984 && operands[0] != operands[1]"
2985 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2986 (set (match_dup 3) (match_dup 4))]
2989 operands[3] = gen_highpart (SImode, operands[0]);
2990 operands[0] = gen_lowpart (SImode, operands[0]);
2991 operands[4] = gen_highpart (SImode, operands[1]);
2992 operands[1] = gen_lowpart (SImode, operands[1]);
2994 [(set_attr "length" "4,8")
2995 (set_attr "predicable" "yes")]
2998 (define_insn_and_split "*anddi_notsesidi_di"
2999 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3000 (and:DI (not:DI (sign_extend:DI
3001 (match_operand:SI 2 "s_register_operand" "r,r")))
3002 (match_operand:DI 1 "s_register_operand" "0,r")))]
3005 "TARGET_32BIT && reload_completed"
3006 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3007 (set (match_dup 3) (and:SI (not:SI
3008 (ashiftrt:SI (match_dup 2) (const_int 31)))
3012 operands[3] = gen_highpart (SImode, operands[0]);
3013 operands[0] = gen_lowpart (SImode, operands[0]);
3014 operands[4] = gen_highpart (SImode, operands[1]);
3015 operands[1] = gen_lowpart (SImode, operands[1]);
3017 [(set_attr "length" "8")
3018 (set_attr "predicable" "yes")]
3021 (define_insn "andsi_notsi_si"
3022 [(set (match_operand:SI 0 "s_register_operand" "=r")
3023 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3024 (match_operand:SI 1 "s_register_operand" "r")))]
3026 "bic%?\\t%0, %1, %2"
3027 [(set_attr "predicable" "yes")]
3030 (define_insn "thumb1_bicsi3"
3031 [(set (match_operand:SI 0 "register_operand" "=l")
3032 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
3033 (match_operand:SI 2 "register_operand" "0")))]
3036 [(set_attr "length" "2")
3037 (set_attr "conds" "set")])
3039 (define_insn "andsi_not_shiftsi_si"
3040 [(set (match_operand:SI 0 "s_register_operand" "=r")
3041 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3042 [(match_operand:SI 2 "s_register_operand" "r")
3043 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3044 (match_operand:SI 1 "s_register_operand" "r")))]
3046 "bic%?\\t%0, %1, %2%S4"
3047 [(set_attr "predicable" "yes")
3048 (set_attr "shift" "2")
3049 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3050 (const_string "alu_shift")
3051 (const_string "alu_shift_reg")))]
3054 (define_insn "*andsi_notsi_si_compare0"
3055 [(set (reg:CC_NOOV CC_REGNUM)
3057 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3058 (match_operand:SI 1 "s_register_operand" "r"))
3060 (set (match_operand:SI 0 "s_register_operand" "=r")
3061 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3063 "bic%.\\t%0, %1, %2"
3064 [(set_attr "conds" "set")]
3067 (define_insn "*andsi_notsi_si_compare0_scratch"
3068 [(set (reg:CC_NOOV CC_REGNUM)
3070 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3071 (match_operand:SI 1 "s_register_operand" "r"))
3073 (clobber (match_scratch:SI 0 "=r"))]
3075 "bic%.\\t%0, %1, %2"
3076 [(set_attr "conds" "set")]
3079 (define_expand "iordi3"
3080 [(set (match_operand:DI 0 "s_register_operand" "")
3081 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3082 (match_operand:DI 2 "neon_logic_op2" "")))]
3087 (define_insn_and_split "*iordi3_insn"
3088 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3089 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3090 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3091 "TARGET_32BIT && !TARGET_IWMMXT"
3093 switch (which_alternative)
3095 case 0: /* fall through */
3096 case 6: return "vorr\t%P0, %P1, %P2";
3097 case 1: /* fall through */
3098 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3099 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3105 default: gcc_unreachable ();
3108 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3109 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3110 [(set (match_dup 3) (match_dup 4))
3111 (set (match_dup 5) (match_dup 6))]
3114 operands[3] = gen_lowpart (SImode, operands[0]);
3115 operands[5] = gen_highpart (SImode, operands[0]);
3117 operands[4] = simplify_gen_binary (IOR, SImode,
3118 gen_lowpart (SImode, operands[1]),
3119 gen_lowpart (SImode, operands[2]));
3120 operands[6] = simplify_gen_binary (IOR, SImode,
3121 gen_highpart (SImode, operands[1]),
3122 gen_highpart_mode (SImode, DImode, operands[2]));
3125 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3126 (set_attr "length" "*,*,8,8,8,8,*,*")
3127 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3130 (define_insn "*iordi_zesidi_di"
3131 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3132 (ior:DI (zero_extend:DI
3133 (match_operand:SI 2 "s_register_operand" "r,r"))
3134 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3137 orr%?\\t%Q0, %Q1, %2
3139 [(set_attr "length" "4,8")
3140 (set_attr "predicable" "yes")]
3143 (define_insn "*iordi_sesidi_di"
3144 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3145 (ior:DI (sign_extend:DI
3146 (match_operand:SI 2 "s_register_operand" "r,r"))
3147 (match_operand:DI 1 "s_register_operand" "0,r")))]
3150 [(set_attr "length" "8")
3151 (set_attr "predicable" "yes")]
3154 (define_expand "iorsi3"
3155 [(set (match_operand:SI 0 "s_register_operand" "")
3156 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3157 (match_operand:SI 2 "reg_or_int_operand" "")))]
3160 if (CONST_INT_P (operands[2]))
3164 arm_split_constant (IOR, SImode, NULL_RTX,
3165 INTVAL (operands[2]), operands[0], operands[1],
3166 optimize && can_create_pseudo_p ());
3169 else /* TARGET_THUMB1 */
3171 rtx tmp = force_reg (SImode, operands[2]);
3172 if (rtx_equal_p (operands[0], operands[1]))
3176 operands[2] = operands[1];
3184 (define_insn_and_split "*iorsi3_insn"
3185 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
3186 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
3187 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
3191 orn%?\\t%0, %1, #%B2
3195 && CONST_INT_P (operands[2])
3196 && !(const_ok_for_arm (INTVAL (operands[2]))
3197 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3198 [(clobber (const_int 0))]
3200 arm_split_constant (IOR, SImode, curr_insn,
3201 INTVAL (operands[2]), operands[0], operands[1], 0);
3204 [(set_attr "length" "4,4,4,16")
3205 (set_attr "arch" "32,t2,32,32")
3206 (set_attr "predicable" "yes")
3207 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
3210 (define_insn "*thumb1_iorsi3_insn"
3211 [(set (match_operand:SI 0 "register_operand" "=l")
3212 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3213 (match_operand:SI 2 "register_operand" "l")))]
3216 [(set_attr "length" "2")
3217 (set_attr "conds" "set")])
3220 [(match_scratch:SI 3 "r")
3221 (set (match_operand:SI 0 "arm_general_register_operand" "")
3222 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3223 (match_operand:SI 2 "const_int_operand" "")))]
3225 && !const_ok_for_arm (INTVAL (operands[2]))
3226 && const_ok_for_arm (~INTVAL (operands[2]))"
3227 [(set (match_dup 3) (match_dup 2))
3228 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3232 (define_insn "*iorsi3_compare0"
3233 [(set (reg:CC_NOOV CC_REGNUM)
3234 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3235 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3238 (ior:SI (match_dup 1) (match_dup 2)))]
3240 "orr%.\\t%0, %1, %2"
3241 [(set_attr "conds" "set")
3242 (set_attr "type" "simple_alu_imm,*")]
3245 (define_insn "*iorsi3_compare0_scratch"
3246 [(set (reg:CC_NOOV CC_REGNUM)
3247 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3248 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3250 (clobber (match_scratch:SI 0 "=r,r"))]
3252 "orr%.\\t%0, %1, %2"
3253 [(set_attr "conds" "set")
3254 (set_attr "type" "simple_alu_imm, *")]
3257 (define_expand "xordi3"
3258 [(set (match_operand:DI 0 "s_register_operand" "")
3259 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3260 (match_operand:DI 2 "arm_xordi_operand" "")))]
3265 (define_insn_and_split "*xordi3_insn"
3266 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,&r,&r,?w")
3267 (xor:DI (match_operand:DI 1 "s_register_operand" "w ,%0,r ,0 ,r ,w")
3268 (match_operand:DI 2 "arm_xordi_operand" "w ,r ,r ,Dg,Dg,w")))]
3269 "TARGET_32BIT && !TARGET_IWMMXT"
3271 switch (which_alternative)
3276 case 4: /* fall through */
3278 case 0: /* fall through */
3279 case 5: return "veor\t%P0, %P1, %P2";
3280 default: gcc_unreachable ();
3283 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3284 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3285 [(set (match_dup 3) (match_dup 4))
3286 (set (match_dup 5) (match_dup 6))]
3289 operands[3] = gen_lowpart (SImode, operands[0]);
3290 operands[5] = gen_highpart (SImode, operands[0]);
3292 operands[4] = simplify_gen_binary (XOR, SImode,
3293 gen_lowpart (SImode, operands[1]),
3294 gen_lowpart (SImode, operands[2]));
3295 operands[6] = simplify_gen_binary (XOR, SImode,
3296 gen_highpart (SImode, operands[1]),
3297 gen_highpart_mode (SImode, DImode, operands[2]));
3300 [(set_attr "length" "*,8,8,8,8,*")
3301 (set_attr "neon_type" "neon_int_1,*,*,*,*,neon_int_1")
3302 (set_attr "arch" "neon_for_64bits,*,*,*,*,avoid_neon_for_64bits")]
3305 (define_insn "*xordi_zesidi_di"
3306 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3307 (xor:DI (zero_extend:DI
3308 (match_operand:SI 2 "s_register_operand" "r,r"))
3309 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3312 eor%?\\t%Q0, %Q1, %2
3314 [(set_attr "length" "4,8")
3315 (set_attr "predicable" "yes")]
3318 (define_insn "*xordi_sesidi_di"
3319 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3320 (xor:DI (sign_extend:DI
3321 (match_operand:SI 2 "s_register_operand" "r,r"))
3322 (match_operand:DI 1 "s_register_operand" "0,r")))]
3325 [(set_attr "length" "8")
3326 (set_attr "predicable" "yes")]
3329 (define_expand "xorsi3"
3330 [(set (match_operand:SI 0 "s_register_operand" "")
3331 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3332 (match_operand:SI 2 "reg_or_int_operand" "")))]
3334 "if (CONST_INT_P (operands[2]))
3338 arm_split_constant (XOR, SImode, NULL_RTX,
3339 INTVAL (operands[2]), operands[0], operands[1],
3340 optimize && can_create_pseudo_p ());
3343 else /* TARGET_THUMB1 */
3345 rtx tmp = force_reg (SImode, operands[2]);
3346 if (rtx_equal_p (operands[0], operands[1]))
3350 operands[2] = operands[1];
3357 (define_insn_and_split "*arm_xorsi3"
3358 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3359 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3360 (match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3367 && CONST_INT_P (operands[2])
3368 && !const_ok_for_arm (INTVAL (operands[2]))"
3369 [(clobber (const_int 0))]
3371 arm_split_constant (XOR, SImode, curr_insn,
3372 INTVAL (operands[2]), operands[0], operands[1], 0);
3375 [(set_attr "length" "4,4,16")
3376 (set_attr "predicable" "yes")
3377 (set_attr "type" "simple_alu_imm,*,*")]
3380 (define_insn "*thumb1_xorsi3_insn"
3381 [(set (match_operand:SI 0 "register_operand" "=l")
3382 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3383 (match_operand:SI 2 "register_operand" "l")))]
3386 [(set_attr "length" "2")
3387 (set_attr "conds" "set")
3388 (set_attr "type" "simple_alu_imm")]
3391 (define_insn "*xorsi3_compare0"
3392 [(set (reg:CC_NOOV CC_REGNUM)
3393 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3394 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3396 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3397 (xor:SI (match_dup 1) (match_dup 2)))]
3399 "eor%.\\t%0, %1, %2"
3400 [(set_attr "conds" "set")
3401 (set_attr "type" "simple_alu_imm,*")]
3404 (define_insn "*xorsi3_compare0_scratch"
3405 [(set (reg:CC_NOOV CC_REGNUM)
3406 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3407 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3411 [(set_attr "conds" "set")
3412 (set_attr "type" "simple_alu_imm, *")]
3415 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3416 ; (NOT D) we can sometimes merge the final NOT into one of the following
3420 [(set (match_operand:SI 0 "s_register_operand" "")
3421 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3422 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3423 (match_operand:SI 3 "arm_rhs_operand" "")))
3424 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3426 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3427 (not:SI (match_dup 3))))
3428 (set (match_dup 0) (not:SI (match_dup 4)))]
3432 (define_insn_and_split "*andsi_iorsi3_notsi"
3433 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3434 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3435 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3436 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3438 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3439 "&& reload_completed"
3440 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3441 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3443 [(set_attr "length" "8")
3444 (set_attr "ce_count" "2")
3445 (set_attr "predicable" "yes")]
3448 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3449 ; insns are available?
3451 [(set (match_operand:SI 0 "s_register_operand" "")
3452 (match_operator:SI 1 "logical_binary_operator"
3453 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3454 (match_operand:SI 3 "const_int_operand" "")
3455 (match_operand:SI 4 "const_int_operand" ""))
3456 (match_operator:SI 9 "logical_binary_operator"
3457 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3458 (match_operand:SI 6 "const_int_operand" ""))
3459 (match_operand:SI 7 "s_register_operand" "")])]))
3460 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3462 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3463 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3466 [(ashift:SI (match_dup 2) (match_dup 4))
3470 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3473 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3477 [(set (match_operand:SI 0 "s_register_operand" "")
3478 (match_operator:SI 1 "logical_binary_operator"
3479 [(match_operator:SI 9 "logical_binary_operator"
3480 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3481 (match_operand:SI 6 "const_int_operand" ""))
3482 (match_operand:SI 7 "s_register_operand" "")])
3483 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3484 (match_operand:SI 3 "const_int_operand" "")
3485 (match_operand:SI 4 "const_int_operand" ""))]))
3486 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3488 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3489 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3492 [(ashift:SI (match_dup 2) (match_dup 4))
3496 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3499 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3503 [(set (match_operand:SI 0 "s_register_operand" "")
3504 (match_operator:SI 1 "logical_binary_operator"
3505 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3506 (match_operand:SI 3 "const_int_operand" "")
3507 (match_operand:SI 4 "const_int_operand" ""))
3508 (match_operator:SI 9 "logical_binary_operator"
3509 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3510 (match_operand:SI 6 "const_int_operand" ""))
3511 (match_operand:SI 7 "s_register_operand" "")])]))
3512 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3514 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3515 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3518 [(ashift:SI (match_dup 2) (match_dup 4))
3522 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3525 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3529 [(set (match_operand:SI 0 "s_register_operand" "")
3530 (match_operator:SI 1 "logical_binary_operator"
3531 [(match_operator:SI 9 "logical_binary_operator"
3532 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3533 (match_operand:SI 6 "const_int_operand" ""))
3534 (match_operand:SI 7 "s_register_operand" "")])
3535 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3536 (match_operand:SI 3 "const_int_operand" "")
3537 (match_operand:SI 4 "const_int_operand" ""))]))
3538 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3540 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3541 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3544 [(ashift:SI (match_dup 2) (match_dup 4))
3548 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3551 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3555 ;; Minimum and maximum insns
3557 (define_expand "smaxsi3"
3559 (set (match_operand:SI 0 "s_register_operand" "")
3560 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3561 (match_operand:SI 2 "arm_rhs_operand" "")))
3562 (clobber (reg:CC CC_REGNUM))])]
3565 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3567 /* No need for a clobber of the condition code register here. */
3568 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3569 gen_rtx_SMAX (SImode, operands[1],
3575 (define_insn "*smax_0"
3576 [(set (match_operand:SI 0 "s_register_operand" "=r")
3577 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3580 "bic%?\\t%0, %1, %1, asr #31"
3581 [(set_attr "predicable" "yes")]
3584 (define_insn "*smax_m1"
3585 [(set (match_operand:SI 0 "s_register_operand" "=r")
3586 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3589 "orr%?\\t%0, %1, %1, asr #31"
3590 [(set_attr "predicable" "yes")]
3593 (define_insn_and_split "*arm_smax_insn"
3594 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3595 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3596 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3597 (clobber (reg:CC CC_REGNUM))]
3600 ; cmp\\t%1, %2\;movlt\\t%0, %2
3601 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3603 [(set (reg:CC CC_REGNUM)
3604 (compare:CC (match_dup 1) (match_dup 2)))
3606 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3610 [(set_attr "conds" "clob")
3611 (set_attr "length" "8,12")]
3614 (define_expand "sminsi3"
3616 (set (match_operand:SI 0 "s_register_operand" "")
3617 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3618 (match_operand:SI 2 "arm_rhs_operand" "")))
3619 (clobber (reg:CC CC_REGNUM))])]
3622 if (operands[2] == const0_rtx)
3624 /* No need for a clobber of the condition code register here. */
3625 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3626 gen_rtx_SMIN (SImode, operands[1],
3632 (define_insn "*smin_0"
3633 [(set (match_operand:SI 0 "s_register_operand" "=r")
3634 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3637 "and%?\\t%0, %1, %1, asr #31"
3638 [(set_attr "predicable" "yes")]
3641 (define_insn_and_split "*arm_smin_insn"
3642 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3643 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3644 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3645 (clobber (reg:CC CC_REGNUM))]
3648 ; cmp\\t%1, %2\;movge\\t%0, %2
3649 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3651 [(set (reg:CC CC_REGNUM)
3652 (compare:CC (match_dup 1) (match_dup 2)))
3654 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3658 [(set_attr "conds" "clob")
3659 (set_attr "length" "8,12")]
3662 (define_expand "umaxsi3"
3664 (set (match_operand:SI 0 "s_register_operand" "")
3665 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3666 (match_operand:SI 2 "arm_rhs_operand" "")))
3667 (clobber (reg:CC CC_REGNUM))])]
3672 (define_insn_and_split "*arm_umaxsi3"
3673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3674 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3675 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3676 (clobber (reg:CC CC_REGNUM))]
3679 ; cmp\\t%1, %2\;movcc\\t%0, %2
3680 ; cmp\\t%1, %2\;movcs\\t%0, %1
3681 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3683 [(set (reg:CC CC_REGNUM)
3684 (compare:CC (match_dup 1) (match_dup 2)))
3686 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3690 [(set_attr "conds" "clob")
3691 (set_attr "length" "8,8,12")]
3694 (define_expand "uminsi3"
3696 (set (match_operand:SI 0 "s_register_operand" "")
3697 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3698 (match_operand:SI 2 "arm_rhs_operand" "")))
3699 (clobber (reg:CC CC_REGNUM))])]
3704 (define_insn_and_split "*arm_uminsi3"
3705 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3706 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3707 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3708 (clobber (reg:CC CC_REGNUM))]
3711 ; cmp\\t%1, %2\;movcs\\t%0, %2
3712 ; cmp\\t%1, %2\;movcc\\t%0, %1
3713 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3715 [(set (reg:CC CC_REGNUM)
3716 (compare:CC (match_dup 1) (match_dup 2)))
3718 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3722 [(set_attr "conds" "clob")
3723 (set_attr "length" "8,8,12")]
3726 (define_insn "*store_minmaxsi"
3727 [(set (match_operand:SI 0 "memory_operand" "=m")
3728 (match_operator:SI 3 "minmax_operator"
3729 [(match_operand:SI 1 "s_register_operand" "r")
3730 (match_operand:SI 2 "s_register_operand" "r")]))
3731 (clobber (reg:CC CC_REGNUM))]
3732 "TARGET_32BIT && optimize_insn_for_size_p()"
3734 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3735 operands[1], operands[2]);
3736 output_asm_insn (\"cmp\\t%1, %2\", operands);
3738 output_asm_insn (\"ite\t%d3\", operands);
3739 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3740 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3743 [(set_attr "conds" "clob")
3744 (set (attr "length")
3745 (if_then_else (eq_attr "is_thumb" "yes")
3748 (set_attr "type" "store1")]
3751 ; Reject the frame pointer in operand[1], since reloading this after
3752 ; it has been eliminated can cause carnage.
3753 (define_insn "*minmax_arithsi"
3754 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3755 (match_operator:SI 4 "shiftable_operator"
3756 [(match_operator:SI 5 "minmax_operator"
3757 [(match_operand:SI 2 "s_register_operand" "r,r")
3758 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3759 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3760 (clobber (reg:CC CC_REGNUM))]
3761 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3764 enum rtx_code code = GET_CODE (operands[4]);
3767 if (which_alternative != 0 || operands[3] != const0_rtx
3768 || (code != PLUS && code != IOR && code != XOR))
3773 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3774 operands[2], operands[3]);
3775 output_asm_insn (\"cmp\\t%2, %3\", operands);
3779 output_asm_insn (\"ite\\t%d5\", operands);
3781 output_asm_insn (\"it\\t%d5\", operands);
3783 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3785 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3788 [(set_attr "conds" "clob")
3789 (set (attr "length")
3790 (if_then_else (eq_attr "is_thumb" "yes")
3795 ; Reject the frame pointer in operand[1], since reloading this after
3796 ; it has been eliminated can cause carnage.
3797 (define_insn_and_split "*minmax_arithsi_non_canon"
3798 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3800 (match_operand:SI 1 "s_register_operand" "0,?r")
3801 (match_operator:SI 4 "minmax_operator"
3802 [(match_operand:SI 2 "s_register_operand" "r,r")
3803 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
3804 (clobber (reg:CC CC_REGNUM))]
3805 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3807 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3808 [(set (reg:CC CC_REGNUM)
3809 (compare:CC (match_dup 2) (match_dup 3)))
3811 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3813 (minus:SI (match_dup 1)
3815 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3817 (minus:SI (match_dup 1)
3820 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3821 operands[2], operands[3]);
3822 enum rtx_code rc = minmax_code (operands[4]);
3823 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3824 operands[2], operands[3]);
3826 if (mode == CCFPmode || mode == CCFPEmode)
3827 rc = reverse_condition_maybe_unordered (rc);
3829 rc = reverse_condition (rc);
3830 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3832 [(set_attr "conds" "clob")
3833 (set (attr "length")
3834 (if_then_else (eq_attr "is_thumb" "yes")
3839 (define_code_iterator SAT [smin smax])
3840 (define_code_iterator SATrev [smin smax])
3841 (define_code_attr SATlo [(smin "1") (smax "2")])
3842 (define_code_attr SAThi [(smin "2") (smax "1")])
3844 (define_insn "*satsi_<SAT:code>"
3845 [(set (match_operand:SI 0 "s_register_operand" "=r")
3846 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3847 (match_operand:SI 1 "const_int_operand" "i"))
3848 (match_operand:SI 2 "const_int_operand" "i")))]
3849 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3850 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3854 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3855 &mask, &signed_sat))
3858 operands[1] = GEN_INT (mask);
3860 return "ssat%?\t%0, %1, %3";
3862 return "usat%?\t%0, %1, %3";
3864 [(set_attr "predicable" "yes")
3865 (set_attr "insn" "sat")])
3867 (define_insn "*satsi_<SAT:code>_shift"
3868 [(set (match_operand:SI 0 "s_register_operand" "=r")
3869 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3870 [(match_operand:SI 4 "s_register_operand" "r")
3871 (match_operand:SI 5 "const_int_operand" "i")])
3872 (match_operand:SI 1 "const_int_operand" "i"))
3873 (match_operand:SI 2 "const_int_operand" "i")))]
3874 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3875 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3879 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3880 &mask, &signed_sat))
3883 operands[1] = GEN_INT (mask);
3885 return "ssat%?\t%0, %1, %4%S3";
3887 return "usat%?\t%0, %1, %4%S3";
3889 [(set_attr "predicable" "yes")
3890 (set_attr "insn" "sat")
3891 (set_attr "shift" "3")
3892 (set_attr "type" "alu_shift")])
3894 ;; Shift and rotation insns
3896 (define_expand "ashldi3"
3897 [(set (match_operand:DI 0 "s_register_operand" "")
3898 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3899 (match_operand:SI 2 "general_operand" "")))]
3904 /* Delay the decision whether to use NEON or core-regs until
3905 register allocation. */
3906 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
3911 /* Only the NEON case can handle in-memory shift counts. */
3912 if (!reg_or_int_operand (operands[2], SImode))
3913 operands[2] = force_reg (SImode, operands[2]);
3916 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3917 ; /* No special preparation statements; expand pattern as above. */
3920 rtx scratch1, scratch2;
3922 if (CONST_INT_P (operands[2])
3923 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3925 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3929 /* Ideally we should use iwmmxt here if we could know that operands[1]
3930 ends up already living in an iwmmxt register. Otherwise it's
3931 cheaper to have the alternate code being generated than moving
3932 values to iwmmxt regs and back. */
3934 /* If we're optimizing for size, we prefer the libgcc calls. */
3935 if (optimize_function_for_size_p (cfun))
3938 /* Expand operation using core-registers.
3939 'FAIL' would achieve the same thing, but this is a bit smarter. */
3940 scratch1 = gen_reg_rtx (SImode);
3941 scratch2 = gen_reg_rtx (SImode);
3942 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3943 operands[2], scratch1, scratch2);
3949 (define_insn_and_split "arm_ashldi3_1bit"
3950 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3951 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3953 (clobber (reg:CC CC_REGNUM))]
3955 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3956 "&& reload_completed"
3957 [(parallel [(set (reg:CC CC_REGNUM)
3958 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
3960 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
3961 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
3962 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
3964 operands[2] = gen_highpart (SImode, operands[0]);
3965 operands[0] = gen_lowpart (SImode, operands[0]);
3966 operands[3] = gen_highpart (SImode, operands[1]);
3967 operands[1] = gen_lowpart (SImode, operands[1]);
3969 [(set_attr "conds" "clob")
3970 (set_attr "length" "8")]
3973 (define_expand "ashlsi3"
3974 [(set (match_operand:SI 0 "s_register_operand" "")
3975 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3976 (match_operand:SI 2 "arm_rhs_operand" "")))]
3979 if (CONST_INT_P (operands[2])
3980 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3982 emit_insn (gen_movsi (operands[0], const0_rtx));
3988 (define_insn "*thumb1_ashlsi3"
3989 [(set (match_operand:SI 0 "register_operand" "=l,l")
3990 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3991 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3994 [(set_attr "length" "2")
3995 (set_attr "conds" "set")])
3997 (define_expand "ashrdi3"
3998 [(set (match_operand:DI 0 "s_register_operand" "")
3999 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4000 (match_operand:SI 2 "reg_or_int_operand" "")))]
4005 /* Delay the decision whether to use NEON or core-regs until
4006 register allocation. */
4007 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
4011 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4012 ; /* No special preparation statements; expand pattern as above. */
4015 rtx scratch1, scratch2;
4017 if (CONST_INT_P (operands[2])
4018 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4020 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
4024 /* Ideally we should use iwmmxt here if we could know that operands[1]
4025 ends up already living in an iwmmxt register. Otherwise it's
4026 cheaper to have the alternate code being generated than moving
4027 values to iwmmxt regs and back. */
4029 /* If we're optimizing for size, we prefer the libgcc calls. */
4030 if (optimize_function_for_size_p (cfun))
4033 /* Expand operation using core-registers.
4034 'FAIL' would achieve the same thing, but this is a bit smarter. */
4035 scratch1 = gen_reg_rtx (SImode);
4036 scratch2 = gen_reg_rtx (SImode);
4037 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4038 operands[2], scratch1, scratch2);
4044 (define_insn_and_split "arm_ashrdi3_1bit"
4045 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4046 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4048 (clobber (reg:CC CC_REGNUM))]
4050 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
4051 "&& reload_completed"
4052 [(parallel [(set (reg:CC CC_REGNUM)
4053 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
4055 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
4056 (set (match_dup 0) (unspec:SI [(match_dup 1)
4057 (reg:CC_C CC_REGNUM)]
4060 operands[2] = gen_highpart (SImode, operands[0]);
4061 operands[0] = gen_lowpart (SImode, operands[0]);
4062 operands[3] = gen_highpart (SImode, operands[1]);
4063 operands[1] = gen_lowpart (SImode, operands[1]);
4065 [(set_attr "conds" "clob")
4066 (set_attr "length" "8")]
4070 [(set (match_operand:SI 0 "s_register_operand" "=r")
4071 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
4072 (reg:CC_C CC_REGNUM)]
4076 [(set_attr "conds" "use")
4077 (set_attr "insn" "mov")
4078 (set_attr "type" "alu_shift")]
4081 (define_expand "ashrsi3"
4082 [(set (match_operand:SI 0 "s_register_operand" "")
4083 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4084 (match_operand:SI 2 "arm_rhs_operand" "")))]
4087 if (CONST_INT_P (operands[2])
4088 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4089 operands[2] = GEN_INT (31);
4093 (define_insn "*thumb1_ashrsi3"
4094 [(set (match_operand:SI 0 "register_operand" "=l,l")
4095 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4096 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4099 [(set_attr "length" "2")
4100 (set_attr "conds" "set")])
4102 (define_expand "lshrdi3"
4103 [(set (match_operand:DI 0 "s_register_operand" "")
4104 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4105 (match_operand:SI 2 "reg_or_int_operand" "")))]
4110 /* Delay the decision whether to use NEON or core-regs until
4111 register allocation. */
4112 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4116 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4117 ; /* No special preparation statements; expand pattern as above. */
4120 rtx scratch1, scratch2;
4122 if (CONST_INT_P (operands[2])
4123 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4125 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4129 /* Ideally we should use iwmmxt here if we could know that operands[1]
4130 ends up already living in an iwmmxt register. Otherwise it's
4131 cheaper to have the alternate code being generated than moving
4132 values to iwmmxt regs and back. */
4134 /* If we're optimizing for size, we prefer the libgcc calls. */
4135 if (optimize_function_for_size_p (cfun))
4138 /* Expand operation using core-registers.
4139 'FAIL' would achieve the same thing, but this is a bit smarter. */
4140 scratch1 = gen_reg_rtx (SImode);
4141 scratch2 = gen_reg_rtx (SImode);
4142 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4143 operands[2], scratch1, scratch2);
4149 (define_insn_and_split "arm_lshrdi3_1bit"
4150 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4151 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4153 (clobber (reg:CC CC_REGNUM))]
4155 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4156 "&& reload_completed"
4157 [(parallel [(set (reg:CC CC_REGNUM)
4158 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4160 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4161 (set (match_dup 0) (unspec:SI [(match_dup 1)
4162 (reg:CC_C CC_REGNUM)]
4165 operands[2] = gen_highpart (SImode, operands[0]);
4166 operands[0] = gen_lowpart (SImode, operands[0]);
4167 operands[3] = gen_highpart (SImode, operands[1]);
4168 operands[1] = gen_lowpart (SImode, operands[1]);
4170 [(set_attr "conds" "clob")
4171 (set_attr "length" "8")]
4174 (define_expand "lshrsi3"
4175 [(set (match_operand:SI 0 "s_register_operand" "")
4176 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4177 (match_operand:SI 2 "arm_rhs_operand" "")))]
4180 if (CONST_INT_P (operands[2])
4181 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4183 emit_insn (gen_movsi (operands[0], const0_rtx));
4189 (define_insn "*thumb1_lshrsi3"
4190 [(set (match_operand:SI 0 "register_operand" "=l,l")
4191 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4192 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4195 [(set_attr "length" "2")
4196 (set_attr "conds" "set")])
4198 (define_expand "rotlsi3"
4199 [(set (match_operand:SI 0 "s_register_operand" "")
4200 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4201 (match_operand:SI 2 "reg_or_int_operand" "")))]
4204 if (CONST_INT_P (operands[2]))
4205 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4208 rtx reg = gen_reg_rtx (SImode);
4209 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4215 (define_expand "rotrsi3"
4216 [(set (match_operand:SI 0 "s_register_operand" "")
4217 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4218 (match_operand:SI 2 "arm_rhs_operand" "")))]
4223 if (CONST_INT_P (operands[2])
4224 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4225 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4227 else /* TARGET_THUMB1 */
4229 if (CONST_INT_P (operands [2]))
4230 operands [2] = force_reg (SImode, operands[2]);
4235 (define_insn "*thumb1_rotrsi3"
4236 [(set (match_operand:SI 0 "register_operand" "=l")
4237 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4238 (match_operand:SI 2 "register_operand" "l")))]
4241 [(set_attr "length" "2")]
4244 (define_insn "*arm_shiftsi3"
4245 [(set (match_operand:SI 0 "s_register_operand" "=r")
4246 (match_operator:SI 3 "shift_operator"
4247 [(match_operand:SI 1 "s_register_operand" "r")
4248 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
4250 "* return arm_output_shift(operands, 0);"
4251 [(set_attr "predicable" "yes")
4252 (set_attr "shift" "1")
4253 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4254 (const_string "alu_shift")
4255 (const_string "alu_shift_reg")))]
4258 (define_insn "*shiftsi3_compare"
4259 [(set (reg:CC CC_REGNUM)
4260 (compare:CC (match_operator:SI 3 "shift_operator"
4261 [(match_operand:SI 1 "s_register_operand" "r")
4262 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4264 (set (match_operand:SI 0 "s_register_operand" "=r")
4265 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4267 "* return arm_output_shift(operands, 1);"
4268 [(set_attr "conds" "set")
4269 (set_attr "shift" "1")
4270 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4271 (const_string "alu_shift")
4272 (const_string "alu_shift_reg")))]
4275 (define_insn "*shiftsi3_compare0"
4276 [(set (reg:CC_NOOV CC_REGNUM)
4277 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4278 [(match_operand:SI 1 "s_register_operand" "r")
4279 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4281 (set (match_operand:SI 0 "s_register_operand" "=r")
4282 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4284 "* return arm_output_shift(operands, 1);"
4285 [(set_attr "conds" "set")
4286 (set_attr "shift" "1")
4287 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4288 (const_string "alu_shift")
4289 (const_string "alu_shift_reg")))]
4292 (define_insn "*shiftsi3_compare0_scratch"
4293 [(set (reg:CC_NOOV CC_REGNUM)
4294 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4295 [(match_operand:SI 1 "s_register_operand" "r")
4296 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4298 (clobber (match_scratch:SI 0 "=r"))]
4300 "* return arm_output_shift(operands, 1);"
4301 [(set_attr "conds" "set")
4302 (set_attr "shift" "1")]
4305 (define_insn "*not_shiftsi"
4306 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4307 (not:SI (match_operator:SI 3 "shift_operator"
4308 [(match_operand:SI 1 "s_register_operand" "r,r")
4309 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4312 [(set_attr "predicable" "yes")
4313 (set_attr "shift" "1")
4314 (set_attr "insn" "mvn")
4315 (set_attr "arch" "32,a")
4316 (set_attr "type" "alu_shift,alu_shift_reg")])
4318 (define_insn "*not_shiftsi_compare0"
4319 [(set (reg:CC_NOOV CC_REGNUM)
4321 (not:SI (match_operator:SI 3 "shift_operator"
4322 [(match_operand:SI 1 "s_register_operand" "r,r")
4323 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4325 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4326 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4329 [(set_attr "conds" "set")
4330 (set_attr "shift" "1")
4331 (set_attr "insn" "mvn")
4332 (set_attr "arch" "32,a")
4333 (set_attr "type" "alu_shift,alu_shift_reg")])
4335 (define_insn "*not_shiftsi_compare0_scratch"
4336 [(set (reg:CC_NOOV CC_REGNUM)
4338 (not:SI (match_operator:SI 3 "shift_operator"
4339 [(match_operand:SI 1 "s_register_operand" "r,r")
4340 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4342 (clobber (match_scratch:SI 0 "=r,r"))]
4345 [(set_attr "conds" "set")
4346 (set_attr "shift" "1")
4347 (set_attr "insn" "mvn")
4348 (set_attr "arch" "32,a")
4349 (set_attr "type" "alu_shift,alu_shift_reg")])
4351 ;; We don't really have extzv, but defining this using shifts helps
4352 ;; to reduce register pressure later on.
4354 (define_expand "extzv"
4355 [(set (match_operand 0 "s_register_operand" "")
4356 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4357 (match_operand 2 "const_int_operand" "")
4358 (match_operand 3 "const_int_operand" "")))]
4359 "TARGET_THUMB1 || arm_arch_thumb2"
4362 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4363 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4365 if (arm_arch_thumb2)
4367 HOST_WIDE_INT width = INTVAL (operands[2]);
4368 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4370 if (unaligned_access && MEM_P (operands[1])
4371 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4375 if (BYTES_BIG_ENDIAN)
4376 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4381 base_addr = adjust_address (operands[1], SImode,
4382 bitpos / BITS_PER_UNIT);
4383 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4387 rtx dest = operands[0];
4388 rtx tmp = gen_reg_rtx (SImode);
4390 /* We may get a paradoxical subreg here. Strip it off. */
4391 if (GET_CODE (dest) == SUBREG
4392 && GET_MODE (dest) == SImode
4393 && GET_MODE (SUBREG_REG (dest)) == HImode)
4394 dest = SUBREG_REG (dest);
4396 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4399 base_addr = adjust_address (operands[1], HImode,
4400 bitpos / BITS_PER_UNIT);
4401 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4402 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4406 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4408 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4416 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4419 operands[3] = GEN_INT (rshift);
4423 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4427 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4428 operands[3], gen_reg_rtx (SImode)));
4433 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4435 (define_expand "extzv_t1"
4436 [(set (match_operand:SI 4 "s_register_operand" "")
4437 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4438 (match_operand:SI 2 "const_int_operand" "")))
4439 (set (match_operand:SI 0 "s_register_operand" "")
4440 (lshiftrt:SI (match_dup 4)
4441 (match_operand:SI 3 "const_int_operand" "")))]
4445 (define_expand "extv"
4446 [(set (match_operand 0 "s_register_operand" "")
4447 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4448 (match_operand 2 "const_int_operand" "")
4449 (match_operand 3 "const_int_operand" "")))]
4452 HOST_WIDE_INT width = INTVAL (operands[2]);
4453 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4455 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4456 && (bitpos % BITS_PER_UNIT) == 0)
4460 if (BYTES_BIG_ENDIAN)
4461 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4465 base_addr = adjust_address (operands[1], SImode,
4466 bitpos / BITS_PER_UNIT);
4467 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4471 rtx dest = operands[0];
4472 rtx tmp = gen_reg_rtx (SImode);
4474 /* We may get a paradoxical subreg here. Strip it off. */
4475 if (GET_CODE (dest) == SUBREG
4476 && GET_MODE (dest) == SImode
4477 && GET_MODE (SUBREG_REG (dest)) == HImode)
4478 dest = SUBREG_REG (dest);
4480 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4483 base_addr = adjust_address (operands[1], HImode,
4484 bitpos / BITS_PER_UNIT);
4485 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4486 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4491 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4493 else if (GET_MODE (operands[0]) == SImode
4494 && GET_MODE (operands[1]) == SImode)
4496 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4504 ; Helper to expand register forms of extv with the proper modes.
4506 (define_expand "extv_regsi"
4507 [(set (match_operand:SI 0 "s_register_operand" "")
4508 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4509 (match_operand 2 "const_int_operand" "")
4510 (match_operand 3 "const_int_operand" "")))]
4515 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4517 (define_insn "unaligned_loadsi"
4518 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4519 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4520 UNSPEC_UNALIGNED_LOAD))]
4521 "unaligned_access && TARGET_32BIT"
4522 "ldr%?\t%0, %1\t@ unaligned"
4523 [(set_attr "arch" "t2,any")
4524 (set_attr "length" "2,4")
4525 (set_attr "predicable" "yes")
4526 (set_attr "type" "load1")])
4528 (define_insn "unaligned_loadhis"
4529 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4531 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4532 UNSPEC_UNALIGNED_LOAD)))]
4533 "unaligned_access && TARGET_32BIT"
4534 "ldr%(sh%)\t%0, %1\t@ unaligned"
4535 [(set_attr "arch" "t2,any")
4536 (set_attr "length" "2,4")
4537 (set_attr "predicable" "yes")
4538 (set_attr "type" "load_byte")])
4540 (define_insn "unaligned_loadhiu"
4541 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4543 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4544 UNSPEC_UNALIGNED_LOAD)))]
4545 "unaligned_access && TARGET_32BIT"
4546 "ldr%(h%)\t%0, %1\t@ unaligned"
4547 [(set_attr "arch" "t2,any")
4548 (set_attr "length" "2,4")
4549 (set_attr "predicable" "yes")
4550 (set_attr "type" "load_byte")])
4552 (define_insn "unaligned_storesi"
4553 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4554 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4555 UNSPEC_UNALIGNED_STORE))]
4556 "unaligned_access && TARGET_32BIT"
4557 "str%?\t%1, %0\t@ unaligned"
4558 [(set_attr "arch" "t2,any")
4559 (set_attr "length" "2,4")
4560 (set_attr "predicable" "yes")
4561 (set_attr "type" "store1")])
4563 (define_insn "unaligned_storehi"
4564 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4565 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4566 UNSPEC_UNALIGNED_STORE))]
4567 "unaligned_access && TARGET_32BIT"
4568 "str%(h%)\t%1, %0\t@ unaligned"
4569 [(set_attr "arch" "t2,any")
4570 (set_attr "length" "2,4")
4571 (set_attr "predicable" "yes")
4572 (set_attr "type" "store1")])
4574 ;; Unaligned double-word load and store.
4575 ;; Split after reload into two unaligned single-word accesses.
4576 ;; It prevents lower_subreg from splitting some other aligned
4577 ;; double-word accesses too early. Used for internal memcpy.
4579 (define_insn_and_split "unaligned_loaddi"
4580 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4581 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4582 UNSPEC_UNALIGNED_LOAD))]
4583 "unaligned_access && TARGET_32BIT"
4585 "&& reload_completed"
4586 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4587 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4589 operands[2] = gen_highpart (SImode, operands[0]);
4590 operands[0] = gen_lowpart (SImode, operands[0]);
4591 operands[3] = gen_highpart (SImode, operands[1]);
4592 operands[1] = gen_lowpart (SImode, operands[1]);
4594 /* If the first destination register overlaps with the base address,
4595 swap the order in which the loads are emitted. */
4596 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4598 rtx tmp = operands[1];
4599 operands[1] = operands[3];
4602 operands[0] = operands[2];
4606 [(set_attr "arch" "t2,any")
4607 (set_attr "length" "4,8")
4608 (set_attr "predicable" "yes")
4609 (set_attr "type" "load2")])
4611 (define_insn_and_split "unaligned_storedi"
4612 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4613 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4614 UNSPEC_UNALIGNED_STORE))]
4615 "unaligned_access && TARGET_32BIT"
4617 "&& reload_completed"
4618 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4619 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4621 operands[2] = gen_highpart (SImode, operands[0]);
4622 operands[0] = gen_lowpart (SImode, operands[0]);
4623 operands[3] = gen_highpart (SImode, operands[1]);
4624 operands[1] = gen_lowpart (SImode, operands[1]);
4626 [(set_attr "arch" "t2,any")
4627 (set_attr "length" "4,8")
4628 (set_attr "predicable" "yes")
4629 (set_attr "type" "store2")])
4632 (define_insn "*extv_reg"
4633 [(set (match_operand:SI 0 "s_register_operand" "=r")
4634 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4635 (match_operand:SI 2 "const_int_operand" "M")
4636 (match_operand:SI 3 "const_int_operand" "M")))]
4638 "sbfx%?\t%0, %1, %3, %2"
4639 [(set_attr "length" "4")
4640 (set_attr "predicable" "yes")]
4643 (define_insn "extzv_t2"
4644 [(set (match_operand:SI 0 "s_register_operand" "=r")
4645 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4646 (match_operand:SI 2 "const_int_operand" "M")
4647 (match_operand:SI 3 "const_int_operand" "M")))]
4649 "ubfx%?\t%0, %1, %3, %2"
4650 [(set_attr "length" "4")
4651 (set_attr "predicable" "yes")]
4655 ;; Division instructions
4656 (define_insn "divsi3"
4657 [(set (match_operand:SI 0 "s_register_operand" "=r")
4658 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4659 (match_operand:SI 2 "s_register_operand" "r")))]
4661 "sdiv%?\t%0, %1, %2"
4662 [(set_attr "predicable" "yes")
4663 (set_attr "type" "sdiv")]
4666 (define_insn "udivsi3"
4667 [(set (match_operand:SI 0 "s_register_operand" "=r")
4668 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4669 (match_operand:SI 2 "s_register_operand" "r")))]
4671 "udiv%?\t%0, %1, %2"
4672 [(set_attr "predicable" "yes")
4673 (set_attr "type" "udiv")]
4677 ;; Unary arithmetic insns
4679 (define_expand "negdi2"
4681 [(set (match_operand:DI 0 "s_register_operand" "")
4682 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4683 (clobber (reg:CC CC_REGNUM))])]
4688 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4694 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4695 ;; The first alternative allows the common case of a *full* overlap.
4696 (define_insn_and_split "*arm_negdi2"
4697 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4698 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4699 (clobber (reg:CC CC_REGNUM))]
4701 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4702 "&& reload_completed"
4703 [(parallel [(set (reg:CC CC_REGNUM)
4704 (compare:CC (const_int 0) (match_dup 1)))
4705 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4706 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4707 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4709 operands[2] = gen_highpart (SImode, operands[0]);
4710 operands[0] = gen_lowpart (SImode, operands[0]);
4711 operands[3] = gen_highpart (SImode, operands[1]);
4712 operands[1] = gen_lowpart (SImode, operands[1]);
4714 [(set_attr "conds" "clob")
4715 (set_attr "length" "8")]
4718 (define_insn "*thumb1_negdi2"
4719 [(set (match_operand:DI 0 "register_operand" "=&l")
4720 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4721 (clobber (reg:CC CC_REGNUM))]
4723 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4724 [(set_attr "length" "6")]
4727 (define_expand "negsi2"
4728 [(set (match_operand:SI 0 "s_register_operand" "")
4729 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4734 (define_insn "*arm_negsi2"
4735 [(set (match_operand:SI 0 "s_register_operand" "=r")
4736 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4738 "rsb%?\\t%0, %1, #0"
4739 [(set_attr "predicable" "yes")]
4742 (define_insn "*thumb1_negsi2"
4743 [(set (match_operand:SI 0 "register_operand" "=l")
4744 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4747 [(set_attr "length" "2")]
4750 (define_expand "negsf2"
4751 [(set (match_operand:SF 0 "s_register_operand" "")
4752 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4753 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4757 (define_expand "negdf2"
4758 [(set (match_operand:DF 0 "s_register_operand" "")
4759 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4760 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4763 ;; Negate an extended 32-bit value.
4764 (define_insn_and_split "*negdi_extendsidi"
4765 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
4766 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
4767 (clobber (reg:CC CC_REGNUM))]
4769 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
4770 "&& reload_completed"
4773 operands[2] = gen_highpart (SImode, operands[0]);
4774 operands[0] = gen_lowpart (SImode, operands[0]);
4775 rtx tmp = gen_rtx_SET (VOIDmode,
4777 gen_rtx_MINUS (SImode,
4786 /* Set the flags, to emit the short encoding in Thumb2. */
4787 rtx flags = gen_rtx_SET (VOIDmode,
4788 gen_rtx_REG (CCmode, CC_REGNUM),
4789 gen_rtx_COMPARE (CCmode,
4792 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4797 emit_insn (gen_rtx_SET (VOIDmode,
4799 gen_rtx_ASHIFTRT (SImode,
4804 [(set_attr "length" "8,8,4,4")
4805 (set_attr "arch" "a,a,t2,t2")]
4808 (define_insn_and_split "*negdi_zero_extendsidi"
4809 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4810 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
4811 (clobber (reg:CC CC_REGNUM))]
4813 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
4814 ;; Don't care what register is input to sbc,
4815 ;; since we just just need to propagate the carry.
4816 "&& reload_completed"
4817 [(parallel [(set (reg:CC CC_REGNUM)
4818 (compare:CC (const_int 0) (match_dup 1)))
4819 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4820 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
4821 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4823 operands[2] = gen_highpart (SImode, operands[0]);
4824 operands[0] = gen_lowpart (SImode, operands[0]);
4826 [(set_attr "conds" "clob")
4827 (set_attr "length" "8")] ;; length in thumb is 4
4830 ;; abssi2 doesn't really clobber the condition codes if a different register
4831 ;; is being set. To keep things simple, assume during rtl manipulations that
4832 ;; it does, but tell the final scan operator the truth. Similarly for
4835 (define_expand "abssi2"
4837 [(set (match_operand:SI 0 "s_register_operand" "")
4838 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4839 (clobber (match_dup 2))])]
4843 operands[2] = gen_rtx_SCRATCH (SImode);
4845 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4848 (define_insn_and_split "*arm_abssi2"
4849 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4850 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4851 (clobber (reg:CC CC_REGNUM))]
4854 "&& reload_completed"
4857 /* if (which_alternative == 0) */
4858 if (REGNO(operands[0]) == REGNO(operands[1]))
4860 /* Emit the pattern:
4861 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4862 [(set (reg:CC CC_REGNUM)
4863 (compare:CC (match_dup 0) (const_int 0)))
4864 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4865 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4867 emit_insn (gen_rtx_SET (VOIDmode,
4868 gen_rtx_REG (CCmode, CC_REGNUM),
4869 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4870 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4871 (gen_rtx_LT (SImode,
4872 gen_rtx_REG (CCmode, CC_REGNUM),
4874 (gen_rtx_SET (VOIDmode,
4876 (gen_rtx_MINUS (SImode,
4883 /* Emit the pattern:
4884 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4886 (xor:SI (match_dup 1)
4887 (ashiftrt:SI (match_dup 1) (const_int 31))))
4889 (minus:SI (match_dup 0)
4890 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4892 emit_insn (gen_rtx_SET (VOIDmode,
4894 gen_rtx_XOR (SImode,
4895 gen_rtx_ASHIFTRT (SImode,
4899 emit_insn (gen_rtx_SET (VOIDmode,
4901 gen_rtx_MINUS (SImode,
4903 gen_rtx_ASHIFTRT (SImode,
4909 [(set_attr "conds" "clob,*")
4910 (set_attr "shift" "1")
4911 (set_attr "predicable" "no, yes")
4912 (set_attr "length" "8")]
4915 (define_insn_and_split "*thumb1_abssi2"
4916 [(set (match_operand:SI 0 "s_register_operand" "=l")
4917 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4918 (clobber (match_scratch:SI 2 "=&l"))]
4921 "TARGET_THUMB1 && reload_completed"
4922 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4923 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4924 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4926 [(set_attr "length" "6")]
4929 (define_insn_and_split "*arm_neg_abssi2"
4930 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4931 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4932 (clobber (reg:CC CC_REGNUM))]
4935 "&& reload_completed"
4938 /* if (which_alternative == 0) */
4939 if (REGNO (operands[0]) == REGNO (operands[1]))
4941 /* Emit the pattern:
4942 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4944 emit_insn (gen_rtx_SET (VOIDmode,
4945 gen_rtx_REG (CCmode, CC_REGNUM),
4946 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4947 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4949 gen_rtx_REG (CCmode, CC_REGNUM),
4951 gen_rtx_SET (VOIDmode,
4953 (gen_rtx_MINUS (SImode,
4959 /* Emit the pattern:
4960 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4962 emit_insn (gen_rtx_SET (VOIDmode,
4964 gen_rtx_XOR (SImode,
4965 gen_rtx_ASHIFTRT (SImode,
4969 emit_insn (gen_rtx_SET (VOIDmode,
4971 gen_rtx_MINUS (SImode,
4972 gen_rtx_ASHIFTRT (SImode,
4979 [(set_attr "conds" "clob,*")
4980 (set_attr "shift" "1")
4981 (set_attr "predicable" "no, yes")
4982 (set_attr "length" "8")]
4985 (define_insn_and_split "*thumb1_neg_abssi2"
4986 [(set (match_operand:SI 0 "s_register_operand" "=l")
4987 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4988 (clobber (match_scratch:SI 2 "=&l"))]
4991 "TARGET_THUMB1 && reload_completed"
4992 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4993 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4994 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4996 [(set_attr "length" "6")]
4999 (define_expand "abssf2"
5000 [(set (match_operand:SF 0 "s_register_operand" "")
5001 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
5002 "TARGET_32BIT && TARGET_HARD_FLOAT"
5005 (define_expand "absdf2"
5006 [(set (match_operand:DF 0 "s_register_operand" "")
5007 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
5008 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5011 (define_expand "sqrtsf2"
5012 [(set (match_operand:SF 0 "s_register_operand" "")
5013 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
5014 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
5017 (define_expand "sqrtdf2"
5018 [(set (match_operand:DF 0 "s_register_operand" "")
5019 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
5020 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5023 (define_insn_and_split "one_cmpldi2"
5024 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
5025 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
5032 "TARGET_32BIT && reload_completed
5033 && arm_general_register_operand (operands[0], DImode)"
5034 [(set (match_dup 0) (not:SI (match_dup 1)))
5035 (set (match_dup 2) (not:SI (match_dup 3)))]
5038 operands[2] = gen_highpart (SImode, operands[0]);
5039 operands[0] = gen_lowpart (SImode, operands[0]);
5040 operands[3] = gen_highpart (SImode, operands[1]);
5041 operands[1] = gen_lowpart (SImode, operands[1]);
5043 [(set_attr "length" "*,8,8,*")
5044 (set_attr "predicable" "no,yes,yes,no")
5045 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
5046 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
5049 (define_expand "one_cmplsi2"
5050 [(set (match_operand:SI 0 "s_register_operand" "")
5051 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
5056 (define_insn "*arm_one_cmplsi2"
5057 [(set (match_operand:SI 0 "s_register_operand" "=r")
5058 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
5061 [(set_attr "predicable" "yes")
5062 (set_attr "insn" "mvn")]
5065 (define_insn "*thumb1_one_cmplsi2"
5066 [(set (match_operand:SI 0 "register_operand" "=l")
5067 (not:SI (match_operand:SI 1 "register_operand" "l")))]
5070 [(set_attr "length" "2")
5071 (set_attr "insn" "mvn")]
5074 (define_insn "*notsi_compare0"
5075 [(set (reg:CC_NOOV CC_REGNUM)
5076 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5078 (set (match_operand:SI 0 "s_register_operand" "=r")
5079 (not:SI (match_dup 1)))]
5082 [(set_attr "conds" "set")
5083 (set_attr "insn" "mvn")]
5086 (define_insn "*notsi_compare0_scratch"
5087 [(set (reg:CC_NOOV CC_REGNUM)
5088 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5090 (clobber (match_scratch:SI 0 "=r"))]
5093 [(set_attr "conds" "set")
5094 (set_attr "insn" "mvn")]
5097 ;; Fixed <--> Floating conversion insns
5099 (define_expand "floatsihf2"
5100 [(set (match_operand:HF 0 "general_operand" "")
5101 (float:HF (match_operand:SI 1 "general_operand" "")))]
5105 rtx op1 = gen_reg_rtx (SFmode);
5106 expand_float (op1, operands[1], 0);
5107 op1 = convert_to_mode (HFmode, op1, 0);
5108 emit_move_insn (operands[0], op1);
5113 (define_expand "floatdihf2"
5114 [(set (match_operand:HF 0 "general_operand" "")
5115 (float:HF (match_operand:DI 1 "general_operand" "")))]
5119 rtx op1 = gen_reg_rtx (SFmode);
5120 expand_float (op1, operands[1], 0);
5121 op1 = convert_to_mode (HFmode, op1, 0);
5122 emit_move_insn (operands[0], op1);
5127 (define_expand "floatsisf2"
5128 [(set (match_operand:SF 0 "s_register_operand" "")
5129 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5130 "TARGET_32BIT && TARGET_HARD_FLOAT"
5134 (define_expand "floatsidf2"
5135 [(set (match_operand:DF 0 "s_register_operand" "")
5136 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5137 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5141 (define_expand "fix_trunchfsi2"
5142 [(set (match_operand:SI 0 "general_operand" "")
5143 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5147 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5148 expand_fix (operands[0], op1, 0);
5153 (define_expand "fix_trunchfdi2"
5154 [(set (match_operand:DI 0 "general_operand" "")
5155 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5159 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5160 expand_fix (operands[0], op1, 0);
5165 (define_expand "fix_truncsfsi2"
5166 [(set (match_operand:SI 0 "s_register_operand" "")
5167 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5168 "TARGET_32BIT && TARGET_HARD_FLOAT"
5172 (define_expand "fix_truncdfsi2"
5173 [(set (match_operand:SI 0 "s_register_operand" "")
5174 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5175 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5181 (define_expand "truncdfsf2"
5182 [(set (match_operand:SF 0 "s_register_operand" "")
5184 (match_operand:DF 1 "s_register_operand" "")))]
5185 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5189 /* DFmode -> HFmode conversions have to go through SFmode. */
5190 (define_expand "truncdfhf2"
5191 [(set (match_operand:HF 0 "general_operand" "")
5193 (match_operand:DF 1 "general_operand" "")))]
5198 op1 = convert_to_mode (SFmode, operands[1], 0);
5199 op1 = convert_to_mode (HFmode, op1, 0);
5200 emit_move_insn (operands[0], op1);
5205 ;; Zero and sign extension instructions.
5207 (define_insn "zero_extend<mode>di2"
5208 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5209 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5210 "<qhs_zextenddi_cstr>")))]
5211 "TARGET_32BIT <qhs_zextenddi_cond>"
5213 [(set_attr "length" "8,4,8,8")
5214 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5215 (set_attr "ce_count" "2")
5216 (set_attr "predicable" "yes")]
5219 (define_insn "extend<mode>di2"
5220 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5221 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5222 "<qhs_extenddi_cstr>")))]
5223 "TARGET_32BIT <qhs_sextenddi_cond>"
5225 [(set_attr "length" "8,4,8,8,8")
5226 (set_attr "ce_count" "2")
5227 (set_attr "shift" "1")
5228 (set_attr "predicable" "yes")
5229 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5232 ;; Splits for all extensions to DImode
5234 [(set (match_operand:DI 0 "s_register_operand" "")
5235 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5236 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5237 [(set (match_dup 0) (match_dup 1))]
5239 rtx lo_part = gen_lowpart (SImode, operands[0]);
5240 enum machine_mode src_mode = GET_MODE (operands[1]);
5242 if (REG_P (operands[0])
5243 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5244 emit_clobber (operands[0]);
5245 if (!REG_P (lo_part) || src_mode != SImode
5246 || !rtx_equal_p (lo_part, operands[1]))
5248 if (src_mode == SImode)
5249 emit_move_insn (lo_part, operands[1]);
5251 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5252 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5253 operands[1] = lo_part;
5255 operands[0] = gen_highpart (SImode, operands[0]);
5256 operands[1] = const0_rtx;
5260 [(set (match_operand:DI 0 "s_register_operand" "")
5261 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5262 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5263 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5265 rtx lo_part = gen_lowpart (SImode, operands[0]);
5266 enum machine_mode src_mode = GET_MODE (operands[1]);
5268 if (REG_P (operands[0])
5269 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5270 emit_clobber (operands[0]);
5272 if (!REG_P (lo_part) || src_mode != SImode
5273 || !rtx_equal_p (lo_part, operands[1]))
5275 if (src_mode == SImode)
5276 emit_move_insn (lo_part, operands[1]);
5278 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5279 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5280 operands[1] = lo_part;
5282 operands[0] = gen_highpart (SImode, operands[0]);
5285 (define_expand "zero_extendhisi2"
5286 [(set (match_operand:SI 0 "s_register_operand" "")
5287 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5290 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5292 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5295 if (!arm_arch6 && !MEM_P (operands[1]))
5297 rtx t = gen_lowpart (SImode, operands[1]);
5298 rtx tmp = gen_reg_rtx (SImode);
5299 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5300 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5306 [(set (match_operand:SI 0 "s_register_operand" "")
5307 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5308 "!TARGET_THUMB2 && !arm_arch6"
5309 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5310 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5312 operands[2] = gen_lowpart (SImode, operands[1]);
5315 (define_insn "*thumb1_zero_extendhisi2"
5316 [(set (match_operand:SI 0 "register_operand" "=l,l")
5317 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5322 if (which_alternative == 0 && arm_arch6)
5323 return "uxth\t%0, %1";
5324 if (which_alternative == 0)
5327 mem = XEXP (operands[1], 0);
5329 if (GET_CODE (mem) == CONST)
5330 mem = XEXP (mem, 0);
5332 if (GET_CODE (mem) == PLUS)
5334 rtx a = XEXP (mem, 0);
5336 /* This can happen due to bugs in reload. */
5337 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5340 ops[0] = operands[0];
5343 output_asm_insn ("mov\t%0, %1", ops);
5345 XEXP (mem, 0) = operands[0];
5349 return "ldrh\t%0, %1";
5351 [(set_attr_alternative "length"
5352 [(if_then_else (eq_attr "is_arch6" "yes")
5353 (const_int 2) (const_int 4))
5355 (set_attr "type" "simple_alu_shift, load_byte")]
5358 (define_insn "*arm_zero_extendhisi2"
5359 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5360 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5361 "TARGET_ARM && arm_arch4 && !arm_arch6"
5365 [(set_attr "type" "alu_shift,load_byte")
5366 (set_attr "predicable" "yes")]
5369 (define_insn "*arm_zero_extendhisi2_v6"
5370 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5371 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5372 "TARGET_ARM && arm_arch6"
5376 [(set_attr "predicable" "yes")
5377 (set_attr "type" "simple_alu_shift,load_byte")]
5380 (define_insn "*arm_zero_extendhisi2addsi"
5381 [(set (match_operand:SI 0 "s_register_operand" "=r")
5382 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5383 (match_operand:SI 2 "s_register_operand" "r")))]
5385 "uxtah%?\\t%0, %2, %1"
5386 [(set_attr "type" "alu_shift")
5387 (set_attr "predicable" "yes")]
5390 (define_expand "zero_extendqisi2"
5391 [(set (match_operand:SI 0 "s_register_operand" "")
5392 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5395 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5397 emit_insn (gen_andsi3 (operands[0],
5398 gen_lowpart (SImode, operands[1]),
5402 if (!arm_arch6 && !MEM_P (operands[1]))
5404 rtx t = gen_lowpart (SImode, operands[1]);
5405 rtx tmp = gen_reg_rtx (SImode);
5406 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5407 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5413 [(set (match_operand:SI 0 "s_register_operand" "")
5414 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5416 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5417 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5419 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5422 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5427 (define_insn "*thumb1_zero_extendqisi2"
5428 [(set (match_operand:SI 0 "register_operand" "=l,l")
5429 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5430 "TARGET_THUMB1 && !arm_arch6"
5434 [(set_attr "length" "4,2")
5435 (set_attr "type" "alu_shift,load_byte")
5436 (set_attr "pool_range" "*,32")]
5439 (define_insn "*thumb1_zero_extendqisi2_v6"
5440 [(set (match_operand:SI 0 "register_operand" "=l,l")
5441 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5442 "TARGET_THUMB1 && arm_arch6"
5446 [(set_attr "length" "2")
5447 (set_attr "type" "simple_alu_shift,load_byte")]
5450 (define_insn "*arm_zero_extendqisi2"
5451 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5452 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5453 "TARGET_ARM && !arm_arch6"
5456 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5457 [(set_attr "length" "8,4")
5458 (set_attr "type" "alu_shift,load_byte")
5459 (set_attr "predicable" "yes")]
5462 (define_insn "*arm_zero_extendqisi2_v6"
5463 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5464 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5465 "TARGET_ARM && arm_arch6"
5468 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5469 [(set_attr "type" "simple_alu_shift,load_byte")
5470 (set_attr "predicable" "yes")]
5473 (define_insn "*arm_zero_extendqisi2addsi"
5474 [(set (match_operand:SI 0 "s_register_operand" "=r")
5475 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5476 (match_operand:SI 2 "s_register_operand" "r")))]
5478 "uxtab%?\\t%0, %2, %1"
5479 [(set_attr "predicable" "yes")
5480 (set_attr "insn" "xtab")
5481 (set_attr "type" "alu_shift")]
5485 [(set (match_operand:SI 0 "s_register_operand" "")
5486 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5487 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5488 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5489 [(set (match_dup 2) (match_dup 1))
5490 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5495 [(set (match_operand:SI 0 "s_register_operand" "")
5496 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5497 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5498 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5499 [(set (match_dup 2) (match_dup 1))
5500 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5506 [(set (match_operand:SI 0 "s_register_operand" "")
5507 (ior_xor:SI (and:SI (ashift:SI
5508 (match_operand:SI 1 "s_register_operand" "")
5509 (match_operand:SI 2 "const_int_operand" ""))
5510 (match_operand:SI 3 "const_int_operand" ""))
5512 (match_operator 5 "subreg_lowpart_operator"
5513 [(match_operand:SI 4 "s_register_operand" "")]))))]
5515 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5516 == (GET_MODE_MASK (GET_MODE (operands[5]))
5517 & (GET_MODE_MASK (GET_MODE (operands[5]))
5518 << (INTVAL (operands[2])))))"
5519 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5521 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5522 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5525 (define_insn "*compareqi_eq0"
5526 [(set (reg:CC_Z CC_REGNUM)
5527 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5531 [(set_attr "conds" "set")
5532 (set_attr "predicable" "yes")]
5535 (define_expand "extendhisi2"
5536 [(set (match_operand:SI 0 "s_register_operand" "")
5537 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5542 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5545 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5547 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5551 if (!arm_arch6 && !MEM_P (operands[1]))
5553 rtx t = gen_lowpart (SImode, operands[1]);
5554 rtx tmp = gen_reg_rtx (SImode);
5555 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5556 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5563 [(set (match_operand:SI 0 "register_operand" "")
5564 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5565 (clobber (match_scratch:SI 2 ""))])]
5567 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5568 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5570 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5573 ;; We used to have an early-clobber on the scratch register here.
5574 ;; However, there's a bug somewhere in reload which means that this
5575 ;; can be partially ignored during spill allocation if the memory
5576 ;; address also needs reloading; this causes us to die later on when
5577 ;; we try to verify the operands. Fortunately, we don't really need
5578 ;; the early-clobber: we can always use operand 0 if operand 2
5579 ;; overlaps the address.
5580 (define_insn "thumb1_extendhisi2"
5581 [(set (match_operand:SI 0 "register_operand" "=l,l")
5582 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5583 (clobber (match_scratch:SI 2 "=X,l"))]
5590 if (which_alternative == 0 && !arm_arch6)
5592 if (which_alternative == 0)
5593 return \"sxth\\t%0, %1\";
5595 mem = XEXP (operands[1], 0);
5597 /* This code used to try to use 'V', and fix the address only if it was
5598 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5599 range of QImode offsets, and offsettable_address_p does a QImode
5602 if (GET_CODE (mem) == CONST)
5603 mem = XEXP (mem, 0);
5605 if (GET_CODE (mem) == LABEL_REF)
5606 return \"ldr\\t%0, %1\";
5608 if (GET_CODE (mem) == PLUS)
5610 rtx a = XEXP (mem, 0);
5611 rtx b = XEXP (mem, 1);
5613 if (GET_CODE (a) == LABEL_REF
5615 return \"ldr\\t%0, %1\";
5618 return \"ldrsh\\t%0, %1\";
5626 ops[2] = const0_rtx;
5629 gcc_assert (REG_P (ops[1]));
5631 ops[0] = operands[0];
5632 if (reg_mentioned_p (operands[2], ops[1]))
5635 ops[3] = operands[2];
5636 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5639 [(set_attr_alternative "length"
5640 [(if_then_else (eq_attr "is_arch6" "yes")
5641 (const_int 2) (const_int 4))
5643 (set_attr "type" "simple_alu_shift,load_byte")
5644 (set_attr "pool_range" "*,1018")]
5647 ;; This pattern will only be used when ldsh is not available
5648 (define_expand "extendhisi2_mem"
5649 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5651 (zero_extend:SI (match_dup 7)))
5652 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5653 (set (match_operand:SI 0 "" "")
5654 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5659 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5661 mem1 = change_address (operands[1], QImode, addr);
5662 mem2 = change_address (operands[1], QImode,
5663 plus_constant (Pmode, addr, 1));
5664 operands[0] = gen_lowpart (SImode, operands[0]);
5666 operands[2] = gen_reg_rtx (SImode);
5667 operands[3] = gen_reg_rtx (SImode);
5668 operands[6] = gen_reg_rtx (SImode);
5671 if (BYTES_BIG_ENDIAN)
5673 operands[4] = operands[2];
5674 operands[5] = operands[3];
5678 operands[4] = operands[3];
5679 operands[5] = operands[2];
5685 [(set (match_operand:SI 0 "register_operand" "")
5686 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5688 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5689 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5691 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5694 (define_insn "*arm_extendhisi2"
5695 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5696 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5697 "TARGET_ARM && arm_arch4 && !arm_arch6"
5701 [(set_attr "length" "8,4")
5702 (set_attr "type" "alu_shift,load_byte")
5703 (set_attr "predicable" "yes")
5704 (set_attr "pool_range" "*,256")
5705 (set_attr "neg_pool_range" "*,244")]
5708 ;; ??? Check Thumb-2 pool range
5709 (define_insn "*arm_extendhisi2_v6"
5710 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5711 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5712 "TARGET_32BIT && arm_arch6"
5716 [(set_attr "type" "simple_alu_shift,load_byte")
5717 (set_attr "predicable" "yes")
5718 (set_attr "pool_range" "*,256")
5719 (set_attr "neg_pool_range" "*,244")]
5722 (define_insn "*arm_extendhisi2addsi"
5723 [(set (match_operand:SI 0 "s_register_operand" "=r")
5724 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5725 (match_operand:SI 2 "s_register_operand" "r")))]
5727 "sxtah%?\\t%0, %2, %1"
5730 (define_expand "extendqihi2"
5732 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5734 (set (match_operand:HI 0 "s_register_operand" "")
5735 (ashiftrt:SI (match_dup 2)
5740 if (arm_arch4 && MEM_P (operands[1]))
5742 emit_insn (gen_rtx_SET (VOIDmode,
5744 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5747 if (!s_register_operand (operands[1], QImode))
5748 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5749 operands[0] = gen_lowpart (SImode, operands[0]);
5750 operands[1] = gen_lowpart (SImode, operands[1]);
5751 operands[2] = gen_reg_rtx (SImode);
5755 (define_insn "*arm_extendqihi_insn"
5756 [(set (match_operand:HI 0 "s_register_operand" "=r")
5757 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5758 "TARGET_ARM && arm_arch4"
5759 "ldr%(sb%)\\t%0, %1"
5760 [(set_attr "type" "load_byte")
5761 (set_attr "predicable" "yes")
5762 (set_attr "pool_range" "256")
5763 (set_attr "neg_pool_range" "244")]
5766 (define_expand "extendqisi2"
5767 [(set (match_operand:SI 0 "s_register_operand" "")
5768 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5771 if (!arm_arch4 && MEM_P (operands[1]))
5772 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5774 if (!arm_arch6 && !MEM_P (operands[1]))
5776 rtx t = gen_lowpart (SImode, operands[1]);
5777 rtx tmp = gen_reg_rtx (SImode);
5778 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5779 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5785 [(set (match_operand:SI 0 "register_operand" "")
5786 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5788 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5789 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5791 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5794 (define_insn "*arm_extendqisi"
5795 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5796 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5797 "TARGET_ARM && arm_arch4 && !arm_arch6"
5801 [(set_attr "length" "8,4")
5802 (set_attr "type" "alu_shift,load_byte")
5803 (set_attr "predicable" "yes")
5804 (set_attr "pool_range" "*,256")
5805 (set_attr "neg_pool_range" "*,244")]
5808 (define_insn "*arm_extendqisi_v6"
5809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5811 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5812 "TARGET_ARM && arm_arch6"
5816 [(set_attr "type" "simple_alu_shift,load_byte")
5817 (set_attr "predicable" "yes")
5818 (set_attr "pool_range" "*,256")
5819 (set_attr "neg_pool_range" "*,244")]
5822 (define_insn "*arm_extendqisi2addsi"
5823 [(set (match_operand:SI 0 "s_register_operand" "=r")
5824 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5825 (match_operand:SI 2 "s_register_operand" "r")))]
5827 "sxtab%?\\t%0, %2, %1"
5828 [(set_attr "type" "alu_shift")
5829 (set_attr "insn" "xtab")
5830 (set_attr "predicable" "yes")]
5834 [(set (match_operand:SI 0 "register_operand" "")
5835 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5836 "TARGET_THUMB1 && reload_completed"
5837 [(set (match_dup 0) (match_dup 2))
5838 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5840 rtx addr = XEXP (operands[1], 0);
5842 if (GET_CODE (addr) == CONST)
5843 addr = XEXP (addr, 0);
5845 if (GET_CODE (addr) == PLUS
5846 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5847 /* No split necessary. */
5850 if (GET_CODE (addr) == PLUS
5851 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5854 if (reg_overlap_mentioned_p (operands[0], addr))
5856 rtx t = gen_lowpart (QImode, operands[0]);
5857 emit_move_insn (t, operands[1]);
5858 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5864 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5865 operands[2] = const0_rtx;
5867 else if (GET_CODE (addr) != PLUS)
5869 else if (REG_P (XEXP (addr, 0)))
5871 operands[2] = XEXP (addr, 1);
5872 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5876 operands[2] = XEXP (addr, 0);
5877 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5880 operands[3] = change_address (operands[1], QImode, addr);
5884 [(set (match_operand:SI 0 "register_operand" "")
5885 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5886 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5887 (set (match_operand:SI 3 "register_operand" "")
5888 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5890 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5891 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5892 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5893 && (peep2_reg_dead_p (3, operands[0])
5894 || rtx_equal_p (operands[0], operands[3]))
5895 && (peep2_reg_dead_p (3, operands[2])
5896 || rtx_equal_p (operands[2], operands[3]))"
5897 [(set (match_dup 2) (match_dup 1))
5898 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5900 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5901 operands[4] = change_address (operands[4], QImode, addr);
5904 (define_insn "thumb1_extendqisi2"
5905 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5906 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5911 if (which_alternative == 0 && arm_arch6)
5912 return "sxtb\\t%0, %1";
5913 if (which_alternative == 0)
5916 addr = XEXP (operands[1], 0);
5917 if (GET_CODE (addr) == PLUS
5918 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5919 return "ldrsb\\t%0, %1";
5923 [(set_attr_alternative "length"
5924 [(if_then_else (eq_attr "is_arch6" "yes")
5925 (const_int 2) (const_int 4))
5927 (if_then_else (eq_attr "is_arch6" "yes")
5928 (const_int 4) (const_int 6))])
5929 (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
5932 (define_expand "extendsfdf2"
5933 [(set (match_operand:DF 0 "s_register_operand" "")
5934 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5935 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5939 /* HFmode -> DFmode conversions have to go through SFmode. */
5940 (define_expand "extendhfdf2"
5941 [(set (match_operand:DF 0 "general_operand" "")
5942 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5947 op1 = convert_to_mode (SFmode, operands[1], 0);
5948 op1 = convert_to_mode (DFmode, op1, 0);
5949 emit_insn (gen_movdf (operands[0], op1));
5954 ;; Move insns (including loads and stores)
5956 ;; XXX Just some ideas about movti.
5957 ;; I don't think these are a good idea on the arm, there just aren't enough
5959 ;;(define_expand "loadti"
5960 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5961 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5964 ;;(define_expand "storeti"
5965 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5966 ;; (match_operand:TI 1 "s_register_operand" ""))]
5969 ;;(define_expand "movti"
5970 ;; [(set (match_operand:TI 0 "general_operand" "")
5971 ;; (match_operand:TI 1 "general_operand" ""))]
5977 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5978 ;; operands[1] = copy_to_reg (operands[1]);
5979 ;; if (MEM_P (operands[0]))
5980 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5981 ;; else if (MEM_P (operands[1]))
5982 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5986 ;; emit_insn (insn);
5990 ;; Recognize garbage generated above.
5993 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5994 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5998 ;; register mem = (which_alternative < 3);
5999 ;; register const char *template;
6001 ;; operands[mem] = XEXP (operands[mem], 0);
6002 ;; switch (which_alternative)
6004 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6005 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6006 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6007 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6008 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6009 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6011 ;; output_asm_insn (template, operands);
6015 (define_expand "movdi"
6016 [(set (match_operand:DI 0 "general_operand" "")
6017 (match_operand:DI 1 "general_operand" ""))]
6020 if (can_create_pseudo_p ())
6022 if (!REG_P (operands[0]))
6023 operands[1] = force_reg (DImode, operands[1]);
6028 (define_insn "*arm_movdi"
6029 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
6030 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
6032 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6034 && ( register_operand (operands[0], DImode)
6035 || register_operand (operands[1], DImode))"
6037 switch (which_alternative)
6044 return output_move_double (operands, true, NULL);
6047 [(set_attr "length" "8,12,16,8,8")
6048 (set_attr "type" "*,*,*,load2,store2")
6049 (set_attr "arm_pool_range" "*,*,*,1020,*")
6050 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6051 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6052 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6056 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6057 (match_operand:ANY64 1 "const_double_operand" ""))]
6060 && (arm_const_double_inline_cost (operands[1])
6061 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
6064 arm_split_constant (SET, SImode, curr_insn,
6065 INTVAL (gen_lowpart (SImode, operands[1])),
6066 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6067 arm_split_constant (SET, SImode, curr_insn,
6068 INTVAL (gen_highpart_mode (SImode,
6069 GET_MODE (operands[0]),
6071 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6076 ; If optimizing for size, or if we have load delay slots, then
6077 ; we want to split the constant into two separate operations.
6078 ; In both cases this may split a trivial part into a single data op
6079 ; leaving a single complex constant to load. We can also get longer
6080 ; offsets in a LDR which means we get better chances of sharing the pool
6081 ; entries. Finally, we can normally do a better job of scheduling
6082 ; LDR instructions than we can with LDM.
6083 ; This pattern will only match if the one above did not.
6085 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6086 (match_operand:ANY64 1 "const_double_operand" ""))]
6087 "TARGET_ARM && reload_completed
6088 && arm_const_double_by_parts (operands[1])"
6089 [(set (match_dup 0) (match_dup 1))
6090 (set (match_dup 2) (match_dup 3))]
6092 operands[2] = gen_highpart (SImode, operands[0]);
6093 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6095 operands[0] = gen_lowpart (SImode, operands[0]);
6096 operands[1] = gen_lowpart (SImode, operands[1]);
6101 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6102 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6103 "TARGET_EITHER && reload_completed"
6104 [(set (match_dup 0) (match_dup 1))
6105 (set (match_dup 2) (match_dup 3))]
6107 operands[2] = gen_highpart (SImode, operands[0]);
6108 operands[3] = gen_highpart (SImode, operands[1]);
6109 operands[0] = gen_lowpart (SImode, operands[0]);
6110 operands[1] = gen_lowpart (SImode, operands[1]);
6112 /* Handle a partial overlap. */
6113 if (rtx_equal_p (operands[0], operands[3]))
6115 rtx tmp0 = operands[0];
6116 rtx tmp1 = operands[1];
6118 operands[0] = operands[2];
6119 operands[1] = operands[3];
6126 ;; We can't actually do base+index doubleword loads if the index and
6127 ;; destination overlap. Split here so that we at least have chance to
6130 [(set (match_operand:DI 0 "s_register_operand" "")
6131 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6132 (match_operand:SI 2 "s_register_operand" ""))))]
6134 && reg_overlap_mentioned_p (operands[0], operands[1])
6135 && reg_overlap_mentioned_p (operands[0], operands[2])"
6137 (plus:SI (match_dup 1)
6140 (mem:DI (match_dup 4)))]
6142 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6146 ;;; ??? This should have alternatives for constants.
6147 ;;; ??? This was originally identical to the movdf_insn pattern.
6148 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6149 ;;; thumb_reorg with a memory reference.
6150 (define_insn "*thumb1_movdi_insn"
6151 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6152 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6154 && ( register_operand (operands[0], DImode)
6155 || register_operand (operands[1], DImode))"
6158 switch (which_alternative)
6162 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6163 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6164 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6166 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6168 operands[1] = GEN_INT (- INTVAL (operands[1]));
6169 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6171 return \"ldmia\\t%1, {%0, %H0}\";
6173 return \"stmia\\t%0, {%1, %H1}\";
6175 return thumb_load_double_from_address (operands);
6177 operands[2] = gen_rtx_MEM (SImode,
6178 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6179 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6182 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6183 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6184 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6187 [(set_attr "length" "4,4,6,2,2,6,4,4")
6188 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
6189 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
6190 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6193 (define_expand "movsi"
6194 [(set (match_operand:SI 0 "general_operand" "")
6195 (match_operand:SI 1 "general_operand" ""))]
6199 rtx base, offset, tmp;
6203 /* Everything except mem = const or mem = mem can be done easily. */
6204 if (MEM_P (operands[0]))
6205 operands[1] = force_reg (SImode, operands[1]);
6206 if (arm_general_register_operand (operands[0], SImode)
6207 && CONST_INT_P (operands[1])
6208 && !(const_ok_for_arm (INTVAL (operands[1]))
6209 || const_ok_for_arm (~INTVAL (operands[1]))))
6211 arm_split_constant (SET, SImode, NULL_RTX,
6212 INTVAL (operands[1]), operands[0], NULL_RTX,
6213 optimize && can_create_pseudo_p ());
6217 else /* TARGET_THUMB1... */
6219 if (can_create_pseudo_p ())
6221 if (!REG_P (operands[0]))
6222 operands[1] = force_reg (SImode, operands[1]);
6226 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6228 split_const (operands[1], &base, &offset);
6229 if (GET_CODE (base) == SYMBOL_REF
6230 && !offset_within_block_p (base, INTVAL (offset)))
6232 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6233 emit_move_insn (tmp, base);
6234 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6239 /* Recognize the case where operand[1] is a reference to thread-local
6240 data and load its address to a register. */
6241 if (arm_tls_referenced_p (operands[1]))
6243 rtx tmp = operands[1];
6246 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6248 addend = XEXP (XEXP (tmp, 0), 1);
6249 tmp = XEXP (XEXP (tmp, 0), 0);
6252 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6253 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6255 tmp = legitimize_tls_address (tmp,
6256 !can_create_pseudo_p () ? operands[0] : 0);
6259 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6260 tmp = force_operand (tmp, operands[0]);
6265 && (CONSTANT_P (operands[1])
6266 || symbol_mentioned_p (operands[1])
6267 || label_mentioned_p (operands[1])))
6268 operands[1] = legitimize_pic_address (operands[1], SImode,
6269 (!can_create_pseudo_p ()
6276 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6277 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6278 ;; so this does not matter.
6279 (define_insn "*arm_movt"
6280 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6281 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6282 (match_operand:SI 2 "general_operand" "i")))]
6284 "movt%?\t%0, #:upper16:%c2"
6285 [(set_attr "predicable" "yes")
6286 (set_attr "length" "4")]
6289 (define_insn "*arm_movsi_insn"
6290 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6291 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6292 "TARGET_ARM && ! TARGET_IWMMXT
6293 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6294 && ( register_operand (operands[0], SImode)
6295 || register_operand (operands[1], SImode))"
6303 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
6304 (set_attr "insn" "mov,mov,mvn,mov,*,*")
6305 (set_attr "predicable" "yes")
6306 (set_attr "pool_range" "*,*,*,*,4096,*")
6307 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6311 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6312 (match_operand:SI 1 "const_int_operand" ""))]
6314 && (!(const_ok_for_arm (INTVAL (operands[1]))
6315 || const_ok_for_arm (~INTVAL (operands[1]))))"
6316 [(clobber (const_int 0))]
6318 arm_split_constant (SET, SImode, NULL_RTX,
6319 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6324 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6325 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6326 ;; and lo_sum would be merged back into memory load at cprop. However,
6327 ;; if the default is to prefer movt/movw rather than a load from the constant
6328 ;; pool, the performance is better.
6330 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6331 (match_operand:SI 1 "general_operand" ""))]
6333 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6334 && !flag_pic && !target_word_relocations
6335 && !arm_tls_referenced_p (operands[1])"
6336 [(clobber (const_int 0))]
6338 arm_emit_movpair (operands[0], operands[1]);
6342 (define_insn "*thumb1_movsi_insn"
6343 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6344 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6346 && ( register_operand (operands[0], SImode)
6347 || register_operand (operands[1], SImode))"
6358 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6359 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6360 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6361 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6364 [(set (match_operand:SI 0 "register_operand" "")
6365 (match_operand:SI 1 "const_int_operand" ""))]
6366 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6367 [(set (match_dup 2) (match_dup 1))
6368 (set (match_dup 0) (neg:SI (match_dup 2)))]
6371 operands[1] = GEN_INT (- INTVAL (operands[1]));
6372 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6377 [(set (match_operand:SI 0 "register_operand" "")
6378 (match_operand:SI 1 "const_int_operand" ""))]
6379 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6380 [(set (match_dup 2) (match_dup 1))
6381 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6384 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6385 unsigned HOST_WIDE_INT mask = 0xff;
6388 for (i = 0; i < 25; i++)
6389 if ((val & (mask << i)) == val)
6392 /* Don't split if the shift is zero. */
6396 operands[1] = GEN_INT (val >> i);
6397 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6398 operands[3] = GEN_INT (i);
6402 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6404 [(set (match_operand:SI 0 "register_operand" "")
6405 (match_operand:SI 1 "const_int_operand" ""))]
6406 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6407 [(set (match_dup 2) (match_dup 1))
6408 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6411 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6412 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6413 operands[3] = GEN_INT (255);
6417 ;; When generating pic, we need to load the symbol offset into a register.
6418 ;; So that the optimizer does not confuse this with a normal symbol load
6419 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6420 ;; since that is the only type of relocation we can use.
6422 ;; Wrap calculation of the whole PIC address in a single pattern for the
6423 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6424 ;; a PIC address involves two loads from memory, so we want to CSE it
6425 ;; as often as possible.
6426 ;; This pattern will be split into one of the pic_load_addr_* patterns
6427 ;; and a move after GCSE optimizations.
6429 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6430 (define_expand "calculate_pic_address"
6431 [(set (match_operand:SI 0 "register_operand" "")
6432 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6433 (unspec:SI [(match_operand:SI 2 "" "")]
6438 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6440 [(set (match_operand:SI 0 "register_operand" "")
6441 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6442 (unspec:SI [(match_operand:SI 2 "" "")]
6445 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6446 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6447 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6450 ;; operand1 is the memory address to go into
6451 ;; pic_load_addr_32bit.
6452 ;; operand2 is the PIC label to be emitted
6453 ;; from pic_add_dot_plus_eight.
6454 ;; We do this to allow hoisting of the entire insn.
6455 (define_insn_and_split "pic_load_addr_unified"
6456 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6457 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6458 (match_operand:SI 2 "" "")]
6459 UNSPEC_PIC_UNIFIED))]
6462 "&& reload_completed"
6463 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6464 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6465 (match_dup 2)] UNSPEC_PIC_BASE))]
6466 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6467 [(set_attr "type" "load1,load1,load1")
6468 (set_attr "pool_range" "4096,4094,1022")
6469 (set_attr "neg_pool_range" "4084,0,0")
6470 (set_attr "arch" "a,t2,t1")
6471 (set_attr "length" "8,6,4")]
6474 ;; The rather odd constraints on the following are to force reload to leave
6475 ;; the insn alone, and to force the minipool generation pass to then move
6476 ;; the GOT symbol to memory.
6478 (define_insn "pic_load_addr_32bit"
6479 [(set (match_operand:SI 0 "s_register_operand" "=r")
6480 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6481 "TARGET_32BIT && flag_pic"
6483 [(set_attr "type" "load1")
6484 (set (attr "pool_range")
6485 (if_then_else (eq_attr "is_thumb" "no")
6488 (set (attr "neg_pool_range")
6489 (if_then_else (eq_attr "is_thumb" "no")
6494 (define_insn "pic_load_addr_thumb1"
6495 [(set (match_operand:SI 0 "s_register_operand" "=l")
6496 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6497 "TARGET_THUMB1 && flag_pic"
6499 [(set_attr "type" "load1")
6500 (set (attr "pool_range") (const_int 1018))]
6503 (define_insn "pic_add_dot_plus_four"
6504 [(set (match_operand:SI 0 "register_operand" "=r")
6505 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6507 (match_operand 2 "" "")]
6511 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6512 INTVAL (operands[2]));
6513 return \"add\\t%0, %|pc\";
6515 [(set_attr "length" "2")]
6518 (define_insn "pic_add_dot_plus_eight"
6519 [(set (match_operand:SI 0 "register_operand" "=r")
6520 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6522 (match_operand 2 "" "")]
6526 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6527 INTVAL (operands[2]));
6528 return \"add%?\\t%0, %|pc, %1\";
6530 [(set_attr "predicable" "yes")]
6533 (define_insn "tls_load_dot_plus_eight"
6534 [(set (match_operand:SI 0 "register_operand" "=r")
6535 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6537 (match_operand 2 "" "")]
6541 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6542 INTVAL (operands[2]));
6543 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6545 [(set_attr "predicable" "yes")]
6548 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6549 ;; followed by a load. These sequences can be crunched down to
6550 ;; tls_load_dot_plus_eight by a peephole.
6553 [(set (match_operand:SI 0 "register_operand" "")
6554 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6556 (match_operand 1 "" "")]
6558 (set (match_operand:SI 2 "arm_general_register_operand" "")
6559 (mem:SI (match_dup 0)))]
6560 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6562 (mem:SI (unspec:SI [(match_dup 3)
6569 (define_insn "pic_offset_arm"
6570 [(set (match_operand:SI 0 "register_operand" "=r")
6571 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6572 (unspec:SI [(match_operand:SI 2 "" "X")]
6573 UNSPEC_PIC_OFFSET))))]
6574 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6575 "ldr%?\\t%0, [%1,%2]"
6576 [(set_attr "type" "load1")]
6579 (define_expand "builtin_setjmp_receiver"
6580 [(label_ref (match_operand 0 "" ""))]
6584 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6586 if (arm_pic_register != INVALID_REGNUM)
6587 arm_load_pic_register (1UL << 3);
6591 ;; If copying one reg to another we can set the condition codes according to
6592 ;; its value. Such a move is common after a return from subroutine and the
6593 ;; result is being tested against zero.
6595 (define_insn "*movsi_compare0"
6596 [(set (reg:CC CC_REGNUM)
6597 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6599 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6605 [(set_attr "conds" "set")
6606 (set_attr "type" "simple_alu_imm,simple_alu_imm")]
6609 ;; Subroutine to store a half word from a register into memory.
6610 ;; Operand 0 is the source register (HImode)
6611 ;; Operand 1 is the destination address in a register (SImode)
6613 ;; In both this routine and the next, we must be careful not to spill
6614 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6615 ;; can generate unrecognizable rtl.
6617 (define_expand "storehi"
6618 [;; store the low byte
6619 (set (match_operand 1 "" "") (match_dup 3))
6620 ;; extract the high byte
6622 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6623 ;; store the high byte
6624 (set (match_dup 4) (match_dup 5))]
6628 rtx op1 = operands[1];
6629 rtx addr = XEXP (op1, 0);
6630 enum rtx_code code = GET_CODE (addr);
6632 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6634 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6636 operands[4] = adjust_address (op1, QImode, 1);
6637 operands[1] = adjust_address (operands[1], QImode, 0);
6638 operands[3] = gen_lowpart (QImode, operands[0]);
6639 operands[0] = gen_lowpart (SImode, operands[0]);
6640 operands[2] = gen_reg_rtx (SImode);
6641 operands[5] = gen_lowpart (QImode, operands[2]);
6645 (define_expand "storehi_bigend"
6646 [(set (match_dup 4) (match_dup 3))
6648 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6649 (set (match_operand 1 "" "") (match_dup 5))]
6653 rtx op1 = operands[1];
6654 rtx addr = XEXP (op1, 0);
6655 enum rtx_code code = GET_CODE (addr);
6657 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6659 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6661 operands[4] = adjust_address (op1, QImode, 1);
6662 operands[1] = adjust_address (operands[1], QImode, 0);
6663 operands[3] = gen_lowpart (QImode, operands[0]);
6664 operands[0] = gen_lowpart (SImode, operands[0]);
6665 operands[2] = gen_reg_rtx (SImode);
6666 operands[5] = gen_lowpart (QImode, operands[2]);
6670 ;; Subroutine to store a half word integer constant into memory.
6671 (define_expand "storeinthi"
6672 [(set (match_operand 0 "" "")
6673 (match_operand 1 "" ""))
6674 (set (match_dup 3) (match_dup 2))]
6678 HOST_WIDE_INT value = INTVAL (operands[1]);
6679 rtx addr = XEXP (operands[0], 0);
6680 rtx op0 = operands[0];
6681 enum rtx_code code = GET_CODE (addr);
6683 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6685 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6687 operands[1] = gen_reg_rtx (SImode);
6688 if (BYTES_BIG_ENDIAN)
6690 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6691 if ((value & 255) == ((value >> 8) & 255))
6692 operands[2] = operands[1];
6695 operands[2] = gen_reg_rtx (SImode);
6696 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6701 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6702 if ((value & 255) == ((value >> 8) & 255))
6703 operands[2] = operands[1];
6706 operands[2] = gen_reg_rtx (SImode);
6707 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6711 operands[3] = adjust_address (op0, QImode, 1);
6712 operands[0] = adjust_address (operands[0], QImode, 0);
6713 operands[2] = gen_lowpart (QImode, operands[2]);
6714 operands[1] = gen_lowpart (QImode, operands[1]);
6718 (define_expand "storehi_single_op"
6719 [(set (match_operand:HI 0 "memory_operand" "")
6720 (match_operand:HI 1 "general_operand" ""))]
6721 "TARGET_32BIT && arm_arch4"
6723 if (!s_register_operand (operands[1], HImode))
6724 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6728 (define_expand "movhi"
6729 [(set (match_operand:HI 0 "general_operand" "")
6730 (match_operand:HI 1 "general_operand" ""))]
6735 if (can_create_pseudo_p ())
6737 if (MEM_P (operands[0]))
6741 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6744 if (CONST_INT_P (operands[1]))
6745 emit_insn (gen_storeinthi (operands[0], operands[1]));
6748 if (MEM_P (operands[1]))
6749 operands[1] = force_reg (HImode, operands[1]);
6750 if (BYTES_BIG_ENDIAN)
6751 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6753 emit_insn (gen_storehi (operands[1], operands[0]));
6757 /* Sign extend a constant, and keep it in an SImode reg. */
6758 else if (CONST_INT_P (operands[1]))
6760 rtx reg = gen_reg_rtx (SImode);
6761 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6763 /* If the constant is already valid, leave it alone. */
6764 if (!const_ok_for_arm (val))
6766 /* If setting all the top bits will make the constant
6767 loadable in a single instruction, then set them.
6768 Otherwise, sign extend the number. */
6770 if (const_ok_for_arm (~(val | ~0xffff)))
6772 else if (val & 0x8000)
6776 emit_insn (gen_movsi (reg, GEN_INT (val)));
6777 operands[1] = gen_lowpart (HImode, reg);
6779 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6780 && MEM_P (operands[1]))
6782 rtx reg = gen_reg_rtx (SImode);
6784 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6785 operands[1] = gen_lowpart (HImode, reg);
6787 else if (!arm_arch4)
6789 if (MEM_P (operands[1]))
6792 rtx offset = const0_rtx;
6793 rtx reg = gen_reg_rtx (SImode);
6795 if ((REG_P (base = XEXP (operands[1], 0))
6796 || (GET_CODE (base) == PLUS
6797 && (CONST_INT_P (offset = XEXP (base, 1)))
6798 && ((INTVAL(offset) & 1) != 1)
6799 && REG_P (base = XEXP (base, 0))))
6800 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6804 new_rtx = widen_memory_access (operands[1], SImode,
6805 ((INTVAL (offset) & ~3)
6806 - INTVAL (offset)));
6807 emit_insn (gen_movsi (reg, new_rtx));
6808 if (((INTVAL (offset) & 2) != 0)
6809 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6811 rtx reg2 = gen_reg_rtx (SImode);
6813 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6818 emit_insn (gen_movhi_bytes (reg, operands[1]));
6820 operands[1] = gen_lowpart (HImode, reg);
6824 /* Handle loading a large integer during reload. */
6825 else if (CONST_INT_P (operands[1])
6826 && !const_ok_for_arm (INTVAL (operands[1]))
6827 && !const_ok_for_arm (~INTVAL (operands[1])))
6829 /* Writing a constant to memory needs a scratch, which should
6830 be handled with SECONDARY_RELOADs. */
6831 gcc_assert (REG_P (operands[0]));
6833 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6834 emit_insn (gen_movsi (operands[0], operands[1]));
6838 else if (TARGET_THUMB2)
6840 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6841 if (can_create_pseudo_p ())
6843 if (!REG_P (operands[0]))
6844 operands[1] = force_reg (HImode, operands[1]);
6845 /* Zero extend a constant, and keep it in an SImode reg. */
6846 else if (CONST_INT_P (operands[1]))
6848 rtx reg = gen_reg_rtx (SImode);
6849 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6851 emit_insn (gen_movsi (reg, GEN_INT (val)));
6852 operands[1] = gen_lowpart (HImode, reg);
6856 else /* TARGET_THUMB1 */
6858 if (can_create_pseudo_p ())
6860 if (CONST_INT_P (operands[1]))
6862 rtx reg = gen_reg_rtx (SImode);
6864 emit_insn (gen_movsi (reg, operands[1]));
6865 operands[1] = gen_lowpart (HImode, reg);
6868 /* ??? We shouldn't really get invalid addresses here, but this can
6869 happen if we are passed a SP (never OK for HImode/QImode) or
6870 virtual register (also rejected as illegitimate for HImode/QImode)
6871 relative address. */
6872 /* ??? This should perhaps be fixed elsewhere, for instance, in
6873 fixup_stack_1, by checking for other kinds of invalid addresses,
6874 e.g. a bare reference to a virtual register. This may confuse the
6875 alpha though, which must handle this case differently. */
6876 if (MEM_P (operands[0])
6877 && !memory_address_p (GET_MODE (operands[0]),
6878 XEXP (operands[0], 0)))
6880 = replace_equiv_address (operands[0],
6881 copy_to_reg (XEXP (operands[0], 0)));
6883 if (MEM_P (operands[1])
6884 && !memory_address_p (GET_MODE (operands[1]),
6885 XEXP (operands[1], 0)))
6887 = replace_equiv_address (operands[1],
6888 copy_to_reg (XEXP (operands[1], 0)));
6890 if (MEM_P (operands[1]) && optimize > 0)
6892 rtx reg = gen_reg_rtx (SImode);
6894 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6895 operands[1] = gen_lowpart (HImode, reg);
6898 if (MEM_P (operands[0]))
6899 operands[1] = force_reg (HImode, operands[1]);
6901 else if (CONST_INT_P (operands[1])
6902 && !satisfies_constraint_I (operands[1]))
6904 /* Handle loading a large integer during reload. */
6906 /* Writing a constant to memory needs a scratch, which should
6907 be handled with SECONDARY_RELOADs. */
6908 gcc_assert (REG_P (operands[0]));
6910 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6911 emit_insn (gen_movsi (operands[0], operands[1]));
6918 (define_insn "*thumb1_movhi_insn"
6919 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6920 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6922 && ( register_operand (operands[0], HImode)
6923 || register_operand (operands[1], HImode))"
6925 switch (which_alternative)
6927 case 0: return \"add %0, %1, #0\";
6928 case 2: return \"strh %1, %0\";
6929 case 3: return \"mov %0, %1\";
6930 case 4: return \"mov %0, %1\";
6931 case 5: return \"mov %0, %1\";
6932 default: gcc_unreachable ();
6934 /* The stack pointer can end up being taken as an index register.
6935 Catch this case here and deal with it. */
6936 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6937 && REG_P (XEXP (XEXP (operands[1], 0), 0))
6938 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6941 ops[0] = operands[0];
6942 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6944 output_asm_insn (\"mov %0, %1\", ops);
6946 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6949 return \"ldrh %0, %1\";
6951 [(set_attr "length" "2,4,2,2,2,2")
6952 (set_attr "type" "*,load1,store1,*,*,*")
6953 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6956 (define_expand "movhi_bytes"
6957 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6959 (zero_extend:SI (match_dup 6)))
6960 (set (match_operand:SI 0 "" "")
6961 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6966 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6968 mem1 = change_address (operands[1], QImode, addr);
6969 mem2 = change_address (operands[1], QImode,
6970 plus_constant (Pmode, addr, 1));
6971 operands[0] = gen_lowpart (SImode, operands[0]);
6973 operands[2] = gen_reg_rtx (SImode);
6974 operands[3] = gen_reg_rtx (SImode);
6977 if (BYTES_BIG_ENDIAN)
6979 operands[4] = operands[2];
6980 operands[5] = operands[3];
6984 operands[4] = operands[3];
6985 operands[5] = operands[2];
6990 (define_expand "movhi_bigend"
6992 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6995 (ashiftrt:SI (match_dup 2) (const_int 16)))
6996 (set (match_operand:HI 0 "s_register_operand" "")
7000 operands[2] = gen_reg_rtx (SImode);
7001 operands[3] = gen_reg_rtx (SImode);
7002 operands[4] = gen_lowpart (HImode, operands[3]);
7006 ;; Pattern to recognize insn generated default case above
7007 (define_insn "*movhi_insn_arch4"
7008 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
7009 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
7012 && (register_operand (operands[0], HImode)
7013 || register_operand (operands[1], HImode))"
7015 mov%?\\t%0, %1\\t%@ movhi
7016 mvn%?\\t%0, #%B1\\t%@ movhi
7017 str%(h%)\\t%1, %0\\t%@ movhi
7018 ldr%(h%)\\t%0, %1\\t%@ movhi"
7019 [(set_attr "predicable" "yes")
7020 (set_attr "insn" "mov,mvn,*,*")
7021 (set_attr "pool_range" "*,*,*,256")
7022 (set_attr "neg_pool_range" "*,*,*,244")
7023 (set_attr_alternative "type"
7024 [(if_then_else (match_operand 1 "const_int_operand" "")
7025 (const_string "simple_alu_imm" )
7027 (const_string "simple_alu_imm")
7028 (const_string "store1")
7029 (const_string "load1")])]
7032 (define_insn "*movhi_bytes"
7033 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7034 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
7037 mov%?\\t%0, %1\\t%@ movhi
7038 mov%?\\t%0, %1\\t%@ movhi
7039 mvn%?\\t%0, #%B1\\t%@ movhi"
7040 [(set_attr "predicable" "yes")
7041 (set_attr "insn" "mov, mov,mvn")
7042 (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
7045 (define_expand "thumb_movhi_clobber"
7046 [(set (match_operand:HI 0 "memory_operand" "")
7047 (match_operand:HI 1 "register_operand" ""))
7048 (clobber (match_operand:DI 2 "register_operand" ""))]
7051 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
7052 && REGNO (operands[1]) <= LAST_LO_REGNUM)
7054 emit_insn (gen_movhi (operands[0], operands[1]));
7057 /* XXX Fixme, need to handle other cases here as well. */
7062 ;; We use a DImode scratch because we may occasionally need an additional
7063 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7064 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7065 (define_expand "reload_outhi"
7066 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7067 (match_operand:HI 1 "s_register_operand" "r")
7068 (match_operand:DI 2 "s_register_operand" "=&l")])]
7071 arm_reload_out_hi (operands);
7073 thumb_reload_out_hi (operands);
7078 (define_expand "reload_inhi"
7079 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7080 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7081 (match_operand:DI 2 "s_register_operand" "=&r")])]
7085 arm_reload_in_hi (operands);
7087 thumb_reload_out_hi (operands);
7091 (define_expand "movqi"
7092 [(set (match_operand:QI 0 "general_operand" "")
7093 (match_operand:QI 1 "general_operand" ""))]
7096 /* Everything except mem = const or mem = mem can be done easily */
7098 if (can_create_pseudo_p ())
7100 if (CONST_INT_P (operands[1]))
7102 rtx reg = gen_reg_rtx (SImode);
7104 /* For thumb we want an unsigned immediate, then we are more likely
7105 to be able to use a movs insn. */
7107 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7109 emit_insn (gen_movsi (reg, operands[1]));
7110 operands[1] = gen_lowpart (QImode, reg);
7115 /* ??? We shouldn't really get invalid addresses here, but this can
7116 happen if we are passed a SP (never OK for HImode/QImode) or
7117 virtual register (also rejected as illegitimate for HImode/QImode)
7118 relative address. */
7119 /* ??? This should perhaps be fixed elsewhere, for instance, in
7120 fixup_stack_1, by checking for other kinds of invalid addresses,
7121 e.g. a bare reference to a virtual register. This may confuse the
7122 alpha though, which must handle this case differently. */
7123 if (MEM_P (operands[0])
7124 && !memory_address_p (GET_MODE (operands[0]),
7125 XEXP (operands[0], 0)))
7127 = replace_equiv_address (operands[0],
7128 copy_to_reg (XEXP (operands[0], 0)));
7129 if (MEM_P (operands[1])
7130 && !memory_address_p (GET_MODE (operands[1]),
7131 XEXP (operands[1], 0)))
7133 = replace_equiv_address (operands[1],
7134 copy_to_reg (XEXP (operands[1], 0)));
7137 if (MEM_P (operands[1]) && optimize > 0)
7139 rtx reg = gen_reg_rtx (SImode);
7141 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7142 operands[1] = gen_lowpart (QImode, reg);
7145 if (MEM_P (operands[0]))
7146 operands[1] = force_reg (QImode, operands[1]);
7148 else if (TARGET_THUMB
7149 && CONST_INT_P (operands[1])
7150 && !satisfies_constraint_I (operands[1]))
7152 /* Handle loading a large integer during reload. */
7154 /* Writing a constant to memory needs a scratch, which should
7155 be handled with SECONDARY_RELOADs. */
7156 gcc_assert (REG_P (operands[0]));
7158 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7159 emit_insn (gen_movsi (operands[0], operands[1]));
7166 (define_insn "*arm_movqi_insn"
7167 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,Uu,r,m")
7168 (match_operand:QI 1 "general_operand" "r,I,K,Uu,l,m,r"))]
7170 && ( register_operand (operands[0], QImode)
7171 || register_operand (operands[1], QImode))"
7180 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
7181 (set_attr "insn" "mov,mov,mvn,*,*,*,*")
7182 (set_attr "predicable" "yes")
7183 (set_attr "arch" "any,any,any,t2,t2,any,any")
7184 (set_attr "length" "4,4,4,2,2,4,4")]
7187 (define_insn "*thumb1_movqi_insn"
7188 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7189 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7191 && ( register_operand (operands[0], QImode)
7192 || register_operand (operands[1], QImode))"
7200 [(set_attr "length" "2")
7201 (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
7202 (set_attr "insn" "*,*,*,mov,mov,mov")
7203 (set_attr "pool_range" "*,32,*,*,*,*")
7204 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7207 (define_expand "movhf"
7208 [(set (match_operand:HF 0 "general_operand" "")
7209 (match_operand:HF 1 "general_operand" ""))]
7214 if (MEM_P (operands[0]))
7215 operands[1] = force_reg (HFmode, operands[1]);
7217 else /* TARGET_THUMB1 */
7219 if (can_create_pseudo_p ())
7221 if (!REG_P (operands[0]))
7222 operands[1] = force_reg (HFmode, operands[1]);
7228 (define_insn "*arm32_movhf"
7229 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7230 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7231 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
7232 && ( s_register_operand (operands[0], HFmode)
7233 || s_register_operand (operands[1], HFmode))"
7235 switch (which_alternative)
7237 case 0: /* ARM register from memory */
7238 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7239 case 1: /* memory from ARM register */
7240 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7241 case 2: /* ARM register from ARM register */
7242 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7243 case 3: /* ARM register from constant */
7249 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7250 bits = real_to_target (NULL, &r, HFmode);
7251 ops[0] = operands[0];
7252 ops[1] = GEN_INT (bits);
7253 ops[2] = GEN_INT (bits & 0xff00);
7254 ops[3] = GEN_INT (bits & 0x00ff);
7256 if (arm_arch_thumb2)
7257 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7259 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7266 [(set_attr "conds" "unconditional")
7267 (set_attr "type" "load1,store1,*,*")
7268 (set_attr "insn" "*,*,mov,mov")
7269 (set_attr "length" "4,4,4,8")
7270 (set_attr "predicable" "yes")]
7273 (define_insn "*thumb1_movhf"
7274 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7275 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7277 && ( s_register_operand (operands[0], HFmode)
7278 || s_register_operand (operands[1], HFmode))"
7280 switch (which_alternative)
7285 gcc_assert (MEM_P (operands[1]));
7286 addr = XEXP (operands[1], 0);
7287 if (GET_CODE (addr) == LABEL_REF
7288 || (GET_CODE (addr) == CONST
7289 && GET_CODE (XEXP (addr, 0)) == PLUS
7290 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7291 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7293 /* Constant pool entry. */
7294 return \"ldr\\t%0, %1\";
7296 return \"ldrh\\t%0, %1\";
7298 case 2: return \"strh\\t%1, %0\";
7299 default: return \"mov\\t%0, %1\";
7302 [(set_attr "length" "2")
7303 (set_attr "type" "*,load1,store1,*,*")
7304 (set_attr "insn" "mov,*,*,mov,mov")
7305 (set_attr "pool_range" "*,1018,*,*,*")
7306 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7308 (define_expand "movsf"
7309 [(set (match_operand:SF 0 "general_operand" "")
7310 (match_operand:SF 1 "general_operand" ""))]
7315 if (MEM_P (operands[0]))
7316 operands[1] = force_reg (SFmode, operands[1]);
7318 else /* TARGET_THUMB1 */
7320 if (can_create_pseudo_p ())
7322 if (!REG_P (operands[0]))
7323 operands[1] = force_reg (SFmode, operands[1]);
7329 ;; Transform a floating-point move of a constant into a core register into
7330 ;; an SImode operation.
7332 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7333 (match_operand:SF 1 "immediate_operand" ""))]
7336 && CONST_DOUBLE_P (operands[1])"
7337 [(set (match_dup 2) (match_dup 3))]
7339 operands[2] = gen_lowpart (SImode, operands[0]);
7340 operands[3] = gen_lowpart (SImode, operands[1]);
7341 if (operands[2] == 0 || operands[3] == 0)
7346 (define_insn "*arm_movsf_soft_insn"
7347 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7348 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7350 && TARGET_SOFT_FLOAT
7351 && (!MEM_P (operands[0])
7352 || register_operand (operands[1], SFmode))"
7355 ldr%?\\t%0, %1\\t%@ float
7356 str%?\\t%1, %0\\t%@ float"
7357 [(set_attr "predicable" "yes")
7358 (set_attr "type" "*,load1,store1")
7359 (set_attr "insn" "mov,*,*")
7360 (set_attr "arm_pool_range" "*,4096,*")
7361 (set_attr "thumb2_pool_range" "*,4094,*")
7362 (set_attr "arm_neg_pool_range" "*,4084,*")
7363 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7366 ;;; ??? This should have alternatives for constants.
7367 (define_insn "*thumb1_movsf_insn"
7368 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7369 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7371 && ( register_operand (operands[0], SFmode)
7372 || register_operand (operands[1], SFmode))"
7381 [(set_attr "length" "2")
7382 (set_attr "type" "*,load1,store1,load1,store1,*,*")
7383 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7384 (set_attr "insn" "*,*,*,*,*,mov,mov")
7385 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7388 (define_expand "movdf"
7389 [(set (match_operand:DF 0 "general_operand" "")
7390 (match_operand:DF 1 "general_operand" ""))]
7395 if (MEM_P (operands[0]))
7396 operands[1] = force_reg (DFmode, operands[1]);
7398 else /* TARGET_THUMB */
7400 if (can_create_pseudo_p ())
7402 if (!REG_P (operands[0]))
7403 operands[1] = force_reg (DFmode, operands[1]);
7409 ;; Reloading a df mode value stored in integer regs to memory can require a
7411 (define_expand "reload_outdf"
7412 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7413 (match_operand:DF 1 "s_register_operand" "r")
7414 (match_operand:SI 2 "s_register_operand" "=&r")]
7418 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7421 operands[2] = XEXP (operands[0], 0);
7422 else if (code == POST_INC || code == PRE_DEC)
7424 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7425 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7426 emit_insn (gen_movdi (operands[0], operands[1]));
7429 else if (code == PRE_INC)
7431 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7433 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7436 else if (code == POST_DEC)
7437 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7439 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7440 XEXP (XEXP (operands[0], 0), 1)));
7442 emit_insn (gen_rtx_SET (VOIDmode,
7443 replace_equiv_address (operands[0], operands[2]),
7446 if (code == POST_DEC)
7447 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7453 (define_insn "*movdf_soft_insn"
7454 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7455 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7456 "TARGET_32BIT && TARGET_SOFT_FLOAT
7457 && ( register_operand (operands[0], DFmode)
7458 || register_operand (operands[1], DFmode))"
7460 switch (which_alternative)
7467 return output_move_double (operands, true, NULL);
7470 [(set_attr "length" "8,12,16,8,8")
7471 (set_attr "type" "*,*,*,load2,store2")
7472 (set_attr "arm_pool_range" "*,*,*,1020,*")
7473 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7474 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7475 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7478 ;;; ??? This should have alternatives for constants.
7479 ;;; ??? This was originally identical to the movdi_insn pattern.
7480 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7481 ;;; thumb_reorg with a memory reference.
7482 (define_insn "*thumb_movdf_insn"
7483 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7484 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7486 && ( register_operand (operands[0], DFmode)
7487 || register_operand (operands[1], DFmode))"
7489 switch (which_alternative)
7493 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7494 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7495 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7497 return \"ldmia\\t%1, {%0, %H0}\";
7499 return \"stmia\\t%0, {%1, %H1}\";
7501 return thumb_load_double_from_address (operands);
7503 operands[2] = gen_rtx_MEM (SImode,
7504 plus_constant (Pmode,
7505 XEXP (operands[0], 0), 4));
7506 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7509 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7510 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7511 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7514 [(set_attr "length" "4,2,2,6,4,4")
7515 (set_attr "type" "*,load2,store2,load2,store2,*")
7516 (set_attr "insn" "*,*,*,*,*,mov")
7517 (set_attr "pool_range" "*,*,*,1018,*,*")]
7521 ;; load- and store-multiple insns
7522 ;; The arm can load/store any set of registers, provided that they are in
7523 ;; ascending order, but these expanders assume a contiguous set.
7525 (define_expand "load_multiple"
7526 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7527 (match_operand:SI 1 "" ""))
7528 (use (match_operand:SI 2 "" ""))])]
7531 HOST_WIDE_INT offset = 0;
7533 /* Support only fixed point registers. */
7534 if (!CONST_INT_P (operands[2])
7535 || INTVAL (operands[2]) > 14
7536 || INTVAL (operands[2]) < 2
7537 || !MEM_P (operands[1])
7538 || !REG_P (operands[0])
7539 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7540 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7544 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7545 INTVAL (operands[2]),
7546 force_reg (SImode, XEXP (operands[1], 0)),
7547 FALSE, operands[1], &offset);
7550 (define_expand "store_multiple"
7551 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7552 (match_operand:SI 1 "" ""))
7553 (use (match_operand:SI 2 "" ""))])]
7556 HOST_WIDE_INT offset = 0;
7558 /* Support only fixed point registers. */
7559 if (!CONST_INT_P (operands[2])
7560 || INTVAL (operands[2]) > 14
7561 || INTVAL (operands[2]) < 2
7562 || !REG_P (operands[1])
7563 || !MEM_P (operands[0])
7564 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7565 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7569 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7570 INTVAL (operands[2]),
7571 force_reg (SImode, XEXP (operands[0], 0)),
7572 FALSE, operands[0], &offset);
7576 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7577 ;; We could let this apply for blocks of less than this, but it clobbers so
7578 ;; many registers that there is then probably a better way.
7580 (define_expand "movmemqi"
7581 [(match_operand:BLK 0 "general_operand" "")
7582 (match_operand:BLK 1 "general_operand" "")
7583 (match_operand:SI 2 "const_int_operand" "")
7584 (match_operand:SI 3 "const_int_operand" "")]
7589 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7590 && !optimize_function_for_size_p (cfun))
7592 if (gen_movmem_ldrd_strd (operands))
7597 if (arm_gen_movmemqi (operands))
7601 else /* TARGET_THUMB1 */
7603 if ( INTVAL (operands[3]) != 4
7604 || INTVAL (operands[2]) > 48)
7607 thumb_expand_movmemqi (operands);
7613 ;; Thumb block-move insns
7615 (define_insn "movmem12b"
7616 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7617 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7618 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7619 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7620 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7621 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7622 (set (match_operand:SI 0 "register_operand" "=l")
7623 (plus:SI (match_dup 2) (const_int 12)))
7624 (set (match_operand:SI 1 "register_operand" "=l")
7625 (plus:SI (match_dup 3) (const_int 12)))
7626 (clobber (match_scratch:SI 4 "=&l"))
7627 (clobber (match_scratch:SI 5 "=&l"))
7628 (clobber (match_scratch:SI 6 "=&l"))]
7630 "* return thumb_output_move_mem_multiple (3, operands);"
7631 [(set_attr "length" "4")
7632 ; This isn't entirely accurate... It loads as well, but in terms of
7633 ; scheduling the following insn it is better to consider it as a store
7634 (set_attr "type" "store3")]
7637 (define_insn "movmem8b"
7638 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7639 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7640 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7641 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7642 (set (match_operand:SI 0 "register_operand" "=l")
7643 (plus:SI (match_dup 2) (const_int 8)))
7644 (set (match_operand:SI 1 "register_operand" "=l")
7645 (plus:SI (match_dup 3) (const_int 8)))
7646 (clobber (match_scratch:SI 4 "=&l"))
7647 (clobber (match_scratch:SI 5 "=&l"))]
7649 "* return thumb_output_move_mem_multiple (2, operands);"
7650 [(set_attr "length" "4")
7651 ; This isn't entirely accurate... It loads as well, but in terms of
7652 ; scheduling the following insn it is better to consider it as a store
7653 (set_attr "type" "store2")]
7658 ;; Compare & branch insns
7659 ;; The range calculations are based as follows:
7660 ;; For forward branches, the address calculation returns the address of
7661 ;; the next instruction. This is 2 beyond the branch instruction.
7662 ;; For backward branches, the address calculation returns the address of
7663 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7664 ;; instruction for the shortest sequence, and 4 before the branch instruction
7665 ;; if we have to jump around an unconditional branch.
7666 ;; To the basic branch range the PC offset must be added (this is +4).
7667 ;; So for forward branches we have
7668 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7669 ;; And for backward branches we have
7670 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7672 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7673 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7675 (define_expand "cbranchsi4"
7676 [(set (pc) (if_then_else
7677 (match_operator 0 "expandable_comparison_operator"
7678 [(match_operand:SI 1 "s_register_operand" "")
7679 (match_operand:SI 2 "nonmemory_operand" "")])
7680 (label_ref (match_operand 3 "" ""))
7686 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7688 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7692 if (thumb1_cmpneg_operand (operands[2], SImode))
7694 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7695 operands[3], operands[0]));
7698 if (!thumb1_cmp_operand (operands[2], SImode))
7699 operands[2] = force_reg (SImode, operands[2]);
7702 ;; A pattern to recognize a special situation and optimize for it.
7703 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7704 ;; due to the available addressing modes. Hence, convert a signed comparison
7705 ;; with zero into an unsigned comparison with 127 if possible.
7706 (define_expand "cbranchqi4"
7707 [(set (pc) (if_then_else
7708 (match_operator 0 "lt_ge_comparison_operator"
7709 [(match_operand:QI 1 "memory_operand" "")
7710 (match_operand:QI 2 "const0_operand" "")])
7711 (label_ref (match_operand 3 "" ""))
7716 xops[1] = gen_reg_rtx (SImode);
7717 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7718 xops[2] = GEN_INT (127);
7719 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7720 VOIDmode, xops[1], xops[2]);
7721 xops[3] = operands[3];
7722 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7726 (define_expand "cbranchsf4"
7727 [(set (pc) (if_then_else
7728 (match_operator 0 "expandable_comparison_operator"
7729 [(match_operand:SF 1 "s_register_operand" "")
7730 (match_operand:SF 2 "arm_float_compare_operand" "")])
7731 (label_ref (match_operand 3 "" ""))
7733 "TARGET_32BIT && TARGET_HARD_FLOAT"
7734 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7735 operands[3])); DONE;"
7738 (define_expand "cbranchdf4"
7739 [(set (pc) (if_then_else
7740 (match_operator 0 "expandable_comparison_operator"
7741 [(match_operand:DF 1 "s_register_operand" "")
7742 (match_operand:DF 2 "arm_float_compare_operand" "")])
7743 (label_ref (match_operand 3 "" ""))
7745 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7746 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7747 operands[3])); DONE;"
7750 (define_expand "cbranchdi4"
7751 [(set (pc) (if_then_else
7752 (match_operator 0 "expandable_comparison_operator"
7753 [(match_operand:DI 1 "s_register_operand" "")
7754 (match_operand:DI 2 "cmpdi_operand" "")])
7755 (label_ref (match_operand 3 "" ""))
7759 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7761 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7767 (define_insn "cbranchsi4_insn"
7768 [(set (pc) (if_then_else
7769 (match_operator 0 "arm_comparison_operator"
7770 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7771 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7772 (label_ref (match_operand 3 "" ""))
7776 rtx t = cfun->machine->thumb1_cc_insn;
7779 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7780 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7782 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7784 if (!noov_comparison_operator (operands[0], VOIDmode))
7787 else if (cfun->machine->thumb1_cc_mode != CCmode)
7792 output_asm_insn ("cmp\t%1, %2", operands);
7793 cfun->machine->thumb1_cc_insn = insn;
7794 cfun->machine->thumb1_cc_op0 = operands[1];
7795 cfun->machine->thumb1_cc_op1 = operands[2];
7796 cfun->machine->thumb1_cc_mode = CCmode;
7799 /* Ensure we emit the right type of condition code on the jump. */
7800 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7803 switch (get_attr_length (insn))
7805 case 4: return \"b%d0\\t%l3\";
7806 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7807 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7810 [(set (attr "far_jump")
7812 (eq_attr "length" "8")
7813 (const_string "yes")
7814 (const_string "no")))
7815 (set (attr "length")
7817 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7818 (le (minus (match_dup 3) (pc)) (const_int 256)))
7821 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7822 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7827 (define_insn "cbranchsi4_scratch"
7828 [(set (pc) (if_then_else
7829 (match_operator 4 "arm_comparison_operator"
7830 [(match_operand:SI 1 "s_register_operand" "l,0")
7831 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7832 (label_ref (match_operand 3 "" ""))
7834 (clobber (match_scratch:SI 0 "=l,l"))]
7837 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7839 switch (get_attr_length (insn))
7841 case 4: return \"b%d4\\t%l3\";
7842 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7843 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7846 [(set (attr "far_jump")
7848 (eq_attr "length" "8")
7849 (const_string "yes")
7850 (const_string "no")))
7851 (set (attr "length")
7853 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7854 (le (minus (match_dup 3) (pc)) (const_int 256)))
7857 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7858 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7863 (define_insn "*negated_cbranchsi4"
7866 (match_operator 0 "equality_operator"
7867 [(match_operand:SI 1 "s_register_operand" "l")
7868 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7869 (label_ref (match_operand 3 "" ""))
7873 output_asm_insn (\"cmn\\t%1, %2\", operands);
7874 switch (get_attr_length (insn))
7876 case 4: return \"b%d0\\t%l3\";
7877 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7878 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7881 [(set (attr "far_jump")
7883 (eq_attr "length" "8")
7884 (const_string "yes")
7885 (const_string "no")))
7886 (set (attr "length")
7888 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7889 (le (minus (match_dup 3) (pc)) (const_int 256)))
7892 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7893 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7898 (define_insn "*tbit_cbranch"
7901 (match_operator 0 "equality_operator"
7902 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7904 (match_operand:SI 2 "const_int_operand" "i"))
7906 (label_ref (match_operand 3 "" ""))
7908 (clobber (match_scratch:SI 4 "=l"))]
7913 op[0] = operands[4];
7914 op[1] = operands[1];
7915 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7917 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7918 switch (get_attr_length (insn))
7920 case 4: return \"b%d0\\t%l3\";
7921 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7922 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7925 [(set (attr "far_jump")
7927 (eq_attr "length" "8")
7928 (const_string "yes")
7929 (const_string "no")))
7930 (set (attr "length")
7932 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7933 (le (minus (match_dup 3) (pc)) (const_int 256)))
7936 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7937 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7942 (define_insn "*tlobits_cbranch"
7945 (match_operator 0 "equality_operator"
7946 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7947 (match_operand:SI 2 "const_int_operand" "i")
7950 (label_ref (match_operand 3 "" ""))
7952 (clobber (match_scratch:SI 4 "=l"))]
7957 op[0] = operands[4];
7958 op[1] = operands[1];
7959 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7961 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7962 switch (get_attr_length (insn))
7964 case 4: return \"b%d0\\t%l3\";
7965 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7966 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7969 [(set (attr "far_jump")
7971 (eq_attr "length" "8")
7972 (const_string "yes")
7973 (const_string "no")))
7974 (set (attr "length")
7976 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7977 (le (minus (match_dup 3) (pc)) (const_int 256)))
7980 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7981 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7986 (define_insn "*tstsi3_cbranch"
7989 (match_operator 3 "equality_operator"
7990 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7991 (match_operand:SI 1 "s_register_operand" "l"))
7993 (label_ref (match_operand 2 "" ""))
7998 output_asm_insn (\"tst\\t%0, %1\", operands);
7999 switch (get_attr_length (insn))
8001 case 4: return \"b%d3\\t%l2\";
8002 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
8003 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
8006 [(set (attr "far_jump")
8008 (eq_attr "length" "8")
8009 (const_string "yes")
8010 (const_string "no")))
8011 (set (attr "length")
8013 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
8014 (le (minus (match_dup 2) (pc)) (const_int 256)))
8017 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
8018 (le (minus (match_dup 2) (pc)) (const_int 2048)))
8023 (define_insn "*cbranchne_decr1"
8025 (if_then_else (match_operator 3 "equality_operator"
8026 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
8028 (label_ref (match_operand 4 "" ""))
8030 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
8031 (plus:SI (match_dup 2) (const_int -1)))
8032 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
8037 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
8039 VOIDmode, operands[2], const1_rtx);
8040 cond[1] = operands[4];
8042 if (which_alternative == 0)
8043 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
8044 else if (which_alternative == 1)
8046 /* We must provide an alternative for a hi reg because reload
8047 cannot handle output reloads on a jump instruction, but we
8048 can't subtract into that. Fortunately a mov from lo to hi
8049 does not clobber the condition codes. */
8050 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8051 output_asm_insn (\"mov\\t%0, %1\", operands);
8055 /* Similarly, but the target is memory. */
8056 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8057 output_asm_insn (\"str\\t%1, %0\", operands);
8060 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
8063 output_asm_insn (\"b%d0\\t%l1\", cond);
8066 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8067 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
8069 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8070 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8074 [(set (attr "far_jump")
8076 (ior (and (eq (symbol_ref ("which_alternative"))
8078 (eq_attr "length" "8"))
8079 (eq_attr "length" "10"))
8080 (const_string "yes")
8081 (const_string "no")))
8082 (set_attr_alternative "length"
8086 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8087 (le (minus (match_dup 4) (pc)) (const_int 256)))
8090 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8091 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8096 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8097 (le (minus (match_dup 4) (pc)) (const_int 256)))
8100 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8101 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8106 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8107 (le (minus (match_dup 4) (pc)) (const_int 256)))
8110 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8111 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8116 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8117 (le (minus (match_dup 4) (pc)) (const_int 256)))
8120 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8121 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8126 (define_insn "*addsi3_cbranch"
8129 (match_operator 4 "arm_comparison_operator"
8131 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8132 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8134 (label_ref (match_operand 5 "" ""))
8137 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8138 (plus:SI (match_dup 2) (match_dup 3)))
8139 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8141 && (GET_CODE (operands[4]) == EQ
8142 || GET_CODE (operands[4]) == NE
8143 || GET_CODE (operands[4]) == GE
8144 || GET_CODE (operands[4]) == LT)"
8149 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8150 cond[1] = operands[2];
8151 cond[2] = operands[3];
8153 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8154 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8156 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8158 if (which_alternative >= 2
8159 && which_alternative < 4)
8160 output_asm_insn (\"mov\\t%0, %1\", operands);
8161 else if (which_alternative >= 4)
8162 output_asm_insn (\"str\\t%1, %0\", operands);
8164 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8167 return \"b%d4\\t%l5\";
8169 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8171 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8175 [(set (attr "far_jump")
8177 (ior (and (lt (symbol_ref ("which_alternative"))
8179 (eq_attr "length" "8"))
8180 (eq_attr "length" "10"))
8181 (const_string "yes")
8182 (const_string "no")))
8183 (set (attr "length")
8185 (lt (symbol_ref ("which_alternative"))
8188 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8189 (le (minus (match_dup 5) (pc)) (const_int 256)))
8192 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8193 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8197 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8198 (le (minus (match_dup 5) (pc)) (const_int 256)))
8201 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8202 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8207 (define_insn "*addsi3_cbranch_scratch"
8210 (match_operator 3 "arm_comparison_operator"
8212 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8213 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8215 (label_ref (match_operand 4 "" ""))
8217 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8219 && (GET_CODE (operands[3]) == EQ
8220 || GET_CODE (operands[3]) == NE
8221 || GET_CODE (operands[3]) == GE
8222 || GET_CODE (operands[3]) == LT)"
8225 switch (which_alternative)
8228 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8231 output_asm_insn (\"cmn\t%1, %2\", operands);
8234 if (INTVAL (operands[2]) < 0)
8235 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8237 output_asm_insn (\"add\t%0, %1, %2\", operands);
8240 if (INTVAL (operands[2]) < 0)
8241 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8243 output_asm_insn (\"add\t%0, %0, %2\", operands);
8247 switch (get_attr_length (insn))
8250 return \"b%d3\\t%l4\";
8252 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8254 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8258 [(set (attr "far_jump")
8260 (eq_attr "length" "8")
8261 (const_string "yes")
8262 (const_string "no")))
8263 (set (attr "length")
8265 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8266 (le (minus (match_dup 4) (pc)) (const_int 256)))
8269 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8270 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8276 ;; Comparison and test insns
8278 (define_insn "*arm_cmpsi_insn"
8279 [(set (reg:CC CC_REGNUM)
8280 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8281 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8288 [(set_attr "conds" "set")
8289 (set_attr "arch" "t2,t2,any,any")
8290 (set_attr "length" "2,2,4,4")
8291 (set_attr "predicable" "yes")
8292 (set_attr "type" "*,*,*,simple_alu_imm")]
8295 (define_insn "*cmpsi_shiftsi"
8296 [(set (reg:CC CC_REGNUM)
8297 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8298 (match_operator:SI 3 "shift_operator"
8299 [(match_operand:SI 1 "s_register_operand" "r,r")
8300 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8303 [(set_attr "conds" "set")
8304 (set_attr "shift" "1")
8305 (set_attr "arch" "32,a")
8306 (set_attr "type" "alu_shift,alu_shift_reg")])
8308 (define_insn "*cmpsi_shiftsi_swp"
8309 [(set (reg:CC_SWP CC_REGNUM)
8310 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8311 [(match_operand:SI 1 "s_register_operand" "r,r")
8312 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8313 (match_operand:SI 0 "s_register_operand" "r,r")))]
8316 [(set_attr "conds" "set")
8317 (set_attr "shift" "1")
8318 (set_attr "arch" "32,a")
8319 (set_attr "type" "alu_shift,alu_shift_reg")])
8321 (define_insn "*arm_cmpsi_negshiftsi_si"
8322 [(set (reg:CC_Z CC_REGNUM)
8324 (neg:SI (match_operator:SI 1 "shift_operator"
8325 [(match_operand:SI 2 "s_register_operand" "r")
8326 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8327 (match_operand:SI 0 "s_register_operand" "r")))]
8330 [(set_attr "conds" "set")
8331 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8332 (const_string "alu_shift")
8333 (const_string "alu_shift_reg")))
8334 (set_attr "predicable" "yes")]
8337 ;; DImode comparisons. The generic code generates branches that
8338 ;; if-conversion can not reduce to a conditional compare, so we do
8341 (define_insn_and_split "*arm_cmpdi_insn"
8342 [(set (reg:CC_NCV CC_REGNUM)
8343 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8344 (match_operand:DI 1 "arm_di_operand" "rDi")))
8345 (clobber (match_scratch:SI 2 "=r"))]
8347 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8348 "&& reload_completed"
8349 [(set (reg:CC CC_REGNUM)
8350 (compare:CC (match_dup 0) (match_dup 1)))
8351 (parallel [(set (reg:CC CC_REGNUM)
8352 (compare:CC (match_dup 3) (match_dup 4)))
8354 (minus:SI (match_dup 5)
8355 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8357 operands[3] = gen_highpart (SImode, operands[0]);
8358 operands[0] = gen_lowpart (SImode, operands[0]);
8359 if (CONST_INT_P (operands[1]))
8361 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8364 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8368 operands[4] = gen_highpart (SImode, operands[1]);
8369 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8371 operands[1] = gen_lowpart (SImode, operands[1]);
8372 operands[2] = gen_lowpart (SImode, operands[2]);
8374 [(set_attr "conds" "set")
8375 (set_attr "length" "8")]
8378 (define_insn_and_split "*arm_cmpdi_unsigned"
8379 [(set (reg:CC_CZ CC_REGNUM)
8380 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
8381 (match_operand:DI 1 "arm_di_operand" "rDi")))]
8383 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8384 "&& reload_completed"
8385 [(set (reg:CC CC_REGNUM)
8386 (compare:CC (match_dup 2) (match_dup 3)))
8387 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8388 (set (reg:CC CC_REGNUM)
8389 (compare:CC (match_dup 0) (match_dup 1))))]
8391 operands[2] = gen_highpart (SImode, operands[0]);
8392 operands[0] = gen_lowpart (SImode, operands[0]);
8393 if (CONST_INT_P (operands[1]))
8394 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8396 operands[3] = gen_highpart (SImode, operands[1]);
8397 operands[1] = gen_lowpart (SImode, operands[1]);
8399 [(set_attr "conds" "set")
8400 (set_attr "length" "8")]
8403 (define_insn "*arm_cmpdi_zero"
8404 [(set (reg:CC_Z CC_REGNUM)
8405 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8407 (clobber (match_scratch:SI 1 "=r"))]
8409 "orr%.\\t%1, %Q0, %R0"
8410 [(set_attr "conds" "set")]
8413 (define_insn "*thumb_cmpdi_zero"
8414 [(set (reg:CC_Z CC_REGNUM)
8415 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8417 (clobber (match_scratch:SI 1 "=l"))]
8419 "orr\\t%1, %Q0, %R0"
8420 [(set_attr "conds" "set")
8421 (set_attr "length" "2")]
8424 ; This insn allows redundant compares to be removed by cse, nothing should
8425 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8426 ; is deleted later on. The match_dup will match the mode here, so that
8427 ; mode changes of the condition codes aren't lost by this even though we don't
8428 ; specify what they are.
8430 (define_insn "*deleted_compare"
8431 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8433 "\\t%@ deleted compare"
8434 [(set_attr "conds" "set")
8435 (set_attr "length" "0")]
8439 ;; Conditional branch insns
8441 (define_expand "cbranch_cc"
8443 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8444 (match_operand 2 "" "")])
8445 (label_ref (match_operand 3 "" ""))
8448 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8449 operands[1], operands[2], NULL_RTX);
8450 operands[2] = const0_rtx;"
8454 ;; Patterns to match conditional branch insns.
8457 (define_insn "arm_cond_branch"
8459 (if_then_else (match_operator 1 "arm_comparison_operator"
8460 [(match_operand 2 "cc_register" "") (const_int 0)])
8461 (label_ref (match_operand 0 "" ""))
8465 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8467 arm_ccfsm_state += 2;
8470 return \"b%d1\\t%l0\";
8472 [(set_attr "conds" "use")
8473 (set_attr "type" "branch")
8474 (set (attr "length")
8476 (and (match_test "TARGET_THUMB2")
8477 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8478 (le (minus (match_dup 0) (pc)) (const_int 256))))
8483 (define_insn "*arm_cond_branch_reversed"
8485 (if_then_else (match_operator 1 "arm_comparison_operator"
8486 [(match_operand 2 "cc_register" "") (const_int 0)])
8488 (label_ref (match_operand 0 "" ""))))]
8491 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8493 arm_ccfsm_state += 2;
8496 return \"b%D1\\t%l0\";
8498 [(set_attr "conds" "use")
8499 (set_attr "type" "branch")
8500 (set (attr "length")
8502 (and (match_test "TARGET_THUMB2")
8503 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8504 (le (minus (match_dup 0) (pc)) (const_int 256))))
8513 (define_expand "cstore_cc"
8514 [(set (match_operand:SI 0 "s_register_operand" "")
8515 (match_operator:SI 1 "" [(match_operand 2 "" "")
8516 (match_operand 3 "" "")]))]
8518 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8519 operands[2], operands[3], NULL_RTX);
8520 operands[3] = const0_rtx;"
8523 (define_insn_and_split "*mov_scc"
8524 [(set (match_operand:SI 0 "s_register_operand" "=r")
8525 (match_operator:SI 1 "arm_comparison_operator"
8526 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8528 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8531 (if_then_else:SI (match_dup 1)
8535 [(set_attr "conds" "use")
8536 (set_attr "length" "8")]
8539 (define_insn_and_split "*mov_negscc"
8540 [(set (match_operand:SI 0 "s_register_operand" "=r")
8541 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8542 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8544 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8547 (if_then_else:SI (match_dup 1)
8551 operands[3] = GEN_INT (~0);
8553 [(set_attr "conds" "use")
8554 (set_attr "length" "8")]
8557 (define_insn_and_split "*mov_notscc"
8558 [(set (match_operand:SI 0 "s_register_operand" "=r")
8559 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8560 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8562 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8565 (if_then_else:SI (match_dup 1)
8569 operands[3] = GEN_INT (~1);
8570 operands[4] = GEN_INT (~0);
8572 [(set_attr "conds" "use")
8573 (set_attr "length" "8")]
8576 (define_expand "cstoresi4"
8577 [(set (match_operand:SI 0 "s_register_operand" "")
8578 (match_operator:SI 1 "expandable_comparison_operator"
8579 [(match_operand:SI 2 "s_register_operand" "")
8580 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8581 "TARGET_32BIT || TARGET_THUMB1"
8583 rtx op3, scratch, scratch2;
8587 if (!arm_add_operand (operands[3], SImode))
8588 operands[3] = force_reg (SImode, operands[3]);
8589 emit_insn (gen_cstore_cc (operands[0], operands[1],
8590 operands[2], operands[3]));
8594 if (operands[3] == const0_rtx)
8596 switch (GET_CODE (operands[1]))
8599 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8603 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8607 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8608 NULL_RTX, 0, OPTAB_WIDEN);
8609 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8610 NULL_RTX, 0, OPTAB_WIDEN);
8611 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8612 operands[0], 1, OPTAB_WIDEN);
8616 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8618 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8619 NULL_RTX, 1, OPTAB_WIDEN);
8623 scratch = expand_binop (SImode, ashr_optab, operands[2],
8624 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8625 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8626 NULL_RTX, 0, OPTAB_WIDEN);
8627 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8631 /* LT is handled by generic code. No need for unsigned with 0. */
8638 switch (GET_CODE (operands[1]))
8641 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8642 NULL_RTX, 0, OPTAB_WIDEN);
8643 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8647 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8648 NULL_RTX, 0, OPTAB_WIDEN);
8649 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8653 op3 = force_reg (SImode, operands[3]);
8655 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8656 NULL_RTX, 1, OPTAB_WIDEN);
8657 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8658 NULL_RTX, 0, OPTAB_WIDEN);
8659 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8665 if (!thumb1_cmp_operand (op3, SImode))
8666 op3 = force_reg (SImode, op3);
8667 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8668 NULL_RTX, 0, OPTAB_WIDEN);
8669 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8670 NULL_RTX, 1, OPTAB_WIDEN);
8671 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8676 op3 = force_reg (SImode, operands[3]);
8677 scratch = force_reg (SImode, const0_rtx);
8678 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8684 if (!thumb1_cmp_operand (op3, SImode))
8685 op3 = force_reg (SImode, op3);
8686 scratch = force_reg (SImode, const0_rtx);
8687 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8693 if (!thumb1_cmp_operand (op3, SImode))
8694 op3 = force_reg (SImode, op3);
8695 scratch = gen_reg_rtx (SImode);
8696 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8700 op3 = force_reg (SImode, operands[3]);
8701 scratch = gen_reg_rtx (SImode);
8702 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8705 /* No good sequences for GT, LT. */
8712 (define_expand "cstoresf4"
8713 [(set (match_operand:SI 0 "s_register_operand" "")
8714 (match_operator:SI 1 "expandable_comparison_operator"
8715 [(match_operand:SF 2 "s_register_operand" "")
8716 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8717 "TARGET_32BIT && TARGET_HARD_FLOAT"
8718 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8719 operands[2], operands[3])); DONE;"
8722 (define_expand "cstoredf4"
8723 [(set (match_operand:SI 0 "s_register_operand" "")
8724 (match_operator:SI 1 "expandable_comparison_operator"
8725 [(match_operand:DF 2 "s_register_operand" "")
8726 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8727 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8728 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8729 operands[2], operands[3])); DONE;"
8732 (define_expand "cstoredi4"
8733 [(set (match_operand:SI 0 "s_register_operand" "")
8734 (match_operator:SI 1 "expandable_comparison_operator"
8735 [(match_operand:DI 2 "s_register_operand" "")
8736 (match_operand:DI 3 "cmpdi_operand" "")]))]
8739 if (!arm_validize_comparison (&operands[1],
8743 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8749 (define_expand "cstoresi_eq0_thumb1"
8751 [(set (match_operand:SI 0 "s_register_operand" "")
8752 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8754 (clobber (match_dup:SI 2))])]
8756 "operands[2] = gen_reg_rtx (SImode);"
8759 (define_expand "cstoresi_ne0_thumb1"
8761 [(set (match_operand:SI 0 "s_register_operand" "")
8762 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8764 (clobber (match_dup:SI 2))])]
8766 "operands[2] = gen_reg_rtx (SImode);"
8769 (define_insn "*cstoresi_eq0_thumb1_insn"
8770 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8771 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8773 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8776 neg\\t%0, %1\;adc\\t%0, %0, %1
8777 neg\\t%2, %1\;adc\\t%0, %1, %2"
8778 [(set_attr "length" "4")]
8781 (define_insn "*cstoresi_ne0_thumb1_insn"
8782 [(set (match_operand:SI 0 "s_register_operand" "=l")
8783 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8785 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8787 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8788 [(set_attr "length" "4")]
8791 ;; Used as part of the expansion of thumb ltu and gtu sequences
8792 (define_insn "cstoresi_nltu_thumb1"
8793 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8794 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8795 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8797 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8798 [(set_attr "length" "4")]
8801 (define_insn_and_split "cstoresi_ltu_thumb1"
8802 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8803 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8804 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8809 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8810 (set (match_dup 0) (neg:SI (match_dup 3)))]
8811 "operands[3] = gen_reg_rtx (SImode);"
8812 [(set_attr "length" "4")]
8815 ;; Used as part of the expansion of thumb les sequence.
8816 (define_insn "thumb1_addsi3_addgeu"
8817 [(set (match_operand:SI 0 "s_register_operand" "=l")
8818 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8819 (match_operand:SI 2 "s_register_operand" "l"))
8820 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8821 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8823 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8824 [(set_attr "length" "4")]
8828 ;; Conditional move insns
8830 (define_expand "movsicc"
8831 [(set (match_operand:SI 0 "s_register_operand" "")
8832 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8833 (match_operand:SI 2 "arm_not_operand" "")
8834 (match_operand:SI 3 "arm_not_operand" "")))]
8841 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8842 &XEXP (operands[1], 1)))
8845 code = GET_CODE (operands[1]);
8846 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8847 XEXP (operands[1], 1), NULL_RTX);
8848 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8852 (define_expand "movsfcc"
8853 [(set (match_operand:SF 0 "s_register_operand" "")
8854 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8855 (match_operand:SF 2 "s_register_operand" "")
8856 (match_operand:SF 3 "s_register_operand" "")))]
8857 "TARGET_32BIT && TARGET_HARD_FLOAT"
8860 enum rtx_code code = GET_CODE (operands[1]);
8863 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8864 &XEXP (operands[1], 1)))
8867 code = GET_CODE (operands[1]);
8868 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8869 XEXP (operands[1], 1), NULL_RTX);
8870 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8874 (define_expand "movdfcc"
8875 [(set (match_operand:DF 0 "s_register_operand" "")
8876 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8877 (match_operand:DF 2 "s_register_operand" "")
8878 (match_operand:DF 3 "s_register_operand" "")))]
8879 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8882 enum rtx_code code = GET_CODE (operands[1]);
8885 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8886 &XEXP (operands[1], 1)))
8888 code = GET_CODE (operands[1]);
8889 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8890 XEXP (operands[1], 1), NULL_RTX);
8891 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8895 (define_insn "*cmov<mode>"
8896 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8897 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8898 [(match_operand 2 "cc_register" "") (const_int 0)])
8899 (match_operand:SDF 3 "s_register_operand"
8901 (match_operand:SDF 4 "s_register_operand"
8902 "<F_constraint>")))]
8903 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
8906 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8913 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8918 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8924 [(set_attr "conds" "use")
8925 (set_attr "type" "f_sel<vfp_type>")]
8928 (define_insn_and_split "*movsicc_insn"
8929 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8931 (match_operator 3 "arm_comparison_operator"
8932 [(match_operand 4 "cc_register" "") (const_int 0)])
8933 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8934 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8945 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8946 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8947 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8948 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8949 "&& reload_completed"
8952 enum rtx_code rev_code;
8953 enum machine_mode mode;
8956 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8958 gen_rtx_SET (VOIDmode,
8962 rev_code = GET_CODE (operands[3]);
8963 mode = GET_MODE (operands[4]);
8964 if (mode == CCFPmode || mode == CCFPEmode)
8965 rev_code = reverse_condition_maybe_unordered (rev_code);
8967 rev_code = reverse_condition (rev_code);
8969 rev_cond = gen_rtx_fmt_ee (rev_code,
8973 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8975 gen_rtx_SET (VOIDmode,
8980 [(set_attr "length" "4,4,4,4,8,8,8,8")
8981 (set_attr "conds" "use")
8982 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
8983 (set_attr_alternative "type"
8984 [(if_then_else (match_operand 2 "const_int_operand" "")
8985 (const_string "simple_alu_imm")
8987 (const_string "simple_alu_imm")
8988 (if_then_else (match_operand 1 "const_int_operand" "")
8989 (const_string "simple_alu_imm")
8991 (const_string "simple_alu_imm")
8995 (const_string "*")])]
8998 (define_insn "*movsfcc_soft_insn"
8999 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
9000 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
9001 [(match_operand 4 "cc_register" "") (const_int 0)])
9002 (match_operand:SF 1 "s_register_operand" "0,r")
9003 (match_operand:SF 2 "s_register_operand" "r,0")))]
9004 "TARGET_ARM && TARGET_SOFT_FLOAT"
9008 [(set_attr "conds" "use")
9009 (set_attr "insn" "mov")]
9013 ;; Jump and linkage insns
9015 (define_expand "jump"
9017 (label_ref (match_operand 0 "" "")))]
9022 (define_insn "*arm_jump"
9024 (label_ref (match_operand 0 "" "")))]
9028 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
9030 arm_ccfsm_state += 2;
9033 return \"b%?\\t%l0\";
9036 [(set_attr "predicable" "yes")
9037 (set (attr "length")
9039 (and (match_test "TARGET_THUMB2")
9040 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9041 (le (minus (match_dup 0) (pc)) (const_int 2048))))
9046 (define_insn "*thumb_jump"
9048 (label_ref (match_operand 0 "" "")))]
9051 if (get_attr_length (insn) == 2)
9053 return \"bl\\t%l0\\t%@ far jump\";
9055 [(set (attr "far_jump")
9057 (eq_attr "length" "4")
9058 (const_string "yes")
9059 (const_string "no")))
9060 (set (attr "length")
9062 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9063 (le (minus (match_dup 0) (pc)) (const_int 2048)))
9068 (define_expand "call"
9069 [(parallel [(call (match_operand 0 "memory_operand" "")
9070 (match_operand 1 "general_operand" ""))
9071 (use (match_operand 2 "" ""))
9072 (clobber (reg:SI LR_REGNUM))])]
9078 /* In an untyped call, we can get NULL for operand 2. */
9079 if (operands[2] == NULL_RTX)
9080 operands[2] = const0_rtx;
9082 /* Decide if we should generate indirect calls by loading the
9083 32-bit address of the callee into a register before performing the
9085 callee = XEXP (operands[0], 0);
9086 if (GET_CODE (callee) == SYMBOL_REF
9087 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9089 XEXP (operands[0], 0) = force_reg (Pmode, callee);
9091 pat = gen_call_internal (operands[0], operands[1], operands[2]);
9092 arm_emit_call_insn (pat, XEXP (operands[0], 0));
9097 (define_expand "call_internal"
9098 [(parallel [(call (match_operand 0 "memory_operand" "")
9099 (match_operand 1 "general_operand" ""))
9100 (use (match_operand 2 "" ""))
9101 (clobber (reg:SI LR_REGNUM))])])
9103 (define_insn "*call_reg_armv5"
9104 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9105 (match_operand 1 "" ""))
9106 (use (match_operand 2 "" ""))
9107 (clobber (reg:SI LR_REGNUM))]
9108 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9110 [(set_attr "type" "call")]
9113 (define_insn "*call_reg_arm"
9114 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9115 (match_operand 1 "" ""))
9116 (use (match_operand 2 "" ""))
9117 (clobber (reg:SI LR_REGNUM))]
9118 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9120 return output_call (operands);
9122 ;; length is worst case, normally it is only two
9123 [(set_attr "length" "12")
9124 (set_attr "type" "call")]
9128 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9129 ;; considered a function call by the branch predictor of some cores (PR40887).
9130 ;; Falls back to blx rN (*call_reg_armv5).
9132 (define_insn "*call_mem"
9133 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9134 (match_operand 1 "" ""))
9135 (use (match_operand 2 "" ""))
9136 (clobber (reg:SI LR_REGNUM))]
9137 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9139 return output_call_mem (operands);
9141 [(set_attr "length" "12")
9142 (set_attr "type" "call")]
9145 (define_insn "*call_reg_thumb1_v5"
9146 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9147 (match_operand 1 "" ""))
9148 (use (match_operand 2 "" ""))
9149 (clobber (reg:SI LR_REGNUM))]
9150 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9152 [(set_attr "length" "2")
9153 (set_attr "type" "call")]
9156 (define_insn "*call_reg_thumb1"
9157 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9158 (match_operand 1 "" ""))
9159 (use (match_operand 2 "" ""))
9160 (clobber (reg:SI LR_REGNUM))]
9161 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9164 if (!TARGET_CALLER_INTERWORKING)
9165 return thumb_call_via_reg (operands[0]);
9166 else if (operands[1] == const0_rtx)
9167 return \"bl\\t%__interwork_call_via_%0\";
9168 else if (frame_pointer_needed)
9169 return \"bl\\t%__interwork_r7_call_via_%0\";
9171 return \"bl\\t%__interwork_r11_call_via_%0\";
9173 [(set_attr "type" "call")]
9176 (define_expand "call_value"
9177 [(parallel [(set (match_operand 0 "" "")
9178 (call (match_operand 1 "memory_operand" "")
9179 (match_operand 2 "general_operand" "")))
9180 (use (match_operand 3 "" ""))
9181 (clobber (reg:SI LR_REGNUM))])]
9187 /* In an untyped call, we can get NULL for operand 2. */
9188 if (operands[3] == 0)
9189 operands[3] = const0_rtx;
9191 /* Decide if we should generate indirect calls by loading the
9192 32-bit address of the callee into a register before performing the
9194 callee = XEXP (operands[1], 0);
9195 if (GET_CODE (callee) == SYMBOL_REF
9196 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9198 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9200 pat = gen_call_value_internal (operands[0], operands[1],
9201 operands[2], operands[3]);
9202 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9207 (define_expand "call_value_internal"
9208 [(parallel [(set (match_operand 0 "" "")
9209 (call (match_operand 1 "memory_operand" "")
9210 (match_operand 2 "general_operand" "")))
9211 (use (match_operand 3 "" ""))
9212 (clobber (reg:SI LR_REGNUM))])])
9214 (define_insn "*call_value_reg_armv5"
9215 [(set (match_operand 0 "" "")
9216 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9217 (match_operand 2 "" "")))
9218 (use (match_operand 3 "" ""))
9219 (clobber (reg:SI LR_REGNUM))]
9220 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9222 [(set_attr "type" "call")]
9225 (define_insn "*call_value_reg_arm"
9226 [(set (match_operand 0 "" "")
9227 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9228 (match_operand 2 "" "")))
9229 (use (match_operand 3 "" ""))
9230 (clobber (reg:SI LR_REGNUM))]
9231 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9233 return output_call (&operands[1]);
9235 [(set_attr "length" "12")
9236 (set_attr "type" "call")]
9239 ;; Note: see *call_mem
9241 (define_insn "*call_value_mem"
9242 [(set (match_operand 0 "" "")
9243 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9244 (match_operand 2 "" "")))
9245 (use (match_operand 3 "" ""))
9246 (clobber (reg:SI LR_REGNUM))]
9247 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9248 && !SIBLING_CALL_P (insn)"
9250 return output_call_mem (&operands[1]);
9252 [(set_attr "length" "12")
9253 (set_attr "type" "call")]
9256 (define_insn "*call_value_reg_thumb1_v5"
9257 [(set (match_operand 0 "" "")
9258 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9259 (match_operand 2 "" "")))
9260 (use (match_operand 3 "" ""))
9261 (clobber (reg:SI LR_REGNUM))]
9262 "TARGET_THUMB1 && arm_arch5"
9264 [(set_attr "length" "2")
9265 (set_attr "type" "call")]
9268 (define_insn "*call_value_reg_thumb1"
9269 [(set (match_operand 0 "" "")
9270 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9271 (match_operand 2 "" "")))
9272 (use (match_operand 3 "" ""))
9273 (clobber (reg:SI LR_REGNUM))]
9274 "TARGET_THUMB1 && !arm_arch5"
9277 if (!TARGET_CALLER_INTERWORKING)
9278 return thumb_call_via_reg (operands[1]);
9279 else if (operands[2] == const0_rtx)
9280 return \"bl\\t%__interwork_call_via_%1\";
9281 else if (frame_pointer_needed)
9282 return \"bl\\t%__interwork_r7_call_via_%1\";
9284 return \"bl\\t%__interwork_r11_call_via_%1\";
9286 [(set_attr "type" "call")]
9289 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9290 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9292 (define_insn "*call_symbol"
9293 [(call (mem:SI (match_operand:SI 0 "" ""))
9294 (match_operand 1 "" ""))
9295 (use (match_operand 2 "" ""))
9296 (clobber (reg:SI LR_REGNUM))]
9298 && !SIBLING_CALL_P (insn)
9299 && (GET_CODE (operands[0]) == SYMBOL_REF)
9300 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9303 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9305 [(set_attr "type" "call")]
9308 (define_insn "*call_value_symbol"
9309 [(set (match_operand 0 "" "")
9310 (call (mem:SI (match_operand:SI 1 "" ""))
9311 (match_operand:SI 2 "" "")))
9312 (use (match_operand 3 "" ""))
9313 (clobber (reg:SI LR_REGNUM))]
9315 && !SIBLING_CALL_P (insn)
9316 && (GET_CODE (operands[1]) == SYMBOL_REF)
9317 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9320 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9322 [(set_attr "type" "call")]
9325 (define_insn "*call_insn"
9326 [(call (mem:SI (match_operand:SI 0 "" ""))
9327 (match_operand:SI 1 "" ""))
9328 (use (match_operand 2 "" ""))
9329 (clobber (reg:SI LR_REGNUM))]
9331 && GET_CODE (operands[0]) == SYMBOL_REF
9332 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9334 [(set_attr "length" "4")
9335 (set_attr "type" "call")]
9338 (define_insn "*call_value_insn"
9339 [(set (match_operand 0 "" "")
9340 (call (mem:SI (match_operand 1 "" ""))
9341 (match_operand 2 "" "")))
9342 (use (match_operand 3 "" ""))
9343 (clobber (reg:SI LR_REGNUM))]
9345 && GET_CODE (operands[1]) == SYMBOL_REF
9346 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9348 [(set_attr "length" "4")
9349 (set_attr "type" "call")]
9352 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9353 (define_expand "sibcall"
9354 [(parallel [(call (match_operand 0 "memory_operand" "")
9355 (match_operand 1 "general_operand" ""))
9357 (use (match_operand 2 "" ""))])]
9361 if (!REG_P (XEXP (operands[0], 0))
9362 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9363 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9365 if (operands[2] == NULL_RTX)
9366 operands[2] = const0_rtx;
9370 (define_expand "sibcall_value"
9371 [(parallel [(set (match_operand 0 "" "")
9372 (call (match_operand 1 "memory_operand" "")
9373 (match_operand 2 "general_operand" "")))
9375 (use (match_operand 3 "" ""))])]
9379 if (!REG_P (XEXP (operands[1], 0)) &&
9380 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9381 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9383 if (operands[3] == NULL_RTX)
9384 operands[3] = const0_rtx;
9388 (define_insn "*sibcall_insn"
9389 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs,Ss"))
9390 (match_operand 1 "" ""))
9392 (use (match_operand 2 "" ""))]
9393 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9395 if (which_alternative == 1)
9396 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9399 if (arm_arch5 || arm_arch4t)
9400 return \" bx\\t%0\\t%@ indirect register sibling call\";
9402 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9405 [(set_attr "type" "call")]
9408 (define_insn "*sibcall_value_insn"
9409 [(set (match_operand 0 "s_register_operand" "")
9410 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,Ss"))
9411 (match_operand 2 "" "")))
9413 (use (match_operand 3 "" ""))]
9414 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9416 if (which_alternative == 1)
9417 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9420 if (arm_arch5 || arm_arch4t)
9423 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9426 [(set_attr "type" "call")]
9429 (define_expand "<return_str>return"
9431 "(TARGET_ARM || (TARGET_THUMB2
9432 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9433 && !IS_STACKALIGN (arm_current_func_type ())))
9434 <return_cond_false>"
9439 thumb2_expand_return (<return_simple_p>);
9446 ;; Often the return insn will be the same as loading from memory, so set attr
9447 (define_insn "*arm_return"
9449 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9452 if (arm_ccfsm_state == 2)
9454 arm_ccfsm_state += 2;
9457 return output_return_instruction (const_true_rtx, true, false, false);
9459 [(set_attr "type" "load1")
9460 (set_attr "length" "12")
9461 (set_attr "predicable" "yes")]
9464 (define_insn "*cond_<return_str>return"
9466 (if_then_else (match_operator 0 "arm_comparison_operator"
9467 [(match_operand 1 "cc_register" "") (const_int 0)])
9470 "TARGET_ARM <return_cond_true>"
9473 if (arm_ccfsm_state == 2)
9475 arm_ccfsm_state += 2;
9478 return output_return_instruction (operands[0], true, false,
9481 [(set_attr "conds" "use")
9482 (set_attr "length" "12")
9483 (set_attr "type" "load1")]
9486 (define_insn "*cond_<return_str>return_inverted"
9488 (if_then_else (match_operator 0 "arm_comparison_operator"
9489 [(match_operand 1 "cc_register" "") (const_int 0)])
9492 "TARGET_ARM <return_cond_true>"
9495 if (arm_ccfsm_state == 2)
9497 arm_ccfsm_state += 2;
9500 return output_return_instruction (operands[0], true, true,
9503 [(set_attr "conds" "use")
9504 (set_attr "length" "12")
9505 (set_attr "type" "load1")]
9508 (define_insn "*arm_simple_return"
9513 if (arm_ccfsm_state == 2)
9515 arm_ccfsm_state += 2;
9518 return output_return_instruction (const_true_rtx, true, false, true);
9520 [(set_attr "type" "branch")
9521 (set_attr "length" "4")
9522 (set_attr "predicable" "yes")]
9525 ;; Generate a sequence of instructions to determine if the processor is
9526 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9529 (define_expand "return_addr_mask"
9531 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9533 (set (match_operand:SI 0 "s_register_operand" "")
9534 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9536 (const_int 67108860)))] ; 0x03fffffc
9539 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9542 (define_insn "*check_arch2"
9543 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9544 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9547 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9548 [(set_attr "length" "8")
9549 (set_attr "conds" "set")]
9552 ;; Call subroutine returning any type.
9554 (define_expand "untyped_call"
9555 [(parallel [(call (match_operand 0 "" "")
9557 (match_operand 1 "" "")
9558 (match_operand 2 "" "")])]
9563 rtx par = gen_rtx_PARALLEL (VOIDmode,
9564 rtvec_alloc (XVECLEN (operands[2], 0)));
9565 rtx addr = gen_reg_rtx (Pmode);
9569 emit_move_insn (addr, XEXP (operands[1], 0));
9570 mem = change_address (operands[1], BLKmode, addr);
9572 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9574 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9576 /* Default code only uses r0 as a return value, but we could
9577 be using anything up to 4 registers. */
9578 if (REGNO (src) == R0_REGNUM)
9579 src = gen_rtx_REG (TImode, R0_REGNUM);
9581 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9583 size += GET_MODE_SIZE (GET_MODE (src));
9586 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9591 for (i = 0; i < XVECLEN (par, 0); i++)
9593 HOST_WIDE_INT offset = 0;
9594 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9597 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9599 mem = change_address (mem, GET_MODE (reg), NULL);
9600 if (REGNO (reg) == R0_REGNUM)
9602 /* On thumb we have to use a write-back instruction. */
9603 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9604 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9605 size = TARGET_ARM ? 16 : 0;
9609 emit_move_insn (mem, reg);
9610 size = GET_MODE_SIZE (GET_MODE (reg));
9614 /* The optimizer does not know that the call sets the function value
9615 registers we stored in the result block. We avoid problems by
9616 claiming that all hard registers are used and clobbered at this
9618 emit_insn (gen_blockage ());
9624 (define_expand "untyped_return"
9625 [(match_operand:BLK 0 "memory_operand" "")
9626 (match_operand 1 "" "")]
9631 rtx addr = gen_reg_rtx (Pmode);
9635 emit_move_insn (addr, XEXP (operands[0], 0));
9636 mem = change_address (operands[0], BLKmode, addr);
9638 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9640 HOST_WIDE_INT offset = 0;
9641 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9644 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9646 mem = change_address (mem, GET_MODE (reg), NULL);
9647 if (REGNO (reg) == R0_REGNUM)
9649 /* On thumb we have to use a write-back instruction. */
9650 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9651 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9652 size = TARGET_ARM ? 16 : 0;
9656 emit_move_insn (reg, mem);
9657 size = GET_MODE_SIZE (GET_MODE (reg));
9661 /* Emit USE insns before the return. */
9662 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9663 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9665 /* Construct the return. */
9666 expand_naked_return ();
9672 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9673 ;; all of memory. This blocks insns from being moved across this point.
9675 (define_insn "blockage"
9676 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9679 [(set_attr "length" "0")
9680 (set_attr "type" "block")]
9683 (define_expand "casesi"
9684 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9685 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9686 (match_operand:SI 2 "const_int_operand" "") ; total range
9687 (match_operand:SI 3 "" "") ; table label
9688 (match_operand:SI 4 "" "")] ; Out of range label
9689 "TARGET_32BIT || optimize_size || flag_pic"
9692 enum insn_code code;
9693 if (operands[1] != const0_rtx)
9695 rtx reg = gen_reg_rtx (SImode);
9697 emit_insn (gen_addsi3 (reg, operands[0],
9698 gen_int_mode (-INTVAL (operands[1]),
9704 code = CODE_FOR_arm_casesi_internal;
9705 else if (TARGET_THUMB1)
9706 code = CODE_FOR_thumb1_casesi_internal_pic;
9708 code = CODE_FOR_thumb2_casesi_internal_pic;
9710 code = CODE_FOR_thumb2_casesi_internal;
9712 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9713 operands[2] = force_reg (SImode, operands[2]);
9715 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9716 operands[3], operands[4]));
9721 ;; The USE in this pattern is needed to tell flow analysis that this is
9722 ;; a CASESI insn. It has no other purpose.
9723 (define_insn "arm_casesi_internal"
9724 [(parallel [(set (pc)
9726 (leu (match_operand:SI 0 "s_register_operand" "r")
9727 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9728 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9729 (label_ref (match_operand 2 "" ""))))
9730 (label_ref (match_operand 3 "" ""))))
9731 (clobber (reg:CC CC_REGNUM))
9732 (use (label_ref (match_dup 2)))])]
9736 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9737 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "12")]
9743 (define_expand "thumb1_casesi_internal_pic"
9744 [(match_operand:SI 0 "s_register_operand" "")
9745 (match_operand:SI 1 "thumb1_cmp_operand" "")
9746 (match_operand 2 "" "")
9747 (match_operand 3 "" "")]
9751 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9752 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9754 reg0 = gen_rtx_REG (SImode, 0);
9755 emit_move_insn (reg0, operands[0]);
9756 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9761 (define_insn "thumb1_casesi_dispatch"
9762 [(parallel [(set (pc) (unspec [(reg:SI 0)
9763 (label_ref (match_operand 0 "" ""))
9764 ;; (label_ref (match_operand 1 "" ""))
9766 UNSPEC_THUMB1_CASESI))
9767 (clobber (reg:SI IP_REGNUM))
9768 (clobber (reg:SI LR_REGNUM))])]
9770 "* return thumb1_output_casesi(operands);"
9771 [(set_attr "length" "4")]
9774 (define_expand "indirect_jump"
9776 (match_operand:SI 0 "s_register_operand" ""))]
9779 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9780 address and use bx. */
9784 tmp = gen_reg_rtx (SImode);
9785 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9791 ;; NB Never uses BX.
9792 (define_insn "*arm_indirect_jump"
9794 (match_operand:SI 0 "s_register_operand" "r"))]
9796 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9797 [(set_attr "predicable" "yes")]
9800 (define_insn "*load_indirect_jump"
9802 (match_operand:SI 0 "memory_operand" "m"))]
9804 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9805 [(set_attr "type" "load1")
9806 (set_attr "pool_range" "4096")
9807 (set_attr "neg_pool_range" "4084")
9808 (set_attr "predicable" "yes")]
9811 ;; NB Never uses BX.
9812 (define_insn "*thumb1_indirect_jump"
9814 (match_operand:SI 0 "register_operand" "l*r"))]
9817 [(set_attr "conds" "clob")
9818 (set_attr "length" "2")]
9828 if (TARGET_UNIFIED_ASM)
9831 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9832 return \"mov\\tr8, r8\";
9834 [(set (attr "length")
9835 (if_then_else (eq_attr "is_thumb" "yes")
9841 ;; Patterns to allow combination of arithmetic, cond code and shifts
9843 (define_insn "*arith_shiftsi"
9844 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9845 (match_operator:SI 1 "shiftable_operator"
9846 [(match_operator:SI 3 "shift_operator"
9847 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
9848 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
9849 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
9851 "%i1%?\\t%0, %2, %4%S3"
9852 [(set_attr "predicable" "yes")
9853 (set_attr "shift" "4")
9854 (set_attr "arch" "a,t2,t2,a")
9855 ;; Thumb2 doesn't allow the stack pointer to be used for
9856 ;; operand1 for all operations other than add and sub. In this case
9857 ;; the minus operation is a candidate for an rsub and hence needs
9859 ;; We have to make sure to disable the fourth alternative if
9860 ;; the shift_operator is MULT, since otherwise the insn will
9861 ;; also match a multiply_accumulate pattern and validate_change
9862 ;; will allow a replacement of the constant with a register
9863 ;; despite the checks done in shift_operator.
9864 (set_attr_alternative "insn_enabled"
9865 [(const_string "yes")
9867 (match_operand:SI 1 "add_operator" "")
9868 (const_string "yes") (const_string "no"))
9869 (const_string "yes")
9871 (match_operand:SI 3 "mult_operator" "")
9872 (const_string "no") (const_string "yes"))])
9873 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
9876 [(set (match_operand:SI 0 "s_register_operand" "")
9877 (match_operator:SI 1 "shiftable_operator"
9878 [(match_operator:SI 2 "shiftable_operator"
9879 [(match_operator:SI 3 "shift_operator"
9880 [(match_operand:SI 4 "s_register_operand" "")
9881 (match_operand:SI 5 "reg_or_int_operand" "")])
9882 (match_operand:SI 6 "s_register_operand" "")])
9883 (match_operand:SI 7 "arm_rhs_operand" "")]))
9884 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9887 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9890 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9893 (define_insn "*arith_shiftsi_compare0"
9894 [(set (reg:CC_NOOV CC_REGNUM)
9896 (match_operator:SI 1 "shiftable_operator"
9897 [(match_operator:SI 3 "shift_operator"
9898 [(match_operand:SI 4 "s_register_operand" "r,r")
9899 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9900 (match_operand:SI 2 "s_register_operand" "r,r")])
9902 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9903 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9906 "%i1%.\\t%0, %2, %4%S3"
9907 [(set_attr "conds" "set")
9908 (set_attr "shift" "4")
9909 (set_attr "arch" "32,a")
9910 (set_attr "type" "alu_shift,alu_shift_reg")])
9912 (define_insn "*arith_shiftsi_compare0_scratch"
9913 [(set (reg:CC_NOOV CC_REGNUM)
9915 (match_operator:SI 1 "shiftable_operator"
9916 [(match_operator:SI 3 "shift_operator"
9917 [(match_operand:SI 4 "s_register_operand" "r,r")
9918 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9919 (match_operand:SI 2 "s_register_operand" "r,r")])
9921 (clobber (match_scratch:SI 0 "=r,r"))]
9923 "%i1%.\\t%0, %2, %4%S3"
9924 [(set_attr "conds" "set")
9925 (set_attr "shift" "4")
9926 (set_attr "arch" "32,a")
9927 (set_attr "type" "alu_shift,alu_shift_reg")])
9929 (define_insn "*sub_shiftsi"
9930 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9931 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9932 (match_operator:SI 2 "shift_operator"
9933 [(match_operand:SI 3 "s_register_operand" "r,r")
9934 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9936 "sub%?\\t%0, %1, %3%S2"
9937 [(set_attr "predicable" "yes")
9938 (set_attr "shift" "3")
9939 (set_attr "arch" "32,a")
9940 (set_attr "type" "alu_shift,alu_shift_reg")])
9942 (define_insn "*sub_shiftsi_compare0"
9943 [(set (reg:CC_NOOV CC_REGNUM)
9945 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9946 (match_operator:SI 2 "shift_operator"
9947 [(match_operand:SI 3 "s_register_operand" "r,r")
9948 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9950 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9951 (minus:SI (match_dup 1)
9952 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9954 "sub%.\\t%0, %1, %3%S2"
9955 [(set_attr "conds" "set")
9956 (set_attr "shift" "3")
9957 (set_attr "arch" "32,a")
9958 (set_attr "type" "alu_shift,alu_shift_reg")])
9960 (define_insn "*sub_shiftsi_compare0_scratch"
9961 [(set (reg:CC_NOOV CC_REGNUM)
9963 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9964 (match_operator:SI 2 "shift_operator"
9965 [(match_operand:SI 3 "s_register_operand" "r,r")
9966 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9968 (clobber (match_scratch:SI 0 "=r,r"))]
9970 "sub%.\\t%0, %1, %3%S2"
9971 [(set_attr "conds" "set")
9972 (set_attr "shift" "3")
9973 (set_attr "arch" "32,a")
9974 (set_attr "type" "alu_shift,alu_shift_reg")])
9977 (define_insn_and_split "*and_scc"
9978 [(set (match_operand:SI 0 "s_register_operand" "=r")
9979 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9980 [(match_operand 2 "cc_register" "") (const_int 0)])
9981 (match_operand:SI 3 "s_register_operand" "r")))]
9983 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9984 "&& reload_completed"
9985 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9986 (cond_exec (match_dup 4) (set (match_dup 0)
9987 (and:SI (match_dup 3) (const_int 1))))]
9989 enum machine_mode mode = GET_MODE (operands[2]);
9990 enum rtx_code rc = GET_CODE (operands[1]);
9992 /* Note that operands[4] is the same as operands[1],
9993 but with VOIDmode as the result. */
9994 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9995 if (mode == CCFPmode || mode == CCFPEmode)
9996 rc = reverse_condition_maybe_unordered (rc);
9998 rc = reverse_condition (rc);
9999 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10001 [(set_attr "conds" "use")
10002 (set_attr "insn" "mov")
10003 (set_attr "length" "8")]
10006 (define_insn_and_split "*ior_scc"
10007 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10008 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
10009 [(match_operand 2 "cc_register" "") (const_int 0)])
10010 (match_operand:SI 3 "s_register_operand" "0,?r")))]
10013 orr%d1\\t%0, %3, #1
10015 "&& reload_completed
10016 && REGNO (operands [0]) != REGNO (operands[3])"
10017 ;; && which_alternative == 1
10018 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
10019 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
10020 (cond_exec (match_dup 4) (set (match_dup 0)
10021 (ior:SI (match_dup 3) (const_int 1))))]
10023 enum machine_mode mode = GET_MODE (operands[2]);
10024 enum rtx_code rc = GET_CODE (operands[1]);
10026 /* Note that operands[4] is the same as operands[1],
10027 but with VOIDmode as the result. */
10028 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10029 if (mode == CCFPmode || mode == CCFPEmode)
10030 rc = reverse_condition_maybe_unordered (rc);
10032 rc = reverse_condition (rc);
10033 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10035 [(set_attr "conds" "use")
10036 (set_attr "length" "4,8")]
10039 ; A series of splitters for the compare_scc pattern below. Note that
10040 ; order is important.
10042 [(set (match_operand:SI 0 "s_register_operand" "")
10043 (lt:SI (match_operand:SI 1 "s_register_operand" "")
10045 (clobber (reg:CC CC_REGNUM))]
10046 "TARGET_32BIT && reload_completed"
10047 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
10050 [(set (match_operand:SI 0 "s_register_operand" "")
10051 (ge:SI (match_operand:SI 1 "s_register_operand" "")
10053 (clobber (reg:CC CC_REGNUM))]
10054 "TARGET_32BIT && reload_completed"
10055 [(set (match_dup 0) (not:SI (match_dup 1)))
10056 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
10059 [(set (match_operand:SI 0 "s_register_operand" "")
10060 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10062 (clobber (reg:CC CC_REGNUM))]
10063 "arm_arch5 && TARGET_32BIT"
10064 [(set (match_dup 0) (clz:SI (match_dup 1)))
10065 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10069 [(set (match_operand:SI 0 "s_register_operand" "")
10070 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10072 (clobber (reg:CC CC_REGNUM))]
10073 "TARGET_32BIT && reload_completed"
10075 [(set (reg:CC CC_REGNUM)
10076 (compare:CC (const_int 1) (match_dup 1)))
10078 (minus:SI (const_int 1) (match_dup 1)))])
10079 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
10080 (set (match_dup 0) (const_int 0)))])
10083 [(set (match_operand:SI 0 "s_register_operand" "")
10084 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10085 (match_operand:SI 2 "const_int_operand" "")))
10086 (clobber (reg:CC CC_REGNUM))]
10087 "TARGET_32BIT && reload_completed"
10089 [(set (reg:CC CC_REGNUM)
10090 (compare:CC (match_dup 1) (match_dup 2)))
10091 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
10092 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
10093 (set (match_dup 0) (const_int 1)))]
10095 operands[3] = GEN_INT (-INTVAL (operands[2]));
10099 [(set (match_operand:SI 0 "s_register_operand" "")
10100 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10101 (match_operand:SI 2 "arm_add_operand" "")))
10102 (clobber (reg:CC CC_REGNUM))]
10103 "TARGET_32BIT && reload_completed"
10105 [(set (reg:CC_NOOV CC_REGNUM)
10106 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10108 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10109 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10110 (set (match_dup 0) (const_int 1)))])
10112 (define_insn_and_split "*compare_scc"
10113 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10114 (match_operator:SI 1 "arm_comparison_operator"
10115 [(match_operand:SI 2 "s_register_operand" "r,r")
10116 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10117 (clobber (reg:CC CC_REGNUM))]
10120 "&& reload_completed"
10121 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10122 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10123 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10126 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10127 operands[2], operands[3]);
10128 enum rtx_code rc = GET_CODE (operands[1]);
10130 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10132 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10133 if (mode == CCFPmode || mode == CCFPEmode)
10134 rc = reverse_condition_maybe_unordered (rc);
10136 rc = reverse_condition (rc);
10137 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10140 ;; Attempt to improve the sequence generated by the compare_scc splitters
10141 ;; not to use conditional execution.
10143 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
10147 [(set (reg:CC CC_REGNUM)
10148 (compare:CC (match_operand:SI 1 "register_operand" "")
10150 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10151 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10152 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10153 (set (match_dup 0) (const_int 1)))]
10154 "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10155 [(set (match_dup 0) (clz:SI (match_dup 1)))
10156 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10159 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
10161 ;; adc Rd, Rd, reg1
10163 [(set (reg:CC CC_REGNUM)
10164 (compare:CC (match_operand:SI 1 "register_operand" "")
10166 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10167 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10168 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10169 (set (match_dup 0) (const_int 1)))
10170 (match_scratch:SI 2 "r")]
10171 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10173 [(set (reg:CC CC_REGNUM)
10174 (compare:CC (const_int 0) (match_dup 1)))
10175 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
10177 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
10178 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10181 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5
10182 ;; sub Rd, Reg1, reg2
10186 [(set (reg:CC CC_REGNUM)
10187 (compare:CC (match_operand:SI 1 "register_operand" "")
10188 (match_operand:SI 2 "arm_rhs_operand" "")))
10189 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10190 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10191 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10192 (set (match_dup 0) (const_int 1)))]
10193 "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10194 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
10195 (set (match_dup 0) (clz:SI (match_dup 0)))
10196 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10200 ;; Rd = (eq (reg1) (reg2/imm)) // ! ARMv5
10201 ;; sub T1, Reg1, reg2
10205 [(set (reg:CC CC_REGNUM)
10206 (compare:CC (match_operand:SI 1 "register_operand" "")
10207 (match_operand:SI 2 "arm_rhs_operand" "")))
10208 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10209 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10210 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10211 (set (match_dup 0) (const_int 1)))
10212 (match_scratch:SI 3 "r")]
10213 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10214 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
10216 [(set (reg:CC CC_REGNUM)
10217 (compare:CC (const_int 0) (match_dup 3)))
10218 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10220 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10221 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10224 (define_insn "*cond_move"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10226 (if_then_else:SI (match_operator 3 "equality_operator"
10227 [(match_operator 4 "arm_comparison_operator"
10228 [(match_operand 5 "cc_register" "") (const_int 0)])
10230 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10231 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10234 if (GET_CODE (operands[3]) == NE)
10236 if (which_alternative != 1)
10237 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10238 if (which_alternative != 0)
10239 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10242 if (which_alternative != 0)
10243 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10244 if (which_alternative != 1)
10245 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10248 [(set_attr "conds" "use")
10249 (set_attr "insn" "mov")
10250 (set_attr "length" "4,4,8")]
10253 (define_insn "*cond_arith"
10254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10255 (match_operator:SI 5 "shiftable_operator"
10256 [(match_operator:SI 4 "arm_comparison_operator"
10257 [(match_operand:SI 2 "s_register_operand" "r,r")
10258 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10259 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10260 (clobber (reg:CC CC_REGNUM))]
10263 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10264 return \"%i5\\t%0, %1, %2, lsr #31\";
10266 output_asm_insn (\"cmp\\t%2, %3\", operands);
10267 if (GET_CODE (operands[5]) == AND)
10268 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10269 else if (GET_CODE (operands[5]) == MINUS)
10270 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10271 else if (which_alternative != 0)
10272 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10273 return \"%i5%d4\\t%0, %1, #1\";
10275 [(set_attr "conds" "clob")
10276 (set_attr "length" "12")]
10279 (define_insn "*cond_sub"
10280 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10281 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10282 (match_operator:SI 4 "arm_comparison_operator"
10283 [(match_operand:SI 2 "s_register_operand" "r,r")
10284 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10285 (clobber (reg:CC CC_REGNUM))]
10288 output_asm_insn (\"cmp\\t%2, %3\", operands);
10289 if (which_alternative != 0)
10290 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10291 return \"sub%d4\\t%0, %1, #1\";
10293 [(set_attr "conds" "clob")
10294 (set_attr "length" "8,12")]
10297 (define_insn "*cmp_ite0"
10298 [(set (match_operand 6 "dominant_cc_register" "")
10301 (match_operator 4 "arm_comparison_operator"
10302 [(match_operand:SI 0 "s_register_operand"
10303 "l,l,l,r,r,r,r,r,r")
10304 (match_operand:SI 1 "arm_add_operand"
10305 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10306 (match_operator:SI 5 "arm_comparison_operator"
10307 [(match_operand:SI 2 "s_register_operand"
10308 "l,r,r,l,l,r,r,r,r")
10309 (match_operand:SI 3 "arm_add_operand"
10310 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10316 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10318 {\"cmp%d5\\t%0, %1\",
10319 \"cmp%d4\\t%2, %3\"},
10320 {\"cmn%d5\\t%0, #%n1\",
10321 \"cmp%d4\\t%2, %3\"},
10322 {\"cmp%d5\\t%0, %1\",
10323 \"cmn%d4\\t%2, #%n3\"},
10324 {\"cmn%d5\\t%0, #%n1\",
10325 \"cmn%d4\\t%2, #%n3\"}
10327 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10332 \"cmn\\t%0, #%n1\"},
10333 {\"cmn\\t%2, #%n3\",
10335 {\"cmn\\t%2, #%n3\",
10336 \"cmn\\t%0, #%n1\"}
10338 static const char * const ite[2] =
10343 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10344 CMP_CMP, CMN_CMP, CMP_CMP,
10345 CMN_CMP, CMP_CMN, CMN_CMN};
10347 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10349 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10350 if (TARGET_THUMB2) {
10351 output_asm_insn (ite[swap], operands);
10353 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10356 [(set_attr "conds" "set")
10357 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10358 (set_attr_alternative "length"
10364 (if_then_else (eq_attr "is_thumb" "no")
10367 (if_then_else (eq_attr "is_thumb" "no")
10370 (if_then_else (eq_attr "is_thumb" "no")
10373 (if_then_else (eq_attr "is_thumb" "no")
10378 (define_insn "*cmp_ite1"
10379 [(set (match_operand 6 "dominant_cc_register" "")
10382 (match_operator 4 "arm_comparison_operator"
10383 [(match_operand:SI 0 "s_register_operand"
10384 "l,l,l,r,r,r,r,r,r")
10385 (match_operand:SI 1 "arm_add_operand"
10386 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10387 (match_operator:SI 5 "arm_comparison_operator"
10388 [(match_operand:SI 2 "s_register_operand"
10389 "l,r,r,l,l,r,r,r,r")
10390 (match_operand:SI 3 "arm_add_operand"
10391 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10397 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10401 {\"cmn\\t%0, #%n1\",
10404 \"cmn\\t%2, #%n3\"},
10405 {\"cmn\\t%0, #%n1\",
10406 \"cmn\\t%2, #%n3\"}
10408 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10410 {\"cmp%d4\\t%2, %3\",
10411 \"cmp%D5\\t%0, %1\"},
10412 {\"cmp%d4\\t%2, %3\",
10413 \"cmn%D5\\t%0, #%n1\"},
10414 {\"cmn%d4\\t%2, #%n3\",
10415 \"cmp%D5\\t%0, %1\"},
10416 {\"cmn%d4\\t%2, #%n3\",
10417 \"cmn%D5\\t%0, #%n1\"}
10419 static const char * const ite[2] =
10424 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10425 CMP_CMP, CMN_CMP, CMP_CMP,
10426 CMN_CMP, CMP_CMN, CMN_CMN};
10428 comparison_dominates_p (GET_CODE (operands[5]),
10429 reverse_condition (GET_CODE (operands[4])));
10431 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10432 if (TARGET_THUMB2) {
10433 output_asm_insn (ite[swap], operands);
10435 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10438 [(set_attr "conds" "set")
10439 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10440 (set_attr_alternative "length"
10446 (if_then_else (eq_attr "is_thumb" "no")
10449 (if_then_else (eq_attr "is_thumb" "no")
10452 (if_then_else (eq_attr "is_thumb" "no")
10455 (if_then_else (eq_attr "is_thumb" "no")
10460 (define_insn "*cmp_and"
10461 [(set (match_operand 6 "dominant_cc_register" "")
10464 (match_operator 4 "arm_comparison_operator"
10465 [(match_operand:SI 0 "s_register_operand"
10466 "l,l,l,r,r,r,r,r,r")
10467 (match_operand:SI 1 "arm_add_operand"
10468 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10469 (match_operator:SI 5 "arm_comparison_operator"
10470 [(match_operand:SI 2 "s_register_operand"
10471 "l,r,r,l,l,r,r,r,r")
10472 (match_operand:SI 3 "arm_add_operand"
10473 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10478 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10480 {\"cmp%d5\\t%0, %1\",
10481 \"cmp%d4\\t%2, %3\"},
10482 {\"cmn%d5\\t%0, #%n1\",
10483 \"cmp%d4\\t%2, %3\"},
10484 {\"cmp%d5\\t%0, %1\",
10485 \"cmn%d4\\t%2, #%n3\"},
10486 {\"cmn%d5\\t%0, #%n1\",
10487 \"cmn%d4\\t%2, #%n3\"}
10489 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10494 \"cmn\\t%0, #%n1\"},
10495 {\"cmn\\t%2, #%n3\",
10497 {\"cmn\\t%2, #%n3\",
10498 \"cmn\\t%0, #%n1\"}
10500 static const char *const ite[2] =
10505 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10506 CMP_CMP, CMN_CMP, CMP_CMP,
10507 CMN_CMP, CMP_CMN, CMN_CMN};
10509 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10511 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10512 if (TARGET_THUMB2) {
10513 output_asm_insn (ite[swap], operands);
10515 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10518 [(set_attr "conds" "set")
10519 (set_attr "predicable" "no")
10520 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10521 (set_attr_alternative "length"
10527 (if_then_else (eq_attr "is_thumb" "no")
10530 (if_then_else (eq_attr "is_thumb" "no")
10533 (if_then_else (eq_attr "is_thumb" "no")
10536 (if_then_else (eq_attr "is_thumb" "no")
10541 (define_insn "*cmp_ior"
10542 [(set (match_operand 6 "dominant_cc_register" "")
10545 (match_operator 4 "arm_comparison_operator"
10546 [(match_operand:SI 0 "s_register_operand"
10547 "l,l,l,r,r,r,r,r,r")
10548 (match_operand:SI 1 "arm_add_operand"
10549 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10550 (match_operator:SI 5 "arm_comparison_operator"
10551 [(match_operand:SI 2 "s_register_operand"
10552 "l,r,r,l,l,r,r,r,r")
10553 (match_operand:SI 3 "arm_add_operand"
10554 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10559 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10563 {\"cmn\\t%0, #%n1\",
10566 \"cmn\\t%2, #%n3\"},
10567 {\"cmn\\t%0, #%n1\",
10568 \"cmn\\t%2, #%n3\"}
10570 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10572 {\"cmp%D4\\t%2, %3\",
10573 \"cmp%D5\\t%0, %1\"},
10574 {\"cmp%D4\\t%2, %3\",
10575 \"cmn%D5\\t%0, #%n1\"},
10576 {\"cmn%D4\\t%2, #%n3\",
10577 \"cmp%D5\\t%0, %1\"},
10578 {\"cmn%D4\\t%2, #%n3\",
10579 \"cmn%D5\\t%0, #%n1\"}
10581 static const char *const ite[2] =
10586 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10587 CMP_CMP, CMN_CMP, CMP_CMP,
10588 CMN_CMP, CMP_CMN, CMN_CMN};
10590 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10592 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10593 if (TARGET_THUMB2) {
10594 output_asm_insn (ite[swap], operands);
10596 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10600 [(set_attr "conds" "set")
10601 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10602 (set_attr_alternative "length"
10608 (if_then_else (eq_attr "is_thumb" "no")
10611 (if_then_else (eq_attr "is_thumb" "no")
10614 (if_then_else (eq_attr "is_thumb" "no")
10617 (if_then_else (eq_attr "is_thumb" "no")
10622 (define_insn_and_split "*ior_scc_scc"
10623 [(set (match_operand:SI 0 "s_register_operand" "=r")
10624 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10625 [(match_operand:SI 1 "s_register_operand" "r")
10626 (match_operand:SI 2 "arm_add_operand" "rIL")])
10627 (match_operator:SI 6 "arm_comparison_operator"
10628 [(match_operand:SI 4 "s_register_operand" "r")
10629 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10630 (clobber (reg:CC CC_REGNUM))]
10632 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10635 "TARGET_32BIT && reload_completed"
10636 [(set (match_dup 7)
10639 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10640 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10642 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10644 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10647 [(set_attr "conds" "clob")
10648 (set_attr "length" "16")])
10650 ; If the above pattern is followed by a CMP insn, then the compare is
10651 ; redundant, since we can rework the conditional instruction that follows.
10652 (define_insn_and_split "*ior_scc_scc_cmp"
10653 [(set (match_operand 0 "dominant_cc_register" "")
10654 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10655 [(match_operand:SI 1 "s_register_operand" "r")
10656 (match_operand:SI 2 "arm_add_operand" "rIL")])
10657 (match_operator:SI 6 "arm_comparison_operator"
10658 [(match_operand:SI 4 "s_register_operand" "r")
10659 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10661 (set (match_operand:SI 7 "s_register_operand" "=r")
10662 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10663 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10666 "TARGET_32BIT && reload_completed"
10667 [(set (match_dup 0)
10670 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10671 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10673 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10675 [(set_attr "conds" "set")
10676 (set_attr "length" "16")])
10678 (define_insn_and_split "*and_scc_scc"
10679 [(set (match_operand:SI 0 "s_register_operand" "=r")
10680 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10681 [(match_operand:SI 1 "s_register_operand" "r")
10682 (match_operand:SI 2 "arm_add_operand" "rIL")])
10683 (match_operator:SI 6 "arm_comparison_operator"
10684 [(match_operand:SI 4 "s_register_operand" "r")
10685 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10686 (clobber (reg:CC CC_REGNUM))]
10688 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10691 "TARGET_32BIT && reload_completed
10692 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10694 [(set (match_dup 7)
10697 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10698 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10700 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10702 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10705 [(set_attr "conds" "clob")
10706 (set_attr "length" "16")])
10708 ; If the above pattern is followed by a CMP insn, then the compare is
10709 ; redundant, since we can rework the conditional instruction that follows.
10710 (define_insn_and_split "*and_scc_scc_cmp"
10711 [(set (match_operand 0 "dominant_cc_register" "")
10712 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10713 [(match_operand:SI 1 "s_register_operand" "r")
10714 (match_operand:SI 2 "arm_add_operand" "rIL")])
10715 (match_operator:SI 6 "arm_comparison_operator"
10716 [(match_operand:SI 4 "s_register_operand" "r")
10717 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10719 (set (match_operand:SI 7 "s_register_operand" "=r")
10720 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10721 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10724 "TARGET_32BIT && reload_completed"
10725 [(set (match_dup 0)
10728 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10729 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10731 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10733 [(set_attr "conds" "set")
10734 (set_attr "length" "16")])
10736 ;; If there is no dominance in the comparison, then we can still save an
10737 ;; instruction in the AND case, since we can know that the second compare
10738 ;; need only zero the value if false (if true, then the value is already
10740 (define_insn_and_split "*and_scc_scc_nodom"
10741 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
10742 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10743 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10744 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10745 (match_operator:SI 6 "arm_comparison_operator"
10746 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10747 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10748 (clobber (reg:CC CC_REGNUM))]
10750 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10753 "TARGET_32BIT && reload_completed"
10754 [(parallel [(set (match_dup 0)
10755 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10756 (clobber (reg:CC CC_REGNUM))])
10757 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10759 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10762 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10763 operands[4], operands[5]),
10765 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10767 [(set_attr "conds" "clob")
10768 (set_attr "length" "20")])
10771 [(set (reg:CC_NOOV CC_REGNUM)
10772 (compare:CC_NOOV (ior:SI
10773 (and:SI (match_operand:SI 0 "s_register_operand" "")
10775 (match_operator:SI 1 "arm_comparison_operator"
10776 [(match_operand:SI 2 "s_register_operand" "")
10777 (match_operand:SI 3 "arm_add_operand" "")]))
10779 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10781 [(set (match_dup 4)
10782 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10784 (set (reg:CC_NOOV CC_REGNUM)
10785 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10790 [(set (reg:CC_NOOV CC_REGNUM)
10791 (compare:CC_NOOV (ior:SI
10792 (match_operator:SI 1 "arm_comparison_operator"
10793 [(match_operand:SI 2 "s_register_operand" "")
10794 (match_operand:SI 3 "arm_add_operand" "")])
10795 (and:SI (match_operand:SI 0 "s_register_operand" "")
10798 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10800 [(set (match_dup 4)
10801 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10803 (set (reg:CC_NOOV CC_REGNUM)
10804 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10807 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10809 (define_insn_and_split "*negscc"
10810 [(set (match_operand:SI 0 "s_register_operand" "=r")
10811 (neg:SI (match_operator 3 "arm_comparison_operator"
10812 [(match_operand:SI 1 "s_register_operand" "r")
10813 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10814 (clobber (reg:CC CC_REGNUM))]
10817 "&& reload_completed"
10820 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10822 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10824 /* Emit mov\\t%0, %1, asr #31 */
10825 emit_insn (gen_rtx_SET (VOIDmode,
10827 gen_rtx_ASHIFTRT (SImode,
10832 else if (GET_CODE (operands[3]) == NE)
10834 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10835 if (CONST_INT_P (operands[2]))
10836 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10837 GEN_INT (- INTVAL (operands[2]))));
10839 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10841 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10842 gen_rtx_NE (SImode,
10845 gen_rtx_SET (SImode,
10852 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10853 emit_insn (gen_rtx_SET (VOIDmode,
10855 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10856 enum rtx_code rc = GET_CODE (operands[3]);
10858 rc = reverse_condition (rc);
10859 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10860 gen_rtx_fmt_ee (rc,
10864 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
10865 rc = GET_CODE (operands[3]);
10866 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10867 gen_rtx_fmt_ee (rc,
10871 gen_rtx_SET (VOIDmode,
10878 [(set_attr "conds" "clob")
10879 (set_attr "length" "12")]
10882 (define_insn "movcond"
10883 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10885 (match_operator 5 "arm_comparison_operator"
10886 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10887 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10888 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10889 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10890 (clobber (reg:CC CC_REGNUM))]
10893 if (GET_CODE (operands[5]) == LT
10894 && (operands[4] == const0_rtx))
10896 if (which_alternative != 1 && REG_P (operands[1]))
10898 if (operands[2] == const0_rtx)
10899 return \"and\\t%0, %1, %3, asr #31\";
10900 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10902 else if (which_alternative != 0 && REG_P (operands[2]))
10904 if (operands[1] == const0_rtx)
10905 return \"bic\\t%0, %2, %3, asr #31\";
10906 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10908 /* The only case that falls through to here is when both ops 1 & 2
10912 if (GET_CODE (operands[5]) == GE
10913 && (operands[4] == const0_rtx))
10915 if (which_alternative != 1 && REG_P (operands[1]))
10917 if (operands[2] == const0_rtx)
10918 return \"bic\\t%0, %1, %3, asr #31\";
10919 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10921 else if (which_alternative != 0 && REG_P (operands[2]))
10923 if (operands[1] == const0_rtx)
10924 return \"and\\t%0, %2, %3, asr #31\";
10925 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10927 /* The only case that falls through to here is when both ops 1 & 2
10930 if (CONST_INT_P (operands[4])
10931 && !const_ok_for_arm (INTVAL (operands[4])))
10932 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10934 output_asm_insn (\"cmp\\t%3, %4\", operands);
10935 if (which_alternative != 0)
10936 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10937 if (which_alternative != 1)
10938 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10941 [(set_attr "conds" "clob")
10942 (set_attr "length" "8,8,12")]
10945 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10947 (define_insn "*ifcompare_plus_move"
10948 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10949 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10950 [(match_operand:SI 4 "s_register_operand" "r,r")
10951 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10953 (match_operand:SI 2 "s_register_operand" "r,r")
10954 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10955 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10956 (clobber (reg:CC CC_REGNUM))]
10959 [(set_attr "conds" "clob")
10960 (set_attr "length" "8,12")]
10963 (define_insn "*if_plus_move"
10964 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10966 (match_operator 4 "arm_comparison_operator"
10967 [(match_operand 5 "cc_register" "") (const_int 0)])
10969 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10970 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10971 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10974 add%d4\\t%0, %2, %3
10975 sub%d4\\t%0, %2, #%n3
10976 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10977 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10978 [(set_attr "conds" "use")
10979 (set_attr "length" "4,4,8,8")
10980 (set_attr_alternative "type"
10981 [(if_then_else (match_operand 3 "const_int_operand" "")
10982 (const_string "simple_alu_imm" )
10983 (const_string "*"))
10984 (const_string "simple_alu_imm")
10986 (const_string "*")])]
10989 (define_insn "*ifcompare_move_plus"
10990 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10991 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10992 [(match_operand:SI 4 "s_register_operand" "r,r")
10993 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10994 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10996 (match_operand:SI 2 "s_register_operand" "r,r")
10997 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10998 (clobber (reg:CC CC_REGNUM))]
11001 [(set_attr "conds" "clob")
11002 (set_attr "length" "8,12")]
11005 (define_insn "*if_move_plus"
11006 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11008 (match_operator 4 "arm_comparison_operator"
11009 [(match_operand 5 "cc_register" "") (const_int 0)])
11010 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
11012 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11013 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
11016 add%D4\\t%0, %2, %3
11017 sub%D4\\t%0, %2, #%n3
11018 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
11019 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
11020 [(set_attr "conds" "use")
11021 (set_attr "length" "4,4,8,8")
11022 (set_attr_alternative "type"
11023 [(if_then_else (match_operand 3 "const_int_operand" "")
11024 (const_string "simple_alu_imm" )
11025 (const_string "*"))
11026 (const_string "simple_alu_imm")
11028 (const_string "*")])]
11031 (define_insn "*ifcompare_arith_arith"
11032 [(set (match_operand:SI 0 "s_register_operand" "=r")
11033 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
11034 [(match_operand:SI 5 "s_register_operand" "r")
11035 (match_operand:SI 6 "arm_add_operand" "rIL")])
11036 (match_operator:SI 8 "shiftable_operator"
11037 [(match_operand:SI 1 "s_register_operand" "r")
11038 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11039 (match_operator:SI 7 "shiftable_operator"
11040 [(match_operand:SI 3 "s_register_operand" "r")
11041 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
11042 (clobber (reg:CC CC_REGNUM))]
11045 [(set_attr "conds" "clob")
11046 (set_attr "length" "12")]
11049 (define_insn "*if_arith_arith"
11050 [(set (match_operand:SI 0 "s_register_operand" "=r")
11051 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
11052 [(match_operand 8 "cc_register" "") (const_int 0)])
11053 (match_operator:SI 6 "shiftable_operator"
11054 [(match_operand:SI 1 "s_register_operand" "r")
11055 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11056 (match_operator:SI 7 "shiftable_operator"
11057 [(match_operand:SI 3 "s_register_operand" "r")
11058 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
11060 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
11061 [(set_attr "conds" "use")
11062 (set_attr "length" "8")]
11065 (define_insn "*ifcompare_arith_move"
11066 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11067 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11068 [(match_operand:SI 2 "s_register_operand" "r,r")
11069 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
11070 (match_operator:SI 7 "shiftable_operator"
11071 [(match_operand:SI 4 "s_register_operand" "r,r")
11072 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
11073 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11074 (clobber (reg:CC CC_REGNUM))]
11077 /* If we have an operation where (op x 0) is the identity operation and
11078 the conditional operator is LT or GE and we are comparing against zero and
11079 everything is in registers then we can do this in two instructions. */
11080 if (operands[3] == const0_rtx
11081 && GET_CODE (operands[7]) != AND
11082 && REG_P (operands[5])
11083 && REG_P (operands[1])
11084 && REGNO (operands[1]) == REGNO (operands[4])
11085 && REGNO (operands[4]) != REGNO (operands[0]))
11087 if (GET_CODE (operands[6]) == LT)
11088 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11089 else if (GET_CODE (operands[6]) == GE)
11090 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11092 if (CONST_INT_P (operands[3])
11093 && !const_ok_for_arm (INTVAL (operands[3])))
11094 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
11096 output_asm_insn (\"cmp\\t%2, %3\", operands);
11097 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11098 if (which_alternative != 0)
11099 return \"mov%D6\\t%0, %1\";
11102 [(set_attr "conds" "clob")
11103 (set_attr "length" "8,12")]
11106 (define_insn "*if_arith_move"
11107 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11108 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11109 [(match_operand 6 "cc_register" "") (const_int 0)])
11110 (match_operator:SI 5 "shiftable_operator"
11111 [(match_operand:SI 2 "s_register_operand" "r,r")
11112 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11113 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11116 %I5%d4\\t%0, %2, %3
11117 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11118 [(set_attr "conds" "use")
11119 (set_attr "length" "4,8")
11120 (set_attr "type" "*,*")]
11123 (define_insn "*ifcompare_move_arith"
11124 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11125 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11126 [(match_operand:SI 4 "s_register_operand" "r,r")
11127 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11128 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11129 (match_operator:SI 7 "shiftable_operator"
11130 [(match_operand:SI 2 "s_register_operand" "r,r")
11131 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11132 (clobber (reg:CC CC_REGNUM))]
11135 /* If we have an operation where (op x 0) is the identity operation and
11136 the conditional operator is LT or GE and we are comparing against zero and
11137 everything is in registers then we can do this in two instructions */
11138 if (operands[5] == const0_rtx
11139 && GET_CODE (operands[7]) != AND
11140 && REG_P (operands[3])
11141 && REG_P (operands[1])
11142 && REGNO (operands[1]) == REGNO (operands[2])
11143 && REGNO (operands[2]) != REGNO (operands[0]))
11145 if (GET_CODE (operands[6]) == GE)
11146 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11147 else if (GET_CODE (operands[6]) == LT)
11148 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11151 if (CONST_INT_P (operands[5])
11152 && !const_ok_for_arm (INTVAL (operands[5])))
11153 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11155 output_asm_insn (\"cmp\\t%4, %5\", operands);
11157 if (which_alternative != 0)
11158 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11159 return \"%I7%D6\\t%0, %2, %3\";
11161 [(set_attr "conds" "clob")
11162 (set_attr "length" "8,12")]
11165 (define_insn "*if_move_arith"
11166 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11168 (match_operator 4 "arm_comparison_operator"
11169 [(match_operand 6 "cc_register" "") (const_int 0)])
11170 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11171 (match_operator:SI 5 "shiftable_operator"
11172 [(match_operand:SI 2 "s_register_operand" "r,r")
11173 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11176 %I5%D4\\t%0, %2, %3
11177 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11178 [(set_attr "conds" "use")
11179 (set_attr "length" "4,8")
11180 (set_attr "type" "*,*")]
11183 (define_insn "*ifcompare_move_not"
11184 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11186 (match_operator 5 "arm_comparison_operator"
11187 [(match_operand:SI 3 "s_register_operand" "r,r")
11188 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11189 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11191 (match_operand:SI 2 "s_register_operand" "r,r"))))
11192 (clobber (reg:CC CC_REGNUM))]
11195 [(set_attr "conds" "clob")
11196 (set_attr "length" "8,12")]
11199 (define_insn "*if_move_not"
11200 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11202 (match_operator 4 "arm_comparison_operator"
11203 [(match_operand 3 "cc_register" "") (const_int 0)])
11204 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11205 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11209 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11210 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11211 [(set_attr "conds" "use")
11212 (set_attr "insn" "mvn")
11213 (set_attr "length" "4,8,8")]
11216 (define_insn "*ifcompare_not_move"
11217 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11219 (match_operator 5 "arm_comparison_operator"
11220 [(match_operand:SI 3 "s_register_operand" "r,r")
11221 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11223 (match_operand:SI 2 "s_register_operand" "r,r"))
11224 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11225 (clobber (reg:CC CC_REGNUM))]
11228 [(set_attr "conds" "clob")
11229 (set_attr "length" "8,12")]
11232 (define_insn "*if_not_move"
11233 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11235 (match_operator 4 "arm_comparison_operator"
11236 [(match_operand 3 "cc_register" "") (const_int 0)])
11237 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11238 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11242 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11243 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11244 [(set_attr "conds" "use")
11245 (set_attr "insn" "mvn")
11246 (set_attr "length" "4,8,8")]
11249 (define_insn "*ifcompare_shift_move"
11250 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11252 (match_operator 6 "arm_comparison_operator"
11253 [(match_operand:SI 4 "s_register_operand" "r,r")
11254 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11255 (match_operator:SI 7 "shift_operator"
11256 [(match_operand:SI 2 "s_register_operand" "r,r")
11257 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11258 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11259 (clobber (reg:CC CC_REGNUM))]
11262 [(set_attr "conds" "clob")
11263 (set_attr "length" "8,12")]
11266 (define_insn "*if_shift_move"
11267 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11269 (match_operator 5 "arm_comparison_operator"
11270 [(match_operand 6 "cc_register" "") (const_int 0)])
11271 (match_operator:SI 4 "shift_operator"
11272 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11273 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11274 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11278 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11279 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11280 [(set_attr "conds" "use")
11281 (set_attr "shift" "2")
11282 (set_attr "length" "4,8,8")
11283 (set_attr "insn" "mov")
11284 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11285 (const_string "alu_shift")
11286 (const_string "alu_shift_reg")))]
11289 (define_insn "*ifcompare_move_shift"
11290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11292 (match_operator 6 "arm_comparison_operator"
11293 [(match_operand:SI 4 "s_register_operand" "r,r")
11294 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11295 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11296 (match_operator:SI 7 "shift_operator"
11297 [(match_operand:SI 2 "s_register_operand" "r,r")
11298 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11299 (clobber (reg:CC CC_REGNUM))]
11302 [(set_attr "conds" "clob")
11303 (set_attr "length" "8,12")]
11306 (define_insn "*if_move_shift"
11307 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11309 (match_operator 5 "arm_comparison_operator"
11310 [(match_operand 6 "cc_register" "") (const_int 0)])
11311 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11312 (match_operator:SI 4 "shift_operator"
11313 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11314 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11318 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11319 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11320 [(set_attr "conds" "use")
11321 (set_attr "shift" "2")
11322 (set_attr "length" "4,8,8")
11323 (set_attr "insn" "mov")
11324 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11325 (const_string "alu_shift")
11326 (const_string "alu_shift_reg")))]
11329 (define_insn "*ifcompare_shift_shift"
11330 [(set (match_operand:SI 0 "s_register_operand" "=r")
11332 (match_operator 7 "arm_comparison_operator"
11333 [(match_operand:SI 5 "s_register_operand" "r")
11334 (match_operand:SI 6 "arm_add_operand" "rIL")])
11335 (match_operator:SI 8 "shift_operator"
11336 [(match_operand:SI 1 "s_register_operand" "r")
11337 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11338 (match_operator:SI 9 "shift_operator"
11339 [(match_operand:SI 3 "s_register_operand" "r")
11340 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11341 (clobber (reg:CC CC_REGNUM))]
11344 [(set_attr "conds" "clob")
11345 (set_attr "length" "12")]
11348 (define_insn "*if_shift_shift"
11349 [(set (match_operand:SI 0 "s_register_operand" "=r")
11351 (match_operator 5 "arm_comparison_operator"
11352 [(match_operand 8 "cc_register" "") (const_int 0)])
11353 (match_operator:SI 6 "shift_operator"
11354 [(match_operand:SI 1 "s_register_operand" "r")
11355 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11356 (match_operator:SI 7 "shift_operator"
11357 [(match_operand:SI 3 "s_register_operand" "r")
11358 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11360 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11361 [(set_attr "conds" "use")
11362 (set_attr "shift" "1")
11363 (set_attr "length" "8")
11364 (set_attr "insn" "mov")
11365 (set (attr "type") (if_then_else
11366 (and (match_operand 2 "const_int_operand" "")
11367 (match_operand 4 "const_int_operand" ""))
11368 (const_string "alu_shift")
11369 (const_string "alu_shift_reg")))]
11372 (define_insn "*ifcompare_not_arith"
11373 [(set (match_operand:SI 0 "s_register_operand" "=r")
11375 (match_operator 6 "arm_comparison_operator"
11376 [(match_operand:SI 4 "s_register_operand" "r")
11377 (match_operand:SI 5 "arm_add_operand" "rIL")])
11378 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11379 (match_operator:SI 7 "shiftable_operator"
11380 [(match_operand:SI 2 "s_register_operand" "r")
11381 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11382 (clobber (reg:CC CC_REGNUM))]
11385 [(set_attr "conds" "clob")
11386 (set_attr "length" "12")]
11389 (define_insn "*if_not_arith"
11390 [(set (match_operand:SI 0 "s_register_operand" "=r")
11392 (match_operator 5 "arm_comparison_operator"
11393 [(match_operand 4 "cc_register" "") (const_int 0)])
11394 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11395 (match_operator:SI 6 "shiftable_operator"
11396 [(match_operand:SI 2 "s_register_operand" "r")
11397 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11399 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11400 [(set_attr "conds" "use")
11401 (set_attr "insn" "mvn")
11402 (set_attr "length" "8")]
11405 (define_insn "*ifcompare_arith_not"
11406 [(set (match_operand:SI 0 "s_register_operand" "=r")
11408 (match_operator 6 "arm_comparison_operator"
11409 [(match_operand:SI 4 "s_register_operand" "r")
11410 (match_operand:SI 5 "arm_add_operand" "rIL")])
11411 (match_operator:SI 7 "shiftable_operator"
11412 [(match_operand:SI 2 "s_register_operand" "r")
11413 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11414 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11415 (clobber (reg:CC CC_REGNUM))]
11418 [(set_attr "conds" "clob")
11419 (set_attr "length" "12")]
11422 (define_insn "*if_arith_not"
11423 [(set (match_operand:SI 0 "s_register_operand" "=r")
11425 (match_operator 5 "arm_comparison_operator"
11426 [(match_operand 4 "cc_register" "") (const_int 0)])
11427 (match_operator:SI 6 "shiftable_operator"
11428 [(match_operand:SI 2 "s_register_operand" "r")
11429 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11430 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11432 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11433 [(set_attr "conds" "use")
11434 (set_attr "insn" "mvn")
11435 (set_attr "length" "8")]
11438 (define_insn "*ifcompare_neg_move"
11439 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11441 (match_operator 5 "arm_comparison_operator"
11442 [(match_operand:SI 3 "s_register_operand" "r,r")
11443 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11444 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11445 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11446 (clobber (reg:CC CC_REGNUM))]
11449 [(set_attr "conds" "clob")
11450 (set_attr "length" "8,12")]
11453 (define_insn "*if_neg_move"
11454 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11456 (match_operator 4 "arm_comparison_operator"
11457 [(match_operand 3 "cc_register" "") (const_int 0)])
11458 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11459 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11462 rsb%d4\\t%0, %2, #0
11463 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11464 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11465 [(set_attr "conds" "use")
11466 (set_attr "length" "4,8,8")]
11469 (define_insn "*ifcompare_move_neg"
11470 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11472 (match_operator 5 "arm_comparison_operator"
11473 [(match_operand:SI 3 "s_register_operand" "r,r")
11474 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11475 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11476 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11477 (clobber (reg:CC CC_REGNUM))]
11480 [(set_attr "conds" "clob")
11481 (set_attr "length" "8,12")]
11484 (define_insn "*if_move_neg"
11485 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11487 (match_operator 4 "arm_comparison_operator"
11488 [(match_operand 3 "cc_register" "") (const_int 0)])
11489 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11490 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11493 rsb%D4\\t%0, %2, #0
11494 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11495 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11496 [(set_attr "conds" "use")
11497 (set_attr "length" "4,8,8")]
11500 (define_insn "*arith_adjacentmem"
11501 [(set (match_operand:SI 0 "s_register_operand" "=r")
11502 (match_operator:SI 1 "shiftable_operator"
11503 [(match_operand:SI 2 "memory_operand" "m")
11504 (match_operand:SI 3 "memory_operand" "m")]))
11505 (clobber (match_scratch:SI 4 "=r"))]
11506 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11512 HOST_WIDE_INT val1 = 0, val2 = 0;
11514 if (REGNO (operands[0]) > REGNO (operands[4]))
11516 ldm[1] = operands[4];
11517 ldm[2] = operands[0];
11521 ldm[1] = operands[0];
11522 ldm[2] = operands[4];
11525 base_reg = XEXP (operands[2], 0);
11527 if (!REG_P (base_reg))
11529 val1 = INTVAL (XEXP (base_reg, 1));
11530 base_reg = XEXP (base_reg, 0);
11533 if (!REG_P (XEXP (operands[3], 0)))
11534 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11536 arith[0] = operands[0];
11537 arith[3] = operands[1];
11551 if (val1 !=0 && val2 != 0)
11555 if (val1 == 4 || val2 == 4)
11556 /* Other val must be 8, since we know they are adjacent and neither
11558 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11559 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11561 ldm[0] = ops[0] = operands[4];
11563 ops[2] = GEN_INT (val1);
11564 output_add_immediate (ops);
11566 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11568 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11572 /* Offset is out of range for a single add, so use two ldr. */
11575 ops[2] = GEN_INT (val1);
11576 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11578 ops[2] = GEN_INT (val2);
11579 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11582 else if (val1 != 0)
11585 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11587 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11592 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11594 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11596 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11599 [(set_attr "length" "12")
11600 (set_attr "predicable" "yes")
11601 (set_attr "type" "load1")]
11604 ; This pattern is never tried by combine, so do it as a peephole
11607 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11608 (match_operand:SI 1 "arm_general_register_operand" ""))
11609 (set (reg:CC CC_REGNUM)
11610 (compare:CC (match_dup 1) (const_int 0)))]
11612 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11613 (set (match_dup 0) (match_dup 1))])]
11618 [(set (match_operand:SI 0 "s_register_operand" "")
11619 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11621 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11622 [(match_operand:SI 3 "s_register_operand" "")
11623 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11624 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11626 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11627 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11632 ;; This split can be used because CC_Z mode implies that the following
11633 ;; branch will be an equality, or an unsigned inequality, so the sign
11634 ;; extension is not needed.
11637 [(set (reg:CC_Z CC_REGNUM)
11639 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11641 (match_operand 1 "const_int_operand" "")))
11642 (clobber (match_scratch:SI 2 ""))]
11644 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11645 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11646 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11647 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11649 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11652 ;; ??? Check the patterns above for Thumb-2 usefulness
11654 (define_expand "prologue"
11655 [(clobber (const_int 0))]
11658 arm_expand_prologue ();
11660 thumb1_expand_prologue ();
11665 (define_expand "epilogue"
11666 [(clobber (const_int 0))]
11669 if (crtl->calls_eh_return)
11670 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11673 thumb1_expand_epilogue ();
11674 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11675 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11677 else if (HAVE_return)
11679 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11680 no need for explicit testing again. */
11681 emit_jump_insn (gen_return ());
11683 else if (TARGET_32BIT)
11685 arm_expand_epilogue (true);
11691 (define_insn "prologue_thumb1_interwork"
11692 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11694 "* return thumb1_output_interwork ();"
11695 [(set_attr "length" "8")]
11698 ;; Note - although unspec_volatile's USE all hard registers,
11699 ;; USEs are ignored after relaod has completed. Thus we need
11700 ;; to add an unspec of the link register to ensure that flow
11701 ;; does not think that it is unused by the sibcall branch that
11702 ;; will replace the standard function epilogue.
11703 (define_expand "sibcall_epilogue"
11704 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11705 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11708 arm_expand_epilogue (false);
11713 (define_insn "*epilogue_insns"
11714 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11717 return thumb1_unexpanded_epilogue ();
11719 ; Length is absolute worst case
11720 [(set_attr "length" "44")
11721 (set_attr "type" "block")
11722 ;; We don't clobber the conditions, but the potential length of this
11723 ;; operation is sufficient to make conditionalizing the sequence
11724 ;; unlikely to be profitable.
11725 (set_attr "conds" "clob")]
11728 (define_expand "eh_epilogue"
11729 [(use (match_operand:SI 0 "register_operand" ""))
11730 (use (match_operand:SI 1 "register_operand" ""))
11731 (use (match_operand:SI 2 "register_operand" ""))]
11735 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11736 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11738 rtx ra = gen_rtx_REG (Pmode, 2);
11740 emit_move_insn (ra, operands[2]);
11743 /* This is a hack -- we may have crystalized the function type too
11745 cfun->machine->func_type = 0;
11749 ;; This split is only used during output to reduce the number of patterns
11750 ;; that need assembler instructions adding to them. We allowed the setting
11751 ;; of the conditions to be implicit during rtl generation so that
11752 ;; the conditional compare patterns would work. However this conflicts to
11753 ;; some extent with the conditional data operations, so we have to split them
11756 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11757 ;; conditional execution sufficient?
11760 [(set (match_operand:SI 0 "s_register_operand" "")
11761 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11762 [(match_operand 2 "" "") (match_operand 3 "" "")])
11764 (match_operand 4 "" "")))
11765 (clobber (reg:CC CC_REGNUM))]
11766 "TARGET_ARM && reload_completed"
11767 [(set (match_dup 5) (match_dup 6))
11768 (cond_exec (match_dup 7)
11769 (set (match_dup 0) (match_dup 4)))]
11772 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11773 operands[2], operands[3]);
11774 enum rtx_code rc = GET_CODE (operands[1]);
11776 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11777 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11778 if (mode == CCFPmode || mode == CCFPEmode)
11779 rc = reverse_condition_maybe_unordered (rc);
11781 rc = reverse_condition (rc);
11783 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11788 [(set (match_operand:SI 0 "s_register_operand" "")
11789 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11790 [(match_operand 2 "" "") (match_operand 3 "" "")])
11791 (match_operand 4 "" "")
11793 (clobber (reg:CC CC_REGNUM))]
11794 "TARGET_ARM && reload_completed"
11795 [(set (match_dup 5) (match_dup 6))
11796 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11797 (set (match_dup 0) (match_dup 4)))]
11800 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11801 operands[2], operands[3]);
11803 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11804 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11809 [(set (match_operand:SI 0 "s_register_operand" "")
11810 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11811 [(match_operand 2 "" "") (match_operand 3 "" "")])
11812 (match_operand 4 "" "")
11813 (match_operand 5 "" "")))
11814 (clobber (reg:CC CC_REGNUM))]
11815 "TARGET_ARM && reload_completed"
11816 [(set (match_dup 6) (match_dup 7))
11817 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11818 (set (match_dup 0) (match_dup 4)))
11819 (cond_exec (match_dup 8)
11820 (set (match_dup 0) (match_dup 5)))]
11823 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11824 operands[2], operands[3]);
11825 enum rtx_code rc = GET_CODE (operands[1]);
11827 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11828 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11829 if (mode == CCFPmode || mode == CCFPEmode)
11830 rc = reverse_condition_maybe_unordered (rc);
11832 rc = reverse_condition (rc);
11834 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11839 [(set (match_operand:SI 0 "s_register_operand" "")
11840 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11841 [(match_operand:SI 2 "s_register_operand" "")
11842 (match_operand:SI 3 "arm_add_operand" "")])
11843 (match_operand:SI 4 "arm_rhs_operand" "")
11845 (match_operand:SI 5 "s_register_operand" ""))))
11846 (clobber (reg:CC CC_REGNUM))]
11847 "TARGET_ARM && reload_completed"
11848 [(set (match_dup 6) (match_dup 7))
11849 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11850 (set (match_dup 0) (match_dup 4)))
11851 (cond_exec (match_dup 8)
11852 (set (match_dup 0) (not:SI (match_dup 5))))]
11855 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11856 operands[2], operands[3]);
11857 enum rtx_code rc = GET_CODE (operands[1]);
11859 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11860 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11861 if (mode == CCFPmode || mode == CCFPEmode)
11862 rc = reverse_condition_maybe_unordered (rc);
11864 rc = reverse_condition (rc);
11866 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11870 (define_insn "*cond_move_not"
11871 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11872 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11873 [(match_operand 3 "cc_register" "") (const_int 0)])
11874 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11876 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11880 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11881 [(set_attr "conds" "use")
11882 (set_attr "insn" "mvn")
11883 (set_attr "length" "4,8")]
11886 ;; The next two patterns occur when an AND operation is followed by a
11887 ;; scc insn sequence
11889 (define_insn "*sign_extract_onebit"
11890 [(set (match_operand:SI 0 "s_register_operand" "=r")
11891 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11893 (match_operand:SI 2 "const_int_operand" "n")))
11894 (clobber (reg:CC CC_REGNUM))]
11897 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11898 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11899 return \"mvnne\\t%0, #0\";
11901 [(set_attr "conds" "clob")
11902 (set_attr "length" "8")]
11905 (define_insn "*not_signextract_onebit"
11906 [(set (match_operand:SI 0 "s_register_operand" "=r")
11908 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11910 (match_operand:SI 2 "const_int_operand" "n"))))
11911 (clobber (reg:CC CC_REGNUM))]
11914 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11915 output_asm_insn (\"tst\\t%1, %2\", operands);
11916 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11917 return \"movne\\t%0, #0\";
11919 [(set_attr "conds" "clob")
11920 (set_attr "length" "12")]
11922 ;; ??? The above patterns need auditing for Thumb-2
11924 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11925 ;; expressions. For simplicity, the first register is also in the unspec
11927 ;; To avoid the usage of GNU extension, the length attribute is computed
11928 ;; in a C function arm_attr_length_push_multi.
11929 (define_insn "*push_multi"
11930 [(match_parallel 2 "multi_register_push"
11931 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11932 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11933 UNSPEC_PUSH_MULT))])]
11937 int num_saves = XVECLEN (operands[2], 0);
11939 /* For the StrongARM at least it is faster to
11940 use STR to store only a single register.
11941 In Thumb mode always use push, and the assembler will pick
11942 something appropriate. */
11943 if (num_saves == 1 && TARGET_ARM)
11944 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11951 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
11952 else if (TARGET_THUMB2)
11953 strcpy (pattern, \"push%?\\t{%1\");
11955 strcpy (pattern, \"push\\t{%1\");
11957 for (i = 1; i < num_saves; i++)
11959 strcat (pattern, \", %|\");
11961 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11964 strcat (pattern, \"}\");
11965 output_asm_insn (pattern, operands);
11970 [(set_attr "type" "store4")
11971 (set (attr "length")
11972 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11975 (define_insn "stack_tie"
11976 [(set (mem:BLK (scratch))
11977 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11978 (match_operand:SI 1 "s_register_operand" "rk")]
11982 [(set_attr "length" "0")]
11985 ;; Pop (as used in epilogue RTL)
11987 (define_insn "*load_multiple_with_writeback"
11988 [(match_parallel 0 "load_multiple_operation"
11989 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11990 (plus:SI (match_dup 1)
11991 (match_operand:SI 2 "const_int_operand" "I")))
11992 (set (match_operand:SI 3 "s_register_operand" "=rk")
11993 (mem:SI (match_dup 1)))
11995 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11998 arm_output_multireg_pop (operands, /*return_pc=*/false,
11999 /*cond=*/const_true_rtx,
12005 [(set_attr "type" "load4")
12006 (set_attr "predicable" "yes")]
12009 ;; Pop with return (as used in epilogue RTL)
12011 ;; This instruction is generated when the registers are popped at the end of
12012 ;; epilogue. Here, instead of popping the value into LR and then generating
12013 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
12015 (define_insn "*pop_multiple_with_writeback_and_return"
12016 [(match_parallel 0 "pop_multiple_return"
12018 (set (match_operand:SI 1 "s_register_operand" "+rk")
12019 (plus:SI (match_dup 1)
12020 (match_operand:SI 2 "const_int_operand" "I")))
12021 (set (match_operand:SI 3 "s_register_operand" "=rk")
12022 (mem:SI (match_dup 1)))
12024 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12027 arm_output_multireg_pop (operands, /*return_pc=*/true,
12028 /*cond=*/const_true_rtx,
12034 [(set_attr "type" "load4")
12035 (set_attr "predicable" "yes")]
12038 (define_insn "*pop_multiple_with_return"
12039 [(match_parallel 0 "pop_multiple_return"
12041 (set (match_operand:SI 2 "s_register_operand" "=rk")
12042 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12044 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12047 arm_output_multireg_pop (operands, /*return_pc=*/true,
12048 /*cond=*/const_true_rtx,
12054 [(set_attr "type" "load4")
12055 (set_attr "predicable" "yes")]
12058 ;; Load into PC and return
12059 (define_insn "*ldr_with_return"
12061 (set (reg:SI PC_REGNUM)
12062 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12063 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12064 "ldr%?\t%|pc, [%0], #4"
12065 [(set_attr "type" "load1")
12066 (set_attr "predicable" "yes")]
12068 ;; Pop for floating point registers (as used in epilogue RTL)
12069 (define_insn "*vfp_pop_multiple_with_writeback"
12070 [(match_parallel 0 "pop_multiple_fp"
12071 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12072 (plus:SI (match_dup 1)
12073 (match_operand:SI 2 "const_int_operand" "I")))
12074 (set (match_operand:DF 3 "arm_hard_register_operand" "")
12075 (mem:DF (match_dup 1)))])]
12076 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
12079 int num_regs = XVECLEN (operands[0], 0);
12082 strcpy (pattern, \"fldmfdd\\t\");
12083 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12084 strcat (pattern, \"!, {\");
12085 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12086 strcat (pattern, \"%P0\");
12087 if ((num_regs - 1) > 1)
12089 strcat (pattern, \"-%P1\");
12090 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12093 strcat (pattern, \"}\");
12094 output_asm_insn (pattern, op_list);
12098 [(set_attr "type" "load4")
12099 (set_attr "conds" "unconditional")
12100 (set_attr "predicable" "no")]
12103 ;; Special patterns for dealing with the constant pool
12105 (define_insn "align_4"
12106 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12109 assemble_align (32);
12114 (define_insn "align_8"
12115 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12118 assemble_align (64);
12123 (define_insn "consttable_end"
12124 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12127 making_const_table = FALSE;
12132 (define_insn "consttable_1"
12133 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12136 making_const_table = TRUE;
12137 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12138 assemble_zeros (3);
12141 [(set_attr "length" "4")]
12144 (define_insn "consttable_2"
12145 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12148 making_const_table = TRUE;
12149 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
12150 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12151 assemble_zeros (2);
12154 [(set_attr "length" "4")]
12157 (define_insn "consttable_4"
12158 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12162 rtx x = operands[0];
12163 making_const_table = TRUE;
12164 switch (GET_MODE_CLASS (GET_MODE (x)))
12167 if (GET_MODE (x) == HFmode)
12168 arm_emit_fp16_const (x);
12172 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12173 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12177 /* XXX: Sometimes gcc does something really dumb and ends up with
12178 a HIGH in a constant pool entry, usually because it's trying to
12179 load into a VFP register. We know this will always be used in
12180 combination with a LO_SUM which ignores the high bits, so just
12181 strip off the HIGH. */
12182 if (GET_CODE (x) == HIGH)
12184 assemble_integer (x, 4, BITS_PER_WORD, 1);
12185 mark_symbol_refs_as_used (x);
12190 [(set_attr "length" "4")]
12193 (define_insn "consttable_8"
12194 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12198 making_const_table = TRUE;
12199 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12204 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12205 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12209 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12214 [(set_attr "length" "8")]
12217 (define_insn "consttable_16"
12218 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12222 making_const_table = TRUE;
12223 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12228 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12229 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12233 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12238 [(set_attr "length" "16")]
12241 ;; Miscellaneous Thumb patterns
12243 (define_expand "tablejump"
12244 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12245 (use (label_ref (match_operand 1 "" "")))])]
12250 /* Hopefully, CSE will eliminate this copy. */
12251 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12252 rtx reg2 = gen_reg_rtx (SImode);
12254 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12255 operands[0] = reg2;
12260 ;; NB never uses BX.
12261 (define_insn "*thumb1_tablejump"
12262 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12263 (use (label_ref (match_operand 1 "" "")))]
12266 [(set_attr "length" "2")]
12269 ;; V5 Instructions,
12271 (define_insn "clzsi2"
12272 [(set (match_operand:SI 0 "s_register_operand" "=r")
12273 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12274 "TARGET_32BIT && arm_arch5"
12276 [(set_attr "predicable" "yes")
12277 (set_attr "insn" "clz")])
12279 (define_insn "rbitsi2"
12280 [(set (match_operand:SI 0 "s_register_operand" "=r")
12281 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12282 "TARGET_32BIT && arm_arch_thumb2"
12284 [(set_attr "predicable" "yes")
12285 (set_attr "insn" "clz")])
12287 (define_expand "ctzsi2"
12288 [(set (match_operand:SI 0 "s_register_operand" "")
12289 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12290 "TARGET_32BIT && arm_arch_thumb2"
12293 rtx tmp = gen_reg_rtx (SImode);
12294 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12295 emit_insn (gen_clzsi2 (operands[0], tmp));
12301 ;; V5E instructions.
12303 (define_insn "prefetch"
12304 [(prefetch (match_operand:SI 0 "address_operand" "p")
12305 (match_operand:SI 1 "" "")
12306 (match_operand:SI 2 "" ""))]
12307 "TARGET_32BIT && arm_arch5e"
12310 ;; General predication pattern
12313 [(match_operator 0 "arm_comparison_operator"
12314 [(match_operand 1 "cc_register" "")
12318 [(set_attr "predicated" "yes")]
12321 (define_insn "force_register_use"
12322 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12325 [(set_attr "length" "0")]
12329 ;; Patterns for exception handling
12331 (define_expand "eh_return"
12332 [(use (match_operand 0 "general_operand" ""))]
12337 emit_insn (gen_arm_eh_return (operands[0]));
12339 emit_insn (gen_thumb_eh_return (operands[0]));
12344 ;; We can't expand this before we know where the link register is stored.
12345 (define_insn_and_split "arm_eh_return"
12346 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12348 (clobber (match_scratch:SI 1 "=&r"))]
12351 "&& reload_completed"
12355 arm_set_return_address (operands[0], operands[1]);
12360 (define_insn_and_split "thumb_eh_return"
12361 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12363 (clobber (match_scratch:SI 1 "=&l"))]
12366 "&& reload_completed"
12370 thumb_set_return_address (operands[0], operands[1]);
12378 (define_insn "load_tp_hard"
12379 [(set (match_operand:SI 0 "register_operand" "=r")
12380 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12382 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12383 [(set_attr "predicable" "yes")]
12386 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12387 (define_insn "load_tp_soft"
12388 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12389 (clobber (reg:SI LR_REGNUM))
12390 (clobber (reg:SI IP_REGNUM))
12391 (clobber (reg:CC CC_REGNUM))]
12393 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12394 [(set_attr "conds" "clob")]
12397 ;; tls descriptor call
12398 (define_insn "tlscall"
12399 [(set (reg:SI R0_REGNUM)
12400 (unspec:SI [(reg:SI R0_REGNUM)
12401 (match_operand:SI 0 "" "X")
12402 (match_operand 1 "" "")] UNSPEC_TLS))
12403 (clobber (reg:SI R1_REGNUM))
12404 (clobber (reg:SI LR_REGNUM))
12405 (clobber (reg:SI CC_REGNUM))]
12408 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12409 INTVAL (operands[1]));
12410 return "bl\\t%c0(tlscall)";
12412 [(set_attr "conds" "clob")
12413 (set_attr "length" "4")]
12416 ;; For thread pointer builtin
12417 (define_expand "get_thread_pointersi"
12418 [(match_operand:SI 0 "s_register_operand" "=r")]
12422 arm_load_tp (operands[0]);
12428 ;; We only care about the lower 16 bits of the constant
12429 ;; being inserted into the upper 16 bits of the register.
12430 (define_insn "*arm_movtas_ze"
12431 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12434 (match_operand:SI 1 "const_int_operand" ""))]
12437 [(set_attr "predicable" "yes")
12438 (set_attr "length" "4")]
12441 (define_insn "*arm_rev"
12442 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12443 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12449 [(set_attr "arch" "t1,t2,32")
12450 (set_attr "length" "2,2,4")]
12453 (define_expand "arm_legacy_rev"
12454 [(set (match_operand:SI 2 "s_register_operand" "")
12455 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12459 (lshiftrt:SI (match_dup 2)
12461 (set (match_operand:SI 3 "s_register_operand" "")
12462 (rotatert:SI (match_dup 1)
12465 (and:SI (match_dup 2)
12466 (const_int -65281)))
12467 (set (match_operand:SI 0 "s_register_operand" "")
12468 (xor:SI (match_dup 3)
12474 ;; Reuse temporaries to keep register pressure down.
12475 (define_expand "thumb_legacy_rev"
12476 [(set (match_operand:SI 2 "s_register_operand" "")
12477 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12479 (set (match_operand:SI 3 "s_register_operand" "")
12480 (lshiftrt:SI (match_dup 1)
12483 (ior:SI (match_dup 3)
12485 (set (match_operand:SI 4 "s_register_operand" "")
12487 (set (match_operand:SI 5 "s_register_operand" "")
12488 (rotatert:SI (match_dup 1)
12491 (ashift:SI (match_dup 5)
12494 (lshiftrt:SI (match_dup 5)
12497 (ior:SI (match_dup 5)
12500 (rotatert:SI (match_dup 5)
12502 (set (match_operand:SI 0 "s_register_operand" "")
12503 (ior:SI (match_dup 5)
12509 (define_expand "bswapsi2"
12510 [(set (match_operand:SI 0 "s_register_operand" "=r")
12511 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12512 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12516 rtx op2 = gen_reg_rtx (SImode);
12517 rtx op3 = gen_reg_rtx (SImode);
12521 rtx op4 = gen_reg_rtx (SImode);
12522 rtx op5 = gen_reg_rtx (SImode);
12524 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12525 op2, op3, op4, op5));
12529 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12538 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12539 ;; and unsigned variants, respectively. For rev16, expose
12540 ;; byte-swapping in the lower 16 bits only.
12541 (define_insn "*arm_revsh"
12542 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12543 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12549 [(set_attr "arch" "t1,t2,32")
12550 (set_attr "length" "2,2,4")]
12553 (define_insn "*arm_rev16"
12554 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12555 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12561 [(set_attr "arch" "t1,t2,32")
12562 (set_attr "length" "2,2,4")]
12565 (define_expand "bswaphi2"
12566 [(set (match_operand:HI 0 "s_register_operand" "=r")
12567 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12572 ;; Patterns for LDRD/STRD in Thumb2 mode
12574 (define_insn "*thumb2_ldrd"
12575 [(set (match_operand:SI 0 "s_register_operand" "=r")
12576 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12577 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12578 (set (match_operand:SI 3 "s_register_operand" "=r")
12579 (mem:SI (plus:SI (match_dup 1)
12580 (match_operand:SI 4 "const_int_operand" ""))))]
12581 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12582 && current_tune->prefer_ldrd_strd
12583 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12584 && (operands_ok_ldrd_strd (operands[0], operands[3],
12585 operands[1], INTVAL (operands[2]),
12587 "ldrd%?\t%0, %3, [%1, %2]"
12588 [(set_attr "type" "load2")
12589 (set_attr "predicable" "yes")])
12591 (define_insn "*thumb2_ldrd_base"
12592 [(set (match_operand:SI 0 "s_register_operand" "=r")
12593 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12594 (set (match_operand:SI 2 "s_register_operand" "=r")
12595 (mem:SI (plus:SI (match_dup 1)
12597 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12598 && current_tune->prefer_ldrd_strd
12599 && (operands_ok_ldrd_strd (operands[0], operands[2],
12600 operands[1], 0, false, true))"
12601 "ldrd%?\t%0, %2, [%1]"
12602 [(set_attr "type" "load2")
12603 (set_attr "predicable" "yes")])
12605 (define_insn "*thumb2_ldrd_base_neg"
12606 [(set (match_operand:SI 0 "s_register_operand" "=r")
12607 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12609 (set (match_operand:SI 2 "s_register_operand" "=r")
12610 (mem:SI (match_dup 1)))]
12611 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12612 && current_tune->prefer_ldrd_strd
12613 && (operands_ok_ldrd_strd (operands[0], operands[2],
12614 operands[1], -4, false, true))"
12615 "ldrd%?\t%0, %2, [%1, #-4]"
12616 [(set_attr "type" "load2")
12617 (set_attr "predicable" "yes")])
12619 (define_insn "*thumb2_strd"
12620 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12621 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12622 (match_operand:SI 2 "s_register_operand" "r"))
12623 (set (mem:SI (plus:SI (match_dup 0)
12624 (match_operand:SI 3 "const_int_operand" "")))
12625 (match_operand:SI 4 "s_register_operand" "r"))]
12626 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12627 && current_tune->prefer_ldrd_strd
12628 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12629 && (operands_ok_ldrd_strd (operands[2], operands[4],
12630 operands[0], INTVAL (operands[1]),
12632 "strd%?\t%2, %4, [%0, %1]"
12633 [(set_attr "type" "store2")
12634 (set_attr "predicable" "yes")])
12636 (define_insn "*thumb2_strd_base"
12637 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12638 (match_operand:SI 1 "s_register_operand" "r"))
12639 (set (mem:SI (plus:SI (match_dup 0)
12641 (match_operand:SI 2 "s_register_operand" "r"))]
12642 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12643 && current_tune->prefer_ldrd_strd
12644 && (operands_ok_ldrd_strd (operands[1], operands[2],
12645 operands[0], 0, false, false))"
12646 "strd%?\t%1, %2, [%0]"
12647 [(set_attr "type" "store2")
12648 (set_attr "predicable" "yes")])
12650 (define_insn "*thumb2_strd_base_neg"
12651 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12653 (match_operand:SI 1 "s_register_operand" "r"))
12654 (set (mem:SI (match_dup 0))
12655 (match_operand:SI 2 "s_register_operand" "r"))]
12656 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12657 && current_tune->prefer_ldrd_strd
12658 && (operands_ok_ldrd_strd (operands[1], operands[2],
12659 operands[0], -4, false, false))"
12660 "strd%?\t%1, %2, [%0, #-4]"
12661 [(set_attr "type" "store2")
12662 (set_attr "predicable" "yes")])
12665 ;; Load the load/store double peephole optimizations.
12666 (include "ldrdstrd.md")
12668 ;; Load the load/store multiple patterns
12669 (include "ldmstm.md")
12671 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12672 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12673 (define_insn "*load_multiple"
12674 [(match_parallel 0 "load_multiple_operation"
12675 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12676 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12681 arm_output_multireg_pop (operands, /*return_pc=*/false,
12682 /*cond=*/const_true_rtx,
12688 [(set_attr "predicable" "yes")]
12691 ;; Vector bits common to IWMMXT and Neon
12692 (include "vec-common.md")
12693 ;; Load the Intel Wireless Multimedia Extension patterns
12694 (include "iwmmxt.md")
12695 ;; Load the VFP co-processor patterns
12697 ;; Thumb-2 patterns
12698 (include "thumb2.md")
12700 (include "neon.md")
12701 ;; Synchronization Primitives
12702 (include "sync.md")
12703 ;; Fixed-point patterns
12704 (include "arm-fixed.md")