1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
110 ;; UNSPEC_VOLATILE Usage:
113 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
115 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
116 ; instruction epilogue sequence that isn't expanded
117 ; into normal RTL. Used for both normal and sibcall
119 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
123 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
125 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
127 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
129 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
131 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
133 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
134 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
135 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
136 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
137 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
138 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
139 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap.
142 (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set.
143 (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op>
144 (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op>
145 (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op>
149 ;;---------------------------------------------------------------------------
152 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
153 ; generating ARM code. This is used to control the length of some insn
154 ; patterns that share the same RTL in both ARM and Thumb code.
155 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
157 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
158 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
160 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
161 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
163 ;; Operand number of an input operand that is shifted. Zero if the
164 ;; given instruction does not shift one of its input operands.
165 (define_attr "shift" "" (const_int 0))
167 ; Floating Point Unit. If we only have floating point emulation, then there
168 ; is no point in scheduling the floating point insns. (Well, for best
169 ; performance we should try and group them together).
170 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
171 (const (symbol_ref "arm_fpu_attr")))
173 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
174 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
180 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
181 (const_string "none"))
183 ; LENGTH of an instruction (in bytes)
184 (define_attr "length" ""
185 (cond [(not (eq_attr "sync_memory" "none"))
186 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
189 ; The architecture which supports the instruction (or alternative).
190 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
191 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
192 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
193 ; arm_arch6. This attribute is used to compute attribute "enabled",
194 ; use type "any" to enable an alternative in all cases.
195 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
196 (const_string "any"))
198 (define_attr "arch_enabled" "no,yes"
199 (cond [(eq_attr "arch" "any")
202 (and (eq_attr "arch" "a")
203 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
206 (and (eq_attr "arch" "t")
207 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
210 (and (eq_attr "arch" "t1")
211 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
214 (and (eq_attr "arch" "t2")
215 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
218 (and (eq_attr "arch" "32")
219 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
222 (and (eq_attr "arch" "v6")
223 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
226 (and (eq_attr "arch" "nov6")
227 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
228 (const_string "yes")]
229 (const_string "no")))
231 ; Allows an insn to disable certain alternatives for reasons other than
233 (define_attr "insn_enabled" "no,yes"
234 (const_string "yes"))
236 ; Enable all alternatives that are both arch_enabled and insn_enabled.
237 (define_attr "enabled" "no,yes"
238 (if_then_else (eq_attr "insn_enabled" "yes")
239 (if_then_else (eq_attr "arch_enabled" "yes")
242 (const_string "no")))
244 ; POOL_RANGE is how far away from a constant pool entry that this insn
245 ; can be placed. If the distance is zero, then this insn will never
246 ; reference the pool.
247 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
248 ; before its address.
249 (define_attr "arm_pool_range" "" (const_int 0))
250 (define_attr "thumb2_pool_range" "" (const_int 0))
251 (define_attr "arm_neg_pool_range" "" (const_int 0))
252 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
254 (define_attr "pool_range" ""
255 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
256 (attr "arm_pool_range")))
257 (define_attr "neg_pool_range" ""
258 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
259 (attr "arm_neg_pool_range")))
261 ; An assembler sequence may clobber the condition codes without us knowing.
262 ; If such an insn references the pool, then we have no way of knowing how,
263 ; so use the most conservative value for pool_range.
264 (define_asm_attributes
265 [(set_attr "conds" "clob")
266 (set_attr "length" "4")
267 (set_attr "pool_range" "250")])
269 ;; The instruction used to implement a particular pattern. This
270 ;; information is used by pipeline descriptions to provide accurate
271 ;; scheduling information.
274 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
275 (const_string "other"))
277 ; TYPE attribute is used to detect floating point instructions which, if
278 ; running on a co-processor can run in parallel with other, basic instructions
279 ; If write-buffer scheduling is enabled then it can also be used in the
280 ; scheduling of writes.
282 ; Classification of each insn
283 ; Note: vfp.md has different meanings for some of these, and some further
284 ; types as well. See that file for details.
285 ; alu any alu instruction that doesn't hit memory or fp
286 ; regs or have a shifted source operand
287 ; alu_shift any data instruction that doesn't hit memory or fp
288 ; regs, but has a source operand shifted by a constant
289 ; alu_shift_reg any data instruction that doesn't hit memory or fp
290 ; regs, but has a source operand shifted by a register value
291 ; mult a multiply instruction
292 ; block blockage insn, this blocks all functional units
293 ; float a floating point arithmetic operation (subject to expansion)
294 ; fdivd DFmode floating point division
295 ; fdivs SFmode floating point division
296 ; fmul Floating point multiply
297 ; ffmul Fast floating point multiply
298 ; farith Floating point arithmetic (4 cycle)
299 ; ffarith Fast floating point arithmetic (2 cycle)
300 ; float_em a floating point arithmetic operation that is normally emulated
301 ; even on a machine with an fpa.
302 ; f_fpa_load a floating point load from memory. Only for the FPA.
303 ; f_fpa_store a floating point store to memory. Only for the FPA.
304 ; f_load[sd] A single/double load from memory. Used for VFP unit.
305 ; f_store[sd] A single/double store to memory. Used for VFP unit.
306 ; f_flag a transfer of co-processor flags to the CPSR
307 ; f_mem_r a transfer of a floating point register to a real reg via mem
308 ; r_mem_f the reverse of f_mem_r
309 ; f_2_r fast transfer float to arm (no memory needed)
310 ; r_2_f fast transfer arm to float
311 ; f_cvt convert floating<->integral
313 ; call a subroutine call
314 ; load_byte load byte(s) from memory to arm registers
315 ; load1 load 1 word from memory to arm registers
316 ; load2 load 2 words from memory to arm registers
317 ; load3 load 3 words from memory to arm registers
318 ; load4 load 4 words from memory to arm registers
319 ; store store 1 word to memory from arm registers
320 ; store2 store 2 words
321 ; store3 store 3 words
322 ; store4 store 4 (or more) words
323 ; Additions for Cirrus Maverick co-processor:
324 ; mav_farith Floating point arithmetic (4 cycle)
325 ; mav_dmult Double multiplies (7 cycle)
329 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
331 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
332 (const_string "mult")
333 (const_string "alu")))
335 ; Load scheduling, set from the arm_ld_sched variable
336 ; initialized by arm_option_override()
337 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
339 ;; Classification of NEON instructions for scheduling purposes.
340 ;; Do not set this attribute and the "type" attribute together in
341 ;; any one instruction pattern.
342 (define_attr "neon_type"
353 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
354 neon_mul_qqq_8_16_32_ddd_32,\
355 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
356 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
358 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
359 neon_mla_qqq_32_qqd_32_scalar,\
360 neon_mul_ddd_16_scalar_32_16_long_scalar,\
361 neon_mul_qqd_32_scalar,\
362 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
367 neon_vqshl_vrshl_vqrshl_qqq,\
369 neon_fp_vadd_ddd_vabs_dd,\
370 neon_fp_vadd_qqq_vabs_qq,\
376 neon_fp_vmla_ddd_scalar,\
377 neon_fp_vmla_qqq_scalar,\
378 neon_fp_vrecps_vrsqrts_ddd,\
379 neon_fp_vrecps_vrsqrts_qqq,\
387 neon_vld2_2_regs_vld1_vld2_all_lanes,\
390 neon_vst1_1_2_regs_vst2_2_regs,\
392 neon_vst2_4_regs_vst3_vst4,\
394 neon_vld1_vld2_lane,\
395 neon_vld3_vld4_lane,\
396 neon_vst1_vst2_lane,\
397 neon_vst3_vst4_lane,\
398 neon_vld3_vld4_all_lanes,\
406 (const_string "none"))
408 ; condition codes: this one is used by final_prescan_insn to speed up
409 ; conditionalizing instructions. It saves having to scan the rtl to see if
410 ; it uses or alters the condition codes.
412 ; USE means that the condition codes are used by the insn in the process of
413 ; outputting code, this means (at present) that we can't use the insn in
416 ; SET means that the purpose of the insn is to set the condition codes in a
417 ; well defined manner.
419 ; CLOB means that the condition codes are altered in an undefined manner, if
420 ; they are altered at all
422 ; UNCONDITIONAL means the instruction can not be conditionally executed and
423 ; that the instruction does not use or alter the condition codes.
425 ; NOCOND means that the instruction does not use or alter the condition
426 ; codes but can be converted into a conditionally exectuted instruction.
428 (define_attr "conds" "use,set,clob,unconditional,nocond"
430 (ior (eq_attr "is_thumb1" "yes")
431 (eq_attr "type" "call"))
432 (const_string "clob")
433 (if_then_else (eq_attr "neon_type" "none")
434 (const_string "nocond")
435 (const_string "unconditional"))))
437 ; Predicable means that the insn can be conditionally executed based on
438 ; an automatically added predicate (additional patterns are generated by
439 ; gen...). We default to 'no' because no Thumb patterns match this rule
440 ; and not all ARM patterns do.
441 (define_attr "predicable" "no,yes" (const_string "no"))
443 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
444 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
445 ; suffer blockages enough to warrant modelling this (and it can adversely
446 ; affect the schedule).
447 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
449 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
450 ; to stall the processor. Used with model_wbuf above.
451 (define_attr "write_conflict" "no,yes"
452 (if_then_else (eq_attr "type"
453 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
455 (const_string "no")))
457 ; Classify the insns into those that take one cycle and those that take more
458 ; than one on the main cpu execution unit.
459 (define_attr "core_cycles" "single,multi"
460 (if_then_else (eq_attr "type"
461 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
462 (const_string "single")
463 (const_string "multi")))
465 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
466 ;; distant label. Only applicable to Thumb code.
467 (define_attr "far_jump" "yes,no" (const_string "no"))
470 ;; The number of machine instructions this pattern expands to.
471 ;; Used for Thumb-2 conditional execution.
472 (define_attr "ce_count" "" (const_int 1))
474 ;;---------------------------------------------------------------------------
477 (include "iterators.md")
479 ;;---------------------------------------------------------------------------
482 (include "predicates.md")
483 (include "constraints.md")
485 ;;---------------------------------------------------------------------------
486 ;; Pipeline descriptions
488 ;; Processor type. This is created automatically from arm-cores.def.
489 (include "arm-tune.md")
491 (define_attr "tune_cortexr4" "yes,no"
493 (eq_attr "tune" "cortexr4,cortexr4f")
495 (const_string "no"))))
497 ;; True if the generic scheduling description should be used.
499 (define_attr "generic_sched" "yes,no"
501 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
502 (eq_attr "tune_cortexr4" "yes"))
504 (const_string "yes"))))
506 (define_attr "generic_vfp" "yes,no"
508 (and (eq_attr "fpu" "vfp")
509 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
510 (eq_attr "tune_cortexr4" "no"))
512 (const_string "no"))))
514 (include "arm-generic.md")
515 (include "arm926ejs.md")
516 (include "arm1020e.md")
517 (include "arm1026ejs.md")
518 (include "arm1136jfs.md")
520 (include "fa606te.md")
521 (include "fa626te.md")
522 (include "fmp626.md")
523 (include "fa726te.md")
524 (include "cortex-a5.md")
525 (include "cortex-a8.md")
526 (include "cortex-a9.md")
527 (include "cortex-r4.md")
528 (include "cortex-r4f.md")
529 (include "cortex-m4.md")
530 (include "cortex-m4-fpu.md")
534 ;;---------------------------------------------------------------------------
539 ;; Note: For DImode insns, there is normally no reason why operands should
540 ;; not be in the same register, what we don't want is for something being
541 ;; written to partially overlap something that is an input.
542 ;; Cirrus 64bit additions should not be split because we have a native
543 ;; 64bit addition instructions.
545 (define_expand "adddi3"
547 [(set (match_operand:DI 0 "s_register_operand" "")
548 (plus:DI (match_operand:DI 1 "s_register_operand" "")
549 (match_operand:DI 2 "s_register_operand" "")))
550 (clobber (reg:CC CC_REGNUM))])]
553 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
555 if (!cirrus_fp_register (operands[0], DImode))
556 operands[0] = force_reg (DImode, operands[0]);
557 if (!cirrus_fp_register (operands[1], DImode))
558 operands[1] = force_reg (DImode, operands[1]);
559 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
565 if (GET_CODE (operands[1]) != REG)
566 operands[1] = force_reg (DImode, operands[1]);
567 if (GET_CODE (operands[2]) != REG)
568 operands[2] = force_reg (DImode, operands[2]);
573 (define_insn "*thumb1_adddi3"
574 [(set (match_operand:DI 0 "register_operand" "=l")
575 (plus:DI (match_operand:DI 1 "register_operand" "%0")
576 (match_operand:DI 2 "register_operand" "l")))
577 (clobber (reg:CC CC_REGNUM))
580 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
581 [(set_attr "length" "4")]
584 (define_insn_and_split "*arm_adddi3"
585 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
586 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
587 (match_operand:DI 2 "s_register_operand" "r, 0")))
588 (clobber (reg:CC CC_REGNUM))]
589 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
591 "TARGET_32BIT && reload_completed
592 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
593 [(parallel [(set (reg:CC_C CC_REGNUM)
594 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
596 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
597 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
598 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
601 operands[3] = gen_highpart (SImode, operands[0]);
602 operands[0] = gen_lowpart (SImode, operands[0]);
603 operands[4] = gen_highpart (SImode, operands[1]);
604 operands[1] = gen_lowpart (SImode, operands[1]);
605 operands[5] = gen_highpart (SImode, operands[2]);
606 operands[2] = gen_lowpart (SImode, operands[2]);
608 [(set_attr "conds" "clob")
609 (set_attr "length" "8")]
612 (define_insn_and_split "*adddi_sesidi_di"
613 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
614 (plus:DI (sign_extend:DI
615 (match_operand:SI 2 "s_register_operand" "r,r"))
616 (match_operand:DI 1 "s_register_operand" "0,r")))
617 (clobber (reg:CC CC_REGNUM))]
618 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
620 "TARGET_32BIT && reload_completed"
621 [(parallel [(set (reg:CC_C CC_REGNUM)
622 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
624 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
625 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
628 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
631 operands[3] = gen_highpart (SImode, operands[0]);
632 operands[0] = gen_lowpart (SImode, operands[0]);
633 operands[4] = gen_highpart (SImode, operands[1]);
634 operands[1] = gen_lowpart (SImode, operands[1]);
635 operands[2] = gen_lowpart (SImode, operands[2]);
637 [(set_attr "conds" "clob")
638 (set_attr "length" "8")]
641 (define_insn_and_split "*adddi_zesidi_di"
642 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
643 (plus:DI (zero_extend:DI
644 (match_operand:SI 2 "s_register_operand" "r,r"))
645 (match_operand:DI 1 "s_register_operand" "0,r")))
646 (clobber (reg:CC CC_REGNUM))]
647 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
649 "TARGET_32BIT && reload_completed"
650 [(parallel [(set (reg:CC_C CC_REGNUM)
651 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
653 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
654 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
655 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
658 operands[3] = gen_highpart (SImode, operands[0]);
659 operands[0] = gen_lowpart (SImode, operands[0]);
660 operands[4] = gen_highpart (SImode, operands[1]);
661 operands[1] = gen_lowpart (SImode, operands[1]);
662 operands[2] = gen_lowpart (SImode, operands[2]);
664 [(set_attr "conds" "clob")
665 (set_attr "length" "8")]
668 (define_expand "addsi3"
669 [(set (match_operand:SI 0 "s_register_operand" "")
670 (plus:SI (match_operand:SI 1 "s_register_operand" "")
671 (match_operand:SI 2 "reg_or_int_operand" "")))]
674 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
676 arm_split_constant (PLUS, SImode, NULL_RTX,
677 INTVAL (operands[2]), operands[0], operands[1],
678 optimize && can_create_pseudo_p ());
684 ; If there is a scratch available, this will be faster than synthesizing the
687 [(match_scratch:SI 3 "r")
688 (set (match_operand:SI 0 "arm_general_register_operand" "")
689 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
690 (match_operand:SI 2 "const_int_operand" "")))]
692 !(const_ok_for_arm (INTVAL (operands[2]))
693 || const_ok_for_arm (-INTVAL (operands[2])))
694 && const_ok_for_arm (~INTVAL (operands[2]))"
695 [(set (match_dup 3) (match_dup 2))
696 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
700 ;; The r/r/k alternative is required when reloading the address
701 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
702 ;; put the duplicated register first, and not try the commutative version.
703 (define_insn_and_split "*arm_addsi3"
704 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
705 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
706 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
716 && GET_CODE (operands[2]) == CONST_INT
717 && !(const_ok_for_arm (INTVAL (operands[2]))
718 || const_ok_for_arm (-INTVAL (operands[2])))
719 && (reload_completed || !arm_eliminable_register (operands[1]))"
720 [(clobber (const_int 0))]
722 arm_split_constant (PLUS, SImode, curr_insn,
723 INTVAL (operands[2]), operands[0],
727 [(set_attr "length" "4,4,4,4,4,16")
728 (set_attr "predicable" "yes")]
731 (define_insn_and_split "*thumb1_addsi3"
732 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
733 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
734 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
737 static const char * const asms[] =
739 \"add\\t%0, %0, %2\",
740 \"sub\\t%0, %0, #%n2\",
741 \"add\\t%0, %1, %2\",
742 \"add\\t%0, %0, %2\",
743 \"add\\t%0, %0, %2\",
744 \"add\\t%0, %1, %2\",
745 \"add\\t%0, %1, %2\",
750 if ((which_alternative == 2 || which_alternative == 6)
751 && GET_CODE (operands[2]) == CONST_INT
752 && INTVAL (operands[2]) < 0)
753 return \"sub\\t%0, %1, #%n2\";
754 return asms[which_alternative];
756 "&& reload_completed && CONST_INT_P (operands[2])
757 && ((operands[1] != stack_pointer_rtx
758 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
759 || (operands[1] == stack_pointer_rtx
760 && INTVAL (operands[2]) > 1020))"
761 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
762 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
764 HOST_WIDE_INT offset = INTVAL (operands[2]);
765 if (operands[1] == stack_pointer_rtx)
771 else if (offset < -255)
774 operands[3] = GEN_INT (offset);
775 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
777 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
780 ;; Reloading and elimination of the frame pointer can
781 ;; sometimes cause this optimization to be missed.
783 [(set (match_operand:SI 0 "arm_general_register_operand" "")
784 (match_operand:SI 1 "const_int_operand" ""))
786 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
788 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
789 && (INTVAL (operands[1]) & 3) == 0"
790 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
794 (define_insn "*addsi3_compare0"
795 [(set (reg:CC_NOOV CC_REGNUM)
797 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
798 (match_operand:SI 2 "arm_add_operand" "rI,L"))
800 (set (match_operand:SI 0 "s_register_operand" "=r,r")
801 (plus:SI (match_dup 1) (match_dup 2)))]
805 sub%.\\t%0, %1, #%n2"
806 [(set_attr "conds" "set")]
809 (define_insn "*addsi3_compare0_scratch"
810 [(set (reg:CC_NOOV CC_REGNUM)
812 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
813 (match_operand:SI 1 "arm_add_operand" "rI,L"))
819 [(set_attr "conds" "set")]
822 (define_insn "*compare_negsi_si"
823 [(set (reg:CC_Z CC_REGNUM)
825 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
826 (match_operand:SI 1 "s_register_operand" "r")))]
829 [(set_attr "conds" "set")]
832 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
833 ;; addend is a constant.
834 (define_insn "*cmpsi2_addneg"
835 [(set (reg:CC CC_REGNUM)
837 (match_operand:SI 1 "s_register_operand" "r,r")
838 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
839 (set (match_operand:SI 0 "s_register_operand" "=r,r")
840 (plus:SI (match_dup 1)
841 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
842 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
845 sub%.\\t%0, %1, #%n3"
846 [(set_attr "conds" "set")]
849 ;; Convert the sequence
851 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
855 ;; bcs dest ((unsigned)rn >= 1)
856 ;; similarly for the beq variant using bcc.
857 ;; This is a common looping idiom (while (n--))
859 [(set (match_operand:SI 0 "arm_general_register_operand" "")
860 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
862 (set (match_operand 2 "cc_register" "")
863 (compare (match_dup 0) (const_int -1)))
865 (if_then_else (match_operator 3 "equality_operator"
866 [(match_dup 2) (const_int 0)])
867 (match_operand 4 "" "")
868 (match_operand 5 "" "")))]
869 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
873 (match_dup 1) (const_int 1)))
874 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
876 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
879 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
880 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
883 operands[2], const0_rtx);"
886 ;; The next four insns work because they compare the result with one of
887 ;; the operands, and we know that the use of the condition code is
888 ;; either GEU or LTU, so we can use the carry flag from the addition
889 ;; instead of doing the compare a second time.
890 (define_insn "*addsi3_compare_op1"
891 [(set (reg:CC_C CC_REGNUM)
893 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
894 (match_operand:SI 2 "arm_add_operand" "rI,L"))
896 (set (match_operand:SI 0 "s_register_operand" "=r,r")
897 (plus:SI (match_dup 1) (match_dup 2)))]
901 sub%.\\t%0, %1, #%n2"
902 [(set_attr "conds" "set")]
905 (define_insn "*addsi3_compare_op2"
906 [(set (reg:CC_C CC_REGNUM)
908 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
909 (match_operand:SI 2 "arm_add_operand" "rI,L"))
911 (set (match_operand:SI 0 "s_register_operand" "=r,r")
912 (plus:SI (match_dup 1) (match_dup 2)))]
916 sub%.\\t%0, %1, #%n2"
917 [(set_attr "conds" "set")]
920 (define_insn "*compare_addsi2_op0"
921 [(set (reg:CC_C CC_REGNUM)
923 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
924 (match_operand:SI 1 "arm_add_operand" "rI,L"))
930 [(set_attr "conds" "set")]
933 (define_insn "*compare_addsi2_op1"
934 [(set (reg:CC_C CC_REGNUM)
936 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
937 (match_operand:SI 1 "arm_add_operand" "rI,L"))
943 [(set_attr "conds" "set")]
946 (define_insn "*addsi3_carryin_<optab>"
947 [(set (match_operand:SI 0 "s_register_operand" "=r")
948 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
949 (match_operand:SI 2 "arm_rhs_operand" "rI"))
950 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
953 [(set_attr "conds" "use")]
956 (define_insn "*addsi3_carryin_alt2_<optab>"
957 [(set (match_operand:SI 0 "s_register_operand" "=r")
958 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
959 (match_operand:SI 1 "s_register_operand" "%r"))
960 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
963 [(set_attr "conds" "use")]
966 (define_insn "*addsi3_carryin_shift_<optab>"
967 [(set (match_operand:SI 0 "s_register_operand" "=r")
969 (match_operator:SI 2 "shift_operator"
970 [(match_operand:SI 3 "s_register_operand" "r")
971 (match_operand:SI 4 "reg_or_int_operand" "rM")])
972 (match_operand:SI 1 "s_register_operand" "r"))
973 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
975 "adc%?\\t%0, %1, %3%S2"
976 [(set_attr "conds" "use")
977 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
978 (const_string "alu_shift")
979 (const_string "alu_shift_reg")))]
982 (define_expand "incscc"
983 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
984 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
985 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
986 (match_operand:SI 1 "s_register_operand" "0,?r")))]
991 (define_insn "*arm_incscc"
992 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
993 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
994 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
995 (match_operand:SI 1 "s_register_operand" "0,?r")))]
999 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1000 [(set_attr "conds" "use")
1001 (set_attr "length" "4,8")]
1004 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1006 [(set (match_operand:SI 0 "s_register_operand" "")
1007 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1008 (match_operand:SI 2 "s_register_operand" ""))
1010 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1012 [(set (match_dup 3) (match_dup 1))
1013 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1015 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1018 (define_expand "addsf3"
1019 [(set (match_operand:SF 0 "s_register_operand" "")
1020 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1021 (match_operand:SF 2 "arm_float_add_operand" "")))]
1022 "TARGET_32BIT && TARGET_HARD_FLOAT"
1025 && !cirrus_fp_register (operands[2], SFmode))
1026 operands[2] = force_reg (SFmode, operands[2]);
1029 (define_expand "adddf3"
1030 [(set (match_operand:DF 0 "s_register_operand" "")
1031 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1032 (match_operand:DF 2 "arm_float_add_operand" "")))]
1033 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1036 && !cirrus_fp_register (operands[2], DFmode))
1037 operands[2] = force_reg (DFmode, operands[2]);
1040 (define_expand "subdi3"
1042 [(set (match_operand:DI 0 "s_register_operand" "")
1043 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1044 (match_operand:DI 2 "s_register_operand" "")))
1045 (clobber (reg:CC CC_REGNUM))])]
1048 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1050 && cirrus_fp_register (operands[0], DImode)
1051 && cirrus_fp_register (operands[1], DImode))
1053 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1059 if (GET_CODE (operands[1]) != REG)
1060 operands[1] = force_reg (DImode, operands[1]);
1061 if (GET_CODE (operands[2]) != REG)
1062 operands[2] = force_reg (DImode, operands[2]);
1067 (define_insn "*arm_subdi3"
1068 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1069 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1070 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1071 (clobber (reg:CC CC_REGNUM))]
1072 "TARGET_32BIT && !TARGET_NEON"
1073 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1074 [(set_attr "conds" "clob")
1075 (set_attr "length" "8")]
1078 (define_insn "*thumb_subdi3"
1079 [(set (match_operand:DI 0 "register_operand" "=l")
1080 (minus:DI (match_operand:DI 1 "register_operand" "0")
1081 (match_operand:DI 2 "register_operand" "l")))
1082 (clobber (reg:CC CC_REGNUM))]
1084 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1085 [(set_attr "length" "4")]
1088 (define_insn "*subdi_di_zesidi"
1089 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1090 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1092 (match_operand:SI 2 "s_register_operand" "r,r"))))
1093 (clobber (reg:CC CC_REGNUM))]
1095 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1096 [(set_attr "conds" "clob")
1097 (set_attr "length" "8")]
1100 (define_insn "*subdi_di_sesidi"
1101 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1102 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1104 (match_operand:SI 2 "s_register_operand" "r,r"))))
1105 (clobber (reg:CC CC_REGNUM))]
1107 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1108 [(set_attr "conds" "clob")
1109 (set_attr "length" "8")]
1112 (define_insn "*subdi_zesidi_di"
1113 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1114 (minus:DI (zero_extend:DI
1115 (match_operand:SI 2 "s_register_operand" "r,r"))
1116 (match_operand:DI 1 "s_register_operand" "0,r")))
1117 (clobber (reg:CC CC_REGNUM))]
1119 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1120 [(set_attr "conds" "clob")
1121 (set_attr "length" "8")]
1124 (define_insn "*subdi_sesidi_di"
1125 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1126 (minus:DI (sign_extend:DI
1127 (match_operand:SI 2 "s_register_operand" "r,r"))
1128 (match_operand:DI 1 "s_register_operand" "0,r")))
1129 (clobber (reg:CC CC_REGNUM))]
1131 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1132 [(set_attr "conds" "clob")
1133 (set_attr "length" "8")]
1136 (define_insn "*subdi_zesidi_zesidi"
1137 [(set (match_operand:DI 0 "s_register_operand" "=r")
1138 (minus:DI (zero_extend:DI
1139 (match_operand:SI 1 "s_register_operand" "r"))
1141 (match_operand:SI 2 "s_register_operand" "r"))))
1142 (clobber (reg:CC CC_REGNUM))]
1144 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1145 [(set_attr "conds" "clob")
1146 (set_attr "length" "8")]
1149 (define_expand "subsi3"
1150 [(set (match_operand:SI 0 "s_register_operand" "")
1151 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1152 (match_operand:SI 2 "s_register_operand" "")))]
1155 if (GET_CODE (operands[1]) == CONST_INT)
1159 arm_split_constant (MINUS, SImode, NULL_RTX,
1160 INTVAL (operands[1]), operands[0],
1161 operands[2], optimize && can_create_pseudo_p ());
1164 else /* TARGET_THUMB1 */
1165 operands[1] = force_reg (SImode, operands[1]);
1170 (define_insn "thumb1_subsi3_insn"
1171 [(set (match_operand:SI 0 "register_operand" "=l")
1172 (minus:SI (match_operand:SI 1 "register_operand" "l")
1173 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1176 [(set_attr "length" "2")
1177 (set_attr "conds" "set")])
1179 ; ??? Check Thumb-2 split length
1180 (define_insn_and_split "*arm_subsi3_insn"
1181 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1182 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1183 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1191 "&& ((GET_CODE (operands[1]) == CONST_INT
1192 && !const_ok_for_arm (INTVAL (operands[1])))
1193 || (GET_CODE (operands[2]) == CONST_INT
1194 && !const_ok_for_arm (INTVAL (operands[2]))))"
1195 [(clobber (const_int 0))]
1197 arm_split_constant (MINUS, SImode, curr_insn,
1198 INTVAL (operands[1]), operands[0], operands[2], 0);
1201 [(set_attr "length" "4,4,4,16,16")
1202 (set_attr "predicable" "yes")]
1206 [(match_scratch:SI 3 "r")
1207 (set (match_operand:SI 0 "arm_general_register_operand" "")
1208 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1209 (match_operand:SI 2 "arm_general_register_operand" "")))]
1211 && !const_ok_for_arm (INTVAL (operands[1]))
1212 && const_ok_for_arm (~INTVAL (operands[1]))"
1213 [(set (match_dup 3) (match_dup 1))
1214 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1218 (define_insn "*subsi3_compare0"
1219 [(set (reg:CC_NOOV CC_REGNUM)
1221 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1222 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1224 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1225 (minus:SI (match_dup 1) (match_dup 2)))]
1230 [(set_attr "conds" "set")]
1233 (define_insn "*subsi3_compare"
1234 [(set (reg:CC CC_REGNUM)
1235 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1236 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1238 (minus:SI (match_dup 1) (match_dup 2)))]
1243 [(set_attr "conds" "set")]
1246 (define_expand "decscc"
1247 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1248 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1249 (match_operator:SI 2 "arm_comparison_operator"
1250 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1255 (define_insn "*arm_decscc"
1256 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1257 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1258 (match_operator:SI 2 "arm_comparison_operator"
1259 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1263 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1264 [(set_attr "conds" "use")
1265 (set_attr "length" "*,8")]
1268 (define_expand "subsf3"
1269 [(set (match_operand:SF 0 "s_register_operand" "")
1270 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1271 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1272 "TARGET_32BIT && TARGET_HARD_FLOAT"
1274 if (TARGET_MAVERICK)
1276 if (!cirrus_fp_register (operands[1], SFmode))
1277 operands[1] = force_reg (SFmode, operands[1]);
1278 if (!cirrus_fp_register (operands[2], SFmode))
1279 operands[2] = force_reg (SFmode, operands[2]);
1283 (define_expand "subdf3"
1284 [(set (match_operand:DF 0 "s_register_operand" "")
1285 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1286 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1287 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1289 if (TARGET_MAVERICK)
1291 if (!cirrus_fp_register (operands[1], DFmode))
1292 operands[1] = force_reg (DFmode, operands[1]);
1293 if (!cirrus_fp_register (operands[2], DFmode))
1294 operands[2] = force_reg (DFmode, operands[2]);
1299 ;; Multiplication insns
1301 (define_expand "mulsi3"
1302 [(set (match_operand:SI 0 "s_register_operand" "")
1303 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1304 (match_operand:SI 1 "s_register_operand" "")))]
1309 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1310 (define_insn "*arm_mulsi3"
1311 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1312 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1313 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1314 "TARGET_32BIT && !arm_arch6"
1315 "mul%?\\t%0, %2, %1"
1316 [(set_attr "insn" "mul")
1317 (set_attr "predicable" "yes")]
1320 (define_insn "*arm_mulsi3_v6"
1321 [(set (match_operand:SI 0 "s_register_operand" "=r")
1322 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1323 (match_operand:SI 2 "s_register_operand" "r")))]
1324 "TARGET_32BIT && arm_arch6"
1325 "mul%?\\t%0, %1, %2"
1326 [(set_attr "insn" "mul")
1327 (set_attr "predicable" "yes")]
1330 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1331 ; 1 and 2; are the same, because reload will make operand 0 match
1332 ; operand 1 without realizing that this conflicts with operand 2. We fix
1333 ; this by adding another alternative to match this case, and then `reload'
1334 ; it ourselves. This alternative must come first.
1335 (define_insn "*thumb_mulsi3"
1336 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1337 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1338 (match_operand:SI 2 "register_operand" "l,l,l")))]
1339 "TARGET_THUMB1 && !arm_arch6"
1341 if (which_alternative < 2)
1342 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1344 return \"mul\\t%0, %2\";
1346 [(set_attr "length" "4,4,2")
1347 (set_attr "insn" "mul")]
1350 (define_insn "*thumb_mulsi3_v6"
1351 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1352 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1353 (match_operand:SI 2 "register_operand" "l,0,0")))]
1354 "TARGET_THUMB1 && arm_arch6"
1359 [(set_attr "length" "2")
1360 (set_attr "insn" "mul")]
1363 (define_insn "*mulsi3_compare0"
1364 [(set (reg:CC_NOOV CC_REGNUM)
1365 (compare:CC_NOOV (mult:SI
1366 (match_operand:SI 2 "s_register_operand" "r,r")
1367 (match_operand:SI 1 "s_register_operand" "%0,r"))
1369 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1370 (mult:SI (match_dup 2) (match_dup 1)))]
1371 "TARGET_ARM && !arm_arch6"
1372 "mul%.\\t%0, %2, %1"
1373 [(set_attr "conds" "set")
1374 (set_attr "insn" "muls")]
1377 (define_insn "*mulsi3_compare0_v6"
1378 [(set (reg:CC_NOOV CC_REGNUM)
1379 (compare:CC_NOOV (mult:SI
1380 (match_operand:SI 2 "s_register_operand" "r")
1381 (match_operand:SI 1 "s_register_operand" "r"))
1383 (set (match_operand:SI 0 "s_register_operand" "=r")
1384 (mult:SI (match_dup 2) (match_dup 1)))]
1385 "TARGET_ARM && arm_arch6 && optimize_size"
1386 "mul%.\\t%0, %2, %1"
1387 [(set_attr "conds" "set")
1388 (set_attr "insn" "muls")]
1391 (define_insn "*mulsi_compare0_scratch"
1392 [(set (reg:CC_NOOV CC_REGNUM)
1393 (compare:CC_NOOV (mult:SI
1394 (match_operand:SI 2 "s_register_operand" "r,r")
1395 (match_operand:SI 1 "s_register_operand" "%0,r"))
1397 (clobber (match_scratch:SI 0 "=&r,&r"))]
1398 "TARGET_ARM && !arm_arch6"
1399 "mul%.\\t%0, %2, %1"
1400 [(set_attr "conds" "set")
1401 (set_attr "insn" "muls")]
1404 (define_insn "*mulsi_compare0_scratch_v6"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1406 (compare:CC_NOOV (mult:SI
1407 (match_operand:SI 2 "s_register_operand" "r")
1408 (match_operand:SI 1 "s_register_operand" "r"))
1410 (clobber (match_scratch:SI 0 "=r"))]
1411 "TARGET_ARM && arm_arch6 && optimize_size"
1412 "mul%.\\t%0, %2, %1"
1413 [(set_attr "conds" "set")
1414 (set_attr "insn" "muls")]
1417 ;; Unnamed templates to match MLA instruction.
1419 (define_insn "*mulsi3addsi"
1420 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1422 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1423 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1424 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1425 "TARGET_32BIT && !arm_arch6"
1426 "mla%?\\t%0, %2, %1, %3"
1427 [(set_attr "insn" "mla")
1428 (set_attr "predicable" "yes")]
1431 (define_insn "*mulsi3addsi_v6"
1432 [(set (match_operand:SI 0 "s_register_operand" "=r")
1434 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1435 (match_operand:SI 1 "s_register_operand" "r"))
1436 (match_operand:SI 3 "s_register_operand" "r")))]
1437 "TARGET_32BIT && arm_arch6"
1438 "mla%?\\t%0, %2, %1, %3"
1439 [(set_attr "insn" "mla")
1440 (set_attr "predicable" "yes")]
1443 (define_insn "*mulsi3addsi_compare0"
1444 [(set (reg:CC_NOOV CC_REGNUM)
1447 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1448 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1449 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1451 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1452 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1454 "TARGET_ARM && arm_arch6"
1455 "mla%.\\t%0, %2, %1, %3"
1456 [(set_attr "conds" "set")
1457 (set_attr "insn" "mlas")]
1460 (define_insn "*mulsi3addsi_compare0_v6"
1461 [(set (reg:CC_NOOV CC_REGNUM)
1464 (match_operand:SI 2 "s_register_operand" "r")
1465 (match_operand:SI 1 "s_register_operand" "r"))
1466 (match_operand:SI 3 "s_register_operand" "r"))
1468 (set (match_operand:SI 0 "s_register_operand" "=r")
1469 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1471 "TARGET_ARM && arm_arch6 && optimize_size"
1472 "mla%.\\t%0, %2, %1, %3"
1473 [(set_attr "conds" "set")
1474 (set_attr "insn" "mlas")]
1477 (define_insn "*mulsi3addsi_compare0_scratch"
1478 [(set (reg:CC_NOOV CC_REGNUM)
1481 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1482 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1483 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1485 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1486 "TARGET_ARM && !arm_arch6"
1487 "mla%.\\t%0, %2, %1, %3"
1488 [(set_attr "conds" "set")
1489 (set_attr "insn" "mlas")]
1492 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1493 [(set (reg:CC_NOOV CC_REGNUM)
1496 (match_operand:SI 2 "s_register_operand" "r")
1497 (match_operand:SI 1 "s_register_operand" "r"))
1498 (match_operand:SI 3 "s_register_operand" "r"))
1500 (clobber (match_scratch:SI 0 "=r"))]
1501 "TARGET_ARM && arm_arch6 && optimize_size"
1502 "mla%.\\t%0, %2, %1, %3"
1503 [(set_attr "conds" "set")
1504 (set_attr "insn" "mlas")]
1507 (define_insn "*mulsi3subsi"
1508 [(set (match_operand:SI 0 "s_register_operand" "=r")
1510 (match_operand:SI 3 "s_register_operand" "r")
1511 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1512 (match_operand:SI 1 "s_register_operand" "r"))))]
1513 "TARGET_32BIT && arm_arch_thumb2"
1514 "mls%?\\t%0, %2, %1, %3"
1515 [(set_attr "insn" "mla")
1516 (set_attr "predicable" "yes")]
1519 (define_expand "maddsidi4"
1520 [(set (match_operand:DI 0 "s_register_operand" "")
1523 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1524 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1525 (match_operand:DI 3 "s_register_operand" "")))]
1526 "TARGET_32BIT && arm_arch3m"
1529 (define_insn "*mulsidi3adddi"
1530 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1533 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1534 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1535 (match_operand:DI 1 "s_register_operand" "0")))]
1536 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1537 "smlal%?\\t%Q0, %R0, %3, %2"
1538 [(set_attr "insn" "smlal")
1539 (set_attr "predicable" "yes")]
1542 (define_insn "*mulsidi3adddi_v6"
1543 [(set (match_operand:DI 0 "s_register_operand" "=r")
1546 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1547 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1548 (match_operand:DI 1 "s_register_operand" "0")))]
1549 "TARGET_32BIT && arm_arch6"
1550 "smlal%?\\t%Q0, %R0, %3, %2"
1551 [(set_attr "insn" "smlal")
1552 (set_attr "predicable" "yes")]
1555 ;; 32x32->64 widening multiply.
1556 ;; As with mulsi3, the only difference between the v3-5 and v6+
1557 ;; versions of these patterns is the requirement that the output not
1558 ;; overlap the inputs, but that still means we have to have a named
1559 ;; expander and two different starred insns.
1561 (define_expand "mulsidi3"
1562 [(set (match_operand:DI 0 "s_register_operand" "")
1564 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1565 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1566 "TARGET_32BIT && arm_arch3m"
1570 (define_insn "*mulsidi3_nov6"
1571 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1573 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1574 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1575 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1576 "smull%?\\t%Q0, %R0, %1, %2"
1577 [(set_attr "insn" "smull")
1578 (set_attr "predicable" "yes")]
1581 (define_insn "*mulsidi3_v6"
1582 [(set (match_operand:DI 0 "s_register_operand" "=r")
1584 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1585 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1586 "TARGET_32BIT && arm_arch6"
1587 "smull%?\\t%Q0, %R0, %1, %2"
1588 [(set_attr "insn" "smull")
1589 (set_attr "predicable" "yes")]
1592 (define_expand "umulsidi3"
1593 [(set (match_operand:DI 0 "s_register_operand" "")
1595 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1596 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1597 "TARGET_32BIT && arm_arch3m"
1601 (define_insn "*umulsidi3_nov6"
1602 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1604 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1605 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1606 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1607 "umull%?\\t%Q0, %R0, %1, %2"
1608 [(set_attr "insn" "umull")
1609 (set_attr "predicable" "yes")]
1612 (define_insn "*umulsidi3_v6"
1613 [(set (match_operand:DI 0 "s_register_operand" "=r")
1615 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1616 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1617 "TARGET_32BIT && arm_arch6"
1618 "umull%?\\t%Q0, %R0, %1, %2"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1623 (define_expand "umaddsidi4"
1624 [(set (match_operand:DI 0 "s_register_operand" "")
1627 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1628 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1629 (match_operand:DI 3 "s_register_operand" "")))]
1630 "TARGET_32BIT && arm_arch3m"
1633 (define_insn "*umulsidi3adddi"
1634 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1637 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1638 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1639 (match_operand:DI 1 "s_register_operand" "0")))]
1640 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1641 "umlal%?\\t%Q0, %R0, %3, %2"
1642 [(set_attr "insn" "umlal")
1643 (set_attr "predicable" "yes")]
1646 (define_insn "*umulsidi3adddi_v6"
1647 [(set (match_operand:DI 0 "s_register_operand" "=r")
1650 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1651 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1652 (match_operand:DI 1 "s_register_operand" "0")))]
1653 "TARGET_32BIT && arm_arch6"
1654 "umlal%?\\t%Q0, %R0, %3, %2"
1655 [(set_attr "insn" "umlal")
1656 (set_attr "predicable" "yes")]
1659 (define_expand "smulsi3_highpart"
1661 [(set (match_operand:SI 0 "s_register_operand" "")
1665 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1666 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1668 (clobber (match_scratch:SI 3 ""))])]
1669 "TARGET_32BIT && arm_arch3m"
1673 (define_insn "*smulsi3_highpart_nov6"
1674 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1678 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1679 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1681 (clobber (match_scratch:SI 3 "=&r,&r"))]
1682 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1683 "smull%?\\t%3, %0, %2, %1"
1684 [(set_attr "insn" "smull")
1685 (set_attr "predicable" "yes")]
1688 (define_insn "*smulsi3_highpart_v6"
1689 [(set (match_operand:SI 0 "s_register_operand" "=r")
1693 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1694 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1696 (clobber (match_scratch:SI 3 "=r"))]
1697 "TARGET_32BIT && arm_arch6"
1698 "smull%?\\t%3, %0, %2, %1"
1699 [(set_attr "insn" "smull")
1700 (set_attr "predicable" "yes")]
1703 (define_expand "umulsi3_highpart"
1705 [(set (match_operand:SI 0 "s_register_operand" "")
1709 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1710 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1712 (clobber (match_scratch:SI 3 ""))])]
1713 "TARGET_32BIT && arm_arch3m"
1717 (define_insn "*umulsi3_highpart_nov6"
1718 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1722 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1723 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1725 (clobber (match_scratch:SI 3 "=&r,&r"))]
1726 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1727 "umull%?\\t%3, %0, %2, %1"
1728 [(set_attr "insn" "umull")
1729 (set_attr "predicable" "yes")]
1732 (define_insn "*umulsi3_highpart_v6"
1733 [(set (match_operand:SI 0 "s_register_operand" "=r")
1737 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1738 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1740 (clobber (match_scratch:SI 3 "=r"))]
1741 "TARGET_32BIT && arm_arch6"
1742 "umull%?\\t%3, %0, %2, %1"
1743 [(set_attr "insn" "umull")
1744 (set_attr "predicable" "yes")]
1747 (define_insn "mulhisi3"
1748 [(set (match_operand:SI 0 "s_register_operand" "=r")
1749 (mult:SI (sign_extend:SI
1750 (match_operand:HI 1 "s_register_operand" "%r"))
1752 (match_operand:HI 2 "s_register_operand" "r"))))]
1753 "TARGET_DSP_MULTIPLY"
1754 "smulbb%?\\t%0, %1, %2"
1755 [(set_attr "insn" "smulxy")
1756 (set_attr "predicable" "yes")]
1759 (define_insn "*mulhisi3tb"
1760 [(set (match_operand:SI 0 "s_register_operand" "=r")
1761 (mult:SI (ashiftrt:SI
1762 (match_operand:SI 1 "s_register_operand" "r")
1765 (match_operand:HI 2 "s_register_operand" "r"))))]
1766 "TARGET_DSP_MULTIPLY"
1767 "smultb%?\\t%0, %1, %2"
1768 [(set_attr "insn" "smulxy")
1769 (set_attr "predicable" "yes")]
1772 (define_insn "*mulhisi3bt"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (mult:SI (sign_extend:SI
1775 (match_operand:HI 1 "s_register_operand" "r"))
1777 (match_operand:SI 2 "s_register_operand" "r")
1779 "TARGET_DSP_MULTIPLY"
1780 "smulbt%?\\t%0, %1, %2"
1781 [(set_attr "insn" "smulxy")
1782 (set_attr "predicable" "yes")]
1785 (define_insn "*mulhisi3tt"
1786 [(set (match_operand:SI 0 "s_register_operand" "=r")
1787 (mult:SI (ashiftrt:SI
1788 (match_operand:SI 1 "s_register_operand" "r")
1791 (match_operand:SI 2 "s_register_operand" "r")
1793 "TARGET_DSP_MULTIPLY"
1794 "smultt%?\\t%0, %1, %2"
1795 [(set_attr "insn" "smulxy")
1796 (set_attr "predicable" "yes")]
1799 (define_insn "maddhisi4"
1800 [(set (match_operand:SI 0 "s_register_operand" "=r")
1801 (plus:SI (mult:SI (sign_extend:SI
1802 (match_operand:HI 1 "s_register_operand" "r"))
1804 (match_operand:HI 2 "s_register_operand" "r")))
1805 (match_operand:SI 3 "s_register_operand" "r")))]
1806 "TARGET_DSP_MULTIPLY"
1807 "smlabb%?\\t%0, %1, %2, %3"
1808 [(set_attr "insn" "smlaxy")
1809 (set_attr "predicable" "yes")]
1812 (define_insn "*maddhidi4"
1813 [(set (match_operand:DI 0 "s_register_operand" "=r")
1815 (mult:DI (sign_extend:DI
1816 (match_operand:HI 1 "s_register_operand" "r"))
1818 (match_operand:HI 2 "s_register_operand" "r")))
1819 (match_operand:DI 3 "s_register_operand" "0")))]
1820 "TARGET_DSP_MULTIPLY"
1821 "smlalbb%?\\t%Q0, %R0, %1, %2"
1822 [(set_attr "insn" "smlalxy")
1823 (set_attr "predicable" "yes")])
1825 (define_expand "mulsf3"
1826 [(set (match_operand:SF 0 "s_register_operand" "")
1827 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1828 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1829 "TARGET_32BIT && TARGET_HARD_FLOAT"
1832 && !cirrus_fp_register (operands[2], SFmode))
1833 operands[2] = force_reg (SFmode, operands[2]);
1836 (define_expand "muldf3"
1837 [(set (match_operand:DF 0 "s_register_operand" "")
1838 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1839 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1840 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1843 && !cirrus_fp_register (operands[2], DFmode))
1844 operands[2] = force_reg (DFmode, operands[2]);
1849 (define_expand "divsf3"
1850 [(set (match_operand:SF 0 "s_register_operand" "")
1851 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1852 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1853 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1856 (define_expand "divdf3"
1857 [(set (match_operand:DF 0 "s_register_operand" "")
1858 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1859 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1860 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1865 (define_expand "modsf3"
1866 [(set (match_operand:SF 0 "s_register_operand" "")
1867 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1868 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1869 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1872 (define_expand "moddf3"
1873 [(set (match_operand:DF 0 "s_register_operand" "")
1874 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1875 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1876 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1879 ;; Boolean and,ior,xor insns
1881 ;; Split up double word logical operations
1883 ;; Split up simple DImode logical operations. Simply perform the logical
1884 ;; operation on the upper and lower halves of the registers.
1886 [(set (match_operand:DI 0 "s_register_operand" "")
1887 (match_operator:DI 6 "logical_binary_operator"
1888 [(match_operand:DI 1 "s_register_operand" "")
1889 (match_operand:DI 2 "s_register_operand" "")]))]
1890 "TARGET_32BIT && reload_completed
1891 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1892 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1893 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1894 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1897 operands[3] = gen_highpart (SImode, operands[0]);
1898 operands[0] = gen_lowpart (SImode, operands[0]);
1899 operands[4] = gen_highpart (SImode, operands[1]);
1900 operands[1] = gen_lowpart (SImode, operands[1]);
1901 operands[5] = gen_highpart (SImode, operands[2]);
1902 operands[2] = gen_lowpart (SImode, operands[2]);
1907 [(set (match_operand:DI 0 "s_register_operand" "")
1908 (match_operator:DI 6 "logical_binary_operator"
1909 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1910 (match_operand:DI 1 "s_register_operand" "")]))]
1911 "TARGET_32BIT && reload_completed"
1912 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1913 (set (match_dup 3) (match_op_dup:SI 6
1914 [(ashiftrt:SI (match_dup 2) (const_int 31))
1918 operands[3] = gen_highpart (SImode, operands[0]);
1919 operands[0] = gen_lowpart (SImode, operands[0]);
1920 operands[4] = gen_highpart (SImode, operands[1]);
1921 operands[1] = gen_lowpart (SImode, operands[1]);
1922 operands[5] = gen_highpart (SImode, operands[2]);
1923 operands[2] = gen_lowpart (SImode, operands[2]);
1927 ;; The zero extend of operand 2 means we can just copy the high part of
1928 ;; operand1 into operand0.
1930 [(set (match_operand:DI 0 "s_register_operand" "")
1932 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1933 (match_operand:DI 1 "s_register_operand" "")))]
1934 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1935 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1936 (set (match_dup 3) (match_dup 4))]
1939 operands[4] = gen_highpart (SImode, operands[1]);
1940 operands[3] = gen_highpart (SImode, operands[0]);
1941 operands[0] = gen_lowpart (SImode, operands[0]);
1942 operands[1] = gen_lowpart (SImode, operands[1]);
1946 ;; The zero extend of operand 2 means we can just copy the high part of
1947 ;; operand1 into operand0.
1949 [(set (match_operand:DI 0 "s_register_operand" "")
1951 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1952 (match_operand:DI 1 "s_register_operand" "")))]
1953 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1954 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1955 (set (match_dup 3) (match_dup 4))]
1958 operands[4] = gen_highpart (SImode, operands[1]);
1959 operands[3] = gen_highpart (SImode, operands[0]);
1960 operands[0] = gen_lowpart (SImode, operands[0]);
1961 operands[1] = gen_lowpart (SImode, operands[1]);
1965 (define_expand "anddi3"
1966 [(set (match_operand:DI 0 "s_register_operand" "")
1967 (and:DI (match_operand:DI 1 "s_register_operand" "")
1968 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1973 (define_insn "*anddi3_insn"
1974 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1975 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1976 (match_operand:DI 2 "s_register_operand" "r,r")))]
1977 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1979 [(set_attr "length" "8")]
1982 (define_insn_and_split "*anddi_zesidi_di"
1983 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1984 (and:DI (zero_extend:DI
1985 (match_operand:SI 2 "s_register_operand" "r,r"))
1986 (match_operand:DI 1 "s_register_operand" "0,r")))]
1989 "TARGET_32BIT && reload_completed"
1990 ; The zero extend of operand 2 clears the high word of the output
1992 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1993 (set (match_dup 3) (const_int 0))]
1996 operands[3] = gen_highpart (SImode, operands[0]);
1997 operands[0] = gen_lowpart (SImode, operands[0]);
1998 operands[1] = gen_lowpart (SImode, operands[1]);
2000 [(set_attr "length" "8")]
2003 (define_insn "*anddi_sesdi_di"
2004 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2005 (and:DI (sign_extend:DI
2006 (match_operand:SI 2 "s_register_operand" "r,r"))
2007 (match_operand:DI 1 "s_register_operand" "0,r")))]
2010 [(set_attr "length" "8")]
2013 (define_expand "andsi3"
2014 [(set (match_operand:SI 0 "s_register_operand" "")
2015 (and:SI (match_operand:SI 1 "s_register_operand" "")
2016 (match_operand:SI 2 "reg_or_int_operand" "")))]
2021 if (GET_CODE (operands[2]) == CONST_INT)
2023 if (INTVAL (operands[2]) == 255 && arm_arch6)
2025 operands[1] = convert_to_mode (QImode, operands[1], 1);
2026 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2030 arm_split_constant (AND, SImode, NULL_RTX,
2031 INTVAL (operands[2]), operands[0],
2033 optimize && can_create_pseudo_p ());
2038 else /* TARGET_THUMB1 */
2040 if (GET_CODE (operands[2]) != CONST_INT)
2042 rtx tmp = force_reg (SImode, operands[2]);
2043 if (rtx_equal_p (operands[0], operands[1]))
2047 operands[2] = operands[1];
2055 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2057 operands[2] = force_reg (SImode,
2058 GEN_INT (~INTVAL (operands[2])));
2060 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2065 for (i = 9; i <= 31; i++)
2067 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2069 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2073 else if ((((HOST_WIDE_INT) 1) << i) - 1
2074 == ~INTVAL (operands[2]))
2076 rtx shift = GEN_INT (i);
2077 rtx reg = gen_reg_rtx (SImode);
2079 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2080 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2086 operands[2] = force_reg (SImode, operands[2]);
2092 ; ??? Check split length for Thumb-2
2093 (define_insn_and_split "*arm_andsi3_insn"
2094 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2095 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2096 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2100 bic%?\\t%0, %1, #%B2
2103 && GET_CODE (operands[2]) == CONST_INT
2104 && !(const_ok_for_arm (INTVAL (operands[2]))
2105 || const_ok_for_arm (~INTVAL (operands[2])))"
2106 [(clobber (const_int 0))]
2108 arm_split_constant (AND, SImode, curr_insn,
2109 INTVAL (operands[2]), operands[0], operands[1], 0);
2112 [(set_attr "length" "4,4,16")
2113 (set_attr "predicable" "yes")]
2116 (define_insn "*thumb1_andsi3_insn"
2117 [(set (match_operand:SI 0 "register_operand" "=l")
2118 (and:SI (match_operand:SI 1 "register_operand" "%0")
2119 (match_operand:SI 2 "register_operand" "l")))]
2122 [(set_attr "length" "2")
2123 (set_attr "conds" "set")])
2125 (define_insn "*andsi3_compare0"
2126 [(set (reg:CC_NOOV CC_REGNUM)
2128 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2129 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2131 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2132 (and:SI (match_dup 1) (match_dup 2)))]
2136 bic%.\\t%0, %1, #%B2"
2137 [(set_attr "conds" "set")]
2140 (define_insn "*andsi3_compare0_scratch"
2141 [(set (reg:CC_NOOV CC_REGNUM)
2143 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2144 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2146 (clobber (match_scratch:SI 2 "=X,r"))]
2150 bic%.\\t%2, %0, #%B1"
2151 [(set_attr "conds" "set")]
2154 (define_insn "*zeroextractsi_compare0_scratch"
2155 [(set (reg:CC_NOOV CC_REGNUM)
2156 (compare:CC_NOOV (zero_extract:SI
2157 (match_operand:SI 0 "s_register_operand" "r")
2158 (match_operand 1 "const_int_operand" "n")
2159 (match_operand 2 "const_int_operand" "n"))
2162 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2163 && INTVAL (operands[1]) > 0
2164 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2165 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2167 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2168 << INTVAL (operands[2]));
2169 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2172 [(set_attr "conds" "set")]
2175 (define_insn_and_split "*ne_zeroextractsi"
2176 [(set (match_operand:SI 0 "s_register_operand" "=r")
2177 (ne:SI (zero_extract:SI
2178 (match_operand:SI 1 "s_register_operand" "r")
2179 (match_operand:SI 2 "const_int_operand" "n")
2180 (match_operand:SI 3 "const_int_operand" "n"))
2182 (clobber (reg:CC CC_REGNUM))]
2184 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2185 && INTVAL (operands[2]) > 0
2186 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2187 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2190 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2191 && INTVAL (operands[2]) > 0
2192 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2193 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2194 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2195 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2197 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2199 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2200 (match_dup 0) (const_int 1)))]
2202 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2203 << INTVAL (operands[3]));
2205 [(set_attr "conds" "clob")
2206 (set (attr "length")
2207 (if_then_else (eq_attr "is_thumb" "yes")
2212 (define_insn_and_split "*ne_zeroextractsi_shifted"
2213 [(set (match_operand:SI 0 "s_register_operand" "=r")
2214 (ne:SI (zero_extract:SI
2215 (match_operand:SI 1 "s_register_operand" "r")
2216 (match_operand:SI 2 "const_int_operand" "n")
2219 (clobber (reg:CC CC_REGNUM))]
2223 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2224 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2226 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2228 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2229 (match_dup 0) (const_int 1)))]
2231 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2233 [(set_attr "conds" "clob")
2234 (set_attr "length" "8")]
2237 (define_insn_and_split "*ite_ne_zeroextractsi"
2238 [(set (match_operand:SI 0 "s_register_operand" "=r")
2239 (if_then_else:SI (ne (zero_extract:SI
2240 (match_operand:SI 1 "s_register_operand" "r")
2241 (match_operand:SI 2 "const_int_operand" "n")
2242 (match_operand:SI 3 "const_int_operand" "n"))
2244 (match_operand:SI 4 "arm_not_operand" "rIK")
2246 (clobber (reg:CC CC_REGNUM))]
2248 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2249 && INTVAL (operands[2]) > 0
2250 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2251 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2252 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2255 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2256 && INTVAL (operands[2]) > 0
2257 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2258 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2259 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2260 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2261 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2263 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2265 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2266 (match_dup 0) (match_dup 4)))]
2268 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2269 << INTVAL (operands[3]));
2271 [(set_attr "conds" "clob")
2272 (set_attr "length" "8")]
2275 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2276 [(set (match_operand:SI 0 "s_register_operand" "=r")
2277 (if_then_else:SI (ne (zero_extract:SI
2278 (match_operand:SI 1 "s_register_operand" "r")
2279 (match_operand:SI 2 "const_int_operand" "n")
2282 (match_operand:SI 3 "arm_not_operand" "rIK")
2284 (clobber (reg:CC CC_REGNUM))]
2285 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2287 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2288 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2289 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2291 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2293 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2294 (match_dup 0) (match_dup 3)))]
2296 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2298 [(set_attr "conds" "clob")
2299 (set_attr "length" "8")]
2303 [(set (match_operand:SI 0 "s_register_operand" "")
2304 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2305 (match_operand:SI 2 "const_int_operand" "")
2306 (match_operand:SI 3 "const_int_operand" "")))
2307 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2309 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2310 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2312 HOST_WIDE_INT temp = INTVAL (operands[2]);
2314 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2315 operands[3] = GEN_INT (32 - temp);
2319 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2321 [(set (match_operand:SI 0 "s_register_operand" "")
2322 (match_operator:SI 1 "shiftable_operator"
2323 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2324 (match_operand:SI 3 "const_int_operand" "")
2325 (match_operand:SI 4 "const_int_operand" ""))
2326 (match_operand:SI 5 "s_register_operand" "")]))
2327 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2329 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2332 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2335 HOST_WIDE_INT temp = INTVAL (operands[3]);
2337 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2338 operands[4] = GEN_INT (32 - temp);
2343 [(set (match_operand:SI 0 "s_register_operand" "")
2344 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2345 (match_operand:SI 2 "const_int_operand" "")
2346 (match_operand:SI 3 "const_int_operand" "")))]
2348 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2349 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2351 HOST_WIDE_INT temp = INTVAL (operands[2]);
2353 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2354 operands[3] = GEN_INT (32 - temp);
2359 [(set (match_operand:SI 0 "s_register_operand" "")
2360 (match_operator:SI 1 "shiftable_operator"
2361 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2362 (match_operand:SI 3 "const_int_operand" "")
2363 (match_operand:SI 4 "const_int_operand" ""))
2364 (match_operand:SI 5 "s_register_operand" "")]))
2365 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2367 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2370 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2373 HOST_WIDE_INT temp = INTVAL (operands[3]);
2375 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2376 operands[4] = GEN_INT (32 - temp);
2380 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2381 ;;; represented by the bitfield, then this will produce incorrect results.
2382 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2383 ;;; which have a real bit-field insert instruction, the truncation happens
2384 ;;; in the bit-field insert instruction itself. Since arm does not have a
2385 ;;; bit-field insert instruction, we would have to emit code here to truncate
2386 ;;; the value before we insert. This loses some of the advantage of having
2387 ;;; this insv pattern, so this pattern needs to be reevalutated.
2389 (define_expand "insv"
2390 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2391 (match_operand:SI 1 "general_operand" "")
2392 (match_operand:SI 2 "general_operand" ""))
2393 (match_operand:SI 3 "reg_or_int_operand" ""))]
2394 "TARGET_ARM || arm_arch_thumb2"
2397 int start_bit = INTVAL (operands[2]);
2398 int width = INTVAL (operands[1]);
2399 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2400 rtx target, subtarget;
2402 if (arm_arch_thumb2)
2404 bool use_bfi = TRUE;
2406 if (GET_CODE (operands[3]) == CONST_INT)
2408 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2412 emit_insn (gen_insv_zero (operands[0], operands[1],
2417 /* See if the set can be done with a single orr instruction. */
2418 if (val == mask && const_ok_for_arm (val << start_bit))
2424 if (GET_CODE (operands[3]) != REG)
2425 operands[3] = force_reg (SImode, operands[3]);
2427 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2433 target = copy_rtx (operands[0]);
2434 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2435 subreg as the final target. */
2436 if (GET_CODE (target) == SUBREG)
2438 subtarget = gen_reg_rtx (SImode);
2439 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2440 < GET_MODE_SIZE (SImode))
2441 target = SUBREG_REG (target);
2446 if (GET_CODE (operands[3]) == CONST_INT)
2448 /* Since we are inserting a known constant, we may be able to
2449 reduce the number of bits that we have to clear so that
2450 the mask becomes simple. */
2451 /* ??? This code does not check to see if the new mask is actually
2452 simpler. It may not be. */
2453 rtx op1 = gen_reg_rtx (SImode);
2454 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2455 start of this pattern. */
2456 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2457 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2459 emit_insn (gen_andsi3 (op1, operands[0],
2460 gen_int_mode (~mask2, SImode)));
2461 emit_insn (gen_iorsi3 (subtarget, op1,
2462 gen_int_mode (op3_value << start_bit, SImode)));
2464 else if (start_bit == 0
2465 && !(const_ok_for_arm (mask)
2466 || const_ok_for_arm (~mask)))
2468 /* A Trick, since we are setting the bottom bits in the word,
2469 we can shift operand[3] up, operand[0] down, OR them together
2470 and rotate the result back again. This takes 3 insns, and
2471 the third might be mergeable into another op. */
2472 /* The shift up copes with the possibility that operand[3] is
2473 wider than the bitfield. */
2474 rtx op0 = gen_reg_rtx (SImode);
2475 rtx op1 = gen_reg_rtx (SImode);
2477 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2478 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2479 emit_insn (gen_iorsi3 (op1, op1, op0));
2480 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2482 else if ((width + start_bit == 32)
2483 && !(const_ok_for_arm (mask)
2484 || const_ok_for_arm (~mask)))
2486 /* Similar trick, but slightly less efficient. */
2488 rtx op0 = gen_reg_rtx (SImode);
2489 rtx op1 = gen_reg_rtx (SImode);
2491 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2492 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2493 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2494 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2498 rtx op0 = gen_int_mode (mask, SImode);
2499 rtx op1 = gen_reg_rtx (SImode);
2500 rtx op2 = gen_reg_rtx (SImode);
2502 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2504 rtx tmp = gen_reg_rtx (SImode);
2506 emit_insn (gen_movsi (tmp, op0));
2510 /* Mask out any bits in operand[3] that are not needed. */
2511 emit_insn (gen_andsi3 (op1, operands[3], op0));
2513 if (GET_CODE (op0) == CONST_INT
2514 && (const_ok_for_arm (mask << start_bit)
2515 || const_ok_for_arm (~(mask << start_bit))))
2517 op0 = gen_int_mode (~(mask << start_bit), SImode);
2518 emit_insn (gen_andsi3 (op2, operands[0], op0));
2522 if (GET_CODE (op0) == CONST_INT)
2524 rtx tmp = gen_reg_rtx (SImode);
2526 emit_insn (gen_movsi (tmp, op0));
2531 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2533 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2537 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2539 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2542 if (subtarget != target)
2544 /* If TARGET is still a SUBREG, then it must be wider than a word,
2545 so we must be careful only to set the subword we were asked to. */
2546 if (GET_CODE (target) == SUBREG)
2547 emit_move_insn (target, subtarget);
2549 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2556 (define_insn "insv_zero"
2557 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2558 (match_operand:SI 1 "const_int_operand" "M")
2559 (match_operand:SI 2 "const_int_operand" "M"))
2563 [(set_attr "length" "4")
2564 (set_attr "predicable" "yes")]
2567 (define_insn "insv_t2"
2568 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2569 (match_operand:SI 1 "const_int_operand" "M")
2570 (match_operand:SI 2 "const_int_operand" "M"))
2571 (match_operand:SI 3 "s_register_operand" "r"))]
2573 "bfi%?\t%0, %3, %2, %1"
2574 [(set_attr "length" "4")
2575 (set_attr "predicable" "yes")]
2578 ; constants for op 2 will never be given to these patterns.
2579 (define_insn_and_split "*anddi_notdi_di"
2580 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2581 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2582 (match_operand:DI 2 "s_register_operand" "r,0")))]
2585 "TARGET_32BIT && reload_completed
2586 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2587 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2588 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2589 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2592 operands[3] = gen_highpart (SImode, operands[0]);
2593 operands[0] = gen_lowpart (SImode, operands[0]);
2594 operands[4] = gen_highpart (SImode, operands[1]);
2595 operands[1] = gen_lowpart (SImode, operands[1]);
2596 operands[5] = gen_highpart (SImode, operands[2]);
2597 operands[2] = gen_lowpart (SImode, operands[2]);
2599 [(set_attr "length" "8")
2600 (set_attr "predicable" "yes")]
2603 (define_insn_and_split "*anddi_notzesidi_di"
2604 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2605 (and:DI (not:DI (zero_extend:DI
2606 (match_operand:SI 2 "s_register_operand" "r,r")))
2607 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2610 bic%?\\t%Q0, %Q1, %2
2612 ; (not (zero_extend ...)) allows us to just copy the high word from
2613 ; operand1 to operand0.
2616 && operands[0] != operands[1]"
2617 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2618 (set (match_dup 3) (match_dup 4))]
2621 operands[3] = gen_highpart (SImode, operands[0]);
2622 operands[0] = gen_lowpart (SImode, operands[0]);
2623 operands[4] = gen_highpart (SImode, operands[1]);
2624 operands[1] = gen_lowpart (SImode, operands[1]);
2626 [(set_attr "length" "4,8")
2627 (set_attr "predicable" "yes")]
2630 (define_insn_and_split "*anddi_notsesidi_di"
2631 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2632 (and:DI (not:DI (sign_extend:DI
2633 (match_operand:SI 2 "s_register_operand" "r,r")))
2634 (match_operand:DI 1 "s_register_operand" "0,r")))]
2637 "TARGET_32BIT && reload_completed"
2638 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2639 (set (match_dup 3) (and:SI (not:SI
2640 (ashiftrt:SI (match_dup 2) (const_int 31)))
2644 operands[3] = gen_highpart (SImode, operands[0]);
2645 operands[0] = gen_lowpart (SImode, operands[0]);
2646 operands[4] = gen_highpart (SImode, operands[1]);
2647 operands[1] = gen_lowpart (SImode, operands[1]);
2649 [(set_attr "length" "8")
2650 (set_attr "predicable" "yes")]
2653 (define_insn "andsi_notsi_si"
2654 [(set (match_operand:SI 0 "s_register_operand" "=r")
2655 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2656 (match_operand:SI 1 "s_register_operand" "r")))]
2658 "bic%?\\t%0, %1, %2"
2659 [(set_attr "predicable" "yes")]
2662 (define_insn "thumb1_bicsi3"
2663 [(set (match_operand:SI 0 "register_operand" "=l")
2664 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2665 (match_operand:SI 2 "register_operand" "0")))]
2668 [(set_attr "length" "2")
2669 (set_attr "conds" "set")])
2671 (define_insn "andsi_not_shiftsi_si"
2672 [(set (match_operand:SI 0 "s_register_operand" "=r")
2673 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2674 [(match_operand:SI 2 "s_register_operand" "r")
2675 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2676 (match_operand:SI 1 "s_register_operand" "r")))]
2678 "bic%?\\t%0, %1, %2%S4"
2679 [(set_attr "predicable" "yes")
2680 (set_attr "shift" "2")
2681 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2682 (const_string "alu_shift")
2683 (const_string "alu_shift_reg")))]
2686 (define_insn "*andsi_notsi_si_compare0"
2687 [(set (reg:CC_NOOV CC_REGNUM)
2689 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2690 (match_operand:SI 1 "s_register_operand" "r"))
2692 (set (match_operand:SI 0 "s_register_operand" "=r")
2693 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2695 "bic%.\\t%0, %1, %2"
2696 [(set_attr "conds" "set")]
2699 (define_insn "*andsi_notsi_si_compare0_scratch"
2700 [(set (reg:CC_NOOV CC_REGNUM)
2702 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2703 (match_operand:SI 1 "s_register_operand" "r"))
2705 (clobber (match_scratch:SI 0 "=r"))]
2707 "bic%.\\t%0, %1, %2"
2708 [(set_attr "conds" "set")]
2711 (define_expand "iordi3"
2712 [(set (match_operand:DI 0 "s_register_operand" "")
2713 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2714 (match_operand:DI 2 "neon_logic_op2" "")))]
2719 (define_insn "*iordi3_insn"
2720 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2721 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2722 (match_operand:DI 2 "s_register_operand" "r,r")))]
2723 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2725 [(set_attr "length" "8")
2726 (set_attr "predicable" "yes")]
2729 (define_insn "*iordi_zesidi_di"
2730 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2731 (ior:DI (zero_extend:DI
2732 (match_operand:SI 2 "s_register_operand" "r,r"))
2733 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2736 orr%?\\t%Q0, %Q1, %2
2738 [(set_attr "length" "4,8")
2739 (set_attr "predicable" "yes")]
2742 (define_insn "*iordi_sesidi_di"
2743 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2744 (ior:DI (sign_extend:DI
2745 (match_operand:SI 2 "s_register_operand" "r,r"))
2746 (match_operand:DI 1 "s_register_operand" "0,r")))]
2749 [(set_attr "length" "8")
2750 (set_attr "predicable" "yes")]
2753 (define_expand "iorsi3"
2754 [(set (match_operand:SI 0 "s_register_operand" "")
2755 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2756 (match_operand:SI 2 "reg_or_int_operand" "")))]
2759 if (GET_CODE (operands[2]) == CONST_INT)
2763 arm_split_constant (IOR, SImode, NULL_RTX,
2764 INTVAL (operands[2]), operands[0], operands[1],
2765 optimize && can_create_pseudo_p ());
2768 else /* TARGET_THUMB1 */
2770 rtx tmp = force_reg (SImode, operands[2]);
2771 if (rtx_equal_p (operands[0], operands[1]))
2775 operands[2] = operands[1];
2783 (define_insn_and_split "*iorsi3_insn"
2784 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2785 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2786 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2790 orn%?\\t%0, %1, #%B2
2793 && GET_CODE (operands[2]) == CONST_INT
2794 && !(const_ok_for_arm (INTVAL (operands[2]))
2795 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2796 [(clobber (const_int 0))]
2798 arm_split_constant (IOR, SImode, curr_insn,
2799 INTVAL (operands[2]), operands[0], operands[1], 0);
2802 [(set_attr "length" "4,4,16")
2803 (set_attr "arch" "32,t2,32")
2804 (set_attr "predicable" "yes")])
2806 (define_insn "*thumb1_iorsi3_insn"
2807 [(set (match_operand:SI 0 "register_operand" "=l")
2808 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2809 (match_operand:SI 2 "register_operand" "l")))]
2812 [(set_attr "length" "2")
2813 (set_attr "conds" "set")])
2816 [(match_scratch:SI 3 "r")
2817 (set (match_operand:SI 0 "arm_general_register_operand" "")
2818 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2819 (match_operand:SI 2 "const_int_operand" "")))]
2821 && !const_ok_for_arm (INTVAL (operands[2]))
2822 && const_ok_for_arm (~INTVAL (operands[2]))"
2823 [(set (match_dup 3) (match_dup 2))
2824 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2828 (define_insn "*iorsi3_compare0"
2829 [(set (reg:CC_NOOV CC_REGNUM)
2830 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2831 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2833 (set (match_operand:SI 0 "s_register_operand" "=r")
2834 (ior:SI (match_dup 1) (match_dup 2)))]
2836 "orr%.\\t%0, %1, %2"
2837 [(set_attr "conds" "set")]
2840 (define_insn "*iorsi3_compare0_scratch"
2841 [(set (reg:CC_NOOV CC_REGNUM)
2842 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2843 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2845 (clobber (match_scratch:SI 0 "=r"))]
2847 "orr%.\\t%0, %1, %2"
2848 [(set_attr "conds" "set")]
2851 (define_expand "xordi3"
2852 [(set (match_operand:DI 0 "s_register_operand" "")
2853 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2854 (match_operand:DI 2 "s_register_operand" "")))]
2859 (define_insn "*xordi3_insn"
2860 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2861 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2862 (match_operand:DI 2 "s_register_operand" "r,r")))]
2863 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2865 [(set_attr "length" "8")
2866 (set_attr "predicable" "yes")]
2869 (define_insn "*xordi_zesidi_di"
2870 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2871 (xor:DI (zero_extend:DI
2872 (match_operand:SI 2 "s_register_operand" "r,r"))
2873 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2876 eor%?\\t%Q0, %Q1, %2
2878 [(set_attr "length" "4,8")
2879 (set_attr "predicable" "yes")]
2882 (define_insn "*xordi_sesidi_di"
2883 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2884 (xor:DI (sign_extend:DI
2885 (match_operand:SI 2 "s_register_operand" "r,r"))
2886 (match_operand:DI 1 "s_register_operand" "0,r")))]
2889 [(set_attr "length" "8")
2890 (set_attr "predicable" "yes")]
2893 (define_expand "xorsi3"
2894 [(set (match_operand:SI 0 "s_register_operand" "")
2895 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2896 (match_operand:SI 2 "reg_or_int_operand" "")))]
2898 "if (GET_CODE (operands[2]) == CONST_INT)
2902 arm_split_constant (XOR, SImode, NULL_RTX,
2903 INTVAL (operands[2]), operands[0], operands[1],
2904 optimize && can_create_pseudo_p ());
2907 else /* TARGET_THUMB1 */
2909 rtx tmp = force_reg (SImode, operands[2]);
2910 if (rtx_equal_p (operands[0], operands[1]))
2914 operands[2] = operands[1];
2921 (define_insn "*arm_xorsi3"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r")
2923 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2924 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2926 "eor%?\\t%0, %1, %2"
2927 [(set_attr "predicable" "yes")]
2930 (define_insn "*thumb1_xorsi3_insn"
2931 [(set (match_operand:SI 0 "register_operand" "=l")
2932 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2933 (match_operand:SI 2 "register_operand" "l")))]
2936 [(set_attr "length" "2")
2937 (set_attr "conds" "set")])
2939 (define_insn "*xorsi3_compare0"
2940 [(set (reg:CC_NOOV CC_REGNUM)
2941 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2942 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2944 (set (match_operand:SI 0 "s_register_operand" "=r")
2945 (xor:SI (match_dup 1) (match_dup 2)))]
2947 "eor%.\\t%0, %1, %2"
2948 [(set_attr "conds" "set")]
2951 (define_insn "*xorsi3_compare0_scratch"
2952 [(set (reg:CC_NOOV CC_REGNUM)
2953 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2954 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2958 [(set_attr "conds" "set")]
2961 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2962 ; (NOT D) we can sometimes merge the final NOT into one of the following
2966 [(set (match_operand:SI 0 "s_register_operand" "")
2967 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2968 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2969 (match_operand:SI 3 "arm_rhs_operand" "")))
2970 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2972 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2973 (not:SI (match_dup 3))))
2974 (set (match_dup 0) (not:SI (match_dup 4)))]
2978 (define_insn "*andsi_iorsi3_notsi"
2979 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2980 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2981 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2982 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2984 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2985 [(set_attr "length" "8")
2986 (set_attr "ce_count" "2")
2987 (set_attr "predicable" "yes")]
2990 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2991 ; insns are available?
2993 [(set (match_operand:SI 0 "s_register_operand" "")
2994 (match_operator:SI 1 "logical_binary_operator"
2995 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2996 (match_operand:SI 3 "const_int_operand" "")
2997 (match_operand:SI 4 "const_int_operand" ""))
2998 (match_operator:SI 9 "logical_binary_operator"
2999 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3000 (match_operand:SI 6 "const_int_operand" ""))
3001 (match_operand:SI 7 "s_register_operand" "")])]))
3002 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3004 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3005 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3008 [(ashift:SI (match_dup 2) (match_dup 4))
3012 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3015 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3019 [(set (match_operand:SI 0 "s_register_operand" "")
3020 (match_operator:SI 1 "logical_binary_operator"
3021 [(match_operator:SI 9 "logical_binary_operator"
3022 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3023 (match_operand:SI 6 "const_int_operand" ""))
3024 (match_operand:SI 7 "s_register_operand" "")])
3025 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3026 (match_operand:SI 3 "const_int_operand" "")
3027 (match_operand:SI 4 "const_int_operand" ""))]))
3028 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3030 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3031 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3034 [(ashift:SI (match_dup 2) (match_dup 4))
3038 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3041 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3045 [(set (match_operand:SI 0 "s_register_operand" "")
3046 (match_operator:SI 1 "logical_binary_operator"
3047 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3048 (match_operand:SI 3 "const_int_operand" "")
3049 (match_operand:SI 4 "const_int_operand" ""))
3050 (match_operator:SI 9 "logical_binary_operator"
3051 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3052 (match_operand:SI 6 "const_int_operand" ""))
3053 (match_operand:SI 7 "s_register_operand" "")])]))
3054 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3056 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3057 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3060 [(ashift:SI (match_dup 2) (match_dup 4))
3064 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3067 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3071 [(set (match_operand:SI 0 "s_register_operand" "")
3072 (match_operator:SI 1 "logical_binary_operator"
3073 [(match_operator:SI 9 "logical_binary_operator"
3074 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3075 (match_operand:SI 6 "const_int_operand" ""))
3076 (match_operand:SI 7 "s_register_operand" "")])
3077 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3078 (match_operand:SI 3 "const_int_operand" "")
3079 (match_operand:SI 4 "const_int_operand" ""))]))
3080 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3082 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3083 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3086 [(ashift:SI (match_dup 2) (match_dup 4))
3090 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3093 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3097 ;; Minimum and maximum insns
3099 (define_expand "smaxsi3"
3101 (set (match_operand:SI 0 "s_register_operand" "")
3102 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3103 (match_operand:SI 2 "arm_rhs_operand" "")))
3104 (clobber (reg:CC CC_REGNUM))])]
3107 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3109 /* No need for a clobber of the condition code register here. */
3110 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3111 gen_rtx_SMAX (SImode, operands[1],
3117 (define_insn "*smax_0"
3118 [(set (match_operand:SI 0 "s_register_operand" "=r")
3119 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3122 "bic%?\\t%0, %1, %1, asr #31"
3123 [(set_attr "predicable" "yes")]
3126 (define_insn "*smax_m1"
3127 [(set (match_operand:SI 0 "s_register_operand" "=r")
3128 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3131 "orr%?\\t%0, %1, %1, asr #31"
3132 [(set_attr "predicable" "yes")]
3135 (define_insn "*arm_smax_insn"
3136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3137 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3138 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3139 (clobber (reg:CC CC_REGNUM))]
3142 cmp\\t%1, %2\;movlt\\t%0, %2
3143 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3144 [(set_attr "conds" "clob")
3145 (set_attr "length" "8,12")]
3148 (define_expand "sminsi3"
3150 (set (match_operand:SI 0 "s_register_operand" "")
3151 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3152 (match_operand:SI 2 "arm_rhs_operand" "")))
3153 (clobber (reg:CC CC_REGNUM))])]
3156 if (operands[2] == const0_rtx)
3158 /* No need for a clobber of the condition code register here. */
3159 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3160 gen_rtx_SMIN (SImode, operands[1],
3166 (define_insn "*smin_0"
3167 [(set (match_operand:SI 0 "s_register_operand" "=r")
3168 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3171 "and%?\\t%0, %1, %1, asr #31"
3172 [(set_attr "predicable" "yes")]
3175 (define_insn "*arm_smin_insn"
3176 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3177 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3178 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3179 (clobber (reg:CC CC_REGNUM))]
3182 cmp\\t%1, %2\;movge\\t%0, %2
3183 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3184 [(set_attr "conds" "clob")
3185 (set_attr "length" "8,12")]
3188 (define_expand "umaxsi3"
3190 (set (match_operand:SI 0 "s_register_operand" "")
3191 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3192 (match_operand:SI 2 "arm_rhs_operand" "")))
3193 (clobber (reg:CC CC_REGNUM))])]
3198 (define_insn "*arm_umaxsi3"
3199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3200 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3201 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3202 (clobber (reg:CC CC_REGNUM))]
3205 cmp\\t%1, %2\;movcc\\t%0, %2
3206 cmp\\t%1, %2\;movcs\\t%0, %1
3207 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3208 [(set_attr "conds" "clob")
3209 (set_attr "length" "8,8,12")]
3212 (define_expand "uminsi3"
3214 (set (match_operand:SI 0 "s_register_operand" "")
3215 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3216 (match_operand:SI 2 "arm_rhs_operand" "")))
3217 (clobber (reg:CC CC_REGNUM))])]
3222 (define_insn "*arm_uminsi3"
3223 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3224 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3225 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3226 (clobber (reg:CC CC_REGNUM))]
3229 cmp\\t%1, %2\;movcs\\t%0, %2
3230 cmp\\t%1, %2\;movcc\\t%0, %1
3231 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3232 [(set_attr "conds" "clob")
3233 (set_attr "length" "8,8,12")]
3236 (define_insn "*store_minmaxsi"
3237 [(set (match_operand:SI 0 "memory_operand" "=m")
3238 (match_operator:SI 3 "minmax_operator"
3239 [(match_operand:SI 1 "s_register_operand" "r")
3240 (match_operand:SI 2 "s_register_operand" "r")]))
3241 (clobber (reg:CC CC_REGNUM))]
3244 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3245 operands[1], operands[2]);
3246 output_asm_insn (\"cmp\\t%1, %2\", operands);
3248 output_asm_insn (\"ite\t%d3\", operands);
3249 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3250 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3253 [(set_attr "conds" "clob")
3254 (set (attr "length")
3255 (if_then_else (eq_attr "is_thumb" "yes")
3258 (set_attr "type" "store1")]
3261 ; Reject the frame pointer in operand[1], since reloading this after
3262 ; it has been eliminated can cause carnage.
3263 (define_insn "*minmax_arithsi"
3264 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3265 (match_operator:SI 4 "shiftable_operator"
3266 [(match_operator:SI 5 "minmax_operator"
3267 [(match_operand:SI 2 "s_register_operand" "r,r")
3268 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3269 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3270 (clobber (reg:CC CC_REGNUM))]
3271 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3274 enum rtx_code code = GET_CODE (operands[4]);
3277 if (which_alternative != 0 || operands[3] != const0_rtx
3278 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3283 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3284 operands[2], operands[3]);
3285 output_asm_insn (\"cmp\\t%2, %3\", operands);
3289 output_asm_insn (\"ite\\t%d5\", operands);
3291 output_asm_insn (\"it\\t%d5\", operands);
3293 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3295 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3298 [(set_attr "conds" "clob")
3299 (set (attr "length")
3300 (if_then_else (eq_attr "is_thumb" "yes")
3306 ;; Shift and rotation insns
3308 (define_expand "ashldi3"
3309 [(set (match_operand:DI 0 "s_register_operand" "")
3310 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3311 (match_operand:SI 2 "reg_or_int_operand" "")))]
3314 if (GET_CODE (operands[2]) == CONST_INT)
3316 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3318 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3321 /* Ideally we shouldn't fail here if we could know that operands[1]
3322 ends up already living in an iwmmxt register. Otherwise it's
3323 cheaper to have the alternate code being generated than moving
3324 values to iwmmxt regs and back. */
3327 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3332 (define_insn "arm_ashldi3_1bit"
3333 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3334 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3336 (clobber (reg:CC CC_REGNUM))]
3338 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3339 [(set_attr "conds" "clob")
3340 (set_attr "length" "8")]
3343 (define_expand "ashlsi3"
3344 [(set (match_operand:SI 0 "s_register_operand" "")
3345 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3346 (match_operand:SI 2 "arm_rhs_operand" "")))]
3349 if (GET_CODE (operands[2]) == CONST_INT
3350 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3352 emit_insn (gen_movsi (operands[0], const0_rtx));
3358 (define_insn "*thumb1_ashlsi3"
3359 [(set (match_operand:SI 0 "register_operand" "=l,l")
3360 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3361 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3364 [(set_attr "length" "2")
3365 (set_attr "conds" "set")])
3367 (define_expand "ashrdi3"
3368 [(set (match_operand:DI 0 "s_register_operand" "")
3369 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3370 (match_operand:SI 2 "reg_or_int_operand" "")))]
3373 if (GET_CODE (operands[2]) == CONST_INT)
3375 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3377 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3380 /* Ideally we shouldn't fail here if we could know that operands[1]
3381 ends up already living in an iwmmxt register. Otherwise it's
3382 cheaper to have the alternate code being generated than moving
3383 values to iwmmxt regs and back. */
3386 else if (!TARGET_REALLY_IWMMXT)
3391 (define_insn "arm_ashrdi3_1bit"
3392 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3393 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3395 (clobber (reg:CC CC_REGNUM))]
3397 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3398 [(set_attr "conds" "clob")
3399 (set_attr "insn" "mov")
3400 (set_attr "length" "8")]
3403 (define_expand "ashrsi3"
3404 [(set (match_operand:SI 0 "s_register_operand" "")
3405 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3406 (match_operand:SI 2 "arm_rhs_operand" "")))]
3409 if (GET_CODE (operands[2]) == CONST_INT
3410 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3411 operands[2] = GEN_INT (31);
3415 (define_insn "*thumb1_ashrsi3"
3416 [(set (match_operand:SI 0 "register_operand" "=l,l")
3417 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3418 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3421 [(set_attr "length" "2")
3422 (set_attr "conds" "set")])
3424 (define_expand "lshrdi3"
3425 [(set (match_operand:DI 0 "s_register_operand" "")
3426 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3427 (match_operand:SI 2 "reg_or_int_operand" "")))]
3430 if (GET_CODE (operands[2]) == CONST_INT)
3432 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3434 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3437 /* Ideally we shouldn't fail here if we could know that operands[1]
3438 ends up already living in an iwmmxt register. Otherwise it's
3439 cheaper to have the alternate code being generated than moving
3440 values to iwmmxt regs and back. */
3443 else if (!TARGET_REALLY_IWMMXT)
3448 (define_insn "arm_lshrdi3_1bit"
3449 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3450 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3452 (clobber (reg:CC CC_REGNUM))]
3454 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3455 [(set_attr "conds" "clob")
3456 (set_attr "insn" "mov")
3457 (set_attr "length" "8")]
3460 (define_expand "lshrsi3"
3461 [(set (match_operand:SI 0 "s_register_operand" "")
3462 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3463 (match_operand:SI 2 "arm_rhs_operand" "")))]
3466 if (GET_CODE (operands[2]) == CONST_INT
3467 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3469 emit_insn (gen_movsi (operands[0], const0_rtx));
3475 (define_insn "*thumb1_lshrsi3"
3476 [(set (match_operand:SI 0 "register_operand" "=l,l")
3477 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3478 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3481 [(set_attr "length" "2")
3482 (set_attr "conds" "set")])
3484 (define_expand "rotlsi3"
3485 [(set (match_operand:SI 0 "s_register_operand" "")
3486 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3487 (match_operand:SI 2 "reg_or_int_operand" "")))]
3490 if (GET_CODE (operands[2]) == CONST_INT)
3491 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3494 rtx reg = gen_reg_rtx (SImode);
3495 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3501 (define_expand "rotrsi3"
3502 [(set (match_operand:SI 0 "s_register_operand" "")
3503 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3504 (match_operand:SI 2 "arm_rhs_operand" "")))]
3509 if (GET_CODE (operands[2]) == CONST_INT
3510 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3511 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3513 else /* TARGET_THUMB1 */
3515 if (GET_CODE (operands [2]) == CONST_INT)
3516 operands [2] = force_reg (SImode, operands[2]);
3521 (define_insn "*thumb1_rotrsi3"
3522 [(set (match_operand:SI 0 "register_operand" "=l")
3523 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3524 (match_operand:SI 2 "register_operand" "l")))]
3527 [(set_attr "length" "2")]
3530 (define_insn "*arm_shiftsi3"
3531 [(set (match_operand:SI 0 "s_register_operand" "=r")
3532 (match_operator:SI 3 "shift_operator"
3533 [(match_operand:SI 1 "s_register_operand" "r")
3534 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3536 "* return arm_output_shift(operands, 0);"
3537 [(set_attr "predicable" "yes")
3538 (set_attr "shift" "1")
3539 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3540 (const_string "alu_shift")
3541 (const_string "alu_shift_reg")))]
3544 (define_insn "*shiftsi3_compare0"
3545 [(set (reg:CC_NOOV CC_REGNUM)
3546 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3547 [(match_operand:SI 1 "s_register_operand" "r")
3548 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3550 (set (match_operand:SI 0 "s_register_operand" "=r")
3551 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3553 "* return arm_output_shift(operands, 1);"
3554 [(set_attr "conds" "set")
3555 (set_attr "shift" "1")
3556 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3557 (const_string "alu_shift")
3558 (const_string "alu_shift_reg")))]
3561 (define_insn "*shiftsi3_compare0_scratch"
3562 [(set (reg:CC_NOOV CC_REGNUM)
3563 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3564 [(match_operand:SI 1 "s_register_operand" "r")
3565 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3567 (clobber (match_scratch:SI 0 "=r"))]
3569 "* return arm_output_shift(operands, 1);"
3570 [(set_attr "conds" "set")
3571 (set_attr "shift" "1")]
3574 (define_insn "*not_shiftsi"
3575 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3576 (not:SI (match_operator:SI 3 "shift_operator"
3577 [(match_operand:SI 1 "s_register_operand" "r,r")
3578 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3581 [(set_attr "predicable" "yes")
3582 (set_attr "shift" "1")
3583 (set_attr "insn" "mvn")
3584 (set_attr "arch" "32,a")
3585 (set_attr "type" "alu_shift,alu_shift_reg")])
3587 (define_insn "*not_shiftsi_compare0"
3588 [(set (reg:CC_NOOV CC_REGNUM)
3590 (not:SI (match_operator:SI 3 "shift_operator"
3591 [(match_operand:SI 1 "s_register_operand" "r,r")
3592 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3594 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3595 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3598 [(set_attr "conds" "set")
3599 (set_attr "shift" "1")
3600 (set_attr "insn" "mvn")
3601 (set_attr "arch" "32,a")
3602 (set_attr "type" "alu_shift,alu_shift_reg")])
3604 (define_insn "*not_shiftsi_compare0_scratch"
3605 [(set (reg:CC_NOOV CC_REGNUM)
3607 (not:SI (match_operator:SI 3 "shift_operator"
3608 [(match_operand:SI 1 "s_register_operand" "r,r")
3609 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3611 (clobber (match_scratch:SI 0 "=r,r"))]
3614 [(set_attr "conds" "set")
3615 (set_attr "shift" "1")
3616 (set_attr "insn" "mvn")
3617 (set_attr "arch" "32,a")
3618 (set_attr "type" "alu_shift,alu_shift_reg")])
3620 ;; We don't really have extzv, but defining this using shifts helps
3621 ;; to reduce register pressure later on.
3623 (define_expand "extzv"
3625 (ashift:SI (match_operand:SI 1 "register_operand" "")
3626 (match_operand:SI 2 "const_int_operand" "")))
3627 (set (match_operand:SI 0 "register_operand" "")
3628 (lshiftrt:SI (match_dup 4)
3629 (match_operand:SI 3 "const_int_operand" "")))]
3630 "TARGET_THUMB1 || arm_arch_thumb2"
3633 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3634 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3636 if (arm_arch_thumb2)
3638 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3643 operands[3] = GEN_INT (rshift);
3647 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3651 operands[2] = GEN_INT (lshift);
3652 operands[4] = gen_reg_rtx (SImode);
3657 [(set (match_operand:SI 0 "s_register_operand" "=r")
3658 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3659 (match_operand:SI 2 "const_int_operand" "M")
3660 (match_operand:SI 3 "const_int_operand" "M")))]
3662 "sbfx%?\t%0, %1, %3, %2"
3663 [(set_attr "length" "4")
3664 (set_attr "predicable" "yes")]
3667 (define_insn "extzv_t2"
3668 [(set (match_operand:SI 0 "s_register_operand" "=r")
3669 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3670 (match_operand:SI 2 "const_int_operand" "M")
3671 (match_operand:SI 3 "const_int_operand" "M")))]
3673 "ubfx%?\t%0, %1, %3, %2"
3674 [(set_attr "length" "4")
3675 (set_attr "predicable" "yes")]
3679 ;; Unary arithmetic insns
3681 (define_expand "negdi2"
3683 [(set (match_operand:DI 0 "s_register_operand" "")
3684 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3685 (clobber (reg:CC CC_REGNUM))])]
3690 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3691 ;; The first alternative allows the common case of a *full* overlap.
3692 (define_insn "*arm_negdi2"
3693 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3694 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3695 (clobber (reg:CC CC_REGNUM))]
3697 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3698 [(set_attr "conds" "clob")
3699 (set_attr "length" "8")]
3702 (define_insn "*thumb1_negdi2"
3703 [(set (match_operand:DI 0 "register_operand" "=&l")
3704 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3705 (clobber (reg:CC CC_REGNUM))]
3707 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3708 [(set_attr "length" "6")]
3711 (define_expand "negsi2"
3712 [(set (match_operand:SI 0 "s_register_operand" "")
3713 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3718 (define_insn "*arm_negsi2"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r")
3720 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3722 "rsb%?\\t%0, %1, #0"
3723 [(set_attr "predicable" "yes")]
3726 (define_insn "*thumb1_negsi2"
3727 [(set (match_operand:SI 0 "register_operand" "=l")
3728 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3731 [(set_attr "length" "2")]
3734 (define_expand "negsf2"
3735 [(set (match_operand:SF 0 "s_register_operand" "")
3736 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3737 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3741 (define_expand "negdf2"
3742 [(set (match_operand:DF 0 "s_register_operand" "")
3743 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3744 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3747 ;; abssi2 doesn't really clobber the condition codes if a different register
3748 ;; is being set. To keep things simple, assume during rtl manipulations that
3749 ;; it does, but tell the final scan operator the truth. Similarly for
3752 (define_expand "abssi2"
3754 [(set (match_operand:SI 0 "s_register_operand" "")
3755 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3756 (clobber (match_dup 2))])]
3760 operands[2] = gen_rtx_SCRATCH (SImode);
3762 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3765 (define_insn "*arm_abssi2"
3766 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3767 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3768 (clobber (reg:CC CC_REGNUM))]
3771 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3772 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3773 [(set_attr "conds" "clob,*")
3774 (set_attr "shift" "1")
3775 ;; predicable can't be set based on the variant, so left as no
3776 (set_attr "length" "8")]
3779 (define_insn_and_split "*thumb1_abssi2"
3780 [(set (match_operand:SI 0 "s_register_operand" "=l")
3781 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3782 (clobber (match_scratch:SI 2 "=&l"))]
3785 "TARGET_THUMB1 && reload_completed"
3786 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3787 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3788 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3790 [(set_attr "length" "6")]
3793 (define_insn "*arm_neg_abssi2"
3794 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3795 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3796 (clobber (reg:CC CC_REGNUM))]
3799 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3800 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3801 [(set_attr "conds" "clob,*")
3802 (set_attr "shift" "1")
3803 ;; predicable can't be set based on the variant, so left as no
3804 (set_attr "length" "8")]
3807 (define_insn_and_split "*thumb1_neg_abssi2"
3808 [(set (match_operand:SI 0 "s_register_operand" "=l")
3809 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3810 (clobber (match_scratch:SI 2 "=&l"))]
3813 "TARGET_THUMB1 && reload_completed"
3814 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3815 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3816 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3818 [(set_attr "length" "6")]
3821 (define_expand "abssf2"
3822 [(set (match_operand:SF 0 "s_register_operand" "")
3823 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3824 "TARGET_32BIT && TARGET_HARD_FLOAT"
3827 (define_expand "absdf2"
3828 [(set (match_operand:DF 0 "s_register_operand" "")
3829 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3830 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3833 (define_expand "sqrtsf2"
3834 [(set (match_operand:SF 0 "s_register_operand" "")
3835 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3836 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3839 (define_expand "sqrtdf2"
3840 [(set (match_operand:DF 0 "s_register_operand" "")
3841 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3842 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3845 (define_insn_and_split "one_cmpldi2"
3846 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3847 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3850 "TARGET_32BIT && reload_completed"
3851 [(set (match_dup 0) (not:SI (match_dup 1)))
3852 (set (match_dup 2) (not:SI (match_dup 3)))]
3855 operands[2] = gen_highpart (SImode, operands[0]);
3856 operands[0] = gen_lowpart (SImode, operands[0]);
3857 operands[3] = gen_highpart (SImode, operands[1]);
3858 operands[1] = gen_lowpart (SImode, operands[1]);
3860 [(set_attr "length" "8")
3861 (set_attr "predicable" "yes")]
3864 (define_expand "one_cmplsi2"
3865 [(set (match_operand:SI 0 "s_register_operand" "")
3866 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3871 (define_insn "*arm_one_cmplsi2"
3872 [(set (match_operand:SI 0 "s_register_operand" "=r")
3873 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3876 [(set_attr "predicable" "yes")
3877 (set_attr "insn" "mvn")]
3880 (define_insn "*thumb1_one_cmplsi2"
3881 [(set (match_operand:SI 0 "register_operand" "=l")
3882 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3885 [(set_attr "length" "2")
3886 (set_attr "insn" "mvn")]
3889 (define_insn "*notsi_compare0"
3890 [(set (reg:CC_NOOV CC_REGNUM)
3891 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3893 (set (match_operand:SI 0 "s_register_operand" "=r")
3894 (not:SI (match_dup 1)))]
3897 [(set_attr "conds" "set")
3898 (set_attr "insn" "mvn")]
3901 (define_insn "*notsi_compare0_scratch"
3902 [(set (reg:CC_NOOV CC_REGNUM)
3903 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3905 (clobber (match_scratch:SI 0 "=r"))]
3908 [(set_attr "conds" "set")
3909 (set_attr "insn" "mvn")]
3912 ;; Fixed <--> Floating conversion insns
3914 (define_expand "floatsihf2"
3915 [(set (match_operand:HF 0 "general_operand" "")
3916 (float:HF (match_operand:SI 1 "general_operand" "")))]
3920 rtx op1 = gen_reg_rtx (SFmode);
3921 expand_float (op1, operands[1], 0);
3922 op1 = convert_to_mode (HFmode, op1, 0);
3923 emit_move_insn (operands[0], op1);
3928 (define_expand "floatdihf2"
3929 [(set (match_operand:HF 0 "general_operand" "")
3930 (float:HF (match_operand:DI 1 "general_operand" "")))]
3934 rtx op1 = gen_reg_rtx (SFmode);
3935 expand_float (op1, operands[1], 0);
3936 op1 = convert_to_mode (HFmode, op1, 0);
3937 emit_move_insn (operands[0], op1);
3942 (define_expand "floatsisf2"
3943 [(set (match_operand:SF 0 "s_register_operand" "")
3944 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3945 "TARGET_32BIT && TARGET_HARD_FLOAT"
3947 if (TARGET_MAVERICK)
3949 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3954 (define_expand "floatsidf2"
3955 [(set (match_operand:DF 0 "s_register_operand" "")
3956 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3957 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3959 if (TARGET_MAVERICK)
3961 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3966 (define_expand "fix_trunchfsi2"
3967 [(set (match_operand:SI 0 "general_operand" "")
3968 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3972 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3973 expand_fix (operands[0], op1, 0);
3978 (define_expand "fix_trunchfdi2"
3979 [(set (match_operand:DI 0 "general_operand" "")
3980 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3984 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3985 expand_fix (operands[0], op1, 0);
3990 (define_expand "fix_truncsfsi2"
3991 [(set (match_operand:SI 0 "s_register_operand" "")
3992 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3993 "TARGET_32BIT && TARGET_HARD_FLOAT"
3995 if (TARGET_MAVERICK)
3997 if (!cirrus_fp_register (operands[0], SImode))
3998 operands[0] = force_reg (SImode, operands[0]);
3999 if (!cirrus_fp_register (operands[1], SFmode))
4000 operands[1] = force_reg (SFmode, operands[0]);
4001 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4006 (define_expand "fix_truncdfsi2"
4007 [(set (match_operand:SI 0 "s_register_operand" "")
4008 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4009 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4011 if (TARGET_MAVERICK)
4013 if (!cirrus_fp_register (operands[1], DFmode))
4014 operands[1] = force_reg (DFmode, operands[0]);
4015 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4022 (define_expand "truncdfsf2"
4023 [(set (match_operand:SF 0 "s_register_operand" "")
4025 (match_operand:DF 1 "s_register_operand" "")))]
4026 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4030 /* DFmode -> HFmode conversions have to go through SFmode. */
4031 (define_expand "truncdfhf2"
4032 [(set (match_operand:HF 0 "general_operand" "")
4034 (match_operand:DF 1 "general_operand" "")))]
4039 op1 = convert_to_mode (SFmode, operands[1], 0);
4040 op1 = convert_to_mode (HFmode, op1, 0);
4041 emit_move_insn (operands[0], op1);
4046 ;; Zero and sign extension instructions.
4048 (define_insn "zero_extend<mode>di2"
4049 [(set (match_operand:DI 0 "s_register_operand" "=r")
4050 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4051 "<qhs_extenddi_cstr>")))]
4052 "TARGET_32BIT <qhs_zextenddi_cond>"
4054 [(set_attr "length" "8")
4055 (set_attr "ce_count" "2")
4056 (set_attr "predicable" "yes")]
4059 (define_insn "extend<mode>di2"
4060 [(set (match_operand:DI 0 "s_register_operand" "=r")
4061 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4062 "<qhs_extenddi_cstr>")))]
4063 "TARGET_32BIT <qhs_sextenddi_cond>"
4065 [(set_attr "length" "8")
4066 (set_attr "ce_count" "2")
4067 (set_attr "shift" "1")
4068 (set_attr "predicable" "yes")]
4071 ;; Splits for all extensions to DImode
4073 [(set (match_operand:DI 0 "s_register_operand" "")
4074 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4076 [(set (match_dup 0) (match_dup 1))]
4078 rtx lo_part = gen_lowpart (SImode, operands[0]);
4079 enum machine_mode src_mode = GET_MODE (operands[1]);
4081 if (REG_P (operands[0])
4082 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4083 emit_clobber (operands[0]);
4084 if (!REG_P (lo_part) || src_mode != SImode
4085 || !rtx_equal_p (lo_part, operands[1]))
4087 if (src_mode == SImode)
4088 emit_move_insn (lo_part, operands[1]);
4090 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4091 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4092 operands[1] = lo_part;
4094 operands[0] = gen_highpart (SImode, operands[0]);
4095 operands[1] = const0_rtx;
4099 [(set (match_operand:DI 0 "s_register_operand" "")
4100 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4102 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4104 rtx lo_part = gen_lowpart (SImode, operands[0]);
4105 enum machine_mode src_mode = GET_MODE (operands[1]);
4107 if (REG_P (operands[0])
4108 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4109 emit_clobber (operands[0]);
4111 if (!REG_P (lo_part) || src_mode != SImode
4112 || !rtx_equal_p (lo_part, operands[1]))
4114 if (src_mode == SImode)
4115 emit_move_insn (lo_part, operands[1]);
4117 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4118 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4119 operands[1] = lo_part;
4121 operands[0] = gen_highpart (SImode, operands[0]);
4124 (define_expand "zero_extendhisi2"
4125 [(set (match_operand:SI 0 "s_register_operand" "")
4126 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4129 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4131 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4134 if (!arm_arch6 && !MEM_P (operands[1]))
4136 rtx t = gen_lowpart (SImode, operands[1]);
4137 rtx tmp = gen_reg_rtx (SImode);
4138 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4139 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4145 [(set (match_operand:SI 0 "s_register_operand" "")
4146 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4147 "!TARGET_THUMB2 && !arm_arch6"
4148 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4149 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4151 operands[2] = gen_lowpart (SImode, operands[1]);
4154 (define_insn "*thumb1_zero_extendhisi2"
4155 [(set (match_operand:SI 0 "register_operand" "=l,l")
4156 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4161 if (which_alternative == 0 && arm_arch6)
4162 return "uxth\t%0, %1";
4163 if (which_alternative == 0)
4166 mem = XEXP (operands[1], 0);
4168 if (GET_CODE (mem) == CONST)
4169 mem = XEXP (mem, 0);
4171 if (GET_CODE (mem) == PLUS)
4173 rtx a = XEXP (mem, 0);
4175 /* This can happen due to bugs in reload. */
4176 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4179 ops[0] = operands[0];
4182 output_asm_insn ("mov\t%0, %1", ops);
4184 XEXP (mem, 0) = operands[0];
4188 return "ldrh\t%0, %1";
4190 [(set_attr_alternative "length"
4191 [(if_then_else (eq_attr "is_arch6" "yes")
4192 (const_int 2) (const_int 4))
4194 (set_attr "type" "alu_shift,load_byte")]
4197 (define_insn "*arm_zero_extendhisi2"
4198 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4199 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4200 "TARGET_ARM && arm_arch4 && !arm_arch6"
4204 [(set_attr "type" "alu_shift,load_byte")
4205 (set_attr "predicable" "yes")]
4208 (define_insn "*arm_zero_extendhisi2_v6"
4209 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4210 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4211 "TARGET_ARM && arm_arch6"
4215 [(set_attr "type" "alu_shift,load_byte")
4216 (set_attr "predicable" "yes")]
4219 (define_insn "*arm_zero_extendhisi2addsi"
4220 [(set (match_operand:SI 0 "s_register_operand" "=r")
4221 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4222 (match_operand:SI 2 "s_register_operand" "r")))]
4224 "uxtah%?\\t%0, %2, %1"
4225 [(set_attr "type" "alu_shift")
4226 (set_attr "predicable" "yes")]
4229 (define_expand "zero_extendqisi2"
4230 [(set (match_operand:SI 0 "s_register_operand" "")
4231 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4234 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4236 emit_insn (gen_andsi3 (operands[0],
4237 gen_lowpart (SImode, operands[1]),
4241 if (!arm_arch6 && !MEM_P (operands[1]))
4243 rtx t = gen_lowpart (SImode, operands[1]);
4244 rtx tmp = gen_reg_rtx (SImode);
4245 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4246 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4252 [(set (match_operand:SI 0 "s_register_operand" "")
4253 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4255 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4256 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4258 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4261 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4266 (define_insn "*thumb1_zero_extendqisi2"
4267 [(set (match_operand:SI 0 "register_operand" "=l,l")
4268 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4269 "TARGET_THUMB1 && !arm_arch6"
4273 [(set_attr "length" "4,2")
4274 (set_attr "type" "alu_shift,load_byte")
4275 (set_attr "pool_range" "*,32")]
4278 (define_insn "*thumb1_zero_extendqisi2_v6"
4279 [(set (match_operand:SI 0 "register_operand" "=l,l")
4280 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4281 "TARGET_THUMB1 && arm_arch6"
4285 [(set_attr "length" "2")
4286 (set_attr "type" "alu_shift,load_byte")]
4289 (define_insn "*arm_zero_extendqisi2"
4290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4291 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4292 "TARGET_ARM && !arm_arch6"
4295 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4296 [(set_attr "length" "8,4")
4297 (set_attr "type" "alu_shift,load_byte")
4298 (set_attr "predicable" "yes")]
4301 (define_insn "*arm_zero_extendqisi2_v6"
4302 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4303 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4304 "TARGET_ARM && arm_arch6"
4307 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4308 [(set_attr "type" "alu_shift,load_byte")
4309 (set_attr "predicable" "yes")]
4312 (define_insn "*arm_zero_extendqisi2addsi"
4313 [(set (match_operand:SI 0 "s_register_operand" "=r")
4314 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4315 (match_operand:SI 2 "s_register_operand" "r")))]
4317 "uxtab%?\\t%0, %2, %1"
4318 [(set_attr "predicable" "yes")
4319 (set_attr "insn" "xtab")
4320 (set_attr "type" "alu_shift")]
4324 [(set (match_operand:SI 0 "s_register_operand" "")
4325 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4326 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4327 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4328 [(set (match_dup 2) (match_dup 1))
4329 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4334 [(set (match_operand:SI 0 "s_register_operand" "")
4335 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4336 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4337 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4338 [(set (match_dup 2) (match_dup 1))
4339 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4345 [(set (match_operand:SI 0 "s_register_operand" "")
4346 (ior_xor:SI (and:SI (ashift:SI
4347 (match_operand:SI 1 "s_register_operand" "")
4348 (match_operand:SI 2 "const_int_operand" ""))
4349 (match_operand:SI 3 "const_int_operand" ""))
4351 (match_operator 5 "subreg_lowpart_operator"
4352 [(match_operand:SI 4 "s_register_operand" "")]))))]
4354 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4355 == (GET_MODE_MASK (GET_MODE (operands[5]))
4356 & (GET_MODE_MASK (GET_MODE (operands[5]))
4357 << (INTVAL (operands[2])))))"
4358 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4360 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4361 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4364 (define_insn "*compareqi_eq0"
4365 [(set (reg:CC_Z CC_REGNUM)
4366 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4370 [(set_attr "conds" "set")]
4373 (define_expand "extendhisi2"
4374 [(set (match_operand:SI 0 "s_register_operand" "")
4375 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4380 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4383 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4385 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4389 if (!arm_arch6 && !MEM_P (operands[1]))
4391 rtx t = gen_lowpart (SImode, operands[1]);
4392 rtx tmp = gen_reg_rtx (SImode);
4393 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4394 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4401 [(set (match_operand:SI 0 "register_operand" "")
4402 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4403 (clobber (match_scratch:SI 2 ""))])]
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4406 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4408 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4411 ;; We used to have an early-clobber on the scratch register here.
4412 ;; However, there's a bug somewhere in reload which means that this
4413 ;; can be partially ignored during spill allocation if the memory
4414 ;; address also needs reloading; this causes us to die later on when
4415 ;; we try to verify the operands. Fortunately, we don't really need
4416 ;; the early-clobber: we can always use operand 0 if operand 2
4417 ;; overlaps the address.
4418 (define_insn "thumb1_extendhisi2"
4419 [(set (match_operand:SI 0 "register_operand" "=l,l")
4420 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4421 (clobber (match_scratch:SI 2 "=X,l"))]
4428 if (which_alternative == 0 && !arm_arch6)
4430 if (which_alternative == 0)
4431 return \"sxth\\t%0, %1\";
4433 mem = XEXP (operands[1], 0);
4435 /* This code used to try to use 'V', and fix the address only if it was
4436 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4437 range of QImode offsets, and offsettable_address_p does a QImode
4440 if (GET_CODE (mem) == CONST)
4441 mem = XEXP (mem, 0);
4443 if (GET_CODE (mem) == LABEL_REF)
4444 return \"ldr\\t%0, %1\";
4446 if (GET_CODE (mem) == PLUS)
4448 rtx a = XEXP (mem, 0);
4449 rtx b = XEXP (mem, 1);
4451 if (GET_CODE (a) == LABEL_REF
4452 && GET_CODE (b) == CONST_INT)
4453 return \"ldr\\t%0, %1\";
4455 if (GET_CODE (b) == REG)
4456 return \"ldrsh\\t%0, %1\";
4464 ops[2] = const0_rtx;
4467 gcc_assert (GET_CODE (ops[1]) == REG);
4469 ops[0] = operands[0];
4470 if (reg_mentioned_p (operands[2], ops[1]))
4473 ops[3] = operands[2];
4474 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4477 [(set_attr_alternative "length"
4478 [(if_then_else (eq_attr "is_arch6" "yes")
4479 (const_int 2) (const_int 4))
4481 (set_attr "type" "alu_shift,load_byte")
4482 (set_attr "pool_range" "*,1020")]
4485 ;; This pattern will only be used when ldsh is not available
4486 (define_expand "extendhisi2_mem"
4487 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4489 (zero_extend:SI (match_dup 7)))
4490 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4491 (set (match_operand:SI 0 "" "")
4492 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4497 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4499 mem1 = change_address (operands[1], QImode, addr);
4500 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4501 operands[0] = gen_lowpart (SImode, operands[0]);
4503 operands[2] = gen_reg_rtx (SImode);
4504 operands[3] = gen_reg_rtx (SImode);
4505 operands[6] = gen_reg_rtx (SImode);
4508 if (BYTES_BIG_ENDIAN)
4510 operands[4] = operands[2];
4511 operands[5] = operands[3];
4515 operands[4] = operands[3];
4516 operands[5] = operands[2];
4522 [(set (match_operand:SI 0 "register_operand" "")
4523 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4525 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4526 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4528 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4531 (define_insn "*arm_extendhisi2"
4532 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4533 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4534 "TARGET_ARM && arm_arch4 && !arm_arch6"
4538 [(set_attr "length" "8,4")
4539 (set_attr "type" "alu_shift,load_byte")
4540 (set_attr "predicable" "yes")
4541 (set_attr "pool_range" "*,256")
4542 (set_attr "neg_pool_range" "*,244")]
4545 ;; ??? Check Thumb-2 pool range
4546 (define_insn "*arm_extendhisi2_v6"
4547 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4548 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4549 "TARGET_32BIT && arm_arch6"
4553 [(set_attr "type" "alu_shift,load_byte")
4554 (set_attr "predicable" "yes")
4555 (set_attr "pool_range" "*,256")
4556 (set_attr "neg_pool_range" "*,244")]
4559 (define_insn "*arm_extendhisi2addsi"
4560 [(set (match_operand:SI 0 "s_register_operand" "=r")
4561 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4562 (match_operand:SI 2 "s_register_operand" "r")))]
4564 "sxtah%?\\t%0, %2, %1"
4567 (define_expand "extendqihi2"
4569 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4571 (set (match_operand:HI 0 "s_register_operand" "")
4572 (ashiftrt:SI (match_dup 2)
4577 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4579 emit_insn (gen_rtx_SET (VOIDmode,
4581 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4584 if (!s_register_operand (operands[1], QImode))
4585 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4586 operands[0] = gen_lowpart (SImode, operands[0]);
4587 operands[1] = gen_lowpart (SImode, operands[1]);
4588 operands[2] = gen_reg_rtx (SImode);
4592 (define_insn "*arm_extendqihi_insn"
4593 [(set (match_operand:HI 0 "s_register_operand" "=r")
4594 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4595 "TARGET_ARM && arm_arch4"
4596 "ldr%(sb%)\\t%0, %1"
4597 [(set_attr "type" "load_byte")
4598 (set_attr "predicable" "yes")
4599 (set_attr "pool_range" "256")
4600 (set_attr "neg_pool_range" "244")]
4603 (define_expand "extendqisi2"
4604 [(set (match_operand:SI 0 "s_register_operand" "")
4605 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4608 if (!arm_arch4 && MEM_P (operands[1]))
4609 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4611 if (!arm_arch6 && !MEM_P (operands[1]))
4613 rtx t = gen_lowpart (SImode, operands[1]);
4614 rtx tmp = gen_reg_rtx (SImode);
4615 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4616 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4622 [(set (match_operand:SI 0 "register_operand" "")
4623 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4625 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4626 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4628 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4631 (define_insn "*arm_extendqisi"
4632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4633 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4634 "TARGET_ARM && arm_arch4 && !arm_arch6"
4638 [(set_attr "length" "8,4")
4639 (set_attr "type" "alu_shift,load_byte")
4640 (set_attr "predicable" "yes")
4641 (set_attr "pool_range" "*,256")
4642 (set_attr "neg_pool_range" "*,244")]
4645 (define_insn "*arm_extendqisi_v6"
4646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4648 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4649 "TARGET_ARM && arm_arch6"
4653 [(set_attr "type" "alu_shift,load_byte")
4654 (set_attr "predicable" "yes")
4655 (set_attr "pool_range" "*,256")
4656 (set_attr "neg_pool_range" "*,244")]
4659 (define_insn "*arm_extendqisi2addsi"
4660 [(set (match_operand:SI 0 "s_register_operand" "=r")
4661 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4662 (match_operand:SI 2 "s_register_operand" "r")))]
4664 "sxtab%?\\t%0, %2, %1"
4665 [(set_attr "type" "alu_shift")
4666 (set_attr "insn" "xtab")
4667 (set_attr "predicable" "yes")]
4671 [(set (match_operand:SI 0 "register_operand" "")
4672 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4673 "TARGET_THUMB1 && reload_completed"
4674 [(set (match_dup 0) (match_dup 2))
4675 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4677 rtx addr = XEXP (operands[1], 0);
4679 if (GET_CODE (addr) == CONST)
4680 addr = XEXP (addr, 0);
4682 if (GET_CODE (addr) == PLUS
4683 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4684 /* No split necessary. */
4687 if (GET_CODE (addr) == PLUS
4688 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4691 if (reg_overlap_mentioned_p (operands[0], addr))
4693 rtx t = gen_lowpart (QImode, operands[0]);
4694 emit_move_insn (t, operands[1]);
4695 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4701 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4702 operands[2] = const0_rtx;
4704 else if (GET_CODE (addr) != PLUS)
4706 else if (REG_P (XEXP (addr, 0)))
4708 operands[2] = XEXP (addr, 1);
4709 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4713 operands[2] = XEXP (addr, 0);
4714 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4717 operands[3] = change_address (operands[1], QImode, addr);
4721 [(set (match_operand:SI 0 "register_operand" "")
4722 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4723 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4724 (set (match_operand:SI 3 "register_operand" "")
4725 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4727 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4728 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4729 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4730 && (peep2_reg_dead_p (3, operands[0])
4731 || rtx_equal_p (operands[0], operands[3]))
4732 && (peep2_reg_dead_p (3, operands[2])
4733 || rtx_equal_p (operands[2], operands[3]))"
4734 [(set (match_dup 2) (match_dup 1))
4735 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4737 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4738 operands[4] = change_address (operands[4], QImode, addr);
4741 (define_insn "thumb1_extendqisi2"
4742 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4743 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4748 if (which_alternative == 0 && arm_arch6)
4749 return "sxtb\\t%0, %1";
4750 if (which_alternative == 0)
4753 addr = XEXP (operands[1], 0);
4754 if (GET_CODE (addr) == PLUS
4755 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4756 return "ldrsb\\t%0, %1";
4760 [(set_attr_alternative "length"
4761 [(if_then_else (eq_attr "is_arch6" "yes")
4762 (const_int 2) (const_int 4))
4764 (if_then_else (eq_attr "is_arch6" "yes")
4765 (const_int 4) (const_int 6))])
4766 (set_attr "type" "alu_shift,load_byte,load_byte")]
4769 (define_expand "extendsfdf2"
4770 [(set (match_operand:DF 0 "s_register_operand" "")
4771 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4772 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4776 /* HFmode -> DFmode conversions have to go through SFmode. */
4777 (define_expand "extendhfdf2"
4778 [(set (match_operand:DF 0 "general_operand" "")
4779 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4784 op1 = convert_to_mode (SFmode, operands[1], 0);
4785 op1 = convert_to_mode (DFmode, op1, 0);
4786 emit_insn (gen_movdf (operands[0], op1));
4791 ;; Move insns (including loads and stores)
4793 ;; XXX Just some ideas about movti.
4794 ;; I don't think these are a good idea on the arm, there just aren't enough
4796 ;;(define_expand "loadti"
4797 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4798 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4801 ;;(define_expand "storeti"
4802 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4803 ;; (match_operand:TI 1 "s_register_operand" ""))]
4806 ;;(define_expand "movti"
4807 ;; [(set (match_operand:TI 0 "general_operand" "")
4808 ;; (match_operand:TI 1 "general_operand" ""))]
4814 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4815 ;; operands[1] = copy_to_reg (operands[1]);
4816 ;; if (GET_CODE (operands[0]) == MEM)
4817 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4818 ;; else if (GET_CODE (operands[1]) == MEM)
4819 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4823 ;; emit_insn (insn);
4827 ;; Recognize garbage generated above.
4830 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4831 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4835 ;; register mem = (which_alternative < 3);
4836 ;; register const char *template;
4838 ;; operands[mem] = XEXP (operands[mem], 0);
4839 ;; switch (which_alternative)
4841 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4842 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4843 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4844 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4845 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4846 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4848 ;; output_asm_insn (template, operands);
4852 (define_expand "movdi"
4853 [(set (match_operand:DI 0 "general_operand" "")
4854 (match_operand:DI 1 "general_operand" ""))]
4857 if (can_create_pseudo_p ())
4859 if (GET_CODE (operands[0]) != REG)
4860 operands[1] = force_reg (DImode, operands[1]);
4865 (define_insn "*arm_movdi"
4866 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4867 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4869 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4871 && ( register_operand (operands[0], DImode)
4872 || register_operand (operands[1], DImode))"
4874 switch (which_alternative)
4881 return output_move_double (operands);
4884 [(set_attr "length" "8,12,16,8,8")
4885 (set_attr "type" "*,*,*,load2,store2")
4886 (set_attr "arm_pool_range" "*,*,*,1020,*")
4887 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4888 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4889 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4893 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4894 (match_operand:ANY64 1 "const_double_operand" ""))]
4897 && (arm_const_double_inline_cost (operands[1])
4898 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4901 arm_split_constant (SET, SImode, curr_insn,
4902 INTVAL (gen_lowpart (SImode, operands[1])),
4903 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4904 arm_split_constant (SET, SImode, curr_insn,
4905 INTVAL (gen_highpart_mode (SImode,
4906 GET_MODE (operands[0]),
4908 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4913 ; If optimizing for size, or if we have load delay slots, then
4914 ; we want to split the constant into two separate operations.
4915 ; In both cases this may split a trivial part into a single data op
4916 ; leaving a single complex constant to load. We can also get longer
4917 ; offsets in a LDR which means we get better chances of sharing the pool
4918 ; entries. Finally, we can normally do a better job of scheduling
4919 ; LDR instructions than we can with LDM.
4920 ; This pattern will only match if the one above did not.
4922 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4923 (match_operand:ANY64 1 "const_double_operand" ""))]
4924 "TARGET_ARM && reload_completed
4925 && arm_const_double_by_parts (operands[1])"
4926 [(set (match_dup 0) (match_dup 1))
4927 (set (match_dup 2) (match_dup 3))]
4929 operands[2] = gen_highpart (SImode, operands[0]);
4930 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4932 operands[0] = gen_lowpart (SImode, operands[0]);
4933 operands[1] = gen_lowpart (SImode, operands[1]);
4938 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4939 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4940 "TARGET_EITHER && reload_completed"
4941 [(set (match_dup 0) (match_dup 1))
4942 (set (match_dup 2) (match_dup 3))]
4944 operands[2] = gen_highpart (SImode, operands[0]);
4945 operands[3] = gen_highpart (SImode, operands[1]);
4946 operands[0] = gen_lowpart (SImode, operands[0]);
4947 operands[1] = gen_lowpart (SImode, operands[1]);
4949 /* Handle a partial overlap. */
4950 if (rtx_equal_p (operands[0], operands[3]))
4952 rtx tmp0 = operands[0];
4953 rtx tmp1 = operands[1];
4955 operands[0] = operands[2];
4956 operands[1] = operands[3];
4963 ;; We can't actually do base+index doubleword loads if the index and
4964 ;; destination overlap. Split here so that we at least have chance to
4967 [(set (match_operand:DI 0 "s_register_operand" "")
4968 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4969 (match_operand:SI 2 "s_register_operand" ""))))]
4971 && reg_overlap_mentioned_p (operands[0], operands[1])
4972 && reg_overlap_mentioned_p (operands[0], operands[2])"
4974 (plus:SI (match_dup 1)
4977 (mem:DI (match_dup 4)))]
4979 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4983 ;;; ??? This should have alternatives for constants.
4984 ;;; ??? This was originally identical to the movdf_insn pattern.
4985 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4986 ;;; thumb_reorg with a memory reference.
4987 (define_insn "*thumb1_movdi_insn"
4988 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4989 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4991 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4992 && ( register_operand (operands[0], DImode)
4993 || register_operand (operands[1], DImode))"
4996 switch (which_alternative)
5000 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5001 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5002 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5004 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5006 operands[1] = GEN_INT (- INTVAL (operands[1]));
5007 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5009 return \"ldmia\\t%1, {%0, %H0}\";
5011 return \"stmia\\t%0, {%1, %H1}\";
5013 return thumb_load_double_from_address (operands);
5015 operands[2] = gen_rtx_MEM (SImode,
5016 plus_constant (XEXP (operands[0], 0), 4));
5017 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5020 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5021 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5022 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5025 [(set_attr "length" "4,4,6,2,2,6,4,4")
5026 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5027 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5028 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5031 (define_expand "movsi"
5032 [(set (match_operand:SI 0 "general_operand" "")
5033 (match_operand:SI 1 "general_operand" ""))]
5037 rtx base, offset, tmp;
5041 /* Everything except mem = const or mem = mem can be done easily. */
5042 if (GET_CODE (operands[0]) == MEM)
5043 operands[1] = force_reg (SImode, operands[1]);
5044 if (arm_general_register_operand (operands[0], SImode)
5045 && GET_CODE (operands[1]) == CONST_INT
5046 && !(const_ok_for_arm (INTVAL (operands[1]))
5047 || const_ok_for_arm (~INTVAL (operands[1]))))
5049 arm_split_constant (SET, SImode, NULL_RTX,
5050 INTVAL (operands[1]), operands[0], NULL_RTX,
5051 optimize && can_create_pseudo_p ());
5055 if (TARGET_USE_MOVT && !target_word_relocations
5056 && GET_CODE (operands[1]) == SYMBOL_REF
5057 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5059 arm_emit_movpair (operands[0], operands[1]);
5063 else /* TARGET_THUMB1... */
5065 if (can_create_pseudo_p ())
5067 if (GET_CODE (operands[0]) != REG)
5068 operands[1] = force_reg (SImode, operands[1]);
5072 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5074 split_const (operands[1], &base, &offset);
5075 if (GET_CODE (base) == SYMBOL_REF
5076 && !offset_within_block_p (base, INTVAL (offset)))
5078 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5079 emit_move_insn (tmp, base);
5080 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5085 /* Recognize the case where operand[1] is a reference to thread-local
5086 data and load its address to a register. */
5087 if (arm_tls_referenced_p (operands[1]))
5089 rtx tmp = operands[1];
5092 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5094 addend = XEXP (XEXP (tmp, 0), 1);
5095 tmp = XEXP (XEXP (tmp, 0), 0);
5098 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5099 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5101 tmp = legitimize_tls_address (tmp,
5102 !can_create_pseudo_p () ? operands[0] : 0);
5105 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5106 tmp = force_operand (tmp, operands[0]);
5111 && (CONSTANT_P (operands[1])
5112 || symbol_mentioned_p (operands[1])
5113 || label_mentioned_p (operands[1])))
5114 operands[1] = legitimize_pic_address (operands[1], SImode,
5115 (!can_create_pseudo_p ()
5122 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5123 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5124 ;; so this does not matter.
5125 (define_insn "*arm_movt"
5126 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5127 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5128 (match_operand:SI 2 "general_operand" "i")))]
5130 "movt%?\t%0, #:upper16:%c2"
5131 [(set_attr "predicable" "yes")
5132 (set_attr "length" "4")]
5135 (define_insn "*arm_movsi_insn"
5136 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5137 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5138 "TARGET_ARM && ! TARGET_IWMMXT
5139 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5140 && ( register_operand (operands[0], SImode)
5141 || register_operand (operands[1], SImode))"
5149 [(set_attr "type" "*,*,*,*,load1,store1")
5150 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5151 (set_attr "predicable" "yes")
5152 (set_attr "pool_range" "*,*,*,*,4096,*")
5153 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5157 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5158 (match_operand:SI 1 "const_int_operand" ""))]
5160 && (!(const_ok_for_arm (INTVAL (operands[1]))
5161 || const_ok_for_arm (~INTVAL (operands[1]))))"
5162 [(clobber (const_int 0))]
5164 arm_split_constant (SET, SImode, NULL_RTX,
5165 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5170 (define_insn "*thumb1_movsi_insn"
5171 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5172 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5174 && ( register_operand (operands[0], SImode)
5175 || register_operand (operands[1], SImode))"
5186 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5187 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5188 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5189 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5192 [(set (match_operand:SI 0 "register_operand" "")
5193 (match_operand:SI 1 "const_int_operand" ""))]
5194 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5195 [(set (match_dup 2) (match_dup 1))
5196 (set (match_dup 0) (neg:SI (match_dup 2)))]
5199 operands[1] = GEN_INT (- INTVAL (operands[1]));
5200 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5205 [(set (match_operand:SI 0 "register_operand" "")
5206 (match_operand:SI 1 "const_int_operand" ""))]
5207 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5208 [(set (match_dup 2) (match_dup 1))
5209 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5212 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5213 unsigned HOST_WIDE_INT mask = 0xff;
5216 for (i = 0; i < 25; i++)
5217 if ((val & (mask << i)) == val)
5220 /* Don't split if the shift is zero. */
5224 operands[1] = GEN_INT (val >> i);
5225 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5226 operands[3] = GEN_INT (i);
5230 ;; When generating pic, we need to load the symbol offset into a register.
5231 ;; So that the optimizer does not confuse this with a normal symbol load
5232 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5233 ;; since that is the only type of relocation we can use.
5235 ;; Wrap calculation of the whole PIC address in a single pattern for the
5236 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5237 ;; a PIC address involves two loads from memory, so we want to CSE it
5238 ;; as often as possible.
5239 ;; This pattern will be split into one of the pic_load_addr_* patterns
5240 ;; and a move after GCSE optimizations.
5242 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5243 (define_expand "calculate_pic_address"
5244 [(set (match_operand:SI 0 "register_operand" "")
5245 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5246 (unspec:SI [(match_operand:SI 2 "" "")]
5251 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5253 [(set (match_operand:SI 0 "register_operand" "")
5254 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5255 (unspec:SI [(match_operand:SI 2 "" "")]
5258 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5259 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5260 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5263 ;; The rather odd constraints on the following are to force reload to leave
5264 ;; the insn alone, and to force the minipool generation pass to then move
5265 ;; the GOT symbol to memory.
5267 (define_insn "pic_load_addr_32bit"
5268 [(set (match_operand:SI 0 "s_register_operand" "=r")
5269 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5270 "TARGET_32BIT && flag_pic"
5272 [(set_attr "type" "load1")
5273 (set_attr "pool_range" "4096")
5274 (set (attr "neg_pool_range")
5275 (if_then_else (eq_attr "is_thumb" "no")
5280 (define_insn "pic_load_addr_thumb1"
5281 [(set (match_operand:SI 0 "s_register_operand" "=l")
5282 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5283 "TARGET_THUMB1 && flag_pic"
5285 [(set_attr "type" "load1")
5286 (set (attr "pool_range") (const_int 1024))]
5289 (define_insn "pic_add_dot_plus_four"
5290 [(set (match_operand:SI 0 "register_operand" "=r")
5291 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5293 (match_operand 2 "" "")]
5297 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5298 INTVAL (operands[2]));
5299 return \"add\\t%0, %|pc\";
5301 [(set_attr "length" "2")]
5304 (define_insn "pic_add_dot_plus_eight"
5305 [(set (match_operand:SI 0 "register_operand" "=r")
5306 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5308 (match_operand 2 "" "")]
5312 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5313 INTVAL (operands[2]));
5314 return \"add%?\\t%0, %|pc, %1\";
5316 [(set_attr "predicable" "yes")]
5319 (define_insn "tls_load_dot_plus_eight"
5320 [(set (match_operand:SI 0 "register_operand" "=r")
5321 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5323 (match_operand 2 "" "")]
5327 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5328 INTVAL (operands[2]));
5329 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5331 [(set_attr "predicable" "yes")]
5334 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5335 ;; followed by a load. These sequences can be crunched down to
5336 ;; tls_load_dot_plus_eight by a peephole.
5339 [(set (match_operand:SI 0 "register_operand" "")
5340 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5342 (match_operand 1 "" "")]
5344 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5345 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5347 (mem:SI (unspec:SI [(match_dup 3)
5354 (define_insn "pic_offset_arm"
5355 [(set (match_operand:SI 0 "register_operand" "=r")
5356 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5357 (unspec:SI [(match_operand:SI 2 "" "X")]
5358 UNSPEC_PIC_OFFSET))))]
5359 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5360 "ldr%?\\t%0, [%1,%2]"
5361 [(set_attr "type" "load1")]
5364 (define_expand "builtin_setjmp_receiver"
5365 [(label_ref (match_operand 0 "" ""))]
5369 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5371 if (arm_pic_register != INVALID_REGNUM)
5372 arm_load_pic_register (1UL << 3);
5376 ;; If copying one reg to another we can set the condition codes according to
5377 ;; its value. Such a move is common after a return from subroutine and the
5378 ;; result is being tested against zero.
5380 (define_insn "*movsi_compare0"
5381 [(set (reg:CC CC_REGNUM)
5382 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5384 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5390 [(set_attr "conds" "set")]
5393 ;; Subroutine to store a half word from a register into memory.
5394 ;; Operand 0 is the source register (HImode)
5395 ;; Operand 1 is the destination address in a register (SImode)
5397 ;; In both this routine and the next, we must be careful not to spill
5398 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5399 ;; can generate unrecognizable rtl.
5401 (define_expand "storehi"
5402 [;; store the low byte
5403 (set (match_operand 1 "" "") (match_dup 3))
5404 ;; extract the high byte
5406 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5407 ;; store the high byte
5408 (set (match_dup 4) (match_dup 5))]
5412 rtx op1 = operands[1];
5413 rtx addr = XEXP (op1, 0);
5414 enum rtx_code code = GET_CODE (addr);
5416 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5418 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5420 operands[4] = adjust_address (op1, QImode, 1);
5421 operands[1] = adjust_address (operands[1], QImode, 0);
5422 operands[3] = gen_lowpart (QImode, operands[0]);
5423 operands[0] = gen_lowpart (SImode, operands[0]);
5424 operands[2] = gen_reg_rtx (SImode);
5425 operands[5] = gen_lowpart (QImode, operands[2]);
5429 (define_expand "storehi_bigend"
5430 [(set (match_dup 4) (match_dup 3))
5432 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5433 (set (match_operand 1 "" "") (match_dup 5))]
5437 rtx op1 = operands[1];
5438 rtx addr = XEXP (op1, 0);
5439 enum rtx_code code = GET_CODE (addr);
5441 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5443 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5445 operands[4] = adjust_address (op1, QImode, 1);
5446 operands[1] = adjust_address (operands[1], QImode, 0);
5447 operands[3] = gen_lowpart (QImode, operands[0]);
5448 operands[0] = gen_lowpart (SImode, operands[0]);
5449 operands[2] = gen_reg_rtx (SImode);
5450 operands[5] = gen_lowpart (QImode, operands[2]);
5454 ;; Subroutine to store a half word integer constant into memory.
5455 (define_expand "storeinthi"
5456 [(set (match_operand 0 "" "")
5457 (match_operand 1 "" ""))
5458 (set (match_dup 3) (match_dup 2))]
5462 HOST_WIDE_INT value = INTVAL (operands[1]);
5463 rtx addr = XEXP (operands[0], 0);
5464 rtx op0 = operands[0];
5465 enum rtx_code code = GET_CODE (addr);
5467 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5469 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5471 operands[1] = gen_reg_rtx (SImode);
5472 if (BYTES_BIG_ENDIAN)
5474 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5475 if ((value & 255) == ((value >> 8) & 255))
5476 operands[2] = operands[1];
5479 operands[2] = gen_reg_rtx (SImode);
5480 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5485 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5486 if ((value & 255) == ((value >> 8) & 255))
5487 operands[2] = operands[1];
5490 operands[2] = gen_reg_rtx (SImode);
5491 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5495 operands[3] = adjust_address (op0, QImode, 1);
5496 operands[0] = adjust_address (operands[0], QImode, 0);
5497 operands[2] = gen_lowpart (QImode, operands[2]);
5498 operands[1] = gen_lowpart (QImode, operands[1]);
5502 (define_expand "storehi_single_op"
5503 [(set (match_operand:HI 0 "memory_operand" "")
5504 (match_operand:HI 1 "general_operand" ""))]
5505 "TARGET_32BIT && arm_arch4"
5507 if (!s_register_operand (operands[1], HImode))
5508 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5512 (define_expand "movhi"
5513 [(set (match_operand:HI 0 "general_operand" "")
5514 (match_operand:HI 1 "general_operand" ""))]
5519 if (can_create_pseudo_p ())
5521 if (GET_CODE (operands[0]) == MEM)
5525 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5528 if (GET_CODE (operands[1]) == CONST_INT)
5529 emit_insn (gen_storeinthi (operands[0], operands[1]));
5532 if (GET_CODE (operands[1]) == MEM)
5533 operands[1] = force_reg (HImode, operands[1]);
5534 if (BYTES_BIG_ENDIAN)
5535 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5537 emit_insn (gen_storehi (operands[1], operands[0]));
5541 /* Sign extend a constant, and keep it in an SImode reg. */
5542 else if (GET_CODE (operands[1]) == CONST_INT)
5544 rtx reg = gen_reg_rtx (SImode);
5545 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5547 /* If the constant is already valid, leave it alone. */
5548 if (!const_ok_for_arm (val))
5550 /* If setting all the top bits will make the constant
5551 loadable in a single instruction, then set them.
5552 Otherwise, sign extend the number. */
5554 if (const_ok_for_arm (~(val | ~0xffff)))
5556 else if (val & 0x8000)
5560 emit_insn (gen_movsi (reg, GEN_INT (val)));
5561 operands[1] = gen_lowpart (HImode, reg);
5563 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5564 && GET_CODE (operands[1]) == MEM)
5566 rtx reg = gen_reg_rtx (SImode);
5568 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5569 operands[1] = gen_lowpart (HImode, reg);
5571 else if (!arm_arch4)
5573 if (GET_CODE (operands[1]) == MEM)
5576 rtx offset = const0_rtx;
5577 rtx reg = gen_reg_rtx (SImode);
5579 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5580 || (GET_CODE (base) == PLUS
5581 && (GET_CODE (offset = XEXP (base, 1))
5583 && ((INTVAL(offset) & 1) != 1)
5584 && GET_CODE (base = XEXP (base, 0)) == REG))
5585 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5589 new_rtx = widen_memory_access (operands[1], SImode,
5590 ((INTVAL (offset) & ~3)
5591 - INTVAL (offset)));
5592 emit_insn (gen_movsi (reg, new_rtx));
5593 if (((INTVAL (offset) & 2) != 0)
5594 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5596 rtx reg2 = gen_reg_rtx (SImode);
5598 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5603 emit_insn (gen_movhi_bytes (reg, operands[1]));
5605 operands[1] = gen_lowpart (HImode, reg);
5609 /* Handle loading a large integer during reload. */
5610 else if (GET_CODE (operands[1]) == CONST_INT
5611 && !const_ok_for_arm (INTVAL (operands[1]))
5612 && !const_ok_for_arm (~INTVAL (operands[1])))
5614 /* Writing a constant to memory needs a scratch, which should
5615 be handled with SECONDARY_RELOADs. */
5616 gcc_assert (GET_CODE (operands[0]) == REG);
5618 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5619 emit_insn (gen_movsi (operands[0], operands[1]));
5623 else if (TARGET_THUMB2)
5625 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5626 if (can_create_pseudo_p ())
5628 if (GET_CODE (operands[0]) != REG)
5629 operands[1] = force_reg (HImode, operands[1]);
5630 /* Zero extend a constant, and keep it in an SImode reg. */
5631 else if (GET_CODE (operands[1]) == CONST_INT)
5633 rtx reg = gen_reg_rtx (SImode);
5634 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5636 emit_insn (gen_movsi (reg, GEN_INT (val)));
5637 operands[1] = gen_lowpart (HImode, reg);
5641 else /* TARGET_THUMB1 */
5643 if (can_create_pseudo_p ())
5645 if (GET_CODE (operands[1]) == CONST_INT)
5647 rtx reg = gen_reg_rtx (SImode);
5649 emit_insn (gen_movsi (reg, operands[1]));
5650 operands[1] = gen_lowpart (HImode, reg);
5653 /* ??? We shouldn't really get invalid addresses here, but this can
5654 happen if we are passed a SP (never OK for HImode/QImode) or
5655 virtual register (also rejected as illegitimate for HImode/QImode)
5656 relative address. */
5657 /* ??? This should perhaps be fixed elsewhere, for instance, in
5658 fixup_stack_1, by checking for other kinds of invalid addresses,
5659 e.g. a bare reference to a virtual register. This may confuse the
5660 alpha though, which must handle this case differently. */
5661 if (GET_CODE (operands[0]) == MEM
5662 && !memory_address_p (GET_MODE (operands[0]),
5663 XEXP (operands[0], 0)))
5665 = replace_equiv_address (operands[0],
5666 copy_to_reg (XEXP (operands[0], 0)));
5668 if (GET_CODE (operands[1]) == MEM
5669 && !memory_address_p (GET_MODE (operands[1]),
5670 XEXP (operands[1], 0)))
5672 = replace_equiv_address (operands[1],
5673 copy_to_reg (XEXP (operands[1], 0)));
5675 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5677 rtx reg = gen_reg_rtx (SImode);
5679 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5680 operands[1] = gen_lowpart (HImode, reg);
5683 if (GET_CODE (operands[0]) == MEM)
5684 operands[1] = force_reg (HImode, operands[1]);
5686 else if (GET_CODE (operands[1]) == CONST_INT
5687 && !satisfies_constraint_I (operands[1]))
5689 /* Handle loading a large integer during reload. */
5691 /* Writing a constant to memory needs a scratch, which should
5692 be handled with SECONDARY_RELOADs. */
5693 gcc_assert (GET_CODE (operands[0]) == REG);
5695 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5696 emit_insn (gen_movsi (operands[0], operands[1]));
5703 (define_insn "*thumb1_movhi_insn"
5704 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5705 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5707 && ( register_operand (operands[0], HImode)
5708 || register_operand (operands[1], HImode))"
5710 switch (which_alternative)
5712 case 0: return \"add %0, %1, #0\";
5713 case 2: return \"strh %1, %0\";
5714 case 3: return \"mov %0, %1\";
5715 case 4: return \"mov %0, %1\";
5716 case 5: return \"mov %0, %1\";
5717 default: gcc_unreachable ();
5719 /* The stack pointer can end up being taken as an index register.
5720 Catch this case here and deal with it. */
5721 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5722 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5723 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5726 ops[0] = operands[0];
5727 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5729 output_asm_insn (\"mov %0, %1\", ops);
5731 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5734 return \"ldrh %0, %1\";
5736 [(set_attr "length" "2,4,2,2,2,2")
5737 (set_attr "type" "*,load1,store1,*,*,*")
5738 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5741 (define_expand "movhi_bytes"
5742 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5744 (zero_extend:SI (match_dup 6)))
5745 (set (match_operand:SI 0 "" "")
5746 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5751 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5753 mem1 = change_address (operands[1], QImode, addr);
5754 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5755 operands[0] = gen_lowpart (SImode, operands[0]);
5757 operands[2] = gen_reg_rtx (SImode);
5758 operands[3] = gen_reg_rtx (SImode);
5761 if (BYTES_BIG_ENDIAN)
5763 operands[4] = operands[2];
5764 operands[5] = operands[3];
5768 operands[4] = operands[3];
5769 operands[5] = operands[2];
5774 (define_expand "movhi_bigend"
5776 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5779 (ashiftrt:SI (match_dup 2) (const_int 16)))
5780 (set (match_operand:HI 0 "s_register_operand" "")
5784 operands[2] = gen_reg_rtx (SImode);
5785 operands[3] = gen_reg_rtx (SImode);
5786 operands[4] = gen_lowpart (HImode, operands[3]);
5790 ;; Pattern to recognize insn generated default case above
5791 (define_insn "*movhi_insn_arch4"
5792 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5793 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
5796 && (register_operand (operands[0], HImode)
5797 || register_operand (operands[1], HImode))"
5799 mov%?\\t%0, %1\\t%@ movhi
5800 mvn%?\\t%0, #%B1\\t%@ movhi
5801 str%(h%)\\t%1, %0\\t%@ movhi
5802 ldr%(h%)\\t%0, %1\\t%@ movhi"
5803 [(set_attr "type" "*,*,store1,load1")
5804 (set_attr "predicable" "yes")
5805 (set_attr "insn" "mov,mvn,*,*")
5806 (set_attr "pool_range" "*,*,*,256")
5807 (set_attr "neg_pool_range" "*,*,*,244")]
5810 (define_insn "*movhi_bytes"
5811 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5812 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5815 mov%?\\t%0, %1\\t%@ movhi
5816 mvn%?\\t%0, #%B1\\t%@ movhi"
5817 [(set_attr "predicable" "yes")
5818 (set_attr "insn" "mov,mvn")]
5821 (define_expand "thumb_movhi_clobber"
5822 [(set (match_operand:HI 0 "memory_operand" "")
5823 (match_operand:HI 1 "register_operand" ""))
5824 (clobber (match_operand:DI 2 "register_operand" ""))]
5827 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5828 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5830 emit_insn (gen_movhi (operands[0], operands[1]));
5833 /* XXX Fixme, need to handle other cases here as well. */
5838 ;; We use a DImode scratch because we may occasionally need an additional
5839 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5840 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5841 (define_expand "reload_outhi"
5842 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5843 (match_operand:HI 1 "s_register_operand" "r")
5844 (match_operand:DI 2 "s_register_operand" "=&l")])]
5847 arm_reload_out_hi (operands);
5849 thumb_reload_out_hi (operands);
5854 (define_expand "reload_inhi"
5855 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5856 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5857 (match_operand:DI 2 "s_register_operand" "=&r")])]
5861 arm_reload_in_hi (operands);
5863 thumb_reload_out_hi (operands);
5867 (define_expand "movqi"
5868 [(set (match_operand:QI 0 "general_operand" "")
5869 (match_operand:QI 1 "general_operand" ""))]
5872 /* Everything except mem = const or mem = mem can be done easily */
5874 if (can_create_pseudo_p ())
5876 if (GET_CODE (operands[1]) == CONST_INT)
5878 rtx reg = gen_reg_rtx (SImode);
5880 /* For thumb we want an unsigned immediate, then we are more likely
5881 to be able to use a movs insn. */
5883 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5885 emit_insn (gen_movsi (reg, operands[1]));
5886 operands[1] = gen_lowpart (QImode, reg);
5891 /* ??? We shouldn't really get invalid addresses here, but this can
5892 happen if we are passed a SP (never OK for HImode/QImode) or
5893 virtual register (also rejected as illegitimate for HImode/QImode)
5894 relative address. */
5895 /* ??? This should perhaps be fixed elsewhere, for instance, in
5896 fixup_stack_1, by checking for other kinds of invalid addresses,
5897 e.g. a bare reference to a virtual register. This may confuse the
5898 alpha though, which must handle this case differently. */
5899 if (GET_CODE (operands[0]) == MEM
5900 && !memory_address_p (GET_MODE (operands[0]),
5901 XEXP (operands[0], 0)))
5903 = replace_equiv_address (operands[0],
5904 copy_to_reg (XEXP (operands[0], 0)));
5905 if (GET_CODE (operands[1]) == MEM
5906 && !memory_address_p (GET_MODE (operands[1]),
5907 XEXP (operands[1], 0)))
5909 = replace_equiv_address (operands[1],
5910 copy_to_reg (XEXP (operands[1], 0)));
5913 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5915 rtx reg = gen_reg_rtx (SImode);
5917 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5918 operands[1] = gen_lowpart (QImode, reg);
5921 if (GET_CODE (operands[0]) == MEM)
5922 operands[1] = force_reg (QImode, operands[1]);
5924 else if (TARGET_THUMB
5925 && GET_CODE (operands[1]) == CONST_INT
5926 && !satisfies_constraint_I (operands[1]))
5928 /* Handle loading a large integer during reload. */
5930 /* Writing a constant to memory needs a scratch, which should
5931 be handled with SECONDARY_RELOADs. */
5932 gcc_assert (GET_CODE (operands[0]) == REG);
5934 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5935 emit_insn (gen_movsi (operands[0], operands[1]));
5942 (define_insn "*arm_movqi_insn"
5943 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5944 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5946 && ( register_operand (operands[0], QImode)
5947 || register_operand (operands[1], QImode))"
5953 [(set_attr "type" "*,*,load1,store1")
5954 (set_attr "insn" "mov,mvn,*,*")
5955 (set_attr "predicable" "yes")]
5958 (define_insn "*thumb1_movqi_insn"
5959 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5960 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5962 && ( register_operand (operands[0], QImode)
5963 || register_operand (operands[1], QImode))"
5971 [(set_attr "length" "2")
5972 (set_attr "type" "*,load1,store1,*,*,*")
5973 (set_attr "insn" "*,*,*,mov,mov,mov")
5974 (set_attr "pool_range" "*,32,*,*,*,*")
5975 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5978 (define_expand "movhf"
5979 [(set (match_operand:HF 0 "general_operand" "")
5980 (match_operand:HF 1 "general_operand" ""))]
5985 if (GET_CODE (operands[0]) == MEM)
5986 operands[1] = force_reg (HFmode, operands[1]);
5988 else /* TARGET_THUMB1 */
5990 if (can_create_pseudo_p ())
5992 if (GET_CODE (operands[0]) != REG)
5993 operands[1] = force_reg (HFmode, operands[1]);
5999 (define_insn "*arm32_movhf"
6000 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6001 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6002 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6003 && ( s_register_operand (operands[0], HFmode)
6004 || s_register_operand (operands[1], HFmode))"
6006 switch (which_alternative)
6008 case 0: /* ARM register from memory */
6009 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6010 case 1: /* memory from ARM register */
6011 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6012 case 2: /* ARM register from ARM register */
6013 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6014 case 3: /* ARM register from constant */
6020 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6021 bits = real_to_target (NULL, &r, HFmode);
6022 ops[0] = operands[0];
6023 ops[1] = GEN_INT (bits);
6024 ops[2] = GEN_INT (bits & 0xff00);
6025 ops[3] = GEN_INT (bits & 0x00ff);
6027 if (arm_arch_thumb2)
6028 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6030 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6037 [(set_attr "conds" "unconditional")
6038 (set_attr "type" "load1,store1,*,*")
6039 (set_attr "insn" "*,*,mov,mov")
6040 (set_attr "length" "4,4,4,8")
6041 (set_attr "predicable" "yes")]
6044 (define_insn "*thumb1_movhf"
6045 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6046 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6048 && ( s_register_operand (operands[0], HFmode)
6049 || s_register_operand (operands[1], HFmode))"
6051 switch (which_alternative)
6056 gcc_assert (GET_CODE(operands[1]) == MEM);
6057 addr = XEXP (operands[1], 0);
6058 if (GET_CODE (addr) == LABEL_REF
6059 || (GET_CODE (addr) == CONST
6060 && GET_CODE (XEXP (addr, 0)) == PLUS
6061 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6062 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6064 /* Constant pool entry. */
6065 return \"ldr\\t%0, %1\";
6067 return \"ldrh\\t%0, %1\";
6069 case 2: return \"strh\\t%1, %0\";
6070 default: return \"mov\\t%0, %1\";
6073 [(set_attr "length" "2")
6074 (set_attr "type" "*,load1,store1,*,*")
6075 (set_attr "insn" "mov,*,*,mov,mov")
6076 (set_attr "pool_range" "*,1020,*,*,*")
6077 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6079 (define_expand "movsf"
6080 [(set (match_operand:SF 0 "general_operand" "")
6081 (match_operand:SF 1 "general_operand" ""))]
6086 if (GET_CODE (operands[0]) == MEM)
6087 operands[1] = force_reg (SFmode, operands[1]);
6089 else /* TARGET_THUMB1 */
6091 if (can_create_pseudo_p ())
6093 if (GET_CODE (operands[0]) != REG)
6094 operands[1] = force_reg (SFmode, operands[1]);
6100 ;; Transform a floating-point move of a constant into a core register into
6101 ;; an SImode operation.
6103 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6104 (match_operand:SF 1 "immediate_operand" ""))]
6107 && GET_CODE (operands[1]) == CONST_DOUBLE"
6108 [(set (match_dup 2) (match_dup 3))]
6110 operands[2] = gen_lowpart (SImode, operands[0]);
6111 operands[3] = gen_lowpart (SImode, operands[1]);
6112 if (operands[2] == 0 || operands[3] == 0)
6117 (define_insn "*arm_movsf_soft_insn"
6118 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6119 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6121 && TARGET_SOFT_FLOAT
6122 && (GET_CODE (operands[0]) != MEM
6123 || register_operand (operands[1], SFmode))"
6126 ldr%?\\t%0, %1\\t%@ float
6127 str%?\\t%1, %0\\t%@ float"
6128 [(set_attr "predicable" "yes")
6129 (set_attr "type" "*,load1,store1")
6130 (set_attr "insn" "mov,*,*")
6131 (set_attr "pool_range" "*,4096,*")
6132 (set_attr "arm_neg_pool_range" "*,4084,*")
6133 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6136 ;;; ??? This should have alternatives for constants.
6137 (define_insn "*thumb1_movsf_insn"
6138 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6139 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6141 && ( register_operand (operands[0], SFmode)
6142 || register_operand (operands[1], SFmode))"
6151 [(set_attr "length" "2")
6152 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6153 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6154 (set_attr "insn" "*,*,*,*,*,mov,mov")
6155 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6158 (define_expand "movdf"
6159 [(set (match_operand:DF 0 "general_operand" "")
6160 (match_operand:DF 1 "general_operand" ""))]
6165 if (GET_CODE (operands[0]) == MEM)
6166 operands[1] = force_reg (DFmode, operands[1]);
6168 else /* TARGET_THUMB */
6170 if (can_create_pseudo_p ())
6172 if (GET_CODE (operands[0]) != REG)
6173 operands[1] = force_reg (DFmode, operands[1]);
6179 ;; Reloading a df mode value stored in integer regs to memory can require a
6181 (define_expand "reload_outdf"
6182 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6183 (match_operand:DF 1 "s_register_operand" "r")
6184 (match_operand:SI 2 "s_register_operand" "=&r")]
6188 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6191 operands[2] = XEXP (operands[0], 0);
6192 else if (code == POST_INC || code == PRE_DEC)
6194 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6195 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6196 emit_insn (gen_movdi (operands[0], operands[1]));
6199 else if (code == PRE_INC)
6201 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6203 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6206 else if (code == POST_DEC)
6207 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6209 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6210 XEXP (XEXP (operands[0], 0), 1)));
6212 emit_insn (gen_rtx_SET (VOIDmode,
6213 replace_equiv_address (operands[0], operands[2]),
6216 if (code == POST_DEC)
6217 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6223 (define_insn "*movdf_soft_insn"
6224 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6225 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6226 "TARGET_32BIT && TARGET_SOFT_FLOAT
6227 && ( register_operand (operands[0], DFmode)
6228 || register_operand (operands[1], DFmode))"
6230 switch (which_alternative)
6237 return output_move_double (operands);
6240 [(set_attr "length" "8,12,16,8,8")
6241 (set_attr "type" "*,*,*,load2,store2")
6242 (set_attr "pool_range" "*,*,*,1020,*")
6243 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6244 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6247 ;;; ??? This should have alternatives for constants.
6248 ;;; ??? This was originally identical to the movdi_insn pattern.
6249 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6250 ;;; thumb_reorg with a memory reference.
6251 (define_insn "*thumb_movdf_insn"
6252 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6253 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6255 && ( register_operand (operands[0], DFmode)
6256 || register_operand (operands[1], DFmode))"
6258 switch (which_alternative)
6262 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6263 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6264 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6266 return \"ldmia\\t%1, {%0, %H0}\";
6268 return \"stmia\\t%0, {%1, %H1}\";
6270 return thumb_load_double_from_address (operands);
6272 operands[2] = gen_rtx_MEM (SImode,
6273 plus_constant (XEXP (operands[0], 0), 4));
6274 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6277 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6278 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6279 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6282 [(set_attr "length" "4,2,2,6,4,4")
6283 (set_attr "type" "*,load2,store2,load2,store2,*")
6284 (set_attr "insn" "*,*,*,*,*,mov")
6285 (set_attr "pool_range" "*,*,*,1020,*,*")]
6288 (define_expand "movxf"
6289 [(set (match_operand:XF 0 "general_operand" "")
6290 (match_operand:XF 1 "general_operand" ""))]
6291 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6293 if (GET_CODE (operands[0]) == MEM)
6294 operands[1] = force_reg (XFmode, operands[1]);
6300 ;; load- and store-multiple insns
6301 ;; The arm can load/store any set of registers, provided that they are in
6302 ;; ascending order, but these expanders assume a contiguous set.
6304 (define_expand "load_multiple"
6305 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6306 (match_operand:SI 1 "" ""))
6307 (use (match_operand:SI 2 "" ""))])]
6310 HOST_WIDE_INT offset = 0;
6312 /* Support only fixed point registers. */
6313 if (GET_CODE (operands[2]) != CONST_INT
6314 || INTVAL (operands[2]) > 14
6315 || INTVAL (operands[2]) < 2
6316 || GET_CODE (operands[1]) != MEM
6317 || GET_CODE (operands[0]) != REG
6318 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6319 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6323 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6324 INTVAL (operands[2]),
6325 force_reg (SImode, XEXP (operands[1], 0)),
6326 FALSE, operands[1], &offset);
6329 (define_expand "store_multiple"
6330 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6331 (match_operand:SI 1 "" ""))
6332 (use (match_operand:SI 2 "" ""))])]
6335 HOST_WIDE_INT offset = 0;
6337 /* Support only fixed point registers. */
6338 if (GET_CODE (operands[2]) != CONST_INT
6339 || INTVAL (operands[2]) > 14
6340 || INTVAL (operands[2]) < 2
6341 || GET_CODE (operands[1]) != REG
6342 || GET_CODE (operands[0]) != MEM
6343 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6344 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6348 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6349 INTVAL (operands[2]),
6350 force_reg (SImode, XEXP (operands[0], 0)),
6351 FALSE, operands[0], &offset);
6355 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6356 ;; We could let this apply for blocks of less than this, but it clobbers so
6357 ;; many registers that there is then probably a better way.
6359 (define_expand "movmemqi"
6360 [(match_operand:BLK 0 "general_operand" "")
6361 (match_operand:BLK 1 "general_operand" "")
6362 (match_operand:SI 2 "const_int_operand" "")
6363 (match_operand:SI 3 "const_int_operand" "")]
6368 if (arm_gen_movmemqi (operands))
6372 else /* TARGET_THUMB1 */
6374 if ( INTVAL (operands[3]) != 4
6375 || INTVAL (operands[2]) > 48)
6378 thumb_expand_movmemqi (operands);
6384 ;; Thumb block-move insns
6386 (define_insn "movmem12b"
6387 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6388 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6389 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6390 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6391 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6392 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6393 (set (match_operand:SI 0 "register_operand" "=l")
6394 (plus:SI (match_dup 2) (const_int 12)))
6395 (set (match_operand:SI 1 "register_operand" "=l")
6396 (plus:SI (match_dup 3) (const_int 12)))
6397 (clobber (match_scratch:SI 4 "=&l"))
6398 (clobber (match_scratch:SI 5 "=&l"))
6399 (clobber (match_scratch:SI 6 "=&l"))]
6401 "* return thumb_output_move_mem_multiple (3, operands);"
6402 [(set_attr "length" "4")
6403 ; This isn't entirely accurate... It loads as well, but in terms of
6404 ; scheduling the following insn it is better to consider it as a store
6405 (set_attr "type" "store3")]
6408 (define_insn "movmem8b"
6409 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6410 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6411 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6412 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6413 (set (match_operand:SI 0 "register_operand" "=l")
6414 (plus:SI (match_dup 2) (const_int 8)))
6415 (set (match_operand:SI 1 "register_operand" "=l")
6416 (plus:SI (match_dup 3) (const_int 8)))
6417 (clobber (match_scratch:SI 4 "=&l"))
6418 (clobber (match_scratch:SI 5 "=&l"))]
6420 "* return thumb_output_move_mem_multiple (2, operands);"
6421 [(set_attr "length" "4")
6422 ; This isn't entirely accurate... It loads as well, but in terms of
6423 ; scheduling the following insn it is better to consider it as a store
6424 (set_attr "type" "store2")]
6429 ;; Compare & branch insns
6430 ;; The range calculations are based as follows:
6431 ;; For forward branches, the address calculation returns the address of
6432 ;; the next instruction. This is 2 beyond the branch instruction.
6433 ;; For backward branches, the address calculation returns the address of
6434 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6435 ;; instruction for the shortest sequence, and 4 before the branch instruction
6436 ;; if we have to jump around an unconditional branch.
6437 ;; To the basic branch range the PC offset must be added (this is +4).
6438 ;; So for forward branches we have
6439 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6440 ;; And for backward branches we have
6441 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6443 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6444 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6446 (define_expand "cbranchsi4"
6447 [(set (pc) (if_then_else
6448 (match_operator 0 "arm_comparison_operator"
6449 [(match_operand:SI 1 "s_register_operand" "")
6450 (match_operand:SI 2 "nonmemory_operand" "")])
6451 (label_ref (match_operand 3 "" ""))
6453 "TARGET_THUMB1 || TARGET_32BIT"
6457 if (!arm_add_operand (operands[2], SImode))
6458 operands[2] = force_reg (SImode, operands[2]);
6459 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6463 if (thumb1_cmpneg_operand (operands[2], SImode))
6465 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6466 operands[3], operands[0]));
6469 if (!thumb1_cmp_operand (operands[2], SImode))
6470 operands[2] = force_reg (SImode, operands[2]);
6473 ;; A pattern to recognize a special situation and optimize for it.
6474 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6475 ;; due to the available addressing modes. Hence, convert a signed comparison
6476 ;; with zero into an unsigned comparison with 127 if possible.
6477 (define_expand "cbranchqi4"
6478 [(set (pc) (if_then_else
6479 (match_operator 0 "lt_ge_comparison_operator"
6480 [(match_operand:QI 1 "memory_operand" "")
6481 (match_operand:QI 2 "const0_operand" "")])
6482 (label_ref (match_operand 3 "" ""))
6487 xops[1] = gen_reg_rtx (SImode);
6488 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6489 xops[2] = GEN_INT (127);
6490 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6491 VOIDmode, xops[1], xops[2]);
6492 xops[3] = operands[3];
6493 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6497 (define_expand "cbranchsf4"
6498 [(set (pc) (if_then_else
6499 (match_operator 0 "arm_comparison_operator"
6500 [(match_operand:SF 1 "s_register_operand" "")
6501 (match_operand:SF 2 "arm_float_compare_operand" "")])
6502 (label_ref (match_operand 3 "" ""))
6504 "TARGET_32BIT && TARGET_HARD_FLOAT"
6505 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6506 operands[3])); DONE;"
6509 (define_expand "cbranchdf4"
6510 [(set (pc) (if_then_else
6511 (match_operator 0 "arm_comparison_operator"
6512 [(match_operand:DF 1 "s_register_operand" "")
6513 (match_operand:DF 2 "arm_float_compare_operand" "")])
6514 (label_ref (match_operand 3 "" ""))
6516 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6517 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6518 operands[3])); DONE;"
6521 (define_expand "cbranchdi4"
6522 [(set (pc) (if_then_else
6523 (match_operator 0 "arm_comparison_operator"
6524 [(match_operand:DI 1 "cmpdi_operand" "")
6525 (match_operand:DI 2 "cmpdi_operand" "")])
6526 (label_ref (match_operand 3 "" ""))
6530 rtx swap = NULL_RTX;
6531 enum rtx_code code = GET_CODE (operands[0]);
6533 /* We should not have two constants. */
6534 gcc_assert (GET_MODE (operands[1]) == DImode
6535 || GET_MODE (operands[2]) == DImode);
6537 /* Flip unimplemented DImode comparisons to a form that
6538 arm_gen_compare_reg can handle. */
6542 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6544 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6546 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6548 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6553 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6556 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6562 (define_insn "cbranchsi4_insn"
6563 [(set (pc) (if_then_else
6564 (match_operator 0 "arm_comparison_operator"
6565 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6566 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6567 (label_ref (match_operand 3 "" ""))
6571 rtx t = cfun->machine->thumb1_cc_insn;
6574 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6575 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6577 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6579 if (!noov_comparison_operator (operands[0], VOIDmode))
6582 else if (cfun->machine->thumb1_cc_mode != CCmode)
6587 output_asm_insn ("cmp\t%1, %2", operands);
6588 cfun->machine->thumb1_cc_insn = insn;
6589 cfun->machine->thumb1_cc_op0 = operands[1];
6590 cfun->machine->thumb1_cc_op1 = operands[2];
6591 cfun->machine->thumb1_cc_mode = CCmode;
6594 /* Ensure we emit the right type of condition code on the jump. */
6595 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6598 switch (get_attr_length (insn))
6600 case 4: return \"b%d0\\t%l3\";
6601 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6602 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6605 [(set (attr "far_jump")
6607 (eq_attr "length" "8")
6608 (const_string "yes")
6609 (const_string "no")))
6610 (set (attr "length")
6612 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6613 (le (minus (match_dup 3) (pc)) (const_int 256)))
6616 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6617 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6622 (define_insn "cbranchsi4_scratch"
6623 [(set (pc) (if_then_else
6624 (match_operator 4 "arm_comparison_operator"
6625 [(match_operand:SI 1 "s_register_operand" "l,0")
6626 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6627 (label_ref (match_operand 3 "" ""))
6629 (clobber (match_scratch:SI 0 "=l,l"))]
6632 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6634 switch (get_attr_length (insn))
6636 case 4: return \"b%d4\\t%l3\";
6637 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6638 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6641 [(set (attr "far_jump")
6643 (eq_attr "length" "8")
6644 (const_string "yes")
6645 (const_string "no")))
6646 (set (attr "length")
6648 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6649 (le (minus (match_dup 3) (pc)) (const_int 256)))
6652 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6653 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6658 ;; Two peepholes to generate subtract of 0 instead of a move if the
6659 ;; condition codes will be useful.
6661 [(set (match_operand:SI 0 "low_register_operand" "")
6662 (match_operand:SI 1 "low_register_operand" ""))
6664 (if_then_else (match_operator 2 "arm_comparison_operator"
6665 [(match_dup 1) (const_int 0)])
6666 (label_ref (match_operand 3 "" ""))
6669 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6671 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6672 (label_ref (match_dup 3))
6676 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6677 ;; merge cases like this because the op1 is a hard register in
6678 ;; arm_class_likely_spilled_p.
6680 [(set (match_operand:SI 0 "low_register_operand" "")
6681 (match_operand:SI 1 "low_register_operand" ""))
6683 (if_then_else (match_operator 2 "arm_comparison_operator"
6684 [(match_dup 0) (const_int 0)])
6685 (label_ref (match_operand 3 "" ""))
6688 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6690 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6691 (label_ref (match_dup 3))
6695 (define_insn "*negated_cbranchsi4"
6698 (match_operator 0 "equality_operator"
6699 [(match_operand:SI 1 "s_register_operand" "l")
6700 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6701 (label_ref (match_operand 3 "" ""))
6705 output_asm_insn (\"cmn\\t%1, %2\", operands);
6706 switch (get_attr_length (insn))
6708 case 4: return \"b%d0\\t%l3\";
6709 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6710 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6713 [(set (attr "far_jump")
6715 (eq_attr "length" "8")
6716 (const_string "yes")
6717 (const_string "no")))
6718 (set (attr "length")
6720 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6721 (le (minus (match_dup 3) (pc)) (const_int 256)))
6724 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6725 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6730 (define_insn "*tbit_cbranch"
6733 (match_operator 0 "equality_operator"
6734 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6736 (match_operand:SI 2 "const_int_operand" "i"))
6738 (label_ref (match_operand 3 "" ""))
6740 (clobber (match_scratch:SI 4 "=l"))]
6745 op[0] = operands[4];
6746 op[1] = operands[1];
6747 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6749 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6750 switch (get_attr_length (insn))
6752 case 4: return \"b%d0\\t%l3\";
6753 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6754 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6757 [(set (attr "far_jump")
6759 (eq_attr "length" "8")
6760 (const_string "yes")
6761 (const_string "no")))
6762 (set (attr "length")
6764 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6765 (le (minus (match_dup 3) (pc)) (const_int 256)))
6768 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6769 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6774 (define_insn "*tlobits_cbranch"
6777 (match_operator 0 "equality_operator"
6778 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6779 (match_operand:SI 2 "const_int_operand" "i")
6782 (label_ref (match_operand 3 "" ""))
6784 (clobber (match_scratch:SI 4 "=l"))]
6789 op[0] = operands[4];
6790 op[1] = operands[1];
6791 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6793 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6794 switch (get_attr_length (insn))
6796 case 4: return \"b%d0\\t%l3\";
6797 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6798 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6801 [(set (attr "far_jump")
6803 (eq_attr "length" "8")
6804 (const_string "yes")
6805 (const_string "no")))
6806 (set (attr "length")
6808 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6809 (le (minus (match_dup 3) (pc)) (const_int 256)))
6812 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6813 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6818 (define_insn "*tstsi3_cbranch"
6821 (match_operator 3 "equality_operator"
6822 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6823 (match_operand:SI 1 "s_register_operand" "l"))
6825 (label_ref (match_operand 2 "" ""))
6830 output_asm_insn (\"tst\\t%0, %1\", operands);
6831 switch (get_attr_length (insn))
6833 case 4: return \"b%d3\\t%l2\";
6834 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6835 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6838 [(set (attr "far_jump")
6840 (eq_attr "length" "8")
6841 (const_string "yes")
6842 (const_string "no")))
6843 (set (attr "length")
6845 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6846 (le (minus (match_dup 2) (pc)) (const_int 256)))
6849 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6850 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6855 (define_insn "*cbranchne_decr1"
6857 (if_then_else (match_operator 3 "equality_operator"
6858 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6860 (label_ref (match_operand 4 "" ""))
6862 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6863 (plus:SI (match_dup 2) (const_int -1)))
6864 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6869 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6871 VOIDmode, operands[2], const1_rtx);
6872 cond[1] = operands[4];
6874 if (which_alternative == 0)
6875 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6876 else if (which_alternative == 1)
6878 /* We must provide an alternative for a hi reg because reload
6879 cannot handle output reloads on a jump instruction, but we
6880 can't subtract into that. Fortunately a mov from lo to hi
6881 does not clobber the condition codes. */
6882 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6883 output_asm_insn (\"mov\\t%0, %1\", operands);
6887 /* Similarly, but the target is memory. */
6888 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6889 output_asm_insn (\"str\\t%1, %0\", operands);
6892 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6895 output_asm_insn (\"b%d0\\t%l1\", cond);
6898 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6899 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6901 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6902 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6906 [(set (attr "far_jump")
6908 (ior (and (eq (symbol_ref ("which_alternative"))
6910 (eq_attr "length" "8"))
6911 (eq_attr "length" "10"))
6912 (const_string "yes")
6913 (const_string "no")))
6914 (set_attr_alternative "length"
6918 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6919 (le (minus (match_dup 4) (pc)) (const_int 256)))
6922 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6923 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6928 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6929 (le (minus (match_dup 4) (pc)) (const_int 256)))
6932 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6933 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6938 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6939 (le (minus (match_dup 4) (pc)) (const_int 256)))
6942 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6943 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6948 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6949 (le (minus (match_dup 4) (pc)) (const_int 256)))
6952 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6953 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6958 (define_insn "*addsi3_cbranch"
6961 (match_operator 4 "arm_comparison_operator"
6963 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6964 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6966 (label_ref (match_operand 5 "" ""))
6969 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6970 (plus:SI (match_dup 2) (match_dup 3)))
6971 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6973 && (GET_CODE (operands[4]) == EQ
6974 || GET_CODE (operands[4]) == NE
6975 || GET_CODE (operands[4]) == GE
6976 || GET_CODE (operands[4]) == LT)"
6981 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6982 cond[1] = operands[2];
6983 cond[2] = operands[3];
6985 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6986 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6988 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6990 if (which_alternative >= 2
6991 && which_alternative < 4)
6992 output_asm_insn (\"mov\\t%0, %1\", operands);
6993 else if (which_alternative >= 4)
6994 output_asm_insn (\"str\\t%1, %0\", operands);
6996 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
6999 return \"b%d4\\t%l5\";
7001 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7003 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7007 [(set (attr "far_jump")
7009 (ior (and (lt (symbol_ref ("which_alternative"))
7011 (eq_attr "length" "8"))
7012 (eq_attr "length" "10"))
7013 (const_string "yes")
7014 (const_string "no")))
7015 (set (attr "length")
7017 (lt (symbol_ref ("which_alternative"))
7020 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7021 (le (minus (match_dup 5) (pc)) (const_int 256)))
7024 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7025 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7029 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7030 (le (minus (match_dup 5) (pc)) (const_int 256)))
7033 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7034 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7039 (define_insn "*addsi3_cbranch_scratch"
7042 (match_operator 3 "arm_comparison_operator"
7044 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7045 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7047 (label_ref (match_operand 4 "" ""))
7049 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7051 && (GET_CODE (operands[3]) == EQ
7052 || GET_CODE (operands[3]) == NE
7053 || GET_CODE (operands[3]) == GE
7054 || GET_CODE (operands[3]) == LT)"
7057 switch (which_alternative)
7060 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7063 output_asm_insn (\"cmn\t%1, %2\", operands);
7066 if (INTVAL (operands[2]) < 0)
7067 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7069 output_asm_insn (\"add\t%0, %1, %2\", operands);
7072 if (INTVAL (operands[2]) < 0)
7073 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7075 output_asm_insn (\"add\t%0, %0, %2\", operands);
7079 switch (get_attr_length (insn))
7082 return \"b%d3\\t%l4\";
7084 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7086 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7090 [(set (attr "far_jump")
7092 (eq_attr "length" "8")
7093 (const_string "yes")
7094 (const_string "no")))
7095 (set (attr "length")
7097 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7098 (le (minus (match_dup 4) (pc)) (const_int 256)))
7101 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7102 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7108 ;; Comparison and test insns
7110 (define_insn "*arm_cmpsi_insn"
7111 [(set (reg:CC CC_REGNUM)
7112 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7113 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7118 [(set_attr "conds" "set")]
7121 (define_insn "*cmpsi_shiftsi"
7122 [(set (reg:CC CC_REGNUM)
7123 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7124 (match_operator:SI 3 "shift_operator"
7125 [(match_operand:SI 1 "s_register_operand" "r,r")
7126 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7129 [(set_attr "conds" "set")
7130 (set_attr "shift" "1")
7131 (set_attr "arch" "32,a")
7132 (set_attr "type" "alu_shift,alu_shift_reg")])
7134 (define_insn "*cmpsi_shiftsi_swp"
7135 [(set (reg:CC_SWP CC_REGNUM)
7136 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7137 [(match_operand:SI 1 "s_register_operand" "r,r")
7138 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7139 (match_operand:SI 0 "s_register_operand" "r,r")))]
7142 [(set_attr "conds" "set")
7143 (set_attr "shift" "1")
7144 (set_attr "arch" "32,a")
7145 (set_attr "type" "alu_shift,alu_shift_reg")])
7147 (define_insn "*arm_cmpsi_negshiftsi_si"
7148 [(set (reg:CC_Z CC_REGNUM)
7150 (neg:SI (match_operator:SI 1 "shift_operator"
7151 [(match_operand:SI 2 "s_register_operand" "r")
7152 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7153 (match_operand:SI 0 "s_register_operand" "r")))]
7156 [(set_attr "conds" "set")
7157 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7158 (const_string "alu_shift")
7159 (const_string "alu_shift_reg")))]
7162 ;; DImode comparisons. The generic code generates branches that
7163 ;; if-conversion can not reduce to a conditional compare, so we do
7166 (define_insn "*arm_cmpdi_insn"
7167 [(set (reg:CC_NCV CC_REGNUM)
7168 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7169 (match_operand:DI 1 "arm_di_operand" "rDi")))
7170 (clobber (match_scratch:SI 2 "=r"))]
7171 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7172 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7173 [(set_attr "conds" "set")
7174 (set_attr "length" "8")]
7177 (define_insn "*arm_cmpdi_unsigned"
7178 [(set (reg:CC_CZ CC_REGNUM)
7179 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7180 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7182 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7183 [(set_attr "conds" "set")
7184 (set_attr "length" "8")]
7187 (define_insn "*arm_cmpdi_zero"
7188 [(set (reg:CC_Z CC_REGNUM)
7189 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7191 (clobber (match_scratch:SI 1 "=r"))]
7193 "orr%.\\t%1, %Q0, %R0"
7194 [(set_attr "conds" "set")]
7197 (define_insn "*thumb_cmpdi_zero"
7198 [(set (reg:CC_Z CC_REGNUM)
7199 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7201 (clobber (match_scratch:SI 1 "=l"))]
7203 "orr\\t%1, %Q0, %R0"
7204 [(set_attr "conds" "set")
7205 (set_attr "length" "2")]
7208 ;; Cirrus SF compare instruction
7209 (define_insn "*cirrus_cmpsf"
7210 [(set (reg:CCFP CC_REGNUM)
7211 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7212 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7213 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7214 "cfcmps%?\\tr15, %V0, %V1"
7215 [(set_attr "type" "mav_farith")
7216 (set_attr "cirrus" "compare")]
7219 ;; Cirrus DF compare instruction
7220 (define_insn "*cirrus_cmpdf"
7221 [(set (reg:CCFP CC_REGNUM)
7222 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7223 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7224 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7225 "cfcmpd%?\\tr15, %V0, %V1"
7226 [(set_attr "type" "mav_farith")
7227 (set_attr "cirrus" "compare")]
7230 (define_insn "*cirrus_cmpdi"
7231 [(set (reg:CC CC_REGNUM)
7232 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7233 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7234 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7235 "cfcmp64%?\\tr15, %V0, %V1"
7236 [(set_attr "type" "mav_farith")
7237 (set_attr "cirrus" "compare")]
7240 ; This insn allows redundant compares to be removed by cse, nothing should
7241 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7242 ; is deleted later on. The match_dup will match the mode here, so that
7243 ; mode changes of the condition codes aren't lost by this even though we don't
7244 ; specify what they are.
7246 (define_insn "*deleted_compare"
7247 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7249 "\\t%@ deleted compare"
7250 [(set_attr "conds" "set")
7251 (set_attr "length" "0")]
7255 ;; Conditional branch insns
7257 (define_expand "cbranch_cc"
7259 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7260 (match_operand 2 "" "")])
7261 (label_ref (match_operand 3 "" ""))
7264 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7265 operands[1], operands[2]);
7266 operands[2] = const0_rtx;"
7270 ;; Patterns to match conditional branch insns.
7273 (define_insn "*arm_cond_branch"
7275 (if_then_else (match_operator 1 "arm_comparison_operator"
7276 [(match_operand 2 "cc_register" "") (const_int 0)])
7277 (label_ref (match_operand 0 "" ""))
7281 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7283 arm_ccfsm_state += 2;
7286 return \"b%d1\\t%l0\";
7288 [(set_attr "conds" "use")
7289 (set_attr "type" "branch")]
7292 (define_insn "*arm_cond_branch_reversed"
7294 (if_then_else (match_operator 1 "arm_comparison_operator"
7295 [(match_operand 2 "cc_register" "") (const_int 0)])
7297 (label_ref (match_operand 0 "" ""))))]
7300 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7302 arm_ccfsm_state += 2;
7305 return \"b%D1\\t%l0\";
7307 [(set_attr "conds" "use")
7308 (set_attr "type" "branch")]
7315 (define_expand "cstore_cc"
7316 [(set (match_operand:SI 0 "s_register_operand" "")
7317 (match_operator:SI 1 "" [(match_operand 2 "" "")
7318 (match_operand 3 "" "")]))]
7320 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7321 operands[2], operands[3]);
7322 operands[3] = const0_rtx;"
7325 (define_insn "*mov_scc"
7326 [(set (match_operand:SI 0 "s_register_operand" "=r")
7327 (match_operator:SI 1 "arm_comparison_operator"
7328 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7330 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7331 [(set_attr "conds" "use")
7332 (set_attr "insn" "mov")
7333 (set_attr "length" "8")]
7336 (define_insn "*mov_negscc"
7337 [(set (match_operand:SI 0 "s_register_operand" "=r")
7338 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7339 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7341 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7342 [(set_attr "conds" "use")
7343 (set_attr "insn" "mov")
7344 (set_attr "length" "8")]
7347 (define_insn "*mov_notscc"
7348 [(set (match_operand:SI 0 "s_register_operand" "=r")
7349 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7350 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7352 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7353 [(set_attr "conds" "use")
7354 (set_attr "insn" "mov")
7355 (set_attr "length" "8")]
7358 (define_expand "cstoresi4"
7359 [(set (match_operand:SI 0 "s_register_operand" "")
7360 (match_operator:SI 1 "arm_comparison_operator"
7361 [(match_operand:SI 2 "s_register_operand" "")
7362 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7363 "TARGET_32BIT || TARGET_THUMB1"
7365 rtx op3, scratch, scratch2;
7369 if (!arm_add_operand (operands[3], SImode))
7370 operands[3] = force_reg (SImode, operands[3]);
7371 emit_insn (gen_cstore_cc (operands[0], operands[1],
7372 operands[2], operands[3]));
7376 if (operands[3] == const0_rtx)
7378 switch (GET_CODE (operands[1]))
7381 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7385 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7389 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7390 NULL_RTX, 0, OPTAB_WIDEN);
7391 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7392 NULL_RTX, 0, OPTAB_WIDEN);
7393 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7394 operands[0], 1, OPTAB_WIDEN);
7398 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7400 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7401 NULL_RTX, 1, OPTAB_WIDEN);
7405 scratch = expand_binop (SImode, ashr_optab, operands[2],
7406 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7407 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7408 NULL_RTX, 0, OPTAB_WIDEN);
7409 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7413 /* LT is handled by generic code. No need for unsigned with 0. */
7420 switch (GET_CODE (operands[1]))
7423 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7424 NULL_RTX, 0, OPTAB_WIDEN);
7425 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7429 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7430 NULL_RTX, 0, OPTAB_WIDEN);
7431 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7435 op3 = force_reg (SImode, operands[3]);
7437 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7438 NULL_RTX, 1, OPTAB_WIDEN);
7439 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7440 NULL_RTX, 0, OPTAB_WIDEN);
7441 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7447 if (!thumb1_cmp_operand (op3, SImode))
7448 op3 = force_reg (SImode, op3);
7449 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7450 NULL_RTX, 0, OPTAB_WIDEN);
7451 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7452 NULL_RTX, 1, OPTAB_WIDEN);
7453 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7458 op3 = force_reg (SImode, operands[3]);
7459 scratch = force_reg (SImode, const0_rtx);
7460 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7466 if (!thumb1_cmp_operand (op3, SImode))
7467 op3 = force_reg (SImode, op3);
7468 scratch = force_reg (SImode, const0_rtx);
7469 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7475 if (!thumb1_cmp_operand (op3, SImode))
7476 op3 = force_reg (SImode, op3);
7477 scratch = gen_reg_rtx (SImode);
7478 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7482 op3 = force_reg (SImode, operands[3]);
7483 scratch = gen_reg_rtx (SImode);
7484 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7487 /* No good sequences for GT, LT. */
7494 (define_expand "cstoresf4"
7495 [(set (match_operand:SI 0 "s_register_operand" "")
7496 (match_operator:SI 1 "arm_comparison_operator"
7497 [(match_operand:SF 2 "s_register_operand" "")
7498 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7499 "TARGET_32BIT && TARGET_HARD_FLOAT"
7500 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7501 operands[2], operands[3])); DONE;"
7504 (define_expand "cstoredf4"
7505 [(set (match_operand:SI 0 "s_register_operand" "")
7506 (match_operator:SI 1 "arm_comparison_operator"
7507 [(match_operand:DF 2 "s_register_operand" "")
7508 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7509 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7510 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7511 operands[2], operands[3])); DONE;"
7514 (define_expand "cstoredi4"
7515 [(set (match_operand:SI 0 "s_register_operand" "")
7516 (match_operator:SI 1 "arm_comparison_operator"
7517 [(match_operand:DI 2 "cmpdi_operand" "")
7518 (match_operand:DI 3 "cmpdi_operand" "")]))]
7521 rtx swap = NULL_RTX;
7522 enum rtx_code code = GET_CODE (operands[1]);
7524 /* We should not have two constants. */
7525 gcc_assert (GET_MODE (operands[2]) == DImode
7526 || GET_MODE (operands[3]) == DImode);
7528 /* Flip unimplemented DImode comparisons to a form that
7529 arm_gen_compare_reg can handle. */
7533 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7535 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7537 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7539 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7544 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7547 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7553 (define_expand "cstoresi_eq0_thumb1"
7555 [(set (match_operand:SI 0 "s_register_operand" "")
7556 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7558 (clobber (match_dup:SI 2))])]
7560 "operands[2] = gen_reg_rtx (SImode);"
7563 (define_expand "cstoresi_ne0_thumb1"
7565 [(set (match_operand:SI 0 "s_register_operand" "")
7566 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7568 (clobber (match_dup:SI 2))])]
7570 "operands[2] = gen_reg_rtx (SImode);"
7573 (define_insn "*cstoresi_eq0_thumb1_insn"
7574 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7575 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7577 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7580 neg\\t%0, %1\;adc\\t%0, %0, %1
7581 neg\\t%2, %1\;adc\\t%0, %1, %2"
7582 [(set_attr "length" "4")]
7585 (define_insn "*cstoresi_ne0_thumb1_insn"
7586 [(set (match_operand:SI 0 "s_register_operand" "=l")
7587 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7589 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7591 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7592 [(set_attr "length" "4")]
7595 ;; Used as part of the expansion of thumb ltu and gtu sequences
7596 (define_insn "cstoresi_nltu_thumb1"
7597 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7598 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7599 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7601 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7602 [(set_attr "length" "4")]
7605 (define_insn_and_split "cstoresi_ltu_thumb1"
7606 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7607 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7608 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7613 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7614 (set (match_dup 0) (neg:SI (match_dup 3)))]
7615 "operands[3] = gen_reg_rtx (SImode);"
7616 [(set_attr "length" "4")]
7619 ;; Used as part of the expansion of thumb les sequence.
7620 (define_insn "thumb1_addsi3_addgeu"
7621 [(set (match_operand:SI 0 "s_register_operand" "=l")
7622 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7623 (match_operand:SI 2 "s_register_operand" "l"))
7624 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7625 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7627 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7628 [(set_attr "length" "4")]
7632 ;; Conditional move insns
7634 (define_expand "movsicc"
7635 [(set (match_operand:SI 0 "s_register_operand" "")
7636 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7637 (match_operand:SI 2 "arm_not_operand" "")
7638 (match_operand:SI 3 "arm_not_operand" "")))]
7642 enum rtx_code code = GET_CODE (operands[1]);
7645 if (code == UNEQ || code == LTGT)
7648 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7649 XEXP (operands[1], 1));
7650 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7654 (define_expand "movsfcc"
7655 [(set (match_operand:SF 0 "s_register_operand" "")
7656 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7657 (match_operand:SF 2 "s_register_operand" "")
7658 (match_operand:SF 3 "nonmemory_operand" "")))]
7659 "TARGET_32BIT && TARGET_HARD_FLOAT"
7662 enum rtx_code code = GET_CODE (operands[1]);
7665 if (code == UNEQ || code == LTGT)
7668 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7669 Otherwise, ensure it is a valid FP add operand */
7670 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7671 || (!arm_float_add_operand (operands[3], SFmode)))
7672 operands[3] = force_reg (SFmode, operands[3]);
7674 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7675 XEXP (operands[1], 1));
7676 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7680 (define_expand "movdfcc"
7681 [(set (match_operand:DF 0 "s_register_operand" "")
7682 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7683 (match_operand:DF 2 "s_register_operand" "")
7684 (match_operand:DF 3 "arm_float_add_operand" "")))]
7685 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7688 enum rtx_code code = GET_CODE (operands[1]);
7691 if (code == UNEQ || code == LTGT)
7694 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7695 XEXP (operands[1], 1));
7696 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7700 (define_insn "*movsicc_insn"
7701 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7703 (match_operator 3 "arm_comparison_operator"
7704 [(match_operand 4 "cc_register" "") (const_int 0)])
7705 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7706 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7713 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7714 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7715 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7716 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7717 [(set_attr "length" "4,4,4,4,8,8,8,8")
7718 (set_attr "conds" "use")
7719 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7722 (define_insn "*movsfcc_soft_insn"
7723 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7724 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7725 [(match_operand 4 "cc_register" "") (const_int 0)])
7726 (match_operand:SF 1 "s_register_operand" "0,r")
7727 (match_operand:SF 2 "s_register_operand" "r,0")))]
7728 "TARGET_ARM && TARGET_SOFT_FLOAT"
7732 [(set_attr "conds" "use")
7733 (set_attr "insn" "mov")]
7737 ;; Jump and linkage insns
7739 (define_expand "jump"
7741 (label_ref (match_operand 0 "" "")))]
7746 (define_insn "*arm_jump"
7748 (label_ref (match_operand 0 "" "")))]
7752 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7754 arm_ccfsm_state += 2;
7757 return \"b%?\\t%l0\";
7760 [(set_attr "predicable" "yes")]
7763 (define_insn "*thumb_jump"
7765 (label_ref (match_operand 0 "" "")))]
7768 if (get_attr_length (insn) == 2)
7770 return \"bl\\t%l0\\t%@ far jump\";
7772 [(set (attr "far_jump")
7774 (eq_attr "length" "4")
7775 (const_string "yes")
7776 (const_string "no")))
7777 (set (attr "length")
7779 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7780 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7785 (define_expand "call"
7786 [(parallel [(call (match_operand 0 "memory_operand" "")
7787 (match_operand 1 "general_operand" ""))
7788 (use (match_operand 2 "" ""))
7789 (clobber (reg:SI LR_REGNUM))])]
7795 /* In an untyped call, we can get NULL for operand 2. */
7796 if (operands[2] == NULL_RTX)
7797 operands[2] = const0_rtx;
7799 /* Decide if we should generate indirect calls by loading the
7800 32-bit address of the callee into a register before performing the
7802 callee = XEXP (operands[0], 0);
7803 if (GET_CODE (callee) == SYMBOL_REF
7804 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7806 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7808 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7809 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7814 (define_expand "call_internal"
7815 [(parallel [(call (match_operand 0 "memory_operand" "")
7816 (match_operand 1 "general_operand" ""))
7817 (use (match_operand 2 "" ""))
7818 (clobber (reg:SI LR_REGNUM))])])
7820 (define_insn "*call_reg_armv5"
7821 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7822 (match_operand 1 "" ""))
7823 (use (match_operand 2 "" ""))
7824 (clobber (reg:SI LR_REGNUM))]
7825 "TARGET_ARM && arm_arch5"
7827 [(set_attr "type" "call")]
7830 (define_insn "*call_reg_arm"
7831 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7832 (match_operand 1 "" ""))
7833 (use (match_operand 2 "" ""))
7834 (clobber (reg:SI LR_REGNUM))]
7835 "TARGET_ARM && !arm_arch5"
7837 return output_call (operands);
7839 ;; length is worst case, normally it is only two
7840 [(set_attr "length" "12")
7841 (set_attr "type" "call")]
7845 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7846 ;; considered a function call by the branch predictor of some cores (PR40887).
7847 ;; Falls back to blx rN (*call_reg_armv5).
7849 (define_insn "*call_mem"
7850 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7851 (match_operand 1 "" ""))
7852 (use (match_operand 2 "" ""))
7853 (clobber (reg:SI LR_REGNUM))]
7854 "TARGET_ARM && !arm_arch5"
7856 return output_call_mem (operands);
7858 [(set_attr "length" "12")
7859 (set_attr "type" "call")]
7862 (define_insn "*call_reg_thumb1_v5"
7863 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7864 (match_operand 1 "" ""))
7865 (use (match_operand 2 "" ""))
7866 (clobber (reg:SI LR_REGNUM))]
7867 "TARGET_THUMB1 && arm_arch5"
7869 [(set_attr "length" "2")
7870 (set_attr "type" "call")]
7873 (define_insn "*call_reg_thumb1"
7874 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7875 (match_operand 1 "" ""))
7876 (use (match_operand 2 "" ""))
7877 (clobber (reg:SI LR_REGNUM))]
7878 "TARGET_THUMB1 && !arm_arch5"
7881 if (!TARGET_CALLER_INTERWORKING)
7882 return thumb_call_via_reg (operands[0]);
7883 else if (operands[1] == const0_rtx)
7884 return \"bl\\t%__interwork_call_via_%0\";
7885 else if (frame_pointer_needed)
7886 return \"bl\\t%__interwork_r7_call_via_%0\";
7888 return \"bl\\t%__interwork_r11_call_via_%0\";
7890 [(set_attr "type" "call")]
7893 (define_expand "call_value"
7894 [(parallel [(set (match_operand 0 "" "")
7895 (call (match_operand 1 "memory_operand" "")
7896 (match_operand 2 "general_operand" "")))
7897 (use (match_operand 3 "" ""))
7898 (clobber (reg:SI LR_REGNUM))])]
7904 /* In an untyped call, we can get NULL for operand 2. */
7905 if (operands[3] == 0)
7906 operands[3] = const0_rtx;
7908 /* Decide if we should generate indirect calls by loading the
7909 32-bit address of the callee into a register before performing the
7911 callee = XEXP (operands[1], 0);
7912 if (GET_CODE (callee) == SYMBOL_REF
7913 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7915 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7917 pat = gen_call_value_internal (operands[0], operands[1],
7918 operands[2], operands[3]);
7919 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7924 (define_expand "call_value_internal"
7925 [(parallel [(set (match_operand 0 "" "")
7926 (call (match_operand 1 "memory_operand" "")
7927 (match_operand 2 "general_operand" "")))
7928 (use (match_operand 3 "" ""))
7929 (clobber (reg:SI LR_REGNUM))])])
7931 (define_insn "*call_value_reg_armv5"
7932 [(set (match_operand 0 "" "")
7933 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7934 (match_operand 2 "" "")))
7935 (use (match_operand 3 "" ""))
7936 (clobber (reg:SI LR_REGNUM))]
7937 "TARGET_ARM && arm_arch5"
7939 [(set_attr "type" "call")]
7942 (define_insn "*call_value_reg_arm"
7943 [(set (match_operand 0 "" "")
7944 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7945 (match_operand 2 "" "")))
7946 (use (match_operand 3 "" ""))
7947 (clobber (reg:SI LR_REGNUM))]
7948 "TARGET_ARM && !arm_arch5"
7950 return output_call (&operands[1]);
7952 [(set_attr "length" "12")
7953 (set_attr "type" "call")]
7956 ;; Note: see *call_mem
7958 (define_insn "*call_value_mem"
7959 [(set (match_operand 0 "" "")
7960 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7961 (match_operand 2 "" "")))
7962 (use (match_operand 3 "" ""))
7963 (clobber (reg:SI LR_REGNUM))]
7964 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7966 return output_call_mem (&operands[1]);
7968 [(set_attr "length" "12")
7969 (set_attr "type" "call")]
7972 (define_insn "*call_value_reg_thumb1_v5"
7973 [(set (match_operand 0 "" "")
7974 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7975 (match_operand 2 "" "")))
7976 (use (match_operand 3 "" ""))
7977 (clobber (reg:SI LR_REGNUM))]
7978 "TARGET_THUMB1 && arm_arch5"
7980 [(set_attr "length" "2")
7981 (set_attr "type" "call")]
7984 (define_insn "*call_value_reg_thumb1"
7985 [(set (match_operand 0 "" "")
7986 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7987 (match_operand 2 "" "")))
7988 (use (match_operand 3 "" ""))
7989 (clobber (reg:SI LR_REGNUM))]
7990 "TARGET_THUMB1 && !arm_arch5"
7993 if (!TARGET_CALLER_INTERWORKING)
7994 return thumb_call_via_reg (operands[1]);
7995 else if (operands[2] == const0_rtx)
7996 return \"bl\\t%__interwork_call_via_%1\";
7997 else if (frame_pointer_needed)
7998 return \"bl\\t%__interwork_r7_call_via_%1\";
8000 return \"bl\\t%__interwork_r11_call_via_%1\";
8002 [(set_attr "type" "call")]
8005 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8006 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8008 (define_insn "*call_symbol"
8009 [(call (mem:SI (match_operand:SI 0 "" ""))
8010 (match_operand 1 "" ""))
8011 (use (match_operand 2 "" ""))
8012 (clobber (reg:SI LR_REGNUM))]
8014 && (GET_CODE (operands[0]) == SYMBOL_REF)
8015 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8018 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8020 [(set_attr "type" "call")]
8023 (define_insn "*call_value_symbol"
8024 [(set (match_operand 0 "" "")
8025 (call (mem:SI (match_operand:SI 1 "" ""))
8026 (match_operand:SI 2 "" "")))
8027 (use (match_operand 3 "" ""))
8028 (clobber (reg:SI LR_REGNUM))]
8030 && (GET_CODE (operands[1]) == SYMBOL_REF)
8031 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8034 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8036 [(set_attr "type" "call")]
8039 (define_insn "*call_insn"
8040 [(call (mem:SI (match_operand:SI 0 "" ""))
8041 (match_operand:SI 1 "" ""))
8042 (use (match_operand 2 "" ""))
8043 (clobber (reg:SI LR_REGNUM))]
8045 && GET_CODE (operands[0]) == SYMBOL_REF
8046 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8048 [(set_attr "length" "4")
8049 (set_attr "type" "call")]
8052 (define_insn "*call_value_insn"
8053 [(set (match_operand 0 "" "")
8054 (call (mem:SI (match_operand 1 "" ""))
8055 (match_operand 2 "" "")))
8056 (use (match_operand 3 "" ""))
8057 (clobber (reg:SI LR_REGNUM))]
8059 && GET_CODE (operands[1]) == SYMBOL_REF
8060 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8062 [(set_attr "length" "4")
8063 (set_attr "type" "call")]
8066 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8067 (define_expand "sibcall"
8068 [(parallel [(call (match_operand 0 "memory_operand" "")
8069 (match_operand 1 "general_operand" ""))
8071 (use (match_operand 2 "" ""))])]
8075 if (operands[2] == NULL_RTX)
8076 operands[2] = const0_rtx;
8080 (define_expand "sibcall_value"
8081 [(parallel [(set (match_operand 0 "" "")
8082 (call (match_operand 1 "memory_operand" "")
8083 (match_operand 2 "general_operand" "")))
8085 (use (match_operand 3 "" ""))])]
8089 if (operands[3] == NULL_RTX)
8090 operands[3] = const0_rtx;
8094 (define_insn "*sibcall_insn"
8095 [(call (mem:SI (match_operand:SI 0 "" "X"))
8096 (match_operand 1 "" ""))
8098 (use (match_operand 2 "" ""))]
8099 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8101 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8103 [(set_attr "type" "call")]
8106 (define_insn "*sibcall_value_insn"
8107 [(set (match_operand 0 "" "")
8108 (call (mem:SI (match_operand:SI 1 "" "X"))
8109 (match_operand 2 "" "")))
8111 (use (match_operand 3 "" ""))]
8112 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8114 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8116 [(set_attr "type" "call")]
8119 (define_expand "return"
8121 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8124 ;; Often the return insn will be the same as loading from memory, so set attr
8125 (define_insn "*arm_return"
8127 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8130 if (arm_ccfsm_state == 2)
8132 arm_ccfsm_state += 2;
8135 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8137 [(set_attr "type" "load1")
8138 (set_attr "length" "12")
8139 (set_attr "predicable" "yes")]
8142 (define_insn "*cond_return"
8144 (if_then_else (match_operator 0 "arm_comparison_operator"
8145 [(match_operand 1 "cc_register" "") (const_int 0)])
8148 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8151 if (arm_ccfsm_state == 2)
8153 arm_ccfsm_state += 2;
8156 return output_return_instruction (operands[0], TRUE, FALSE);
8158 [(set_attr "conds" "use")
8159 (set_attr "length" "12")
8160 (set_attr "type" "load1")]
8163 (define_insn "*cond_return_inverted"
8165 (if_then_else (match_operator 0 "arm_comparison_operator"
8166 [(match_operand 1 "cc_register" "") (const_int 0)])
8169 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8172 if (arm_ccfsm_state == 2)
8174 arm_ccfsm_state += 2;
8177 return output_return_instruction (operands[0], TRUE, TRUE);
8179 [(set_attr "conds" "use")
8180 (set_attr "length" "12")
8181 (set_attr "type" "load1")]
8184 ;; Generate a sequence of instructions to determine if the processor is
8185 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8188 (define_expand "return_addr_mask"
8190 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8192 (set (match_operand:SI 0 "s_register_operand" "")
8193 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8195 (const_int 67108860)))] ; 0x03fffffc
8198 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8201 (define_insn "*check_arch2"
8202 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8203 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8206 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8207 [(set_attr "length" "8")
8208 (set_attr "conds" "set")]
8211 ;; Call subroutine returning any type.
8213 (define_expand "untyped_call"
8214 [(parallel [(call (match_operand 0 "" "")
8216 (match_operand 1 "" "")
8217 (match_operand 2 "" "")])]
8222 rtx par = gen_rtx_PARALLEL (VOIDmode,
8223 rtvec_alloc (XVECLEN (operands[2], 0)));
8224 rtx addr = gen_reg_rtx (Pmode);
8228 emit_move_insn (addr, XEXP (operands[1], 0));
8229 mem = change_address (operands[1], BLKmode, addr);
8231 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8233 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8235 /* Default code only uses r0 as a return value, but we could
8236 be using anything up to 4 registers. */
8237 if (REGNO (src) == R0_REGNUM)
8238 src = gen_rtx_REG (TImode, R0_REGNUM);
8240 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8242 size += GET_MODE_SIZE (GET_MODE (src));
8245 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8250 for (i = 0; i < XVECLEN (par, 0); i++)
8252 HOST_WIDE_INT offset = 0;
8253 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8256 emit_move_insn (addr, plus_constant (addr, size));
8258 mem = change_address (mem, GET_MODE (reg), NULL);
8259 if (REGNO (reg) == R0_REGNUM)
8261 /* On thumb we have to use a write-back instruction. */
8262 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8263 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8264 size = TARGET_ARM ? 16 : 0;
8268 emit_move_insn (mem, reg);
8269 size = GET_MODE_SIZE (GET_MODE (reg));
8273 /* The optimizer does not know that the call sets the function value
8274 registers we stored in the result block. We avoid problems by
8275 claiming that all hard registers are used and clobbered at this
8277 emit_insn (gen_blockage ());
8283 (define_expand "untyped_return"
8284 [(match_operand:BLK 0 "memory_operand" "")
8285 (match_operand 1 "" "")]
8290 rtx addr = gen_reg_rtx (Pmode);
8294 emit_move_insn (addr, XEXP (operands[0], 0));
8295 mem = change_address (operands[0], BLKmode, addr);
8297 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8299 HOST_WIDE_INT offset = 0;
8300 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8303 emit_move_insn (addr, plus_constant (addr, size));
8305 mem = change_address (mem, GET_MODE (reg), NULL);
8306 if (REGNO (reg) == R0_REGNUM)
8308 /* On thumb we have to use a write-back instruction. */
8309 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8310 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8311 size = TARGET_ARM ? 16 : 0;
8315 emit_move_insn (reg, mem);
8316 size = GET_MODE_SIZE (GET_MODE (reg));
8320 /* Emit USE insns before the return. */
8321 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8322 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8324 /* Construct the return. */
8325 expand_naked_return ();
8331 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8332 ;; all of memory. This blocks insns from being moved across this point.
8334 (define_insn "blockage"
8335 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8338 [(set_attr "length" "0")
8339 (set_attr "type" "block")]
8342 (define_expand "casesi"
8343 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8344 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8345 (match_operand:SI 2 "const_int_operand" "") ; total range
8346 (match_operand:SI 3 "" "") ; table label
8347 (match_operand:SI 4 "" "")] ; Out of range label
8348 "TARGET_32BIT || optimize_size || flag_pic"
8351 enum insn_code code;
8352 if (operands[1] != const0_rtx)
8354 rtx reg = gen_reg_rtx (SImode);
8356 emit_insn (gen_addsi3 (reg, operands[0],
8357 GEN_INT (-INTVAL (operands[1]))));
8362 code = CODE_FOR_arm_casesi_internal;
8363 else if (TARGET_THUMB1)
8364 code = CODE_FOR_thumb1_casesi_internal_pic;
8366 code = CODE_FOR_thumb2_casesi_internal_pic;
8368 code = CODE_FOR_thumb2_casesi_internal;
8370 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8371 operands[2] = force_reg (SImode, operands[2]);
8373 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8374 operands[3], operands[4]));
8379 ;; The USE in this pattern is needed to tell flow analysis that this is
8380 ;; a CASESI insn. It has no other purpose.
8381 (define_insn "arm_casesi_internal"
8382 [(parallel [(set (pc)
8384 (leu (match_operand:SI 0 "s_register_operand" "r")
8385 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8386 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8387 (label_ref (match_operand 2 "" ""))))
8388 (label_ref (match_operand 3 "" ""))))
8389 (clobber (reg:CC CC_REGNUM))
8390 (use (label_ref (match_dup 2)))])]
8394 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8395 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8397 [(set_attr "conds" "clob")
8398 (set_attr "length" "12")]
8401 (define_expand "thumb1_casesi_internal_pic"
8402 [(match_operand:SI 0 "s_register_operand" "")
8403 (match_operand:SI 1 "thumb1_cmp_operand" "")
8404 (match_operand 2 "" "")
8405 (match_operand 3 "" "")]
8409 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8410 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8412 reg0 = gen_rtx_REG (SImode, 0);
8413 emit_move_insn (reg0, operands[0]);
8414 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8419 (define_insn "thumb1_casesi_dispatch"
8420 [(parallel [(set (pc) (unspec [(reg:SI 0)
8421 (label_ref (match_operand 0 "" ""))
8422 ;; (label_ref (match_operand 1 "" ""))
8424 UNSPEC_THUMB1_CASESI))
8425 (clobber (reg:SI IP_REGNUM))
8426 (clobber (reg:SI LR_REGNUM))])]
8428 "* return thumb1_output_casesi(operands);"
8429 [(set_attr "length" "4")]
8432 (define_expand "indirect_jump"
8434 (match_operand:SI 0 "s_register_operand" ""))]
8437 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8438 address and use bx. */
8442 tmp = gen_reg_rtx (SImode);
8443 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8449 ;; NB Never uses BX.
8450 (define_insn "*arm_indirect_jump"
8452 (match_operand:SI 0 "s_register_operand" "r"))]
8454 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8455 [(set_attr "predicable" "yes")]
8458 (define_insn "*load_indirect_jump"
8460 (match_operand:SI 0 "memory_operand" "m"))]
8462 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8463 [(set_attr "type" "load1")
8464 (set_attr "pool_range" "4096")
8465 (set_attr "neg_pool_range" "4084")
8466 (set_attr "predicable" "yes")]
8469 ;; NB Never uses BX.
8470 (define_insn "*thumb1_indirect_jump"
8472 (match_operand:SI 0 "register_operand" "l*r"))]
8475 [(set_attr "conds" "clob")
8476 (set_attr "length" "2")]
8486 if (TARGET_UNIFIED_ASM)
8489 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8490 return \"mov\\tr8, r8\";
8492 [(set (attr "length")
8493 (if_then_else (eq_attr "is_thumb" "yes")
8499 ;; Patterns to allow combination of arithmetic, cond code and shifts
8501 (define_insn "*arith_shiftsi"
8502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8503 (match_operator:SI 1 "shiftable_operator"
8504 [(match_operator:SI 3 "shift_operator"
8505 [(match_operand:SI 4 "s_register_operand" "r,r")
8506 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8507 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8509 "%i1%?\\t%0, %2, %4%S3"
8510 [(set_attr "predicable" "yes")
8511 (set_attr "shift" "4")
8512 (set_attr "arch" "32,a")
8513 ;; We have to make sure to disable the second alternative if
8514 ;; the shift_operator is MULT, since otherwise the insn will
8515 ;; also match a multiply_accumulate pattern and validate_change
8516 ;; will allow a replacement of the constant with a register
8517 ;; despite the checks done in shift_operator.
8518 (set_attr_alternative "insn_enabled"
8519 [(const_string "yes")
8521 (match_operand:SI 3 "mult_operator" "")
8522 (const_string "no") (const_string "yes"))])
8523 (set_attr "type" "alu_shift,alu_shift_reg")])
8526 [(set (match_operand:SI 0 "s_register_operand" "")
8527 (match_operator:SI 1 "shiftable_operator"
8528 [(match_operator:SI 2 "shiftable_operator"
8529 [(match_operator:SI 3 "shift_operator"
8530 [(match_operand:SI 4 "s_register_operand" "")
8531 (match_operand:SI 5 "reg_or_int_operand" "")])
8532 (match_operand:SI 6 "s_register_operand" "")])
8533 (match_operand:SI 7 "arm_rhs_operand" "")]))
8534 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8537 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8540 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8543 (define_insn "*arith_shiftsi_compare0"
8544 [(set (reg:CC_NOOV CC_REGNUM)
8546 (match_operator:SI 1 "shiftable_operator"
8547 [(match_operator:SI 3 "shift_operator"
8548 [(match_operand:SI 4 "s_register_operand" "r,r")
8549 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8550 (match_operand:SI 2 "s_register_operand" "r,r")])
8552 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8553 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8556 "%i1%.\\t%0, %2, %4%S3"
8557 [(set_attr "conds" "set")
8558 (set_attr "shift" "4")
8559 (set_attr "arch" "32,a")
8560 (set_attr "type" "alu_shift,alu_shift_reg")])
8562 (define_insn "*arith_shiftsi_compare0_scratch"
8563 [(set (reg:CC_NOOV CC_REGNUM)
8565 (match_operator:SI 1 "shiftable_operator"
8566 [(match_operator:SI 3 "shift_operator"
8567 [(match_operand:SI 4 "s_register_operand" "r,r")
8568 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8569 (match_operand:SI 2 "s_register_operand" "r,r")])
8571 (clobber (match_scratch:SI 0 "=r,r"))]
8573 "%i1%.\\t%0, %2, %4%S3"
8574 [(set_attr "conds" "set")
8575 (set_attr "shift" "4")
8576 (set_attr "arch" "32,a")
8577 (set_attr "type" "alu_shift,alu_shift_reg")])
8579 (define_insn "*sub_shiftsi"
8580 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8581 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8582 (match_operator:SI 2 "shift_operator"
8583 [(match_operand:SI 3 "s_register_operand" "r,r")
8584 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8586 "sub%?\\t%0, %1, %3%S2"
8587 [(set_attr "predicable" "yes")
8588 (set_attr "shift" "3")
8589 (set_attr "arch" "32,a")
8590 (set_attr "type" "alu_shift,alu_shift_reg")])
8592 (define_insn "*sub_shiftsi_compare0"
8593 [(set (reg:CC_NOOV CC_REGNUM)
8595 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8596 (match_operator:SI 2 "shift_operator"
8597 [(match_operand:SI 3 "s_register_operand" "r,r")
8598 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8600 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8601 (minus:SI (match_dup 1)
8602 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8604 "sub%.\\t%0, %1, %3%S2"
8605 [(set_attr "conds" "set")
8606 (set_attr "shift" "3")
8607 (set_attr "arch" "32,a")
8608 (set_attr "type" "alu_shift,alu_shift_reg")])
8610 (define_insn "*sub_shiftsi_compare0_scratch"
8611 [(set (reg:CC_NOOV CC_REGNUM)
8613 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8614 (match_operator:SI 2 "shift_operator"
8615 [(match_operand:SI 3 "s_register_operand" "r,r")
8616 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8618 (clobber (match_scratch:SI 0 "=r,r"))]
8620 "sub%.\\t%0, %1, %3%S2"
8621 [(set_attr "conds" "set")
8622 (set_attr "shift" "3")
8623 (set_attr "arch" "32,a")
8624 (set_attr "type" "alu_shift,alu_shift_reg")])
8627 (define_insn "*and_scc"
8628 [(set (match_operand:SI 0 "s_register_operand" "=r")
8629 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8630 [(match_operand 3 "cc_register" "") (const_int 0)])
8631 (match_operand:SI 2 "s_register_operand" "r")))]
8633 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8634 [(set_attr "conds" "use")
8635 (set_attr "insn" "mov")
8636 (set_attr "length" "8")]
8639 (define_insn "*ior_scc"
8640 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8641 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8642 [(match_operand 3 "cc_register" "") (const_int 0)])
8643 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8647 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8648 [(set_attr "conds" "use")
8649 (set_attr "length" "4,8")]
8652 ; A series of splitters for the compare_scc pattern below. Note that
8653 ; order is important.
8655 [(set (match_operand:SI 0 "s_register_operand" "")
8656 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8658 (clobber (reg:CC CC_REGNUM))]
8659 "TARGET_32BIT && reload_completed"
8660 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8663 [(set (match_operand:SI 0 "s_register_operand" "")
8664 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8666 (clobber (reg:CC CC_REGNUM))]
8667 "TARGET_32BIT && reload_completed"
8668 [(set (match_dup 0) (not:SI (match_dup 1)))
8669 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8672 [(set (match_operand:SI 0 "s_register_operand" "")
8673 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8675 (clobber (reg:CC CC_REGNUM))]
8676 "TARGET_32BIT && reload_completed"
8678 [(set (reg:CC CC_REGNUM)
8679 (compare:CC (const_int 1) (match_dup 1)))
8681 (minus:SI (const_int 1) (match_dup 1)))])
8682 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8683 (set (match_dup 0) (const_int 0)))])
8686 [(set (match_operand:SI 0 "s_register_operand" "")
8687 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8688 (match_operand:SI 2 "const_int_operand" "")))
8689 (clobber (reg:CC CC_REGNUM))]
8690 "TARGET_32BIT && reload_completed"
8692 [(set (reg:CC CC_REGNUM)
8693 (compare:CC (match_dup 1) (match_dup 2)))
8694 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8695 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8696 (set (match_dup 0) (const_int 1)))]
8698 operands[3] = GEN_INT (-INTVAL (operands[2]));
8702 [(set (match_operand:SI 0 "s_register_operand" "")
8703 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8704 (match_operand:SI 2 "arm_add_operand" "")))
8705 (clobber (reg:CC CC_REGNUM))]
8706 "TARGET_32BIT && reload_completed"
8708 [(set (reg:CC_NOOV CC_REGNUM)
8709 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8711 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8712 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8713 (set (match_dup 0) (const_int 1)))])
8715 (define_insn_and_split "*compare_scc"
8716 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8717 (match_operator:SI 1 "arm_comparison_operator"
8718 [(match_operand:SI 2 "s_register_operand" "r,r")
8719 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8720 (clobber (reg:CC CC_REGNUM))]
8723 "&& reload_completed"
8724 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8725 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8726 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8729 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8730 operands[2], operands[3]);
8731 enum rtx_code rc = GET_CODE (operands[1]);
8733 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8735 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8736 if (mode == CCFPmode || mode == CCFPEmode)
8737 rc = reverse_condition_maybe_unordered (rc);
8739 rc = reverse_condition (rc);
8740 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8743 ;; Attempt to improve the sequence generated by the compare_scc splitters
8744 ;; not to use conditional execution.
8746 [(set (reg:CC CC_REGNUM)
8747 (compare:CC (match_operand:SI 1 "register_operand" "")
8748 (match_operand:SI 2 "arm_rhs_operand" "")))
8749 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8750 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8751 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8752 (set (match_dup 0) (const_int 1)))
8753 (match_scratch:SI 3 "r")]
8755 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8757 [(set (reg:CC CC_REGNUM)
8758 (compare:CC (const_int 0) (match_dup 3)))
8759 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8761 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8762 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8764 (define_insn "*cond_move"
8765 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8766 (if_then_else:SI (match_operator 3 "equality_operator"
8767 [(match_operator 4 "arm_comparison_operator"
8768 [(match_operand 5 "cc_register" "") (const_int 0)])
8770 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8771 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8774 if (GET_CODE (operands[3]) == NE)
8776 if (which_alternative != 1)
8777 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8778 if (which_alternative != 0)
8779 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8782 if (which_alternative != 0)
8783 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8784 if (which_alternative != 1)
8785 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8788 [(set_attr "conds" "use")
8789 (set_attr "insn" "mov")
8790 (set_attr "length" "4,4,8")]
8793 (define_insn "*cond_arith"
8794 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8795 (match_operator:SI 5 "shiftable_operator"
8796 [(match_operator:SI 4 "arm_comparison_operator"
8797 [(match_operand:SI 2 "s_register_operand" "r,r")
8798 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8799 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8800 (clobber (reg:CC CC_REGNUM))]
8803 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8804 return \"%i5\\t%0, %1, %2, lsr #31\";
8806 output_asm_insn (\"cmp\\t%2, %3\", operands);
8807 if (GET_CODE (operands[5]) == AND)
8808 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8809 else if (GET_CODE (operands[5]) == MINUS)
8810 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8811 else if (which_alternative != 0)
8812 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8813 return \"%i5%d4\\t%0, %1, #1\";
8815 [(set_attr "conds" "clob")
8816 (set_attr "length" "12")]
8819 (define_insn "*cond_sub"
8820 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8821 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8822 (match_operator:SI 4 "arm_comparison_operator"
8823 [(match_operand:SI 2 "s_register_operand" "r,r")
8824 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8825 (clobber (reg:CC CC_REGNUM))]
8828 output_asm_insn (\"cmp\\t%2, %3\", operands);
8829 if (which_alternative != 0)
8830 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8831 return \"sub%d4\\t%0, %1, #1\";
8833 [(set_attr "conds" "clob")
8834 (set_attr "length" "8,12")]
8837 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8838 (define_insn "*cmp_ite0"
8839 [(set (match_operand 6 "dominant_cc_register" "")
8842 (match_operator 4 "arm_comparison_operator"
8843 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8844 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8845 (match_operator:SI 5 "arm_comparison_operator"
8846 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8847 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8853 static const char * const opcodes[4][2] =
8855 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8856 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8857 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8858 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8859 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8860 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8861 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8862 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8865 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8867 return opcodes[which_alternative][swap];
8869 [(set_attr "conds" "set")
8870 (set_attr "length" "8")]
8873 (define_insn "*cmp_ite1"
8874 [(set (match_operand 6 "dominant_cc_register" "")
8877 (match_operator 4 "arm_comparison_operator"
8878 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8879 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8880 (match_operator:SI 5 "arm_comparison_operator"
8881 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8882 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8888 static const char * const opcodes[4][2] =
8890 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8891 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8892 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8893 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8894 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8895 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8896 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8897 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8900 comparison_dominates_p (GET_CODE (operands[5]),
8901 reverse_condition (GET_CODE (operands[4])));
8903 return opcodes[which_alternative][swap];
8905 [(set_attr "conds" "set")
8906 (set_attr "length" "8")]
8909 (define_insn "*cmp_and"
8910 [(set (match_operand 6 "dominant_cc_register" "")
8913 (match_operator 4 "arm_comparison_operator"
8914 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8915 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8916 (match_operator:SI 5 "arm_comparison_operator"
8917 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8918 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8923 static const char *const opcodes[4][2] =
8925 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8926 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8927 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8928 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8929 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8930 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8931 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8932 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8935 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8937 return opcodes[which_alternative][swap];
8939 [(set_attr "conds" "set")
8940 (set_attr "predicable" "no")
8941 (set_attr "length" "8")]
8944 (define_insn "*cmp_ior"
8945 [(set (match_operand 6 "dominant_cc_register" "")
8948 (match_operator 4 "arm_comparison_operator"
8949 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8950 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8951 (match_operator:SI 5 "arm_comparison_operator"
8952 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8953 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8958 static const char *const opcodes[4][2] =
8960 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8961 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8962 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8963 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8964 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8965 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8966 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8967 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8970 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8972 return opcodes[which_alternative][swap];
8975 [(set_attr "conds" "set")
8976 (set_attr "length" "8")]
8979 (define_insn_and_split "*ior_scc_scc"
8980 [(set (match_operand:SI 0 "s_register_operand" "=r")
8981 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8982 [(match_operand:SI 1 "s_register_operand" "r")
8983 (match_operand:SI 2 "arm_add_operand" "rIL")])
8984 (match_operator:SI 6 "arm_comparison_operator"
8985 [(match_operand:SI 4 "s_register_operand" "r")
8986 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8987 (clobber (reg:CC CC_REGNUM))]
8989 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8992 "TARGET_ARM && reload_completed"
8996 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8997 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8999 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9001 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9004 [(set_attr "conds" "clob")
9005 (set_attr "length" "16")])
9007 ; If the above pattern is followed by a CMP insn, then the compare is
9008 ; redundant, since we can rework the conditional instruction that follows.
9009 (define_insn_and_split "*ior_scc_scc_cmp"
9010 [(set (match_operand 0 "dominant_cc_register" "")
9011 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9012 [(match_operand:SI 1 "s_register_operand" "r")
9013 (match_operand:SI 2 "arm_add_operand" "rIL")])
9014 (match_operator:SI 6 "arm_comparison_operator"
9015 [(match_operand:SI 4 "s_register_operand" "r")
9016 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9018 (set (match_operand:SI 7 "s_register_operand" "=r")
9019 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9020 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9023 "TARGET_ARM && reload_completed"
9027 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9028 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9030 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9032 [(set_attr "conds" "set")
9033 (set_attr "length" "16")])
9035 (define_insn_and_split "*and_scc_scc"
9036 [(set (match_operand:SI 0 "s_register_operand" "=r")
9037 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9038 [(match_operand:SI 1 "s_register_operand" "r")
9039 (match_operand:SI 2 "arm_add_operand" "rIL")])
9040 (match_operator:SI 6 "arm_comparison_operator"
9041 [(match_operand:SI 4 "s_register_operand" "r")
9042 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9043 (clobber (reg:CC CC_REGNUM))]
9045 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9048 "TARGET_ARM && reload_completed
9049 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9054 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9055 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9057 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9059 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9062 [(set_attr "conds" "clob")
9063 (set_attr "length" "16")])
9065 ; If the above pattern is followed by a CMP insn, then the compare is
9066 ; redundant, since we can rework the conditional instruction that follows.
9067 (define_insn_and_split "*and_scc_scc_cmp"
9068 [(set (match_operand 0 "dominant_cc_register" "")
9069 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9070 [(match_operand:SI 1 "s_register_operand" "r")
9071 (match_operand:SI 2 "arm_add_operand" "rIL")])
9072 (match_operator:SI 6 "arm_comparison_operator"
9073 [(match_operand:SI 4 "s_register_operand" "r")
9074 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9076 (set (match_operand:SI 7 "s_register_operand" "=r")
9077 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9078 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9081 "TARGET_ARM && reload_completed"
9085 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9086 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9088 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9090 [(set_attr "conds" "set")
9091 (set_attr "length" "16")])
9093 ;; If there is no dominance in the comparison, then we can still save an
9094 ;; instruction in the AND case, since we can know that the second compare
9095 ;; need only zero the value if false (if true, then the value is already
9097 (define_insn_and_split "*and_scc_scc_nodom"
9098 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9099 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9100 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9101 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9102 (match_operator:SI 6 "arm_comparison_operator"
9103 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9104 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9105 (clobber (reg:CC CC_REGNUM))]
9107 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9110 "TARGET_ARM && reload_completed"
9111 [(parallel [(set (match_dup 0)
9112 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9113 (clobber (reg:CC CC_REGNUM))])
9114 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9116 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9119 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9120 operands[4], operands[5]),
9122 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9124 [(set_attr "conds" "clob")
9125 (set_attr "length" "20")])
9128 [(set (reg:CC_NOOV CC_REGNUM)
9129 (compare:CC_NOOV (ior:SI
9130 (and:SI (match_operand:SI 0 "s_register_operand" "")
9132 (match_operator:SI 1 "arm_comparison_operator"
9133 [(match_operand:SI 2 "s_register_operand" "")
9134 (match_operand:SI 3 "arm_add_operand" "")]))
9136 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9139 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9141 (set (reg:CC_NOOV CC_REGNUM)
9142 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9147 [(set (reg:CC_NOOV CC_REGNUM)
9148 (compare:CC_NOOV (ior:SI
9149 (match_operator:SI 1 "arm_comparison_operator"
9150 [(match_operand:SI 2 "s_register_operand" "")
9151 (match_operand:SI 3 "arm_add_operand" "")])
9152 (and:SI (match_operand:SI 0 "s_register_operand" "")
9155 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9158 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9160 (set (reg:CC_NOOV CC_REGNUM)
9161 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9164 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9166 (define_insn "*negscc"
9167 [(set (match_operand:SI 0 "s_register_operand" "=r")
9168 (neg:SI (match_operator 3 "arm_comparison_operator"
9169 [(match_operand:SI 1 "s_register_operand" "r")
9170 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9171 (clobber (reg:CC CC_REGNUM))]
9174 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9175 return \"mov\\t%0, %1, asr #31\";
9177 if (GET_CODE (operands[3]) == NE)
9178 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9180 output_asm_insn (\"cmp\\t%1, %2\", operands);
9181 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9182 return \"mvn%d3\\t%0, #0\";
9184 [(set_attr "conds" "clob")
9185 (set_attr "length" "12")]
9188 (define_insn "movcond"
9189 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9191 (match_operator 5 "arm_comparison_operator"
9192 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9193 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9194 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9195 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9196 (clobber (reg:CC CC_REGNUM))]
9199 if (GET_CODE (operands[5]) == LT
9200 && (operands[4] == const0_rtx))
9202 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9204 if (operands[2] == const0_rtx)
9205 return \"and\\t%0, %1, %3, asr #31\";
9206 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9208 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9210 if (operands[1] == const0_rtx)
9211 return \"bic\\t%0, %2, %3, asr #31\";
9212 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9214 /* The only case that falls through to here is when both ops 1 & 2
9218 if (GET_CODE (operands[5]) == GE
9219 && (operands[4] == const0_rtx))
9221 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9223 if (operands[2] == const0_rtx)
9224 return \"bic\\t%0, %1, %3, asr #31\";
9225 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9227 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9229 if (operands[1] == const0_rtx)
9230 return \"and\\t%0, %2, %3, asr #31\";
9231 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9233 /* The only case that falls through to here is when both ops 1 & 2
9236 if (GET_CODE (operands[4]) == CONST_INT
9237 && !const_ok_for_arm (INTVAL (operands[4])))
9238 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9240 output_asm_insn (\"cmp\\t%3, %4\", operands);
9241 if (which_alternative != 0)
9242 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9243 if (which_alternative != 1)
9244 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9247 [(set_attr "conds" "clob")
9248 (set_attr "length" "8,8,12")]
9251 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9253 (define_insn "*ifcompare_plus_move"
9254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9255 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9256 [(match_operand:SI 4 "s_register_operand" "r,r")
9257 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9259 (match_operand:SI 2 "s_register_operand" "r,r")
9260 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9261 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9262 (clobber (reg:CC CC_REGNUM))]
9265 [(set_attr "conds" "clob")
9266 (set_attr "length" "8,12")]
9269 (define_insn "*if_plus_move"
9270 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9272 (match_operator 4 "arm_comparison_operator"
9273 [(match_operand 5 "cc_register" "") (const_int 0)])
9275 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9276 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9277 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9281 sub%d4\\t%0, %2, #%n3
9282 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9283 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9284 [(set_attr "conds" "use")
9285 (set_attr "length" "4,4,8,8")
9286 (set_attr "type" "*,*,*,*")]
9289 (define_insn "*ifcompare_move_plus"
9290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9291 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9292 [(match_operand:SI 4 "s_register_operand" "r,r")
9293 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9294 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9296 (match_operand:SI 2 "s_register_operand" "r,r")
9297 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9298 (clobber (reg:CC CC_REGNUM))]
9301 [(set_attr "conds" "clob")
9302 (set_attr "length" "8,12")]
9305 (define_insn "*if_move_plus"
9306 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9308 (match_operator 4 "arm_comparison_operator"
9309 [(match_operand 5 "cc_register" "") (const_int 0)])
9310 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9312 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9313 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9317 sub%D4\\t%0, %2, #%n3
9318 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9319 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9320 [(set_attr "conds" "use")
9321 (set_attr "length" "4,4,8,8")
9322 (set_attr "type" "*,*,*,*")]
9325 (define_insn "*ifcompare_arith_arith"
9326 [(set (match_operand:SI 0 "s_register_operand" "=r")
9327 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9328 [(match_operand:SI 5 "s_register_operand" "r")
9329 (match_operand:SI 6 "arm_add_operand" "rIL")])
9330 (match_operator:SI 8 "shiftable_operator"
9331 [(match_operand:SI 1 "s_register_operand" "r")
9332 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9333 (match_operator:SI 7 "shiftable_operator"
9334 [(match_operand:SI 3 "s_register_operand" "r")
9335 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9336 (clobber (reg:CC CC_REGNUM))]
9339 [(set_attr "conds" "clob")
9340 (set_attr "length" "12")]
9343 (define_insn "*if_arith_arith"
9344 [(set (match_operand:SI 0 "s_register_operand" "=r")
9345 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9346 [(match_operand 8 "cc_register" "") (const_int 0)])
9347 (match_operator:SI 6 "shiftable_operator"
9348 [(match_operand:SI 1 "s_register_operand" "r")
9349 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9350 (match_operator:SI 7 "shiftable_operator"
9351 [(match_operand:SI 3 "s_register_operand" "r")
9352 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9354 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9355 [(set_attr "conds" "use")
9356 (set_attr "length" "8")]
9359 (define_insn "*ifcompare_arith_move"
9360 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9361 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9362 [(match_operand:SI 2 "s_register_operand" "r,r")
9363 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9364 (match_operator:SI 7 "shiftable_operator"
9365 [(match_operand:SI 4 "s_register_operand" "r,r")
9366 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9367 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9368 (clobber (reg:CC CC_REGNUM))]
9371 /* If we have an operation where (op x 0) is the identity operation and
9372 the conditional operator is LT or GE and we are comparing against zero and
9373 everything is in registers then we can do this in two instructions. */
9374 if (operands[3] == const0_rtx
9375 && GET_CODE (operands[7]) != AND
9376 && GET_CODE (operands[5]) == REG
9377 && GET_CODE (operands[1]) == REG
9378 && REGNO (operands[1]) == REGNO (operands[4])
9379 && REGNO (operands[4]) != REGNO (operands[0]))
9381 if (GET_CODE (operands[6]) == LT)
9382 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9383 else if (GET_CODE (operands[6]) == GE)
9384 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9386 if (GET_CODE (operands[3]) == CONST_INT
9387 && !const_ok_for_arm (INTVAL (operands[3])))
9388 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9390 output_asm_insn (\"cmp\\t%2, %3\", operands);
9391 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9392 if (which_alternative != 0)
9393 return \"mov%D6\\t%0, %1\";
9396 [(set_attr "conds" "clob")
9397 (set_attr "length" "8,12")]
9400 (define_insn "*if_arith_move"
9401 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9402 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9403 [(match_operand 6 "cc_register" "") (const_int 0)])
9404 (match_operator:SI 5 "shiftable_operator"
9405 [(match_operand:SI 2 "s_register_operand" "r,r")
9406 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9407 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9411 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9412 [(set_attr "conds" "use")
9413 (set_attr "length" "4,8")
9414 (set_attr "type" "*,*")]
9417 (define_insn "*ifcompare_move_arith"
9418 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9419 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9420 [(match_operand:SI 4 "s_register_operand" "r,r")
9421 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9422 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9423 (match_operator:SI 7 "shiftable_operator"
9424 [(match_operand:SI 2 "s_register_operand" "r,r")
9425 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9426 (clobber (reg:CC CC_REGNUM))]
9429 /* If we have an operation where (op x 0) is the identity operation and
9430 the conditional operator is LT or GE and we are comparing against zero and
9431 everything is in registers then we can do this in two instructions */
9432 if (operands[5] == const0_rtx
9433 && GET_CODE (operands[7]) != AND
9434 && GET_CODE (operands[3]) == REG
9435 && GET_CODE (operands[1]) == REG
9436 && REGNO (operands[1]) == REGNO (operands[2])
9437 && REGNO (operands[2]) != REGNO (operands[0]))
9439 if (GET_CODE (operands[6]) == GE)
9440 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9441 else if (GET_CODE (operands[6]) == LT)
9442 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9445 if (GET_CODE (operands[5]) == CONST_INT
9446 && !const_ok_for_arm (INTVAL (operands[5])))
9447 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9449 output_asm_insn (\"cmp\\t%4, %5\", operands);
9451 if (which_alternative != 0)
9452 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9453 return \"%I7%D6\\t%0, %2, %3\";
9455 [(set_attr "conds" "clob")
9456 (set_attr "length" "8,12")]
9459 (define_insn "*if_move_arith"
9460 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9462 (match_operator 4 "arm_comparison_operator"
9463 [(match_operand 6 "cc_register" "") (const_int 0)])
9464 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9465 (match_operator:SI 5 "shiftable_operator"
9466 [(match_operand:SI 2 "s_register_operand" "r,r")
9467 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9471 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9472 [(set_attr "conds" "use")
9473 (set_attr "length" "4,8")
9474 (set_attr "type" "*,*")]
9477 (define_insn "*ifcompare_move_not"
9478 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9480 (match_operator 5 "arm_comparison_operator"
9481 [(match_operand:SI 3 "s_register_operand" "r,r")
9482 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9483 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9485 (match_operand:SI 2 "s_register_operand" "r,r"))))
9486 (clobber (reg:CC CC_REGNUM))]
9489 [(set_attr "conds" "clob")
9490 (set_attr "length" "8,12")]
9493 (define_insn "*if_move_not"
9494 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9496 (match_operator 4 "arm_comparison_operator"
9497 [(match_operand 3 "cc_register" "") (const_int 0)])
9498 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9499 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9503 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9504 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9505 [(set_attr "conds" "use")
9506 (set_attr "insn" "mvn")
9507 (set_attr "length" "4,8,8")]
9510 (define_insn "*ifcompare_not_move"
9511 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9513 (match_operator 5 "arm_comparison_operator"
9514 [(match_operand:SI 3 "s_register_operand" "r,r")
9515 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9517 (match_operand:SI 2 "s_register_operand" "r,r"))
9518 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9519 (clobber (reg:CC CC_REGNUM))]
9522 [(set_attr "conds" "clob")
9523 (set_attr "length" "8,12")]
9526 (define_insn "*if_not_move"
9527 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9529 (match_operator 4 "arm_comparison_operator"
9530 [(match_operand 3 "cc_register" "") (const_int 0)])
9531 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9532 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9536 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9537 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9538 [(set_attr "conds" "use")
9539 (set_attr "insn" "mvn")
9540 (set_attr "length" "4,8,8")]
9543 (define_insn "*ifcompare_shift_move"
9544 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9546 (match_operator 6 "arm_comparison_operator"
9547 [(match_operand:SI 4 "s_register_operand" "r,r")
9548 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9549 (match_operator:SI 7 "shift_operator"
9550 [(match_operand:SI 2 "s_register_operand" "r,r")
9551 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9552 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9553 (clobber (reg:CC CC_REGNUM))]
9556 [(set_attr "conds" "clob")
9557 (set_attr "length" "8,12")]
9560 (define_insn "*if_shift_move"
9561 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9563 (match_operator 5 "arm_comparison_operator"
9564 [(match_operand 6 "cc_register" "") (const_int 0)])
9565 (match_operator:SI 4 "shift_operator"
9566 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9567 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9568 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9572 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9573 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9574 [(set_attr "conds" "use")
9575 (set_attr "shift" "2")
9576 (set_attr "length" "4,8,8")
9577 (set_attr "insn" "mov")
9578 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9579 (const_string "alu_shift")
9580 (const_string "alu_shift_reg")))]
9583 (define_insn "*ifcompare_move_shift"
9584 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9586 (match_operator 6 "arm_comparison_operator"
9587 [(match_operand:SI 4 "s_register_operand" "r,r")
9588 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9589 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9590 (match_operator:SI 7 "shift_operator"
9591 [(match_operand:SI 2 "s_register_operand" "r,r")
9592 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9593 (clobber (reg:CC CC_REGNUM))]
9596 [(set_attr "conds" "clob")
9597 (set_attr "length" "8,12")]
9600 (define_insn "*if_move_shift"
9601 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9603 (match_operator 5 "arm_comparison_operator"
9604 [(match_operand 6 "cc_register" "") (const_int 0)])
9605 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9606 (match_operator:SI 4 "shift_operator"
9607 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9608 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9612 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9613 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9614 [(set_attr "conds" "use")
9615 (set_attr "shift" "2")
9616 (set_attr "length" "4,8,8")
9617 (set_attr "insn" "mov")
9618 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9619 (const_string "alu_shift")
9620 (const_string "alu_shift_reg")))]
9623 (define_insn "*ifcompare_shift_shift"
9624 [(set (match_operand:SI 0 "s_register_operand" "=r")
9626 (match_operator 7 "arm_comparison_operator"
9627 [(match_operand:SI 5 "s_register_operand" "r")
9628 (match_operand:SI 6 "arm_add_operand" "rIL")])
9629 (match_operator:SI 8 "shift_operator"
9630 [(match_operand:SI 1 "s_register_operand" "r")
9631 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9632 (match_operator:SI 9 "shift_operator"
9633 [(match_operand:SI 3 "s_register_operand" "r")
9634 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9635 (clobber (reg:CC CC_REGNUM))]
9638 [(set_attr "conds" "clob")
9639 (set_attr "length" "12")]
9642 (define_insn "*if_shift_shift"
9643 [(set (match_operand:SI 0 "s_register_operand" "=r")
9645 (match_operator 5 "arm_comparison_operator"
9646 [(match_operand 8 "cc_register" "") (const_int 0)])
9647 (match_operator:SI 6 "shift_operator"
9648 [(match_operand:SI 1 "s_register_operand" "r")
9649 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9650 (match_operator:SI 7 "shift_operator"
9651 [(match_operand:SI 3 "s_register_operand" "r")
9652 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9654 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9655 [(set_attr "conds" "use")
9656 (set_attr "shift" "1")
9657 (set_attr "length" "8")
9658 (set_attr "insn" "mov")
9659 (set (attr "type") (if_then_else
9660 (and (match_operand 2 "const_int_operand" "")
9661 (match_operand 4 "const_int_operand" ""))
9662 (const_string "alu_shift")
9663 (const_string "alu_shift_reg")))]
9666 (define_insn "*ifcompare_not_arith"
9667 [(set (match_operand:SI 0 "s_register_operand" "=r")
9669 (match_operator 6 "arm_comparison_operator"
9670 [(match_operand:SI 4 "s_register_operand" "r")
9671 (match_operand:SI 5 "arm_add_operand" "rIL")])
9672 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9673 (match_operator:SI 7 "shiftable_operator"
9674 [(match_operand:SI 2 "s_register_operand" "r")
9675 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9676 (clobber (reg:CC CC_REGNUM))]
9679 [(set_attr "conds" "clob")
9680 (set_attr "length" "12")]
9683 (define_insn "*if_not_arith"
9684 [(set (match_operand:SI 0 "s_register_operand" "=r")
9686 (match_operator 5 "arm_comparison_operator"
9687 [(match_operand 4 "cc_register" "") (const_int 0)])
9688 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9689 (match_operator:SI 6 "shiftable_operator"
9690 [(match_operand:SI 2 "s_register_operand" "r")
9691 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9693 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9694 [(set_attr "conds" "use")
9695 (set_attr "insn" "mvn")
9696 (set_attr "length" "8")]
9699 (define_insn "*ifcompare_arith_not"
9700 [(set (match_operand:SI 0 "s_register_operand" "=r")
9702 (match_operator 6 "arm_comparison_operator"
9703 [(match_operand:SI 4 "s_register_operand" "r")
9704 (match_operand:SI 5 "arm_add_operand" "rIL")])
9705 (match_operator:SI 7 "shiftable_operator"
9706 [(match_operand:SI 2 "s_register_operand" "r")
9707 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9708 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9709 (clobber (reg:CC CC_REGNUM))]
9712 [(set_attr "conds" "clob")
9713 (set_attr "length" "12")]
9716 (define_insn "*if_arith_not"
9717 [(set (match_operand:SI 0 "s_register_operand" "=r")
9719 (match_operator 5 "arm_comparison_operator"
9720 [(match_operand 4 "cc_register" "") (const_int 0)])
9721 (match_operator:SI 6 "shiftable_operator"
9722 [(match_operand:SI 2 "s_register_operand" "r")
9723 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9724 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9726 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9727 [(set_attr "conds" "use")
9728 (set_attr "insn" "mvn")
9729 (set_attr "length" "8")]
9732 (define_insn "*ifcompare_neg_move"
9733 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9735 (match_operator 5 "arm_comparison_operator"
9736 [(match_operand:SI 3 "s_register_operand" "r,r")
9737 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9738 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9739 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9740 (clobber (reg:CC CC_REGNUM))]
9743 [(set_attr "conds" "clob")
9744 (set_attr "length" "8,12")]
9747 (define_insn "*if_neg_move"
9748 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9750 (match_operator 4 "arm_comparison_operator"
9751 [(match_operand 3 "cc_register" "") (const_int 0)])
9752 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9753 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9757 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9758 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9759 [(set_attr "conds" "use")
9760 (set_attr "length" "4,8,8")]
9763 (define_insn "*ifcompare_move_neg"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9766 (match_operator 5 "arm_comparison_operator"
9767 [(match_operand:SI 3 "s_register_operand" "r,r")
9768 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9769 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9770 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9771 (clobber (reg:CC CC_REGNUM))]
9774 [(set_attr "conds" "clob")
9775 (set_attr "length" "8,12")]
9778 (define_insn "*if_move_neg"
9779 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9781 (match_operator 4 "arm_comparison_operator"
9782 [(match_operand 3 "cc_register" "") (const_int 0)])
9783 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9784 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9788 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9789 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9790 [(set_attr "conds" "use")
9791 (set_attr "length" "4,8,8")]
9794 (define_insn "*arith_adjacentmem"
9795 [(set (match_operand:SI 0 "s_register_operand" "=r")
9796 (match_operator:SI 1 "shiftable_operator"
9797 [(match_operand:SI 2 "memory_operand" "m")
9798 (match_operand:SI 3 "memory_operand" "m")]))
9799 (clobber (match_scratch:SI 4 "=r"))]
9800 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9806 HOST_WIDE_INT val1 = 0, val2 = 0;
9808 if (REGNO (operands[0]) > REGNO (operands[4]))
9810 ldm[1] = operands[4];
9811 ldm[2] = operands[0];
9815 ldm[1] = operands[0];
9816 ldm[2] = operands[4];
9819 base_reg = XEXP (operands[2], 0);
9821 if (!REG_P (base_reg))
9823 val1 = INTVAL (XEXP (base_reg, 1));
9824 base_reg = XEXP (base_reg, 0);
9827 if (!REG_P (XEXP (operands[3], 0)))
9828 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9830 arith[0] = operands[0];
9831 arith[3] = operands[1];
9845 if (val1 !=0 && val2 != 0)
9849 if (val1 == 4 || val2 == 4)
9850 /* Other val must be 8, since we know they are adjacent and neither
9852 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9853 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9855 ldm[0] = ops[0] = operands[4];
9857 ops[2] = GEN_INT (val1);
9858 output_add_immediate (ops);
9860 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9862 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9866 /* Offset is out of range for a single add, so use two ldr. */
9869 ops[2] = GEN_INT (val1);
9870 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9872 ops[2] = GEN_INT (val2);
9873 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9879 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9881 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9886 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9888 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9890 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9893 [(set_attr "length" "12")
9894 (set_attr "predicable" "yes")
9895 (set_attr "type" "load1")]
9898 ; This pattern is never tried by combine, so do it as a peephole
9901 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9902 (match_operand:SI 1 "arm_general_register_operand" ""))
9903 (set (reg:CC CC_REGNUM)
9904 (compare:CC (match_dup 1) (const_int 0)))]
9906 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9907 (set (match_dup 0) (match_dup 1))])]
9912 [(set (match_operand:SI 0 "s_register_operand" "")
9913 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9915 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9916 [(match_operand:SI 3 "s_register_operand" "")
9917 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9918 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9920 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9921 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9926 ;; This split can be used because CC_Z mode implies that the following
9927 ;; branch will be an equality, or an unsigned inequality, so the sign
9928 ;; extension is not needed.
9931 [(set (reg:CC_Z CC_REGNUM)
9933 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9935 (match_operand 1 "const_int_operand" "")))
9936 (clobber (match_scratch:SI 2 ""))]
9938 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9939 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9940 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9941 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9943 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9946 ;; ??? Check the patterns above for Thumb-2 usefulness
9948 (define_expand "prologue"
9949 [(clobber (const_int 0))]
9952 arm_expand_prologue ();
9954 thumb1_expand_prologue ();
9959 (define_expand "epilogue"
9960 [(clobber (const_int 0))]
9963 if (crtl->calls_eh_return)
9964 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9966 thumb1_expand_epilogue ();
9967 else if (USE_RETURN_INSN (FALSE))
9969 emit_jump_insn (gen_return ());
9972 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9974 gen_rtx_RETURN (VOIDmode)),
9980 ;; Note - although unspec_volatile's USE all hard registers,
9981 ;; USEs are ignored after relaod has completed. Thus we need
9982 ;; to add an unspec of the link register to ensure that flow
9983 ;; does not think that it is unused by the sibcall branch that
9984 ;; will replace the standard function epilogue.
9985 (define_insn "sibcall_epilogue"
9986 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9987 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9990 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9991 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9992 return arm_output_epilogue (next_nonnote_insn (insn));
9994 ;; Length is absolute worst case
9995 [(set_attr "length" "44")
9996 (set_attr "type" "block")
9997 ;; We don't clobber the conditions, but the potential length of this
9998 ;; operation is sufficient to make conditionalizing the sequence
9999 ;; unlikely to be profitable.
10000 (set_attr "conds" "clob")]
10003 (define_insn "*epilogue_insns"
10004 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10008 return arm_output_epilogue (NULL);
10009 else /* TARGET_THUMB1 */
10010 return thumb_unexpanded_epilogue ();
10012 ; Length is absolute worst case
10013 [(set_attr "length" "44")
10014 (set_attr "type" "block")
10015 ;; We don't clobber the conditions, but the potential length of this
10016 ;; operation is sufficient to make conditionalizing the sequence
10017 ;; unlikely to be profitable.
10018 (set_attr "conds" "clob")]
10021 (define_expand "eh_epilogue"
10022 [(use (match_operand:SI 0 "register_operand" ""))
10023 (use (match_operand:SI 1 "register_operand" ""))
10024 (use (match_operand:SI 2 "register_operand" ""))]
10028 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10029 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10031 rtx ra = gen_rtx_REG (Pmode, 2);
10033 emit_move_insn (ra, operands[2]);
10036 /* This is a hack -- we may have crystalized the function type too
10038 cfun->machine->func_type = 0;
10042 ;; This split is only used during output to reduce the number of patterns
10043 ;; that need assembler instructions adding to them. We allowed the setting
10044 ;; of the conditions to be implicit during rtl generation so that
10045 ;; the conditional compare patterns would work. However this conflicts to
10046 ;; some extent with the conditional data operations, so we have to split them
10049 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10050 ;; conditional execution sufficient?
10053 [(set (match_operand:SI 0 "s_register_operand" "")
10054 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10055 [(match_operand 2 "" "") (match_operand 3 "" "")])
10057 (match_operand 4 "" "")))
10058 (clobber (reg:CC CC_REGNUM))]
10059 "TARGET_ARM && reload_completed"
10060 [(set (match_dup 5) (match_dup 6))
10061 (cond_exec (match_dup 7)
10062 (set (match_dup 0) (match_dup 4)))]
10065 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10066 operands[2], operands[3]);
10067 enum rtx_code rc = GET_CODE (operands[1]);
10069 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10070 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10071 if (mode == CCFPmode || mode == CCFPEmode)
10072 rc = reverse_condition_maybe_unordered (rc);
10074 rc = reverse_condition (rc);
10076 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10081 [(set (match_operand:SI 0 "s_register_operand" "")
10082 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10083 [(match_operand 2 "" "") (match_operand 3 "" "")])
10084 (match_operand 4 "" "")
10086 (clobber (reg:CC CC_REGNUM))]
10087 "TARGET_ARM && reload_completed"
10088 [(set (match_dup 5) (match_dup 6))
10089 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10090 (set (match_dup 0) (match_dup 4)))]
10093 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10094 operands[2], operands[3]);
10096 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10097 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10102 [(set (match_operand:SI 0 "s_register_operand" "")
10103 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10104 [(match_operand 2 "" "") (match_operand 3 "" "")])
10105 (match_operand 4 "" "")
10106 (match_operand 5 "" "")))
10107 (clobber (reg:CC CC_REGNUM))]
10108 "TARGET_ARM && reload_completed"
10109 [(set (match_dup 6) (match_dup 7))
10110 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10111 (set (match_dup 0) (match_dup 4)))
10112 (cond_exec (match_dup 8)
10113 (set (match_dup 0) (match_dup 5)))]
10116 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10117 operands[2], operands[3]);
10118 enum rtx_code rc = GET_CODE (operands[1]);
10120 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10121 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10122 if (mode == CCFPmode || mode == CCFPEmode)
10123 rc = reverse_condition_maybe_unordered (rc);
10125 rc = reverse_condition (rc);
10127 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10132 [(set (match_operand:SI 0 "s_register_operand" "")
10133 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10134 [(match_operand:SI 2 "s_register_operand" "")
10135 (match_operand:SI 3 "arm_add_operand" "")])
10136 (match_operand:SI 4 "arm_rhs_operand" "")
10138 (match_operand:SI 5 "s_register_operand" ""))))
10139 (clobber (reg:CC CC_REGNUM))]
10140 "TARGET_ARM && reload_completed"
10141 [(set (match_dup 6) (match_dup 7))
10142 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10143 (set (match_dup 0) (match_dup 4)))
10144 (cond_exec (match_dup 8)
10145 (set (match_dup 0) (not:SI (match_dup 5))))]
10148 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10149 operands[2], operands[3]);
10150 enum rtx_code rc = GET_CODE (operands[1]);
10152 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10153 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10154 if (mode == CCFPmode || mode == CCFPEmode)
10155 rc = reverse_condition_maybe_unordered (rc);
10157 rc = reverse_condition (rc);
10159 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10163 (define_insn "*cond_move_not"
10164 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10165 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10166 [(match_operand 3 "cc_register" "") (const_int 0)])
10167 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10169 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10173 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10174 [(set_attr "conds" "use")
10175 (set_attr "insn" "mvn")
10176 (set_attr "length" "4,8")]
10179 ;; The next two patterns occur when an AND operation is followed by a
10180 ;; scc insn sequence
10182 (define_insn "*sign_extract_onebit"
10183 [(set (match_operand:SI 0 "s_register_operand" "=r")
10184 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10186 (match_operand:SI 2 "const_int_operand" "n")))
10187 (clobber (reg:CC CC_REGNUM))]
10190 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10191 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10192 return \"mvnne\\t%0, #0\";
10194 [(set_attr "conds" "clob")
10195 (set_attr "length" "8")]
10198 (define_insn "*not_signextract_onebit"
10199 [(set (match_operand:SI 0 "s_register_operand" "=r")
10201 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10203 (match_operand:SI 2 "const_int_operand" "n"))))
10204 (clobber (reg:CC CC_REGNUM))]
10207 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10208 output_asm_insn (\"tst\\t%1, %2\", operands);
10209 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10210 return \"movne\\t%0, #0\";
10212 [(set_attr "conds" "clob")
10213 (set_attr "length" "12")]
10215 ;; ??? The above patterns need auditing for Thumb-2
10217 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10218 ;; expressions. For simplicity, the first register is also in the unspec
10220 (define_insn "*push_multi"
10221 [(match_parallel 2 "multi_register_push"
10222 [(set (match_operand:BLK 0 "memory_operand" "=m")
10223 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10224 UNSPEC_PUSH_MULT))])]
10228 int num_saves = XVECLEN (operands[2], 0);
10230 /* For the StrongARM at least it is faster to
10231 use STR to store only a single register.
10232 In Thumb mode always use push, and the assembler will pick
10233 something appropriate. */
10234 if (num_saves == 1 && TARGET_ARM)
10235 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10242 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10244 strcpy (pattern, \"push\\t{%1\");
10246 for (i = 1; i < num_saves; i++)
10248 strcat (pattern, \", %|\");
10250 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10253 strcat (pattern, \"}\");
10254 output_asm_insn (pattern, operands);
10259 [(set_attr "type" "store4")]
10262 (define_insn "stack_tie"
10263 [(set (mem:BLK (scratch))
10264 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10265 (match_operand:SI 1 "s_register_operand" "rk")]
10269 [(set_attr "length" "0")]
10272 ;; Similarly for the floating point registers
10273 (define_insn "*push_fp_multi"
10274 [(match_parallel 2 "multi_register_push"
10275 [(set (match_operand:BLK 0 "memory_operand" "=m")
10276 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10277 UNSPEC_PUSH_MULT))])]
10278 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10283 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10284 output_asm_insn (pattern, operands);
10287 [(set_attr "type" "f_fpa_store")]
10290 ;; Special patterns for dealing with the constant pool
10292 (define_insn "align_4"
10293 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10296 assemble_align (32);
10301 (define_insn "align_8"
10302 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10305 assemble_align (64);
10310 (define_insn "consttable_end"
10311 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10314 making_const_table = FALSE;
10319 (define_insn "consttable_1"
10320 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10323 making_const_table = TRUE;
10324 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10325 assemble_zeros (3);
10328 [(set_attr "length" "4")]
10331 (define_insn "consttable_2"
10332 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10335 making_const_table = TRUE;
10336 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10337 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10338 assemble_zeros (2);
10341 [(set_attr "length" "4")]
10344 (define_insn "consttable_4"
10345 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10349 rtx x = operands[0];
10350 making_const_table = TRUE;
10351 switch (GET_MODE_CLASS (GET_MODE (x)))
10354 if (GET_MODE (x) == HFmode)
10355 arm_emit_fp16_const (x);
10359 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10360 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10364 /* XXX: Sometimes gcc does something really dumb and ends up with
10365 a HIGH in a constant pool entry, usually because it's trying to
10366 load into a VFP register. We know this will always be used in
10367 combination with a LO_SUM which ignores the high bits, so just
10368 strip off the HIGH. */
10369 if (GET_CODE (x) == HIGH)
10371 assemble_integer (x, 4, BITS_PER_WORD, 1);
10372 mark_symbol_refs_as_used (x);
10377 [(set_attr "length" "4")]
10380 (define_insn "consttable_8"
10381 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10385 making_const_table = TRUE;
10386 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10391 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10392 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10396 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10401 [(set_attr "length" "8")]
10404 (define_insn "consttable_16"
10405 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10409 making_const_table = TRUE;
10410 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10415 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10416 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10420 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10425 [(set_attr "length" "16")]
10428 ;; Miscellaneous Thumb patterns
10430 (define_expand "tablejump"
10431 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10432 (use (label_ref (match_operand 1 "" "")))])]
10437 /* Hopefully, CSE will eliminate this copy. */
10438 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10439 rtx reg2 = gen_reg_rtx (SImode);
10441 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10442 operands[0] = reg2;
10447 ;; NB never uses BX.
10448 (define_insn "*thumb1_tablejump"
10449 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10450 (use (label_ref (match_operand 1 "" "")))]
10453 [(set_attr "length" "2")]
10456 ;; V5 Instructions,
10458 (define_insn "clzsi2"
10459 [(set (match_operand:SI 0 "s_register_operand" "=r")
10460 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10461 "TARGET_32BIT && arm_arch5"
10463 [(set_attr "predicable" "yes")
10464 (set_attr "insn" "clz")])
10466 (define_insn "rbitsi2"
10467 [(set (match_operand:SI 0 "s_register_operand" "=r")
10468 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10469 "TARGET_32BIT && arm_arch_thumb2"
10471 [(set_attr "predicable" "yes")
10472 (set_attr "insn" "clz")])
10474 (define_expand "ctzsi2"
10475 [(set (match_operand:SI 0 "s_register_operand" "")
10476 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10477 "TARGET_32BIT && arm_arch_thumb2"
10480 rtx tmp = gen_reg_rtx (SImode);
10481 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10482 emit_insn (gen_clzsi2 (operands[0], tmp));
10488 ;; V5E instructions.
10490 (define_insn "prefetch"
10491 [(prefetch (match_operand:SI 0 "address_operand" "p")
10492 (match_operand:SI 1 "" "")
10493 (match_operand:SI 2 "" ""))]
10494 "TARGET_32BIT && arm_arch5e"
10497 ;; General predication pattern
10500 [(match_operator 0 "arm_comparison_operator"
10501 [(match_operand 1 "cc_register" "")
10507 (define_insn "prologue_use"
10508 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10510 "%@ %0 needed for prologue"
10511 [(set_attr "length" "0")]
10515 ;; Patterns for exception handling
10517 (define_expand "eh_return"
10518 [(use (match_operand 0 "general_operand" ""))]
10523 emit_insn (gen_arm_eh_return (operands[0]));
10525 emit_insn (gen_thumb_eh_return (operands[0]));
10530 ;; We can't expand this before we know where the link register is stored.
10531 (define_insn_and_split "arm_eh_return"
10532 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10534 (clobber (match_scratch:SI 1 "=&r"))]
10537 "&& reload_completed"
10541 arm_set_return_address (operands[0], operands[1]);
10546 (define_insn_and_split "thumb_eh_return"
10547 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10549 (clobber (match_scratch:SI 1 "=&l"))]
10552 "&& reload_completed"
10556 thumb_set_return_address (operands[0], operands[1]);
10564 (define_insn "load_tp_hard"
10565 [(set (match_operand:SI 0 "register_operand" "=r")
10566 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10568 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10569 [(set_attr "predicable" "yes")]
10572 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10573 (define_insn "load_tp_soft"
10574 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10575 (clobber (reg:SI LR_REGNUM))
10576 (clobber (reg:SI IP_REGNUM))
10577 (clobber (reg:CC CC_REGNUM))]
10579 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10580 [(set_attr "conds" "clob")]
10583 (define_insn "*arm_movtas_ze"
10584 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10587 (match_operand:SI 1 "const_int_operand" ""))]
10590 [(set_attr "predicable" "yes")
10591 (set_attr "length" "4")]
10594 (define_insn "*arm_rev"
10595 [(set (match_operand:SI 0 "s_register_operand" "=r")
10596 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10597 "TARGET_32BIT && arm_arch6"
10599 [(set_attr "predicable" "yes")
10600 (set_attr "length" "4")]
10603 (define_insn "*thumb1_rev"
10604 [(set (match_operand:SI 0 "s_register_operand" "=l")
10605 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10606 "TARGET_THUMB1 && arm_arch6"
10608 [(set_attr "length" "2")]
10611 (define_expand "arm_legacy_rev"
10612 [(set (match_operand:SI 2 "s_register_operand" "")
10613 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10617 (lshiftrt:SI (match_dup 2)
10619 (set (match_operand:SI 3 "s_register_operand" "")
10620 (rotatert:SI (match_dup 1)
10623 (and:SI (match_dup 2)
10624 (const_int -65281)))
10625 (set (match_operand:SI 0 "s_register_operand" "")
10626 (xor:SI (match_dup 3)
10632 ;; Reuse temporaries to keep register pressure down.
10633 (define_expand "thumb_legacy_rev"
10634 [(set (match_operand:SI 2 "s_register_operand" "")
10635 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10637 (set (match_operand:SI 3 "s_register_operand" "")
10638 (lshiftrt:SI (match_dup 1)
10641 (ior:SI (match_dup 3)
10643 (set (match_operand:SI 4 "s_register_operand" "")
10645 (set (match_operand:SI 5 "s_register_operand" "")
10646 (rotatert:SI (match_dup 1)
10649 (ashift:SI (match_dup 5)
10652 (lshiftrt:SI (match_dup 5)
10655 (ior:SI (match_dup 5)
10658 (rotatert:SI (match_dup 5)
10660 (set (match_operand:SI 0 "s_register_operand" "")
10661 (ior:SI (match_dup 5)
10667 (define_expand "bswapsi2"
10668 [(set (match_operand:SI 0 "s_register_operand" "=r")
10669 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10670 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10674 rtx op2 = gen_reg_rtx (SImode);
10675 rtx op3 = gen_reg_rtx (SImode);
10679 rtx op4 = gen_reg_rtx (SImode);
10680 rtx op5 = gen_reg_rtx (SImode);
10682 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10683 op2, op3, op4, op5));
10687 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10696 ;; Load the load/store multiple patterns
10697 (include "ldmstm.md")
10698 ;; Load the FPA co-processor patterns
10700 ;; Load the Maverick co-processor patterns
10701 (include "cirrus.md")
10702 ;; Vector bits common to IWMMXT and Neon
10703 (include "vec-common.md")
10704 ;; Load the Intel Wireless Multimedia Extension patterns
10705 (include "iwmmxt.md")
10706 ;; Load the VFP co-processor patterns
10708 ;; Thumb-2 patterns
10709 (include "thumb2.md")
10711 (include "neon.md")
10712 ;; Synchronization Primitives
10713 (include "sync.md")