1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
110 ;; UNSPEC_VOLATILE Usage:
113 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
115 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
116 ; instruction epilogue sequence that isn't expanded
117 ; into normal RTL. Used for both normal and sibcall
119 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
123 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
125 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
127 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
129 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
131 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
133 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
134 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
135 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
136 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
137 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
138 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
139 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap.
142 (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set.
143 (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op>
144 (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op>
145 (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op>
149 ;;---------------------------------------------------------------------------
152 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
153 ; generating ARM code. This is used to control the length of some insn
154 ; patterns that share the same RTL in both ARM and Thumb code.
155 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
157 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
158 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
160 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
161 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
163 ;; Operand number of an input operand that is shifted. Zero if the
164 ;; given instruction does not shift one of its input operands.
165 (define_attr "shift" "" (const_int 0))
167 ; Floating Point Unit. If we only have floating point emulation, then there
168 ; is no point in scheduling the floating point insns. (Well, for best
169 ; performance we should try and group them together).
170 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
171 (const (symbol_ref "arm_fpu_attr")))
173 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
174 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
180 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
181 (const_string "none"))
183 ; LENGTH of an instruction (in bytes)
184 (define_attr "length" ""
185 (cond [(not (eq_attr "sync_memory" "none"))
186 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
189 ; The architecture which supports the instruction (or alternative).
190 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
191 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
192 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
193 ; arm_arch6. This attribute is used to compute attribute "enabled",
194 ; use type "any" to enable an alternative in all cases.
195 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
196 (const_string "any"))
198 (define_attr "arch_enabled" "no,yes"
199 (cond [(eq_attr "arch" "any")
202 (and (eq_attr "arch" "a")
203 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
206 (and (eq_attr "arch" "t")
207 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
210 (and (eq_attr "arch" "t1")
211 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
214 (and (eq_attr "arch" "t2")
215 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
218 (and (eq_attr "arch" "32")
219 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
222 (and (eq_attr "arch" "v6")
223 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
226 (and (eq_attr "arch" "nov6")
227 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
228 (const_string "yes")]
229 (const_string "no")))
231 ; Allows an insn to disable certain alternatives for reasons other than
233 (define_attr "insn_enabled" "no,yes"
234 (const_string "yes"))
236 ; Enable all alternatives that are both arch_enabled and insn_enabled.
237 (define_attr "enabled" "no,yes"
238 (if_then_else (eq_attr "insn_enabled" "yes")
239 (if_then_else (eq_attr "arch_enabled" "yes")
242 (const_string "no")))
244 ; POOL_RANGE is how far away from a constant pool entry that this insn
245 ; can be placed. If the distance is zero, then this insn will never
246 ; reference the pool.
247 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
248 ; before its address.
249 (define_attr "arm_pool_range" "" (const_int 0))
250 (define_attr "thumb2_pool_range" "" (const_int 0))
251 (define_attr "arm_neg_pool_range" "" (const_int 0))
252 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
254 (define_attr "pool_range" ""
255 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
256 (attr "arm_pool_range")))
257 (define_attr "neg_pool_range" ""
258 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
259 (attr "arm_neg_pool_range")))
261 ; An assembler sequence may clobber the condition codes without us knowing.
262 ; If such an insn references the pool, then we have no way of knowing how,
263 ; so use the most conservative value for pool_range.
264 (define_asm_attributes
265 [(set_attr "conds" "clob")
266 (set_attr "length" "4")
267 (set_attr "pool_range" "250")])
269 ;; The instruction used to implement a particular pattern. This
270 ;; information is used by pipeline descriptions to provide accurate
271 ;; scheduling information.
274 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
275 (const_string "other"))
277 ; TYPE attribute is used to detect floating point instructions which, if
278 ; running on a co-processor can run in parallel with other, basic instructions
279 ; If write-buffer scheduling is enabled then it can also be used in the
280 ; scheduling of writes.
282 ; Classification of each insn
283 ; Note: vfp.md has different meanings for some of these, and some further
284 ; types as well. See that file for details.
285 ; alu any alu instruction that doesn't hit memory or fp
286 ; regs or have a shifted source operand
287 ; alu_shift any data instruction that doesn't hit memory or fp
288 ; regs, but has a source operand shifted by a constant
289 ; alu_shift_reg any data instruction that doesn't hit memory or fp
290 ; regs, but has a source operand shifted by a register value
291 ; mult a multiply instruction
292 ; block blockage insn, this blocks all functional units
293 ; float a floating point arithmetic operation (subject to expansion)
294 ; fdivd DFmode floating point division
295 ; fdivs SFmode floating point division
296 ; fmul Floating point multiply
297 ; ffmul Fast floating point multiply
298 ; farith Floating point arithmetic (4 cycle)
299 ; ffarith Fast floating point arithmetic (2 cycle)
300 ; float_em a floating point arithmetic operation that is normally emulated
301 ; even on a machine with an fpa.
302 ; f_load a floating point load from memory
303 ; f_store a floating point store to memory
304 ; f_load[sd] single/double load from memory
305 ; f_store[sd] single/double store to memory
306 ; f_flag a transfer of co-processor flags to the CPSR
307 ; f_mem_r a transfer of a floating point register to a real reg via mem
308 ; r_mem_f the reverse of f_mem_r
309 ; f_2_r fast transfer float to arm (no memory needed)
310 ; r_2_f fast transfer arm to float
311 ; f_cvt convert floating<->integral
313 ; call a subroutine call
314 ; load_byte load byte(s) from memory to arm registers
315 ; load1 load 1 word from memory to arm registers
316 ; load2 load 2 words from memory to arm registers
317 ; load3 load 3 words from memory to arm registers
318 ; load4 load 4 words from memory to arm registers
319 ; store store 1 word to memory from arm registers
320 ; store2 store 2 words
321 ; store3 store 3 words
322 ; store4 store 4 (or more) words
323 ; Additions for Cirrus Maverick co-processor:
324 ; mav_farith Floating point arithmetic (4 cycle)
325 ; mav_dmult Double multiplies (7 cycle)
329 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
331 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
332 (const_string "mult")
333 (const_string "alu")))
335 ; Load scheduling, set from the arm_ld_sched variable
336 ; initialized by arm_option_override()
337 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
339 ;; Classification of NEON instructions for scheduling purposes.
340 ;; Do not set this attribute and the "type" attribute together in
341 ;; any one instruction pattern.
342 (define_attr "neon_type"
353 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
354 neon_mul_qqq_8_16_32_ddd_32,\
355 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
356 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
358 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
359 neon_mla_qqq_32_qqd_32_scalar,\
360 neon_mul_ddd_16_scalar_32_16_long_scalar,\
361 neon_mul_qqd_32_scalar,\
362 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
367 neon_vqshl_vrshl_vqrshl_qqq,\
369 neon_fp_vadd_ddd_vabs_dd,\
370 neon_fp_vadd_qqq_vabs_qq,\
376 neon_fp_vmla_ddd_scalar,\
377 neon_fp_vmla_qqq_scalar,\
378 neon_fp_vrecps_vrsqrts_ddd,\
379 neon_fp_vrecps_vrsqrts_qqq,\
387 neon_vld2_2_regs_vld1_vld2_all_lanes,\
390 neon_vst1_1_2_regs_vst2_2_regs,\
392 neon_vst2_4_regs_vst3_vst4,\
394 neon_vld1_vld2_lane,\
395 neon_vld3_vld4_lane,\
396 neon_vst1_vst2_lane,\
397 neon_vst3_vst4_lane,\
398 neon_vld3_vld4_all_lanes,\
406 (const_string "none"))
408 ; condition codes: this one is used by final_prescan_insn to speed up
409 ; conditionalizing instructions. It saves having to scan the rtl to see if
410 ; it uses or alters the condition codes.
412 ; USE means that the condition codes are used by the insn in the process of
413 ; outputting code, this means (at present) that we can't use the insn in
416 ; SET means that the purpose of the insn is to set the condition codes in a
417 ; well defined manner.
419 ; CLOB means that the condition codes are altered in an undefined manner, if
420 ; they are altered at all
422 ; UNCONDITIONAL means the instruction can not be conditionally executed and
423 ; that the instruction does not use or alter the condition codes.
425 ; NOCOND means that the instruction does not use or alter the condition
426 ; codes but can be converted into a conditionally exectuted instruction.
428 (define_attr "conds" "use,set,clob,unconditional,nocond"
430 (ior (eq_attr "is_thumb1" "yes")
431 (eq_attr "type" "call"))
432 (const_string "clob")
433 (if_then_else (eq_attr "neon_type" "none")
434 (const_string "nocond")
435 (const_string "unconditional"))))
437 ; Predicable means that the insn can be conditionally executed based on
438 ; an automatically added predicate (additional patterns are generated by
439 ; gen...). We default to 'no' because no Thumb patterns match this rule
440 ; and not all ARM patterns do.
441 (define_attr "predicable" "no,yes" (const_string "no"))
443 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
444 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
445 ; suffer blockages enough to warrant modelling this (and it can adversely
446 ; affect the schedule).
447 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
449 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
450 ; to stall the processor. Used with model_wbuf above.
451 (define_attr "write_conflict" "no,yes"
452 (if_then_else (eq_attr "type"
453 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
455 (const_string "no")))
457 ; Classify the insns into those that take one cycle and those that take more
458 ; than one on the main cpu execution unit.
459 (define_attr "core_cycles" "single,multi"
460 (if_then_else (eq_attr "type"
461 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
462 (const_string "single")
463 (const_string "multi")))
465 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
466 ;; distant label. Only applicable to Thumb code.
467 (define_attr "far_jump" "yes,no" (const_string "no"))
470 ;; The number of machine instructions this pattern expands to.
471 ;; Used for Thumb-2 conditional execution.
472 (define_attr "ce_count" "" (const_int 1))
474 ;;---------------------------------------------------------------------------
477 (include "iterators.md")
479 ;;---------------------------------------------------------------------------
482 (include "predicates.md")
483 (include "constraints.md")
485 ;;---------------------------------------------------------------------------
486 ;; Pipeline descriptions
488 ;; Processor type. This is created automatically from arm-cores.def.
489 (include "arm-tune.md")
491 (define_attr "tune_cortexr4" "yes,no"
493 (eq_attr "tune" "cortexr4,cortexr4f")
495 (const_string "no"))))
497 ;; True if the generic scheduling description should be used.
499 (define_attr "generic_sched" "yes,no"
501 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
502 (eq_attr "tune_cortexr4" "yes"))
504 (const_string "yes"))))
506 (define_attr "generic_vfp" "yes,no"
508 (and (eq_attr "fpu" "vfp")
509 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
510 (eq_attr "tune_cortexr4" "no"))
512 (const_string "no"))))
514 (include "arm-generic.md")
515 (include "arm926ejs.md")
516 (include "arm1020e.md")
517 (include "arm1026ejs.md")
518 (include "arm1136jfs.md")
520 (include "fa606te.md")
521 (include "fa626te.md")
522 (include "fmp626.md")
523 (include "fa726te.md")
524 (include "cortex-a5.md")
525 (include "cortex-a8.md")
526 (include "cortex-a9.md")
527 (include "cortex-r4.md")
528 (include "cortex-r4f.md")
529 (include "cortex-m4.md")
530 (include "cortex-m4-fpu.md")
534 ;;---------------------------------------------------------------------------
539 ;; Note: For DImode insns, there is normally no reason why operands should
540 ;; not be in the same register, what we don't want is for something being
541 ;; written to partially overlap something that is an input.
542 ;; Cirrus 64bit additions should not be split because we have a native
543 ;; 64bit addition instructions.
545 (define_expand "adddi3"
547 [(set (match_operand:DI 0 "s_register_operand" "")
548 (plus:DI (match_operand:DI 1 "s_register_operand" "")
549 (match_operand:DI 2 "s_register_operand" "")))
550 (clobber (reg:CC CC_REGNUM))])]
553 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
555 if (!cirrus_fp_register (operands[0], DImode))
556 operands[0] = force_reg (DImode, operands[0]);
557 if (!cirrus_fp_register (operands[1], DImode))
558 operands[1] = force_reg (DImode, operands[1]);
559 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
565 if (GET_CODE (operands[1]) != REG)
566 operands[1] = force_reg (DImode, operands[1]);
567 if (GET_CODE (operands[2]) != REG)
568 operands[2] = force_reg (DImode, operands[2]);
573 (define_insn "*thumb1_adddi3"
574 [(set (match_operand:DI 0 "register_operand" "=l")
575 (plus:DI (match_operand:DI 1 "register_operand" "%0")
576 (match_operand:DI 2 "register_operand" "l")))
577 (clobber (reg:CC CC_REGNUM))
580 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
581 [(set_attr "length" "4")]
584 (define_insn_and_split "*arm_adddi3"
585 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
586 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
587 (match_operand:DI 2 "s_register_operand" "r, 0")))
588 (clobber (reg:CC CC_REGNUM))]
589 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
591 "TARGET_32BIT && reload_completed
592 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
593 [(parallel [(set (reg:CC_C CC_REGNUM)
594 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
596 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
597 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
598 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
601 operands[3] = gen_highpart (SImode, operands[0]);
602 operands[0] = gen_lowpart (SImode, operands[0]);
603 operands[4] = gen_highpart (SImode, operands[1]);
604 operands[1] = gen_lowpart (SImode, operands[1]);
605 operands[5] = gen_highpart (SImode, operands[2]);
606 operands[2] = gen_lowpart (SImode, operands[2]);
608 [(set_attr "conds" "clob")
609 (set_attr "length" "8")]
612 (define_insn_and_split "*adddi_sesidi_di"
613 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
614 (plus:DI (sign_extend:DI
615 (match_operand:SI 2 "s_register_operand" "r,r"))
616 (match_operand:DI 1 "s_register_operand" "0,r")))
617 (clobber (reg:CC CC_REGNUM))]
618 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
620 "TARGET_32BIT && reload_completed"
621 [(parallel [(set (reg:CC_C CC_REGNUM)
622 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
624 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
625 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
628 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
631 operands[3] = gen_highpart (SImode, operands[0]);
632 operands[0] = gen_lowpart (SImode, operands[0]);
633 operands[4] = gen_highpart (SImode, operands[1]);
634 operands[1] = gen_lowpart (SImode, operands[1]);
635 operands[2] = gen_lowpart (SImode, operands[2]);
637 [(set_attr "conds" "clob")
638 (set_attr "length" "8")]
641 (define_insn_and_split "*adddi_zesidi_di"
642 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
643 (plus:DI (zero_extend:DI
644 (match_operand:SI 2 "s_register_operand" "r,r"))
645 (match_operand:DI 1 "s_register_operand" "0,r")))
646 (clobber (reg:CC CC_REGNUM))]
647 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
649 "TARGET_32BIT && reload_completed"
650 [(parallel [(set (reg:CC_C CC_REGNUM)
651 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
653 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
654 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
655 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
658 operands[3] = gen_highpart (SImode, operands[0]);
659 operands[0] = gen_lowpart (SImode, operands[0]);
660 operands[4] = gen_highpart (SImode, operands[1]);
661 operands[1] = gen_lowpart (SImode, operands[1]);
662 operands[2] = gen_lowpart (SImode, operands[2]);
664 [(set_attr "conds" "clob")
665 (set_attr "length" "8")]
668 (define_expand "addsi3"
669 [(set (match_operand:SI 0 "s_register_operand" "")
670 (plus:SI (match_operand:SI 1 "s_register_operand" "")
671 (match_operand:SI 2 "reg_or_int_operand" "")))]
674 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
676 arm_split_constant (PLUS, SImode, NULL_RTX,
677 INTVAL (operands[2]), operands[0], operands[1],
678 optimize && can_create_pseudo_p ());
684 ; If there is a scratch available, this will be faster than synthesizing the
687 [(match_scratch:SI 3 "r")
688 (set (match_operand:SI 0 "arm_general_register_operand" "")
689 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
690 (match_operand:SI 2 "const_int_operand" "")))]
692 !(const_ok_for_arm (INTVAL (operands[2]))
693 || const_ok_for_arm (-INTVAL (operands[2])))
694 && const_ok_for_arm (~INTVAL (operands[2]))"
695 [(set (match_dup 3) (match_dup 2))
696 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
700 ;; The r/r/k alternative is required when reloading the address
701 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
702 ;; put the duplicated register first, and not try the commutative version.
703 (define_insn_and_split "*arm_addsi3"
704 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
705 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
706 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
716 && GET_CODE (operands[2]) == CONST_INT
717 && !(const_ok_for_arm (INTVAL (operands[2]))
718 || const_ok_for_arm (-INTVAL (operands[2])))
719 && (reload_completed || !arm_eliminable_register (operands[1]))"
720 [(clobber (const_int 0))]
722 arm_split_constant (PLUS, SImode, curr_insn,
723 INTVAL (operands[2]), operands[0],
727 [(set_attr "length" "4,4,4,4,4,16")
728 (set_attr "predicable" "yes")]
731 (define_insn_and_split "*thumb1_addsi3"
732 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
733 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
734 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
737 static const char * const asms[] =
739 \"add\\t%0, %0, %2\",
740 \"sub\\t%0, %0, #%n2\",
741 \"add\\t%0, %1, %2\",
742 \"add\\t%0, %0, %2\",
743 \"add\\t%0, %0, %2\",
744 \"add\\t%0, %1, %2\",
745 \"add\\t%0, %1, %2\",
750 if ((which_alternative == 2 || which_alternative == 6)
751 && GET_CODE (operands[2]) == CONST_INT
752 && INTVAL (operands[2]) < 0)
753 return \"sub\\t%0, %1, #%n2\";
754 return asms[which_alternative];
756 "&& reload_completed && CONST_INT_P (operands[2])
757 && ((operands[1] != stack_pointer_rtx
758 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
759 || (operands[1] == stack_pointer_rtx
760 && INTVAL (operands[2]) > 1020))"
761 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
762 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
764 HOST_WIDE_INT offset = INTVAL (operands[2]);
765 if (operands[1] == stack_pointer_rtx)
771 else if (offset < -255)
774 operands[3] = GEN_INT (offset);
775 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
777 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
780 ;; Reloading and elimination of the frame pointer can
781 ;; sometimes cause this optimization to be missed.
783 [(set (match_operand:SI 0 "arm_general_register_operand" "")
784 (match_operand:SI 1 "const_int_operand" ""))
786 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
788 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
789 && (INTVAL (operands[1]) & 3) == 0"
790 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
794 (define_insn "*addsi3_compare0"
795 [(set (reg:CC_NOOV CC_REGNUM)
797 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
798 (match_operand:SI 2 "arm_add_operand" "rI,L"))
800 (set (match_operand:SI 0 "s_register_operand" "=r,r")
801 (plus:SI (match_dup 1) (match_dup 2)))]
805 sub%.\\t%0, %1, #%n2"
806 [(set_attr "conds" "set")]
809 (define_insn "*addsi3_compare0_scratch"
810 [(set (reg:CC_NOOV CC_REGNUM)
812 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
813 (match_operand:SI 1 "arm_add_operand" "rI,L"))
819 [(set_attr "conds" "set")]
822 (define_insn "*compare_negsi_si"
823 [(set (reg:CC_Z CC_REGNUM)
825 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
826 (match_operand:SI 1 "s_register_operand" "r")))]
829 [(set_attr "conds" "set")]
832 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
833 ;; addend is a constant.
834 (define_insn "*cmpsi2_addneg"
835 [(set (reg:CC CC_REGNUM)
837 (match_operand:SI 1 "s_register_operand" "r,r")
838 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
839 (set (match_operand:SI 0 "s_register_operand" "=r,r")
840 (plus:SI (match_dup 1)
841 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
842 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
845 sub%.\\t%0, %1, #%n3"
846 [(set_attr "conds" "set")]
849 ;; Convert the sequence
851 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
855 ;; bcs dest ((unsigned)rn >= 1)
856 ;; similarly for the beq variant using bcc.
857 ;; This is a common looping idiom (while (n--))
859 [(set (match_operand:SI 0 "arm_general_register_operand" "")
860 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
862 (set (match_operand 2 "cc_register" "")
863 (compare (match_dup 0) (const_int -1)))
865 (if_then_else (match_operator 3 "equality_operator"
866 [(match_dup 2) (const_int 0)])
867 (match_operand 4 "" "")
868 (match_operand 5 "" "")))]
869 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
873 (match_dup 1) (const_int 1)))
874 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
876 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
879 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
880 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
883 operands[2], const0_rtx);"
886 ;; The next four insns work because they compare the result with one of
887 ;; the operands, and we know that the use of the condition code is
888 ;; either GEU or LTU, so we can use the carry flag from the addition
889 ;; instead of doing the compare a second time.
890 (define_insn "*addsi3_compare_op1"
891 [(set (reg:CC_C CC_REGNUM)
893 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
894 (match_operand:SI 2 "arm_add_operand" "rI,L"))
896 (set (match_operand:SI 0 "s_register_operand" "=r,r")
897 (plus:SI (match_dup 1) (match_dup 2)))]
901 sub%.\\t%0, %1, #%n2"
902 [(set_attr "conds" "set")]
905 (define_insn "*addsi3_compare_op2"
906 [(set (reg:CC_C CC_REGNUM)
908 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
909 (match_operand:SI 2 "arm_add_operand" "rI,L"))
911 (set (match_operand:SI 0 "s_register_operand" "=r,r")
912 (plus:SI (match_dup 1) (match_dup 2)))]
916 sub%.\\t%0, %1, #%n2"
917 [(set_attr "conds" "set")]
920 (define_insn "*compare_addsi2_op0"
921 [(set (reg:CC_C CC_REGNUM)
923 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
924 (match_operand:SI 1 "arm_add_operand" "rI,L"))
930 [(set_attr "conds" "set")]
933 (define_insn "*compare_addsi2_op1"
934 [(set (reg:CC_C CC_REGNUM)
936 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
937 (match_operand:SI 1 "arm_add_operand" "rI,L"))
943 [(set_attr "conds" "set")]
946 (define_insn "*addsi3_carryin_<optab>"
947 [(set (match_operand:SI 0 "s_register_operand" "=r")
948 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
949 (match_operand:SI 2 "arm_rhs_operand" "rI"))
950 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
953 [(set_attr "conds" "use")]
956 (define_insn "*addsi3_carryin_alt2_<optab>"
957 [(set (match_operand:SI 0 "s_register_operand" "=r")
958 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
959 (match_operand:SI 1 "s_register_operand" "%r"))
960 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
963 [(set_attr "conds" "use")]
966 (define_insn "*addsi3_carryin_shift_<optab>"
967 [(set (match_operand:SI 0 "s_register_operand" "=r")
969 (match_operator:SI 2 "shift_operator"
970 [(match_operand:SI 3 "s_register_operand" "r")
971 (match_operand:SI 4 "reg_or_int_operand" "rM")])
972 (match_operand:SI 1 "s_register_operand" "r"))
973 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
975 "adc%?\\t%0, %1, %3%S2"
976 [(set_attr "conds" "use")
977 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
978 (const_string "alu_shift")
979 (const_string "alu_shift_reg")))]
982 (define_expand "incscc"
983 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
984 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
985 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
986 (match_operand:SI 1 "s_register_operand" "0,?r")))]
991 (define_insn "*arm_incscc"
992 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
993 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
994 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
995 (match_operand:SI 1 "s_register_operand" "0,?r")))]
999 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1000 [(set_attr "conds" "use")
1001 (set_attr "length" "4,8")]
1004 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1006 [(set (match_operand:SI 0 "s_register_operand" "")
1007 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1008 (match_operand:SI 2 "s_register_operand" ""))
1010 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1012 [(set (match_dup 3) (match_dup 1))
1013 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1015 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1018 (define_expand "addsf3"
1019 [(set (match_operand:SF 0 "s_register_operand" "")
1020 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1021 (match_operand:SF 2 "arm_float_add_operand" "")))]
1022 "TARGET_32BIT && TARGET_HARD_FLOAT"
1025 && !cirrus_fp_register (operands[2], SFmode))
1026 operands[2] = force_reg (SFmode, operands[2]);
1029 (define_expand "adddf3"
1030 [(set (match_operand:DF 0 "s_register_operand" "")
1031 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1032 (match_operand:DF 2 "arm_float_add_operand" "")))]
1033 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1036 && !cirrus_fp_register (operands[2], DFmode))
1037 operands[2] = force_reg (DFmode, operands[2]);
1040 (define_expand "subdi3"
1042 [(set (match_operand:DI 0 "s_register_operand" "")
1043 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1044 (match_operand:DI 2 "s_register_operand" "")))
1045 (clobber (reg:CC CC_REGNUM))])]
1048 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1050 && cirrus_fp_register (operands[0], DImode)
1051 && cirrus_fp_register (operands[1], DImode))
1053 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1059 if (GET_CODE (operands[1]) != REG)
1060 operands[1] = force_reg (DImode, operands[1]);
1061 if (GET_CODE (operands[2]) != REG)
1062 operands[2] = force_reg (DImode, operands[2]);
1067 (define_insn "*arm_subdi3"
1068 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1069 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1070 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1071 (clobber (reg:CC CC_REGNUM))]
1072 "TARGET_32BIT && !TARGET_NEON"
1073 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1074 [(set_attr "conds" "clob")
1075 (set_attr "length" "8")]
1078 (define_insn "*thumb_subdi3"
1079 [(set (match_operand:DI 0 "register_operand" "=l")
1080 (minus:DI (match_operand:DI 1 "register_operand" "0")
1081 (match_operand:DI 2 "register_operand" "l")))
1082 (clobber (reg:CC CC_REGNUM))]
1084 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1085 [(set_attr "length" "4")]
1088 (define_insn "*subdi_di_zesidi"
1089 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1090 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1092 (match_operand:SI 2 "s_register_operand" "r,r"))))
1093 (clobber (reg:CC CC_REGNUM))]
1095 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1096 [(set_attr "conds" "clob")
1097 (set_attr "length" "8")]
1100 (define_insn "*subdi_di_sesidi"
1101 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1102 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1104 (match_operand:SI 2 "s_register_operand" "r,r"))))
1105 (clobber (reg:CC CC_REGNUM))]
1107 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1108 [(set_attr "conds" "clob")
1109 (set_attr "length" "8")]
1112 (define_insn "*subdi_zesidi_di"
1113 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1114 (minus:DI (zero_extend:DI
1115 (match_operand:SI 2 "s_register_operand" "r,r"))
1116 (match_operand:DI 1 "s_register_operand" "0,r")))
1117 (clobber (reg:CC CC_REGNUM))]
1119 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1120 [(set_attr "conds" "clob")
1121 (set_attr "length" "8")]
1124 (define_insn "*subdi_sesidi_di"
1125 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1126 (minus:DI (sign_extend:DI
1127 (match_operand:SI 2 "s_register_operand" "r,r"))
1128 (match_operand:DI 1 "s_register_operand" "0,r")))
1129 (clobber (reg:CC CC_REGNUM))]
1131 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1132 [(set_attr "conds" "clob")
1133 (set_attr "length" "8")]
1136 (define_insn "*subdi_zesidi_zesidi"
1137 [(set (match_operand:DI 0 "s_register_operand" "=r")
1138 (minus:DI (zero_extend:DI
1139 (match_operand:SI 1 "s_register_operand" "r"))
1141 (match_operand:SI 2 "s_register_operand" "r"))))
1142 (clobber (reg:CC CC_REGNUM))]
1144 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1145 [(set_attr "conds" "clob")
1146 (set_attr "length" "8")]
1149 (define_expand "subsi3"
1150 [(set (match_operand:SI 0 "s_register_operand" "")
1151 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1152 (match_operand:SI 2 "s_register_operand" "")))]
1155 if (GET_CODE (operands[1]) == CONST_INT)
1159 arm_split_constant (MINUS, SImode, NULL_RTX,
1160 INTVAL (operands[1]), operands[0],
1161 operands[2], optimize && can_create_pseudo_p ());
1164 else /* TARGET_THUMB1 */
1165 operands[1] = force_reg (SImode, operands[1]);
1170 (define_insn "thumb1_subsi3_insn"
1171 [(set (match_operand:SI 0 "register_operand" "=l")
1172 (minus:SI (match_operand:SI 1 "register_operand" "l")
1173 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1176 [(set_attr "length" "2")
1177 (set_attr "conds" "set")])
1179 ; ??? Check Thumb-2 split length
1180 (define_insn_and_split "*arm_subsi3_insn"
1181 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1182 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1183 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1191 "&& ((GET_CODE (operands[1]) == CONST_INT
1192 && !const_ok_for_arm (INTVAL (operands[1])))
1193 || (GET_CODE (operands[2]) == CONST_INT
1194 && !const_ok_for_arm (INTVAL (operands[2]))))"
1195 [(clobber (const_int 0))]
1197 arm_split_constant (MINUS, SImode, curr_insn,
1198 INTVAL (operands[1]), operands[0], operands[2], 0);
1201 [(set_attr "length" "4,4,4,16,16")
1202 (set_attr "predicable" "yes")]
1206 [(match_scratch:SI 3 "r")
1207 (set (match_operand:SI 0 "arm_general_register_operand" "")
1208 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1209 (match_operand:SI 2 "arm_general_register_operand" "")))]
1211 && !const_ok_for_arm (INTVAL (operands[1]))
1212 && const_ok_for_arm (~INTVAL (operands[1]))"
1213 [(set (match_dup 3) (match_dup 1))
1214 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1218 (define_insn "*subsi3_compare0"
1219 [(set (reg:CC_NOOV CC_REGNUM)
1221 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1222 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1224 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1225 (minus:SI (match_dup 1) (match_dup 2)))]
1230 [(set_attr "conds" "set")]
1233 (define_insn "*subsi3_compare"
1234 [(set (reg:CC CC_REGNUM)
1235 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1236 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1238 (minus:SI (match_dup 1) (match_dup 2)))]
1243 [(set_attr "conds" "set")]
1246 (define_expand "decscc"
1247 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1248 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1249 (match_operator:SI 2 "arm_comparison_operator"
1250 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1255 (define_insn "*arm_decscc"
1256 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1257 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1258 (match_operator:SI 2 "arm_comparison_operator"
1259 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1263 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1264 [(set_attr "conds" "use")
1265 (set_attr "length" "*,8")]
1268 (define_expand "subsf3"
1269 [(set (match_operand:SF 0 "s_register_operand" "")
1270 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1271 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1272 "TARGET_32BIT && TARGET_HARD_FLOAT"
1274 if (TARGET_MAVERICK)
1276 if (!cirrus_fp_register (operands[1], SFmode))
1277 operands[1] = force_reg (SFmode, operands[1]);
1278 if (!cirrus_fp_register (operands[2], SFmode))
1279 operands[2] = force_reg (SFmode, operands[2]);
1283 (define_expand "subdf3"
1284 [(set (match_operand:DF 0 "s_register_operand" "")
1285 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1286 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1287 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1289 if (TARGET_MAVERICK)
1291 if (!cirrus_fp_register (operands[1], DFmode))
1292 operands[1] = force_reg (DFmode, operands[1]);
1293 if (!cirrus_fp_register (operands[2], DFmode))
1294 operands[2] = force_reg (DFmode, operands[2]);
1299 ;; Multiplication insns
1301 (define_expand "mulsi3"
1302 [(set (match_operand:SI 0 "s_register_operand" "")
1303 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1304 (match_operand:SI 1 "s_register_operand" "")))]
1309 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1310 (define_insn "*arm_mulsi3"
1311 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1312 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1313 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1314 "TARGET_32BIT && !arm_arch6"
1315 "mul%?\\t%0, %2, %1"
1316 [(set_attr "insn" "mul")
1317 (set_attr "predicable" "yes")]
1320 (define_insn "*arm_mulsi3_v6"
1321 [(set (match_operand:SI 0 "s_register_operand" "=r")
1322 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1323 (match_operand:SI 2 "s_register_operand" "r")))]
1324 "TARGET_32BIT && arm_arch6"
1325 "mul%?\\t%0, %1, %2"
1326 [(set_attr "insn" "mul")
1327 (set_attr "predicable" "yes")]
1330 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1331 ; 1 and 2; are the same, because reload will make operand 0 match
1332 ; operand 1 without realizing that this conflicts with operand 2. We fix
1333 ; this by adding another alternative to match this case, and then `reload'
1334 ; it ourselves. This alternative must come first.
1335 (define_insn "*thumb_mulsi3"
1336 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1337 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1338 (match_operand:SI 2 "register_operand" "l,l,l")))]
1339 "TARGET_THUMB1 && !arm_arch6"
1341 if (which_alternative < 2)
1342 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1344 return \"mul\\t%0, %2\";
1346 [(set_attr "length" "4,4,2")
1347 (set_attr "insn" "mul")]
1350 (define_insn "*thumb_mulsi3_v6"
1351 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1352 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1353 (match_operand:SI 2 "register_operand" "l,0,0")))]
1354 "TARGET_THUMB1 && arm_arch6"
1359 [(set_attr "length" "2")
1360 (set_attr "insn" "mul")]
1363 (define_insn "*mulsi3_compare0"
1364 [(set (reg:CC_NOOV CC_REGNUM)
1365 (compare:CC_NOOV (mult:SI
1366 (match_operand:SI 2 "s_register_operand" "r,r")
1367 (match_operand:SI 1 "s_register_operand" "%0,r"))
1369 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1370 (mult:SI (match_dup 2) (match_dup 1)))]
1371 "TARGET_ARM && !arm_arch6"
1372 "mul%.\\t%0, %2, %1"
1373 [(set_attr "conds" "set")
1374 (set_attr "insn" "muls")]
1377 (define_insn "*mulsi3_compare0_v6"
1378 [(set (reg:CC_NOOV CC_REGNUM)
1379 (compare:CC_NOOV (mult:SI
1380 (match_operand:SI 2 "s_register_operand" "r")
1381 (match_operand:SI 1 "s_register_operand" "r"))
1383 (set (match_operand:SI 0 "s_register_operand" "=r")
1384 (mult:SI (match_dup 2) (match_dup 1)))]
1385 "TARGET_ARM && arm_arch6 && optimize_size"
1386 "mul%.\\t%0, %2, %1"
1387 [(set_attr "conds" "set")
1388 (set_attr "insn" "muls")]
1391 (define_insn "*mulsi_compare0_scratch"
1392 [(set (reg:CC_NOOV CC_REGNUM)
1393 (compare:CC_NOOV (mult:SI
1394 (match_operand:SI 2 "s_register_operand" "r,r")
1395 (match_operand:SI 1 "s_register_operand" "%0,r"))
1397 (clobber (match_scratch:SI 0 "=&r,&r"))]
1398 "TARGET_ARM && !arm_arch6"
1399 "mul%.\\t%0, %2, %1"
1400 [(set_attr "conds" "set")
1401 (set_attr "insn" "muls")]
1404 (define_insn "*mulsi_compare0_scratch_v6"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1406 (compare:CC_NOOV (mult:SI
1407 (match_operand:SI 2 "s_register_operand" "r")
1408 (match_operand:SI 1 "s_register_operand" "r"))
1410 (clobber (match_scratch:SI 0 "=r"))]
1411 "TARGET_ARM && arm_arch6 && optimize_size"
1412 "mul%.\\t%0, %2, %1"
1413 [(set_attr "conds" "set")
1414 (set_attr "insn" "muls")]
1417 ;; Unnamed templates to match MLA instruction.
1419 (define_insn "*mulsi3addsi"
1420 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1422 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1423 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1424 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1425 "TARGET_32BIT && !arm_arch6"
1426 "mla%?\\t%0, %2, %1, %3"
1427 [(set_attr "insn" "mla")
1428 (set_attr "predicable" "yes")]
1431 (define_insn "*mulsi3addsi_v6"
1432 [(set (match_operand:SI 0 "s_register_operand" "=r")
1434 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1435 (match_operand:SI 1 "s_register_operand" "r"))
1436 (match_operand:SI 3 "s_register_operand" "r")))]
1437 "TARGET_32BIT && arm_arch6"
1438 "mla%?\\t%0, %2, %1, %3"
1439 [(set_attr "insn" "mla")
1440 (set_attr "predicable" "yes")]
1443 (define_insn "*mulsi3addsi_compare0"
1444 [(set (reg:CC_NOOV CC_REGNUM)
1447 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1448 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1449 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1451 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1452 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1454 "TARGET_ARM && arm_arch6"
1455 "mla%.\\t%0, %2, %1, %3"
1456 [(set_attr "conds" "set")
1457 (set_attr "insn" "mlas")]
1460 (define_insn "*mulsi3addsi_compare0_v6"
1461 [(set (reg:CC_NOOV CC_REGNUM)
1464 (match_operand:SI 2 "s_register_operand" "r")
1465 (match_operand:SI 1 "s_register_operand" "r"))
1466 (match_operand:SI 3 "s_register_operand" "r"))
1468 (set (match_operand:SI 0 "s_register_operand" "=r")
1469 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1471 "TARGET_ARM && arm_arch6 && optimize_size"
1472 "mla%.\\t%0, %2, %1, %3"
1473 [(set_attr "conds" "set")
1474 (set_attr "insn" "mlas")]
1477 (define_insn "*mulsi3addsi_compare0_scratch"
1478 [(set (reg:CC_NOOV CC_REGNUM)
1481 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1482 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1483 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1485 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1486 "TARGET_ARM && !arm_arch6"
1487 "mla%.\\t%0, %2, %1, %3"
1488 [(set_attr "conds" "set")
1489 (set_attr "insn" "mlas")]
1492 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1493 [(set (reg:CC_NOOV CC_REGNUM)
1496 (match_operand:SI 2 "s_register_operand" "r")
1497 (match_operand:SI 1 "s_register_operand" "r"))
1498 (match_operand:SI 3 "s_register_operand" "r"))
1500 (clobber (match_scratch:SI 0 "=r"))]
1501 "TARGET_ARM && arm_arch6 && optimize_size"
1502 "mla%.\\t%0, %2, %1, %3"
1503 [(set_attr "conds" "set")
1504 (set_attr "insn" "mlas")]
1507 (define_insn "*mulsi3subsi"
1508 [(set (match_operand:SI 0 "s_register_operand" "=r")
1510 (match_operand:SI 3 "s_register_operand" "r")
1511 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1512 (match_operand:SI 1 "s_register_operand" "r"))))]
1513 "TARGET_32BIT && arm_arch_thumb2"
1514 "mls%?\\t%0, %2, %1, %3"
1515 [(set_attr "insn" "mla")
1516 (set_attr "predicable" "yes")]
1519 (define_expand "maddsidi4"
1520 [(set (match_operand:DI 0 "s_register_operand" "")
1523 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1524 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1525 (match_operand:DI 3 "s_register_operand" "")))]
1526 "TARGET_32BIT && arm_arch3m"
1529 (define_insn "*mulsidi3adddi"
1530 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1533 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1534 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1535 (match_operand:DI 1 "s_register_operand" "0")))]
1536 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1537 "smlal%?\\t%Q0, %R0, %3, %2"
1538 [(set_attr "insn" "smlal")
1539 (set_attr "predicable" "yes")]
1542 (define_insn "*mulsidi3adddi_v6"
1543 [(set (match_operand:DI 0 "s_register_operand" "=r")
1546 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1547 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1548 (match_operand:DI 1 "s_register_operand" "0")))]
1549 "TARGET_32BIT && arm_arch6"
1550 "smlal%?\\t%Q0, %R0, %3, %2"
1551 [(set_attr "insn" "smlal")
1552 (set_attr "predicable" "yes")]
1555 ;; 32x32->64 widening multiply.
1556 ;; As with mulsi3, the only difference between the v3-5 and v6+
1557 ;; versions of these patterns is the requirement that the output not
1558 ;; overlap the inputs, but that still means we have to have a named
1559 ;; expander and two different starred insns.
1561 (define_expand "mulsidi3"
1562 [(set (match_operand:DI 0 "s_register_operand" "")
1564 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1565 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1566 "TARGET_32BIT && arm_arch3m"
1570 (define_insn "*mulsidi3_nov6"
1571 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1573 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1574 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1575 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1576 "smull%?\\t%Q0, %R0, %1, %2"
1577 [(set_attr "insn" "smull")
1578 (set_attr "predicable" "yes")]
1581 (define_insn "*mulsidi3_v6"
1582 [(set (match_operand:DI 0 "s_register_operand" "=r")
1584 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1585 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1586 "TARGET_32BIT && arm_arch6"
1587 "smull%?\\t%Q0, %R0, %1, %2"
1588 [(set_attr "insn" "smull")
1589 (set_attr "predicable" "yes")]
1592 (define_expand "umulsidi3"
1593 [(set (match_operand:DI 0 "s_register_operand" "")
1595 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1596 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1597 "TARGET_32BIT && arm_arch3m"
1601 (define_insn "*umulsidi3_nov6"
1602 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1604 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1605 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1606 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1607 "umull%?\\t%Q0, %R0, %1, %2"
1608 [(set_attr "insn" "umull")
1609 (set_attr "predicable" "yes")]
1612 (define_insn "*umulsidi3_v6"
1613 [(set (match_operand:DI 0 "s_register_operand" "=r")
1615 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1616 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1617 "TARGET_32BIT && arm_arch6"
1618 "umull%?\\t%Q0, %R0, %1, %2"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1623 (define_expand "umaddsidi4"
1624 [(set (match_operand:DI 0 "s_register_operand" "")
1627 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1628 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1629 (match_operand:DI 3 "s_register_operand" "")))]
1630 "TARGET_32BIT && arm_arch3m"
1633 (define_insn "*umulsidi3adddi"
1634 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1637 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1638 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1639 (match_operand:DI 1 "s_register_operand" "0")))]
1640 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1641 "umlal%?\\t%Q0, %R0, %3, %2"
1642 [(set_attr "insn" "umlal")
1643 (set_attr "predicable" "yes")]
1646 (define_insn "*umulsidi3adddi_v6"
1647 [(set (match_operand:DI 0 "s_register_operand" "=r")
1650 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1651 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1652 (match_operand:DI 1 "s_register_operand" "0")))]
1653 "TARGET_32BIT && arm_arch6"
1654 "umlal%?\\t%Q0, %R0, %3, %2"
1655 [(set_attr "insn" "umlal")
1656 (set_attr "predicable" "yes")]
1659 (define_expand "smulsi3_highpart"
1661 [(set (match_operand:SI 0 "s_register_operand" "")
1665 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1666 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1668 (clobber (match_scratch:SI 3 ""))])]
1669 "TARGET_32BIT && arm_arch3m"
1673 (define_insn "*smulsi3_highpart_nov6"
1674 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1678 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1679 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1681 (clobber (match_scratch:SI 3 "=&r,&r"))]
1682 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1683 "smull%?\\t%3, %0, %2, %1"
1684 [(set_attr "insn" "smull")
1685 (set_attr "predicable" "yes")]
1688 (define_insn "*smulsi3_highpart_v6"
1689 [(set (match_operand:SI 0 "s_register_operand" "=r")
1693 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1694 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1696 (clobber (match_scratch:SI 3 "=r"))]
1697 "TARGET_32BIT && arm_arch6"
1698 "smull%?\\t%3, %0, %2, %1"
1699 [(set_attr "insn" "smull")
1700 (set_attr "predicable" "yes")]
1703 (define_expand "umulsi3_highpart"
1705 [(set (match_operand:SI 0 "s_register_operand" "")
1709 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1710 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1712 (clobber (match_scratch:SI 3 ""))])]
1713 "TARGET_32BIT && arm_arch3m"
1717 (define_insn "*umulsi3_highpart_nov6"
1718 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1722 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1723 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1725 (clobber (match_scratch:SI 3 "=&r,&r"))]
1726 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1727 "umull%?\\t%3, %0, %2, %1"
1728 [(set_attr "insn" "umull")
1729 (set_attr "predicable" "yes")]
1732 (define_insn "*umulsi3_highpart_v6"
1733 [(set (match_operand:SI 0 "s_register_operand" "=r")
1737 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1738 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1740 (clobber (match_scratch:SI 3 "=r"))]
1741 "TARGET_32BIT && arm_arch6"
1742 "umull%?\\t%3, %0, %2, %1"
1743 [(set_attr "insn" "umull")
1744 (set_attr "predicable" "yes")]
1747 (define_insn "mulhisi3"
1748 [(set (match_operand:SI 0 "s_register_operand" "=r")
1749 (mult:SI (sign_extend:SI
1750 (match_operand:HI 1 "s_register_operand" "%r"))
1752 (match_operand:HI 2 "s_register_operand" "r"))))]
1753 "TARGET_DSP_MULTIPLY"
1754 "smulbb%?\\t%0, %1, %2"
1755 [(set_attr "insn" "smulxy")
1756 (set_attr "predicable" "yes")]
1759 (define_insn "*mulhisi3tb"
1760 [(set (match_operand:SI 0 "s_register_operand" "=r")
1761 (mult:SI (ashiftrt:SI
1762 (match_operand:SI 1 "s_register_operand" "r")
1765 (match_operand:HI 2 "s_register_operand" "r"))))]
1766 "TARGET_DSP_MULTIPLY"
1767 "smultb%?\\t%0, %1, %2"
1768 [(set_attr "insn" "smulxy")
1769 (set_attr "predicable" "yes")]
1772 (define_insn "*mulhisi3bt"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (mult:SI (sign_extend:SI
1775 (match_operand:HI 1 "s_register_operand" "r"))
1777 (match_operand:SI 2 "s_register_operand" "r")
1779 "TARGET_DSP_MULTIPLY"
1780 "smulbt%?\\t%0, %1, %2"
1781 [(set_attr "insn" "smulxy")
1782 (set_attr "predicable" "yes")]
1785 (define_insn "*mulhisi3tt"
1786 [(set (match_operand:SI 0 "s_register_operand" "=r")
1787 (mult:SI (ashiftrt:SI
1788 (match_operand:SI 1 "s_register_operand" "r")
1791 (match_operand:SI 2 "s_register_operand" "r")
1793 "TARGET_DSP_MULTIPLY"
1794 "smultt%?\\t%0, %1, %2"
1795 [(set_attr "insn" "smulxy")
1796 (set_attr "predicable" "yes")]
1799 (define_insn "maddhisi4"
1800 [(set (match_operand:SI 0 "s_register_operand" "=r")
1801 (plus:SI (mult:SI (sign_extend:SI
1802 (match_operand:HI 1 "s_register_operand" "r"))
1804 (match_operand:HI 2 "s_register_operand" "r")))
1805 (match_operand:SI 3 "s_register_operand" "r")))]
1806 "TARGET_DSP_MULTIPLY"
1807 "smlabb%?\\t%0, %1, %2, %3"
1808 [(set_attr "insn" "smlaxy")
1809 (set_attr "predicable" "yes")]
1812 (define_insn "*maddhidi4"
1813 [(set (match_operand:DI 0 "s_register_operand" "=r")
1815 (mult:DI (sign_extend:DI
1816 (match_operand:HI 1 "s_register_operand" "r"))
1818 (match_operand:HI 2 "s_register_operand" "r")))
1819 (match_operand:DI 3 "s_register_operand" "0")))]
1820 "TARGET_DSP_MULTIPLY"
1821 "smlalbb%?\\t%Q0, %R0, %1, %2"
1822 [(set_attr "insn" "smlalxy")
1823 (set_attr "predicable" "yes")])
1825 (define_expand "mulsf3"
1826 [(set (match_operand:SF 0 "s_register_operand" "")
1827 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1828 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1829 "TARGET_32BIT && TARGET_HARD_FLOAT"
1832 && !cirrus_fp_register (operands[2], SFmode))
1833 operands[2] = force_reg (SFmode, operands[2]);
1836 (define_expand "muldf3"
1837 [(set (match_operand:DF 0 "s_register_operand" "")
1838 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1839 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1840 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1843 && !cirrus_fp_register (operands[2], DFmode))
1844 operands[2] = force_reg (DFmode, operands[2]);
1849 (define_expand "divsf3"
1850 [(set (match_operand:SF 0 "s_register_operand" "")
1851 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1852 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1853 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1856 (define_expand "divdf3"
1857 [(set (match_operand:DF 0 "s_register_operand" "")
1858 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1859 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1860 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1865 (define_expand "modsf3"
1866 [(set (match_operand:SF 0 "s_register_operand" "")
1867 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1868 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1869 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1872 (define_expand "moddf3"
1873 [(set (match_operand:DF 0 "s_register_operand" "")
1874 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1875 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1876 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1879 ;; Boolean and,ior,xor insns
1881 ;; Split up double word logical operations
1883 ;; Split up simple DImode logical operations. Simply perform the logical
1884 ;; operation on the upper and lower halves of the registers.
1886 [(set (match_operand:DI 0 "s_register_operand" "")
1887 (match_operator:DI 6 "logical_binary_operator"
1888 [(match_operand:DI 1 "s_register_operand" "")
1889 (match_operand:DI 2 "s_register_operand" "")]))]
1890 "TARGET_32BIT && reload_completed
1891 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1892 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1893 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1894 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1897 operands[3] = gen_highpart (SImode, operands[0]);
1898 operands[0] = gen_lowpart (SImode, operands[0]);
1899 operands[4] = gen_highpart (SImode, operands[1]);
1900 operands[1] = gen_lowpart (SImode, operands[1]);
1901 operands[5] = gen_highpart (SImode, operands[2]);
1902 operands[2] = gen_lowpart (SImode, operands[2]);
1907 [(set (match_operand:DI 0 "s_register_operand" "")
1908 (match_operator:DI 6 "logical_binary_operator"
1909 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1910 (match_operand:DI 1 "s_register_operand" "")]))]
1911 "TARGET_32BIT && reload_completed"
1912 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1913 (set (match_dup 3) (match_op_dup:SI 6
1914 [(ashiftrt:SI (match_dup 2) (const_int 31))
1918 operands[3] = gen_highpart (SImode, operands[0]);
1919 operands[0] = gen_lowpart (SImode, operands[0]);
1920 operands[4] = gen_highpart (SImode, operands[1]);
1921 operands[1] = gen_lowpart (SImode, operands[1]);
1922 operands[5] = gen_highpart (SImode, operands[2]);
1923 operands[2] = gen_lowpart (SImode, operands[2]);
1927 ;; The zero extend of operand 2 means we can just copy the high part of
1928 ;; operand1 into operand0.
1930 [(set (match_operand:DI 0 "s_register_operand" "")
1932 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1933 (match_operand:DI 1 "s_register_operand" "")))]
1934 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1935 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1936 (set (match_dup 3) (match_dup 4))]
1939 operands[4] = gen_highpart (SImode, operands[1]);
1940 operands[3] = gen_highpart (SImode, operands[0]);
1941 operands[0] = gen_lowpart (SImode, operands[0]);
1942 operands[1] = gen_lowpart (SImode, operands[1]);
1946 ;; The zero extend of operand 2 means we can just copy the high part of
1947 ;; operand1 into operand0.
1949 [(set (match_operand:DI 0 "s_register_operand" "")
1951 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1952 (match_operand:DI 1 "s_register_operand" "")))]
1953 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1954 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1955 (set (match_dup 3) (match_dup 4))]
1958 operands[4] = gen_highpart (SImode, operands[1]);
1959 operands[3] = gen_highpart (SImode, operands[0]);
1960 operands[0] = gen_lowpart (SImode, operands[0]);
1961 operands[1] = gen_lowpart (SImode, operands[1]);
1965 (define_expand "anddi3"
1966 [(set (match_operand:DI 0 "s_register_operand" "")
1967 (and:DI (match_operand:DI 1 "s_register_operand" "")
1968 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1973 (define_insn "*anddi3_insn"
1974 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1975 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1976 (match_operand:DI 2 "s_register_operand" "r,r")))]
1977 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1979 [(set_attr "length" "8")]
1982 (define_insn_and_split "*anddi_zesidi_di"
1983 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1984 (and:DI (zero_extend:DI
1985 (match_operand:SI 2 "s_register_operand" "r,r"))
1986 (match_operand:DI 1 "s_register_operand" "0,r")))]
1989 "TARGET_32BIT && reload_completed"
1990 ; The zero extend of operand 2 clears the high word of the output
1992 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1993 (set (match_dup 3) (const_int 0))]
1996 operands[3] = gen_highpart (SImode, operands[0]);
1997 operands[0] = gen_lowpart (SImode, operands[0]);
1998 operands[1] = gen_lowpart (SImode, operands[1]);
2000 [(set_attr "length" "8")]
2003 (define_insn "*anddi_sesdi_di"
2004 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2005 (and:DI (sign_extend:DI
2006 (match_operand:SI 2 "s_register_operand" "r,r"))
2007 (match_operand:DI 1 "s_register_operand" "0,r")))]
2010 [(set_attr "length" "8")]
2013 (define_expand "andsi3"
2014 [(set (match_operand:SI 0 "s_register_operand" "")
2015 (and:SI (match_operand:SI 1 "s_register_operand" "")
2016 (match_operand:SI 2 "reg_or_int_operand" "")))]
2021 if (GET_CODE (operands[2]) == CONST_INT)
2023 if (INTVAL (operands[2]) == 255 && arm_arch6)
2025 operands[1] = convert_to_mode (QImode, operands[1], 1);
2026 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2030 arm_split_constant (AND, SImode, NULL_RTX,
2031 INTVAL (operands[2]), operands[0],
2033 optimize && can_create_pseudo_p ());
2038 else /* TARGET_THUMB1 */
2040 if (GET_CODE (operands[2]) != CONST_INT)
2042 rtx tmp = force_reg (SImode, operands[2]);
2043 if (rtx_equal_p (operands[0], operands[1]))
2047 operands[2] = operands[1];
2055 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2057 operands[2] = force_reg (SImode,
2058 GEN_INT (~INTVAL (operands[2])));
2060 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2065 for (i = 9; i <= 31; i++)
2067 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2069 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2073 else if ((((HOST_WIDE_INT) 1) << i) - 1
2074 == ~INTVAL (operands[2]))
2076 rtx shift = GEN_INT (i);
2077 rtx reg = gen_reg_rtx (SImode);
2079 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2080 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2086 operands[2] = force_reg (SImode, operands[2]);
2092 ; ??? Check split length for Thumb-2
2093 (define_insn_and_split "*arm_andsi3_insn"
2094 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2095 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2096 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2100 bic%?\\t%0, %1, #%B2
2103 && GET_CODE (operands[2]) == CONST_INT
2104 && !(const_ok_for_arm (INTVAL (operands[2]))
2105 || const_ok_for_arm (~INTVAL (operands[2])))"
2106 [(clobber (const_int 0))]
2108 arm_split_constant (AND, SImode, curr_insn,
2109 INTVAL (operands[2]), operands[0], operands[1], 0);
2112 [(set_attr "length" "4,4,16")
2113 (set_attr "predicable" "yes")]
2116 (define_insn "*thumb1_andsi3_insn"
2117 [(set (match_operand:SI 0 "register_operand" "=l")
2118 (and:SI (match_operand:SI 1 "register_operand" "%0")
2119 (match_operand:SI 2 "register_operand" "l")))]
2122 [(set_attr "length" "2")
2123 (set_attr "conds" "set")])
2125 (define_insn "*andsi3_compare0"
2126 [(set (reg:CC_NOOV CC_REGNUM)
2128 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2129 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2131 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2132 (and:SI (match_dup 1) (match_dup 2)))]
2136 bic%.\\t%0, %1, #%B2"
2137 [(set_attr "conds" "set")]
2140 (define_insn "*andsi3_compare0_scratch"
2141 [(set (reg:CC_NOOV CC_REGNUM)
2143 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2144 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2146 (clobber (match_scratch:SI 2 "=X,r"))]
2150 bic%.\\t%2, %0, #%B1"
2151 [(set_attr "conds" "set")]
2154 (define_insn "*zeroextractsi_compare0_scratch"
2155 [(set (reg:CC_NOOV CC_REGNUM)
2156 (compare:CC_NOOV (zero_extract:SI
2157 (match_operand:SI 0 "s_register_operand" "r")
2158 (match_operand 1 "const_int_operand" "n")
2159 (match_operand 2 "const_int_operand" "n"))
2162 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2163 && INTVAL (operands[1]) > 0
2164 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2165 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2167 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2168 << INTVAL (operands[2]));
2169 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2172 [(set_attr "conds" "set")]
2175 (define_insn_and_split "*ne_zeroextractsi"
2176 [(set (match_operand:SI 0 "s_register_operand" "=r")
2177 (ne:SI (zero_extract:SI
2178 (match_operand:SI 1 "s_register_operand" "r")
2179 (match_operand:SI 2 "const_int_operand" "n")
2180 (match_operand:SI 3 "const_int_operand" "n"))
2182 (clobber (reg:CC CC_REGNUM))]
2184 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2185 && INTVAL (operands[2]) > 0
2186 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2187 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2190 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2191 && INTVAL (operands[2]) > 0
2192 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2193 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2194 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2195 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2197 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2199 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2200 (match_dup 0) (const_int 1)))]
2202 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2203 << INTVAL (operands[3]));
2205 [(set_attr "conds" "clob")
2206 (set (attr "length")
2207 (if_then_else (eq_attr "is_thumb" "yes")
2212 (define_insn_and_split "*ne_zeroextractsi_shifted"
2213 [(set (match_operand:SI 0 "s_register_operand" "=r")
2214 (ne:SI (zero_extract:SI
2215 (match_operand:SI 1 "s_register_operand" "r")
2216 (match_operand:SI 2 "const_int_operand" "n")
2219 (clobber (reg:CC CC_REGNUM))]
2223 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2224 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2226 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2228 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2229 (match_dup 0) (const_int 1)))]
2231 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2233 [(set_attr "conds" "clob")
2234 (set_attr "length" "8")]
2237 (define_insn_and_split "*ite_ne_zeroextractsi"
2238 [(set (match_operand:SI 0 "s_register_operand" "=r")
2239 (if_then_else:SI (ne (zero_extract:SI
2240 (match_operand:SI 1 "s_register_operand" "r")
2241 (match_operand:SI 2 "const_int_operand" "n")
2242 (match_operand:SI 3 "const_int_operand" "n"))
2244 (match_operand:SI 4 "arm_not_operand" "rIK")
2246 (clobber (reg:CC CC_REGNUM))]
2248 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2249 && INTVAL (operands[2]) > 0
2250 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2251 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2252 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2255 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2256 && INTVAL (operands[2]) > 0
2257 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2258 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2259 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2260 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2261 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2263 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2265 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2266 (match_dup 0) (match_dup 4)))]
2268 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2269 << INTVAL (operands[3]));
2271 [(set_attr "conds" "clob")
2272 (set_attr "length" "8")]
2275 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2276 [(set (match_operand:SI 0 "s_register_operand" "=r")
2277 (if_then_else:SI (ne (zero_extract:SI
2278 (match_operand:SI 1 "s_register_operand" "r")
2279 (match_operand:SI 2 "const_int_operand" "n")
2282 (match_operand:SI 3 "arm_not_operand" "rIK")
2284 (clobber (reg:CC CC_REGNUM))]
2285 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2287 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2288 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2289 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2291 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2293 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2294 (match_dup 0) (match_dup 3)))]
2296 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2298 [(set_attr "conds" "clob")
2299 (set_attr "length" "8")]
2303 [(set (match_operand:SI 0 "s_register_operand" "")
2304 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2305 (match_operand:SI 2 "const_int_operand" "")
2306 (match_operand:SI 3 "const_int_operand" "")))
2307 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2309 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2310 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2312 HOST_WIDE_INT temp = INTVAL (operands[2]);
2314 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2315 operands[3] = GEN_INT (32 - temp);
2319 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2321 [(set (match_operand:SI 0 "s_register_operand" "")
2322 (match_operator:SI 1 "shiftable_operator"
2323 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2324 (match_operand:SI 3 "const_int_operand" "")
2325 (match_operand:SI 4 "const_int_operand" ""))
2326 (match_operand:SI 5 "s_register_operand" "")]))
2327 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2329 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2332 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2335 HOST_WIDE_INT temp = INTVAL (operands[3]);
2337 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2338 operands[4] = GEN_INT (32 - temp);
2343 [(set (match_operand:SI 0 "s_register_operand" "")
2344 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2345 (match_operand:SI 2 "const_int_operand" "")
2346 (match_operand:SI 3 "const_int_operand" "")))]
2348 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2349 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2351 HOST_WIDE_INT temp = INTVAL (operands[2]);
2353 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2354 operands[3] = GEN_INT (32 - temp);
2359 [(set (match_operand:SI 0 "s_register_operand" "")
2360 (match_operator:SI 1 "shiftable_operator"
2361 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2362 (match_operand:SI 3 "const_int_operand" "")
2363 (match_operand:SI 4 "const_int_operand" ""))
2364 (match_operand:SI 5 "s_register_operand" "")]))
2365 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2367 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2370 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2373 HOST_WIDE_INT temp = INTVAL (operands[3]);
2375 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2376 operands[4] = GEN_INT (32 - temp);
2380 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2381 ;;; represented by the bitfield, then this will produce incorrect results.
2382 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2383 ;;; which have a real bit-field insert instruction, the truncation happens
2384 ;;; in the bit-field insert instruction itself. Since arm does not have a
2385 ;;; bit-field insert instruction, we would have to emit code here to truncate
2386 ;;; the value before we insert. This loses some of the advantage of having
2387 ;;; this insv pattern, so this pattern needs to be reevalutated.
2389 (define_expand "insv"
2390 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2391 (match_operand:SI 1 "general_operand" "")
2392 (match_operand:SI 2 "general_operand" ""))
2393 (match_operand:SI 3 "reg_or_int_operand" ""))]
2394 "TARGET_ARM || arm_arch_thumb2"
2397 int start_bit = INTVAL (operands[2]);
2398 int width = INTVAL (operands[1]);
2399 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2400 rtx target, subtarget;
2402 if (arm_arch_thumb2)
2404 bool use_bfi = TRUE;
2406 if (GET_CODE (operands[3]) == CONST_INT)
2408 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2412 emit_insn (gen_insv_zero (operands[0], operands[1],
2417 /* See if the set can be done with a single orr instruction. */
2418 if (val == mask && const_ok_for_arm (val << start_bit))
2424 if (GET_CODE (operands[3]) != REG)
2425 operands[3] = force_reg (SImode, operands[3]);
2427 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2433 target = copy_rtx (operands[0]);
2434 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2435 subreg as the final target. */
2436 if (GET_CODE (target) == SUBREG)
2438 subtarget = gen_reg_rtx (SImode);
2439 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2440 < GET_MODE_SIZE (SImode))
2441 target = SUBREG_REG (target);
2446 if (GET_CODE (operands[3]) == CONST_INT)
2448 /* Since we are inserting a known constant, we may be able to
2449 reduce the number of bits that we have to clear so that
2450 the mask becomes simple. */
2451 /* ??? This code does not check to see if the new mask is actually
2452 simpler. It may not be. */
2453 rtx op1 = gen_reg_rtx (SImode);
2454 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2455 start of this pattern. */
2456 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2457 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2459 emit_insn (gen_andsi3 (op1, operands[0],
2460 gen_int_mode (~mask2, SImode)));
2461 emit_insn (gen_iorsi3 (subtarget, op1,
2462 gen_int_mode (op3_value << start_bit, SImode)));
2464 else if (start_bit == 0
2465 && !(const_ok_for_arm (mask)
2466 || const_ok_for_arm (~mask)))
2468 /* A Trick, since we are setting the bottom bits in the word,
2469 we can shift operand[3] up, operand[0] down, OR them together
2470 and rotate the result back again. This takes 3 insns, and
2471 the third might be mergeable into another op. */
2472 /* The shift up copes with the possibility that operand[3] is
2473 wider than the bitfield. */
2474 rtx op0 = gen_reg_rtx (SImode);
2475 rtx op1 = gen_reg_rtx (SImode);
2477 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2478 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2479 emit_insn (gen_iorsi3 (op1, op1, op0));
2480 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2482 else if ((width + start_bit == 32)
2483 && !(const_ok_for_arm (mask)
2484 || const_ok_for_arm (~mask)))
2486 /* Similar trick, but slightly less efficient. */
2488 rtx op0 = gen_reg_rtx (SImode);
2489 rtx op1 = gen_reg_rtx (SImode);
2491 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2492 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2493 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2494 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2498 rtx op0 = gen_int_mode (mask, SImode);
2499 rtx op1 = gen_reg_rtx (SImode);
2500 rtx op2 = gen_reg_rtx (SImode);
2502 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2504 rtx tmp = gen_reg_rtx (SImode);
2506 emit_insn (gen_movsi (tmp, op0));
2510 /* Mask out any bits in operand[3] that are not needed. */
2511 emit_insn (gen_andsi3 (op1, operands[3], op0));
2513 if (GET_CODE (op0) == CONST_INT
2514 && (const_ok_for_arm (mask << start_bit)
2515 || const_ok_for_arm (~(mask << start_bit))))
2517 op0 = gen_int_mode (~(mask << start_bit), SImode);
2518 emit_insn (gen_andsi3 (op2, operands[0], op0));
2522 if (GET_CODE (op0) == CONST_INT)
2524 rtx tmp = gen_reg_rtx (SImode);
2526 emit_insn (gen_movsi (tmp, op0));
2531 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2533 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2537 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2539 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2542 if (subtarget != target)
2544 /* If TARGET is still a SUBREG, then it must be wider than a word,
2545 so we must be careful only to set the subword we were asked to. */
2546 if (GET_CODE (target) == SUBREG)
2547 emit_move_insn (target, subtarget);
2549 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2556 (define_insn "insv_zero"
2557 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2558 (match_operand:SI 1 "const_int_operand" "M")
2559 (match_operand:SI 2 "const_int_operand" "M"))
2563 [(set_attr "length" "4")
2564 (set_attr "predicable" "yes")]
2567 (define_insn "insv_t2"
2568 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2569 (match_operand:SI 1 "const_int_operand" "M")
2570 (match_operand:SI 2 "const_int_operand" "M"))
2571 (match_operand:SI 3 "s_register_operand" "r"))]
2573 "bfi%?\t%0, %3, %2, %1"
2574 [(set_attr "length" "4")
2575 (set_attr "predicable" "yes")]
2578 ; constants for op 2 will never be given to these patterns.
2579 (define_insn_and_split "*anddi_notdi_di"
2580 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2581 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2582 (match_operand:DI 2 "s_register_operand" "r,0")))]
2585 "TARGET_32BIT && reload_completed
2586 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2587 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2588 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2589 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2592 operands[3] = gen_highpart (SImode, operands[0]);
2593 operands[0] = gen_lowpart (SImode, operands[0]);
2594 operands[4] = gen_highpart (SImode, operands[1]);
2595 operands[1] = gen_lowpart (SImode, operands[1]);
2596 operands[5] = gen_highpart (SImode, operands[2]);
2597 operands[2] = gen_lowpart (SImode, operands[2]);
2599 [(set_attr "length" "8")
2600 (set_attr "predicable" "yes")]
2603 (define_insn_and_split "*anddi_notzesidi_di"
2604 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2605 (and:DI (not:DI (zero_extend:DI
2606 (match_operand:SI 2 "s_register_operand" "r,r")))
2607 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2610 bic%?\\t%Q0, %Q1, %2
2612 ; (not (zero_extend ...)) allows us to just copy the high word from
2613 ; operand1 to operand0.
2616 && operands[0] != operands[1]"
2617 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2618 (set (match_dup 3) (match_dup 4))]
2621 operands[3] = gen_highpart (SImode, operands[0]);
2622 operands[0] = gen_lowpart (SImode, operands[0]);
2623 operands[4] = gen_highpart (SImode, operands[1]);
2624 operands[1] = gen_lowpart (SImode, operands[1]);
2626 [(set_attr "length" "4,8")
2627 (set_attr "predicable" "yes")]
2630 (define_insn_and_split "*anddi_notsesidi_di"
2631 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2632 (and:DI (not:DI (sign_extend:DI
2633 (match_operand:SI 2 "s_register_operand" "r,r")))
2634 (match_operand:DI 1 "s_register_operand" "0,r")))]
2637 "TARGET_32BIT && reload_completed"
2638 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2639 (set (match_dup 3) (and:SI (not:SI
2640 (ashiftrt:SI (match_dup 2) (const_int 31)))
2644 operands[3] = gen_highpart (SImode, operands[0]);
2645 operands[0] = gen_lowpart (SImode, operands[0]);
2646 operands[4] = gen_highpart (SImode, operands[1]);
2647 operands[1] = gen_lowpart (SImode, operands[1]);
2649 [(set_attr "length" "8")
2650 (set_attr "predicable" "yes")]
2653 (define_insn "andsi_notsi_si"
2654 [(set (match_operand:SI 0 "s_register_operand" "=r")
2655 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2656 (match_operand:SI 1 "s_register_operand" "r")))]
2658 "bic%?\\t%0, %1, %2"
2659 [(set_attr "predicable" "yes")]
2662 (define_insn "thumb1_bicsi3"
2663 [(set (match_operand:SI 0 "register_operand" "=l")
2664 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2665 (match_operand:SI 2 "register_operand" "0")))]
2668 [(set_attr "length" "2")
2669 (set_attr "conds" "set")])
2671 (define_insn "andsi_not_shiftsi_si"
2672 [(set (match_operand:SI 0 "s_register_operand" "=r")
2673 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2674 [(match_operand:SI 2 "s_register_operand" "r")
2675 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2676 (match_operand:SI 1 "s_register_operand" "r")))]
2678 "bic%?\\t%0, %1, %2%S4"
2679 [(set_attr "predicable" "yes")
2680 (set_attr "shift" "2")
2681 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2682 (const_string "alu_shift")
2683 (const_string "alu_shift_reg")))]
2686 (define_insn "*andsi_notsi_si_compare0"
2687 [(set (reg:CC_NOOV CC_REGNUM)
2689 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2690 (match_operand:SI 1 "s_register_operand" "r"))
2692 (set (match_operand:SI 0 "s_register_operand" "=r")
2693 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2695 "bic%.\\t%0, %1, %2"
2696 [(set_attr "conds" "set")]
2699 (define_insn "*andsi_notsi_si_compare0_scratch"
2700 [(set (reg:CC_NOOV CC_REGNUM)
2702 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2703 (match_operand:SI 1 "s_register_operand" "r"))
2705 (clobber (match_scratch:SI 0 "=r"))]
2707 "bic%.\\t%0, %1, %2"
2708 [(set_attr "conds" "set")]
2711 (define_expand "iordi3"
2712 [(set (match_operand:DI 0 "s_register_operand" "")
2713 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2714 (match_operand:DI 2 "neon_logic_op2" "")))]
2719 (define_insn "*iordi3_insn"
2720 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2721 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2722 (match_operand:DI 2 "s_register_operand" "r,r")))]
2723 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2725 [(set_attr "length" "8")
2726 (set_attr "predicable" "yes")]
2729 (define_insn "*iordi_zesidi_di"
2730 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2731 (ior:DI (zero_extend:DI
2732 (match_operand:SI 2 "s_register_operand" "r,r"))
2733 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2736 orr%?\\t%Q0, %Q1, %2
2738 [(set_attr "length" "4,8")
2739 (set_attr "predicable" "yes")]
2742 (define_insn "*iordi_sesidi_di"
2743 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2744 (ior:DI (sign_extend:DI
2745 (match_operand:SI 2 "s_register_operand" "r,r"))
2746 (match_operand:DI 1 "s_register_operand" "0,r")))]
2749 [(set_attr "length" "8")
2750 (set_attr "predicable" "yes")]
2753 (define_expand "iorsi3"
2754 [(set (match_operand:SI 0 "s_register_operand" "")
2755 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2756 (match_operand:SI 2 "reg_or_int_operand" "")))]
2759 if (GET_CODE (operands[2]) == CONST_INT)
2763 arm_split_constant (IOR, SImode, NULL_RTX,
2764 INTVAL (operands[2]), operands[0], operands[1],
2765 optimize && can_create_pseudo_p ());
2768 else /* TARGET_THUMB1 */
2770 rtx tmp = force_reg (SImode, operands[2]);
2771 if (rtx_equal_p (operands[0], operands[1]))
2775 operands[2] = operands[1];
2783 (define_insn_and_split "*iorsi3_insn"
2784 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2785 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2786 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2790 orn%?\\t%0, %1, #%B2
2793 && GET_CODE (operands[2]) == CONST_INT
2794 && !(const_ok_for_arm (INTVAL (operands[2]))
2795 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2796 [(clobber (const_int 0))]
2798 arm_split_constant (IOR, SImode, curr_insn,
2799 INTVAL (operands[2]), operands[0], operands[1], 0);
2802 [(set_attr "length" "4,4,16")
2803 (set_attr "arch" "32,t2,32")
2804 (set_attr "predicable" "yes")])
2806 (define_insn "*thumb1_iorsi3_insn"
2807 [(set (match_operand:SI 0 "register_operand" "=l")
2808 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2809 (match_operand:SI 2 "register_operand" "l")))]
2812 [(set_attr "length" "2")
2813 (set_attr "conds" "set")])
2816 [(match_scratch:SI 3 "r")
2817 (set (match_operand:SI 0 "arm_general_register_operand" "")
2818 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2819 (match_operand:SI 2 "const_int_operand" "")))]
2821 && !const_ok_for_arm (INTVAL (operands[2]))
2822 && const_ok_for_arm (~INTVAL (operands[2]))"
2823 [(set (match_dup 3) (match_dup 2))
2824 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2828 (define_insn "*iorsi3_compare0"
2829 [(set (reg:CC_NOOV CC_REGNUM)
2830 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2831 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2833 (set (match_operand:SI 0 "s_register_operand" "=r")
2834 (ior:SI (match_dup 1) (match_dup 2)))]
2836 "orr%.\\t%0, %1, %2"
2837 [(set_attr "conds" "set")]
2840 (define_insn "*iorsi3_compare0_scratch"
2841 [(set (reg:CC_NOOV CC_REGNUM)
2842 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2843 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2845 (clobber (match_scratch:SI 0 "=r"))]
2847 "orr%.\\t%0, %1, %2"
2848 [(set_attr "conds" "set")]
2851 (define_expand "xordi3"
2852 [(set (match_operand:DI 0 "s_register_operand" "")
2853 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2854 (match_operand:DI 2 "s_register_operand" "")))]
2859 (define_insn "*xordi3_insn"
2860 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2861 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2862 (match_operand:DI 2 "s_register_operand" "r,r")))]
2863 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2865 [(set_attr "length" "8")
2866 (set_attr "predicable" "yes")]
2869 (define_insn "*xordi_zesidi_di"
2870 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2871 (xor:DI (zero_extend:DI
2872 (match_operand:SI 2 "s_register_operand" "r,r"))
2873 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2876 eor%?\\t%Q0, %Q1, %2
2878 [(set_attr "length" "4,8")
2879 (set_attr "predicable" "yes")]
2882 (define_insn "*xordi_sesidi_di"
2883 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2884 (xor:DI (sign_extend:DI
2885 (match_operand:SI 2 "s_register_operand" "r,r"))
2886 (match_operand:DI 1 "s_register_operand" "0,r")))]
2889 [(set_attr "length" "8")
2890 (set_attr "predicable" "yes")]
2893 (define_expand "xorsi3"
2894 [(set (match_operand:SI 0 "s_register_operand" "")
2895 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2896 (match_operand:SI 2 "reg_or_int_operand" "")))]
2898 "if (GET_CODE (operands[2]) == CONST_INT)
2902 arm_split_constant (XOR, SImode, NULL_RTX,
2903 INTVAL (operands[2]), operands[0], operands[1],
2904 optimize && can_create_pseudo_p ());
2907 else /* TARGET_THUMB1 */
2909 rtx tmp = force_reg (SImode, operands[2]);
2910 if (rtx_equal_p (operands[0], operands[1]))
2914 operands[2] = operands[1];
2921 (define_insn "*arm_xorsi3"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r")
2923 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2924 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2926 "eor%?\\t%0, %1, %2"
2927 [(set_attr "predicable" "yes")]
2930 (define_insn "*thumb1_xorsi3_insn"
2931 [(set (match_operand:SI 0 "register_operand" "=l")
2932 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2933 (match_operand:SI 2 "register_operand" "l")))]
2936 [(set_attr "length" "2")
2937 (set_attr "conds" "set")])
2939 (define_insn "*xorsi3_compare0"
2940 [(set (reg:CC_NOOV CC_REGNUM)
2941 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2942 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2944 (set (match_operand:SI 0 "s_register_operand" "=r")
2945 (xor:SI (match_dup 1) (match_dup 2)))]
2947 "eor%.\\t%0, %1, %2"
2948 [(set_attr "conds" "set")]
2951 (define_insn "*xorsi3_compare0_scratch"
2952 [(set (reg:CC_NOOV CC_REGNUM)
2953 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2954 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2958 [(set_attr "conds" "set")]
2961 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2962 ; (NOT D) we can sometimes merge the final NOT into one of the following
2966 [(set (match_operand:SI 0 "s_register_operand" "")
2967 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2968 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2969 (match_operand:SI 3 "arm_rhs_operand" "")))
2970 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2972 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2973 (not:SI (match_dup 3))))
2974 (set (match_dup 0) (not:SI (match_dup 4)))]
2978 (define_insn "*andsi_iorsi3_notsi"
2979 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2980 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2981 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2982 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2984 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2985 [(set_attr "length" "8")
2986 (set_attr "ce_count" "2")
2987 (set_attr "predicable" "yes")]
2990 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2991 ; insns are available?
2993 [(set (match_operand:SI 0 "s_register_operand" "")
2994 (match_operator:SI 1 "logical_binary_operator"
2995 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2996 (match_operand:SI 3 "const_int_operand" "")
2997 (match_operand:SI 4 "const_int_operand" ""))
2998 (match_operator:SI 9 "logical_binary_operator"
2999 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3000 (match_operand:SI 6 "const_int_operand" ""))
3001 (match_operand:SI 7 "s_register_operand" "")])]))
3002 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3004 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3005 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3008 [(ashift:SI (match_dup 2) (match_dup 4))
3012 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3015 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3019 [(set (match_operand:SI 0 "s_register_operand" "")
3020 (match_operator:SI 1 "logical_binary_operator"
3021 [(match_operator:SI 9 "logical_binary_operator"
3022 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3023 (match_operand:SI 6 "const_int_operand" ""))
3024 (match_operand:SI 7 "s_register_operand" "")])
3025 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3026 (match_operand:SI 3 "const_int_operand" "")
3027 (match_operand:SI 4 "const_int_operand" ""))]))
3028 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3030 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3031 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3034 [(ashift:SI (match_dup 2) (match_dup 4))
3038 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3041 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3045 [(set (match_operand:SI 0 "s_register_operand" "")
3046 (match_operator:SI 1 "logical_binary_operator"
3047 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3048 (match_operand:SI 3 "const_int_operand" "")
3049 (match_operand:SI 4 "const_int_operand" ""))
3050 (match_operator:SI 9 "logical_binary_operator"
3051 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3052 (match_operand:SI 6 "const_int_operand" ""))
3053 (match_operand:SI 7 "s_register_operand" "")])]))
3054 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3056 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3057 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3060 [(ashift:SI (match_dup 2) (match_dup 4))
3064 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3067 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3071 [(set (match_operand:SI 0 "s_register_operand" "")
3072 (match_operator:SI 1 "logical_binary_operator"
3073 [(match_operator:SI 9 "logical_binary_operator"
3074 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3075 (match_operand:SI 6 "const_int_operand" ""))
3076 (match_operand:SI 7 "s_register_operand" "")])
3077 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3078 (match_operand:SI 3 "const_int_operand" "")
3079 (match_operand:SI 4 "const_int_operand" ""))]))
3080 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3082 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3083 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3086 [(ashift:SI (match_dup 2) (match_dup 4))
3090 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3093 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3097 ;; Minimum and maximum insns
3099 (define_expand "smaxsi3"
3101 (set (match_operand:SI 0 "s_register_operand" "")
3102 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3103 (match_operand:SI 2 "arm_rhs_operand" "")))
3104 (clobber (reg:CC CC_REGNUM))])]
3107 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3109 /* No need for a clobber of the condition code register here. */
3110 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3111 gen_rtx_SMAX (SImode, operands[1],
3117 (define_insn "*smax_0"
3118 [(set (match_operand:SI 0 "s_register_operand" "=r")
3119 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3122 "bic%?\\t%0, %1, %1, asr #31"
3123 [(set_attr "predicable" "yes")]
3126 (define_insn "*smax_m1"
3127 [(set (match_operand:SI 0 "s_register_operand" "=r")
3128 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3131 "orr%?\\t%0, %1, %1, asr #31"
3132 [(set_attr "predicable" "yes")]
3135 (define_insn "*arm_smax_insn"
3136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3137 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3138 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3139 (clobber (reg:CC CC_REGNUM))]
3142 cmp\\t%1, %2\;movlt\\t%0, %2
3143 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3144 [(set_attr "conds" "clob")
3145 (set_attr "length" "8,12")]
3148 (define_expand "sminsi3"
3150 (set (match_operand:SI 0 "s_register_operand" "")
3151 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3152 (match_operand:SI 2 "arm_rhs_operand" "")))
3153 (clobber (reg:CC CC_REGNUM))])]
3156 if (operands[2] == const0_rtx)
3158 /* No need for a clobber of the condition code register here. */
3159 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3160 gen_rtx_SMIN (SImode, operands[1],
3166 (define_insn "*smin_0"
3167 [(set (match_operand:SI 0 "s_register_operand" "=r")
3168 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3171 "and%?\\t%0, %1, %1, asr #31"
3172 [(set_attr "predicable" "yes")]
3175 (define_insn "*arm_smin_insn"
3176 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3177 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3178 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3179 (clobber (reg:CC CC_REGNUM))]
3182 cmp\\t%1, %2\;movge\\t%0, %2
3183 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3184 [(set_attr "conds" "clob")
3185 (set_attr "length" "8,12")]
3188 (define_expand "umaxsi3"
3190 (set (match_operand:SI 0 "s_register_operand" "")
3191 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3192 (match_operand:SI 2 "arm_rhs_operand" "")))
3193 (clobber (reg:CC CC_REGNUM))])]
3198 (define_insn "*arm_umaxsi3"
3199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3200 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3201 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3202 (clobber (reg:CC CC_REGNUM))]
3205 cmp\\t%1, %2\;movcc\\t%0, %2
3206 cmp\\t%1, %2\;movcs\\t%0, %1
3207 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3208 [(set_attr "conds" "clob")
3209 (set_attr "length" "8,8,12")]
3212 (define_expand "uminsi3"
3214 (set (match_operand:SI 0 "s_register_operand" "")
3215 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3216 (match_operand:SI 2 "arm_rhs_operand" "")))
3217 (clobber (reg:CC CC_REGNUM))])]
3222 (define_insn "*arm_uminsi3"
3223 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3224 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3225 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3226 (clobber (reg:CC CC_REGNUM))]
3229 cmp\\t%1, %2\;movcs\\t%0, %2
3230 cmp\\t%1, %2\;movcc\\t%0, %1
3231 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3232 [(set_attr "conds" "clob")
3233 (set_attr "length" "8,8,12")]
3236 (define_insn "*store_minmaxsi"
3237 [(set (match_operand:SI 0 "memory_operand" "=m")
3238 (match_operator:SI 3 "minmax_operator"
3239 [(match_operand:SI 1 "s_register_operand" "r")
3240 (match_operand:SI 2 "s_register_operand" "r")]))
3241 (clobber (reg:CC CC_REGNUM))]
3244 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3245 operands[1], operands[2]);
3246 output_asm_insn (\"cmp\\t%1, %2\", operands);
3248 output_asm_insn (\"ite\t%d3\", operands);
3249 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3250 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3253 [(set_attr "conds" "clob")
3254 (set (attr "length")
3255 (if_then_else (eq_attr "is_thumb" "yes")
3258 (set_attr "type" "store1")]
3261 ; Reject the frame pointer in operand[1], since reloading this after
3262 ; it has been eliminated can cause carnage.
3263 (define_insn "*minmax_arithsi"
3264 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3265 (match_operator:SI 4 "shiftable_operator"
3266 [(match_operator:SI 5 "minmax_operator"
3267 [(match_operand:SI 2 "s_register_operand" "r,r")
3268 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3269 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3270 (clobber (reg:CC CC_REGNUM))]
3271 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3274 enum rtx_code code = GET_CODE (operands[4]);
3277 if (which_alternative != 0 || operands[3] != const0_rtx
3278 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3283 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3284 operands[2], operands[3]);
3285 output_asm_insn (\"cmp\\t%2, %3\", operands);
3289 output_asm_insn (\"ite\\t%d5\", operands);
3291 output_asm_insn (\"it\\t%d5\", operands);
3293 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3295 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3298 [(set_attr "conds" "clob")
3299 (set (attr "length")
3300 (if_then_else (eq_attr "is_thumb" "yes")
3306 ;; Shift and rotation insns
3308 (define_expand "ashldi3"
3309 [(set (match_operand:DI 0 "s_register_operand" "")
3310 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3311 (match_operand:SI 2 "reg_or_int_operand" "")))]
3314 if (GET_CODE (operands[2]) == CONST_INT)
3316 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3318 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3321 /* Ideally we shouldn't fail here if we could know that operands[1]
3322 ends up already living in an iwmmxt register. Otherwise it's
3323 cheaper to have the alternate code being generated than moving
3324 values to iwmmxt regs and back. */
3327 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3332 (define_insn "arm_ashldi3_1bit"
3333 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3334 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3336 (clobber (reg:CC CC_REGNUM))]
3338 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3339 [(set_attr "conds" "clob")
3340 (set_attr "length" "8")]
3343 (define_expand "ashlsi3"
3344 [(set (match_operand:SI 0 "s_register_operand" "")
3345 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3346 (match_operand:SI 2 "arm_rhs_operand" "")))]
3349 if (GET_CODE (operands[2]) == CONST_INT
3350 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3352 emit_insn (gen_movsi (operands[0], const0_rtx));
3358 (define_insn "*thumb1_ashlsi3"
3359 [(set (match_operand:SI 0 "register_operand" "=l,l")
3360 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3361 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3364 [(set_attr "length" "2")
3365 (set_attr "conds" "set")])
3367 (define_expand "ashrdi3"
3368 [(set (match_operand:DI 0 "s_register_operand" "")
3369 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3370 (match_operand:SI 2 "reg_or_int_operand" "")))]
3373 if (GET_CODE (operands[2]) == CONST_INT)
3375 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3377 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3380 /* Ideally we shouldn't fail here if we could know that operands[1]
3381 ends up already living in an iwmmxt register. Otherwise it's
3382 cheaper to have the alternate code being generated than moving
3383 values to iwmmxt regs and back. */
3386 else if (!TARGET_REALLY_IWMMXT)
3391 (define_insn "arm_ashrdi3_1bit"
3392 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3393 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3395 (clobber (reg:CC CC_REGNUM))]
3397 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3398 [(set_attr "conds" "clob")
3399 (set_attr "insn" "mov")
3400 (set_attr "length" "8")]
3403 (define_expand "ashrsi3"
3404 [(set (match_operand:SI 0 "s_register_operand" "")
3405 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3406 (match_operand:SI 2 "arm_rhs_operand" "")))]
3409 if (GET_CODE (operands[2]) == CONST_INT
3410 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3411 operands[2] = GEN_INT (31);
3415 (define_insn "*thumb1_ashrsi3"
3416 [(set (match_operand:SI 0 "register_operand" "=l,l")
3417 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3418 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3421 [(set_attr "length" "2")
3422 (set_attr "conds" "set")])
3424 (define_expand "lshrdi3"
3425 [(set (match_operand:DI 0 "s_register_operand" "")
3426 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3427 (match_operand:SI 2 "reg_or_int_operand" "")))]
3430 if (GET_CODE (operands[2]) == CONST_INT)
3432 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3434 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3437 /* Ideally we shouldn't fail here if we could know that operands[1]
3438 ends up already living in an iwmmxt register. Otherwise it's
3439 cheaper to have the alternate code being generated than moving
3440 values to iwmmxt regs and back. */
3443 else if (!TARGET_REALLY_IWMMXT)
3448 (define_insn "arm_lshrdi3_1bit"
3449 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3450 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3452 (clobber (reg:CC CC_REGNUM))]
3454 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3455 [(set_attr "conds" "clob")
3456 (set_attr "insn" "mov")
3457 (set_attr "length" "8")]
3460 (define_expand "lshrsi3"
3461 [(set (match_operand:SI 0 "s_register_operand" "")
3462 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3463 (match_operand:SI 2 "arm_rhs_operand" "")))]
3466 if (GET_CODE (operands[2]) == CONST_INT
3467 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3469 emit_insn (gen_movsi (operands[0], const0_rtx));
3475 (define_insn "*thumb1_lshrsi3"
3476 [(set (match_operand:SI 0 "register_operand" "=l,l")
3477 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3478 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3481 [(set_attr "length" "2")
3482 (set_attr "conds" "set")])
3484 (define_expand "rotlsi3"
3485 [(set (match_operand:SI 0 "s_register_operand" "")
3486 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3487 (match_operand:SI 2 "reg_or_int_operand" "")))]
3490 if (GET_CODE (operands[2]) == CONST_INT)
3491 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3494 rtx reg = gen_reg_rtx (SImode);
3495 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3501 (define_expand "rotrsi3"
3502 [(set (match_operand:SI 0 "s_register_operand" "")
3503 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3504 (match_operand:SI 2 "arm_rhs_operand" "")))]
3509 if (GET_CODE (operands[2]) == CONST_INT
3510 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3511 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3513 else /* TARGET_THUMB1 */
3515 if (GET_CODE (operands [2]) == CONST_INT)
3516 operands [2] = force_reg (SImode, operands[2]);
3521 (define_insn "*thumb1_rotrsi3"
3522 [(set (match_operand:SI 0 "register_operand" "=l")
3523 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3524 (match_operand:SI 2 "register_operand" "l")))]
3527 [(set_attr "length" "2")]
3530 (define_insn "*arm_shiftsi3"
3531 [(set (match_operand:SI 0 "s_register_operand" "=r")
3532 (match_operator:SI 3 "shift_operator"
3533 [(match_operand:SI 1 "s_register_operand" "r")
3534 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3536 "* return arm_output_shift(operands, 0);"
3537 [(set_attr "predicable" "yes")
3538 (set_attr "shift" "1")
3539 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3540 (const_string "alu_shift")
3541 (const_string "alu_shift_reg")))]
3544 (define_insn "*shiftsi3_compare0"
3545 [(set (reg:CC_NOOV CC_REGNUM)
3546 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3547 [(match_operand:SI 1 "s_register_operand" "r")
3548 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3550 (set (match_operand:SI 0 "s_register_operand" "=r")
3551 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3553 "* return arm_output_shift(operands, 1);"
3554 [(set_attr "conds" "set")
3555 (set_attr "shift" "1")
3556 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3557 (const_string "alu_shift")
3558 (const_string "alu_shift_reg")))]
3561 (define_insn "*shiftsi3_compare0_scratch"
3562 [(set (reg:CC_NOOV CC_REGNUM)
3563 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3564 [(match_operand:SI 1 "s_register_operand" "r")
3565 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3567 (clobber (match_scratch:SI 0 "=r"))]
3569 "* return arm_output_shift(operands, 1);"
3570 [(set_attr "conds" "set")
3571 (set_attr "shift" "1")]
3574 (define_insn "*not_shiftsi"
3575 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3576 (not:SI (match_operator:SI 3 "shift_operator"
3577 [(match_operand:SI 1 "s_register_operand" "r,r")
3578 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3581 [(set_attr "predicable" "yes")
3582 (set_attr "shift" "1")
3583 (set_attr "insn" "mvn")
3584 (set_attr "arch" "32,a")
3585 (set_attr "type" "alu_shift,alu_shift_reg")])
3587 (define_insn "*not_shiftsi_compare0"
3588 [(set (reg:CC_NOOV CC_REGNUM)
3590 (not:SI (match_operator:SI 3 "shift_operator"
3591 [(match_operand:SI 1 "s_register_operand" "r,r")
3592 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3594 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3595 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3598 [(set_attr "conds" "set")
3599 (set_attr "shift" "1")
3600 (set_attr "insn" "mvn")
3601 (set_attr "arch" "32,a")
3602 (set_attr "type" "alu_shift,alu_shift_reg")])
3604 (define_insn "*not_shiftsi_compare0_scratch"
3605 [(set (reg:CC_NOOV CC_REGNUM)
3607 (not:SI (match_operator:SI 3 "shift_operator"
3608 [(match_operand:SI 1 "s_register_operand" "r,r")
3609 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3611 (clobber (match_scratch:SI 0 "=r,r"))]
3614 [(set_attr "conds" "set")
3615 (set_attr "shift" "1")
3616 (set_attr "insn" "mvn")
3617 (set_attr "arch" "32,a")
3618 (set_attr "type" "alu_shift,alu_shift_reg")])
3620 ;; We don't really have extzv, but defining this using shifts helps
3621 ;; to reduce register pressure later on.
3623 (define_expand "extzv"
3625 (ashift:SI (match_operand:SI 1 "register_operand" "")
3626 (match_operand:SI 2 "const_int_operand" "")))
3627 (set (match_operand:SI 0 "register_operand" "")
3628 (lshiftrt:SI (match_dup 4)
3629 (match_operand:SI 3 "const_int_operand" "")))]
3630 "TARGET_THUMB1 || arm_arch_thumb2"
3633 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3634 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3636 if (arm_arch_thumb2)
3638 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3643 operands[3] = GEN_INT (rshift);
3647 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3651 operands[2] = GEN_INT (lshift);
3652 operands[4] = gen_reg_rtx (SImode);
3657 [(set (match_operand:SI 0 "s_register_operand" "=r")
3658 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3659 (match_operand:SI 2 "const_int_operand" "M")
3660 (match_operand:SI 3 "const_int_operand" "M")))]
3662 "sbfx%?\t%0, %1, %3, %2"
3663 [(set_attr "length" "4")
3664 (set_attr "predicable" "yes")]
3667 (define_insn "extzv_t2"
3668 [(set (match_operand:SI 0 "s_register_operand" "=r")
3669 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3670 (match_operand:SI 2 "const_int_operand" "M")
3671 (match_operand:SI 3 "const_int_operand" "M")))]
3673 "ubfx%?\t%0, %1, %3, %2"
3674 [(set_attr "length" "4")
3675 (set_attr "predicable" "yes")]
3679 ;; Unary arithmetic insns
3681 (define_expand "negdi2"
3683 [(set (match_operand:DI 0 "s_register_operand" "")
3684 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3685 (clobber (reg:CC CC_REGNUM))])]
3690 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3691 ;; The first alternative allows the common case of a *full* overlap.
3692 (define_insn "*arm_negdi2"
3693 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3694 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3695 (clobber (reg:CC CC_REGNUM))]
3697 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3698 [(set_attr "conds" "clob")
3699 (set_attr "length" "8")]
3702 (define_insn "*thumb1_negdi2"
3703 [(set (match_operand:DI 0 "register_operand" "=&l")
3704 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3705 (clobber (reg:CC CC_REGNUM))]
3707 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3708 [(set_attr "length" "6")]
3711 (define_expand "negsi2"
3712 [(set (match_operand:SI 0 "s_register_operand" "")
3713 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3718 (define_insn "*arm_negsi2"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r")
3720 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3722 "rsb%?\\t%0, %1, #0"
3723 [(set_attr "predicable" "yes")]
3726 (define_insn "*thumb1_negsi2"
3727 [(set (match_operand:SI 0 "register_operand" "=l")
3728 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3731 [(set_attr "length" "2")]
3734 (define_expand "negsf2"
3735 [(set (match_operand:SF 0 "s_register_operand" "")
3736 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3737 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3741 (define_expand "negdf2"
3742 [(set (match_operand:DF 0 "s_register_operand" "")
3743 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3744 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3747 ;; abssi2 doesn't really clobber the condition codes if a different register
3748 ;; is being set. To keep things simple, assume during rtl manipulations that
3749 ;; it does, but tell the final scan operator the truth. Similarly for
3752 (define_expand "abssi2"
3754 [(set (match_operand:SI 0 "s_register_operand" "")
3755 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3756 (clobber (match_dup 2))])]
3760 operands[2] = gen_rtx_SCRATCH (SImode);
3762 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3765 (define_insn "*arm_abssi2"
3766 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3767 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3768 (clobber (reg:CC CC_REGNUM))]
3771 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3772 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3773 [(set_attr "conds" "clob,*")
3774 (set_attr "shift" "1")
3775 ;; predicable can't be set based on the variant, so left as no
3776 (set_attr "length" "8")]
3779 (define_insn_and_split "*thumb1_abssi2"
3780 [(set (match_operand:SI 0 "s_register_operand" "=l")
3781 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3782 (clobber (match_scratch:SI 2 "=&l"))]
3785 "TARGET_THUMB1 && reload_completed"
3786 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3787 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3788 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3790 [(set_attr "length" "6")]
3793 (define_insn "*arm_neg_abssi2"
3794 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3795 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3796 (clobber (reg:CC CC_REGNUM))]
3799 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3800 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3801 [(set_attr "conds" "clob,*")
3802 (set_attr "shift" "1")
3803 ;; predicable can't be set based on the variant, so left as no
3804 (set_attr "length" "8")]
3807 (define_insn_and_split "*thumb1_neg_abssi2"
3808 [(set (match_operand:SI 0 "s_register_operand" "=l")
3809 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3810 (clobber (match_scratch:SI 2 "=&l"))]
3813 "TARGET_THUMB1 && reload_completed"
3814 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3815 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3816 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3818 [(set_attr "length" "6")]
3821 (define_expand "abssf2"
3822 [(set (match_operand:SF 0 "s_register_operand" "")
3823 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3824 "TARGET_32BIT && TARGET_HARD_FLOAT"
3827 (define_expand "absdf2"
3828 [(set (match_operand:DF 0 "s_register_operand" "")
3829 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3830 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3833 (define_expand "sqrtsf2"
3834 [(set (match_operand:SF 0 "s_register_operand" "")
3835 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3836 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3839 (define_expand "sqrtdf2"
3840 [(set (match_operand:DF 0 "s_register_operand" "")
3841 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3842 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3845 (define_insn_and_split "one_cmpldi2"
3846 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3847 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3850 "TARGET_32BIT && reload_completed"
3851 [(set (match_dup 0) (not:SI (match_dup 1)))
3852 (set (match_dup 2) (not:SI (match_dup 3)))]
3855 operands[2] = gen_highpart (SImode, operands[0]);
3856 operands[0] = gen_lowpart (SImode, operands[0]);
3857 operands[3] = gen_highpart (SImode, operands[1]);
3858 operands[1] = gen_lowpart (SImode, operands[1]);
3860 [(set_attr "length" "8")
3861 (set_attr "predicable" "yes")]
3864 (define_expand "one_cmplsi2"
3865 [(set (match_operand:SI 0 "s_register_operand" "")
3866 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3871 (define_insn "*arm_one_cmplsi2"
3872 [(set (match_operand:SI 0 "s_register_operand" "=r")
3873 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3876 [(set_attr "predicable" "yes")
3877 (set_attr "insn" "mvn")]
3880 (define_insn "*thumb1_one_cmplsi2"
3881 [(set (match_operand:SI 0 "register_operand" "=l")
3882 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3885 [(set_attr "length" "2")
3886 (set_attr "insn" "mvn")]
3889 (define_insn "*notsi_compare0"
3890 [(set (reg:CC_NOOV CC_REGNUM)
3891 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3893 (set (match_operand:SI 0 "s_register_operand" "=r")
3894 (not:SI (match_dup 1)))]
3897 [(set_attr "conds" "set")
3898 (set_attr "insn" "mvn")]
3901 (define_insn "*notsi_compare0_scratch"
3902 [(set (reg:CC_NOOV CC_REGNUM)
3903 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3905 (clobber (match_scratch:SI 0 "=r"))]
3908 [(set_attr "conds" "set")
3909 (set_attr "insn" "mvn")]
3912 ;; Fixed <--> Floating conversion insns
3914 (define_expand "floatsihf2"
3915 [(set (match_operand:HF 0 "general_operand" "")
3916 (float:HF (match_operand:SI 1 "general_operand" "")))]
3920 rtx op1 = gen_reg_rtx (SFmode);
3921 expand_float (op1, operands[1], 0);
3922 op1 = convert_to_mode (HFmode, op1, 0);
3923 emit_move_insn (operands[0], op1);
3928 (define_expand "floatdihf2"
3929 [(set (match_operand:HF 0 "general_operand" "")
3930 (float:HF (match_operand:DI 1 "general_operand" "")))]
3934 rtx op1 = gen_reg_rtx (SFmode);
3935 expand_float (op1, operands[1], 0);
3936 op1 = convert_to_mode (HFmode, op1, 0);
3937 emit_move_insn (operands[0], op1);
3942 (define_expand "floatsisf2"
3943 [(set (match_operand:SF 0 "s_register_operand" "")
3944 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3945 "TARGET_32BIT && TARGET_HARD_FLOAT"
3947 if (TARGET_MAVERICK)
3949 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3954 (define_expand "floatsidf2"
3955 [(set (match_operand:DF 0 "s_register_operand" "")
3956 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3957 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3959 if (TARGET_MAVERICK)
3961 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3966 (define_expand "fix_trunchfsi2"
3967 [(set (match_operand:SI 0 "general_operand" "")
3968 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3972 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3973 expand_fix (operands[0], op1, 0);
3978 (define_expand "fix_trunchfdi2"
3979 [(set (match_operand:DI 0 "general_operand" "")
3980 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3984 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3985 expand_fix (operands[0], op1, 0);
3990 (define_expand "fix_truncsfsi2"
3991 [(set (match_operand:SI 0 "s_register_operand" "")
3992 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3993 "TARGET_32BIT && TARGET_HARD_FLOAT"
3995 if (TARGET_MAVERICK)
3997 if (!cirrus_fp_register (operands[0], SImode))
3998 operands[0] = force_reg (SImode, operands[0]);
3999 if (!cirrus_fp_register (operands[1], SFmode))
4000 operands[1] = force_reg (SFmode, operands[0]);
4001 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4006 (define_expand "fix_truncdfsi2"
4007 [(set (match_operand:SI 0 "s_register_operand" "")
4008 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4009 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4011 if (TARGET_MAVERICK)
4013 if (!cirrus_fp_register (operands[1], DFmode))
4014 operands[1] = force_reg (DFmode, operands[0]);
4015 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4022 (define_expand "truncdfsf2"
4023 [(set (match_operand:SF 0 "s_register_operand" "")
4025 (match_operand:DF 1 "s_register_operand" "")))]
4026 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4030 /* DFmode -> HFmode conversions have to go through SFmode. */
4031 (define_expand "truncdfhf2"
4032 [(set (match_operand:HF 0 "general_operand" "")
4034 (match_operand:DF 1 "general_operand" "")))]
4039 op1 = convert_to_mode (SFmode, operands[1], 0);
4040 op1 = convert_to_mode (HFmode, op1, 0);
4041 emit_move_insn (operands[0], op1);
4046 ;; Zero and sign extension instructions.
4048 (define_insn "zero_extend<mode>di2"
4049 [(set (match_operand:DI 0 "s_register_operand" "=r")
4050 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4051 "<qhs_extenddi_cstr>")))]
4052 "TARGET_32BIT <qhs_zextenddi_cond>"
4054 [(set_attr "length" "8")
4055 (set_attr "ce_count" "2")
4056 (set_attr "predicable" "yes")]
4059 (define_insn "extend<mode>di2"
4060 [(set (match_operand:DI 0 "s_register_operand" "=r")
4061 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4062 "<qhs_extenddi_cstr>")))]
4063 "TARGET_32BIT <qhs_sextenddi_cond>"
4065 [(set_attr "length" "8")
4066 (set_attr "ce_count" "2")
4067 (set_attr "shift" "1")
4068 (set_attr "predicable" "yes")]
4071 ;; Splits for all extensions to DImode
4073 [(set (match_operand:DI 0 "s_register_operand" "")
4074 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4076 [(set (match_dup 0) (match_dup 1))]
4078 rtx lo_part = gen_lowpart (SImode, operands[0]);
4079 enum machine_mode src_mode = GET_MODE (operands[1]);
4081 if (REG_P (operands[0])
4082 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4083 emit_clobber (operands[0]);
4084 if (!REG_P (lo_part) || src_mode != SImode
4085 || !rtx_equal_p (lo_part, operands[1]))
4087 if (src_mode == SImode)
4088 emit_move_insn (lo_part, operands[1]);
4090 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4091 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4092 operands[1] = lo_part;
4094 operands[0] = gen_highpart (SImode, operands[0]);
4095 operands[1] = const0_rtx;
4099 [(set (match_operand:DI 0 "s_register_operand" "")
4100 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4102 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4104 rtx lo_part = gen_lowpart (SImode, operands[0]);
4105 enum machine_mode src_mode = GET_MODE (operands[1]);
4107 if (REG_P (operands[0])
4108 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4109 emit_clobber (operands[0]);
4111 if (!REG_P (lo_part) || src_mode != SImode
4112 || !rtx_equal_p (lo_part, operands[1]))
4114 if (src_mode == SImode)
4115 emit_move_insn (lo_part, operands[1]);
4117 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4118 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4119 operands[1] = lo_part;
4121 operands[0] = gen_highpart (SImode, operands[0]);
4124 (define_expand "zero_extendhisi2"
4125 [(set (match_operand:SI 0 "s_register_operand" "")
4126 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4129 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4131 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4134 if (!arm_arch6 && !MEM_P (operands[1]))
4136 rtx t = gen_lowpart (SImode, operands[1]);
4137 rtx tmp = gen_reg_rtx (SImode);
4138 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4139 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4145 [(set (match_operand:SI 0 "s_register_operand" "")
4146 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4147 "!TARGET_THUMB2 && !arm_arch6"
4148 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4149 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4151 operands[2] = gen_lowpart (SImode, operands[1]);
4154 (define_insn "*thumb1_zero_extendhisi2"
4155 [(set (match_operand:SI 0 "register_operand" "=l,l")
4156 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4161 if (which_alternative == 0 && arm_arch6)
4162 return "uxth\t%0, %1";
4163 if (which_alternative == 0)
4166 mem = XEXP (operands[1], 0);
4168 if (GET_CODE (mem) == CONST)
4169 mem = XEXP (mem, 0);
4171 if (GET_CODE (mem) == PLUS)
4173 rtx a = XEXP (mem, 0);
4175 /* This can happen due to bugs in reload. */
4176 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4179 ops[0] = operands[0];
4182 output_asm_insn ("mov\t%0, %1", ops);
4184 XEXP (mem, 0) = operands[0];
4188 return "ldrh\t%0, %1";
4190 [(set_attr_alternative "length"
4191 [(if_then_else (eq_attr "is_arch6" "yes")
4192 (const_int 2) (const_int 4))
4194 (set_attr "type" "alu_shift,load_byte")]
4197 (define_insn "*arm_zero_extendhisi2"
4198 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4199 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4200 "TARGET_ARM && arm_arch4 && !arm_arch6"
4204 [(set_attr "type" "alu_shift,load_byte")
4205 (set_attr "predicable" "yes")]
4208 (define_insn "*arm_zero_extendhisi2_v6"
4209 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4210 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4211 "TARGET_ARM && arm_arch6"
4215 [(set_attr "type" "alu_shift,load_byte")
4216 (set_attr "predicable" "yes")]
4219 (define_insn "*arm_zero_extendhisi2addsi"
4220 [(set (match_operand:SI 0 "s_register_operand" "=r")
4221 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4222 (match_operand:SI 2 "s_register_operand" "r")))]
4224 "uxtah%?\\t%0, %2, %1"
4225 [(set_attr "type" "alu_shift")
4226 (set_attr "predicable" "yes")]
4229 (define_expand "zero_extendqisi2"
4230 [(set (match_operand:SI 0 "s_register_operand" "")
4231 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4234 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4236 emit_insn (gen_andsi3 (operands[0],
4237 gen_lowpart (SImode, operands[1]),
4241 if (!arm_arch6 && !MEM_P (operands[1]))
4243 rtx t = gen_lowpart (SImode, operands[1]);
4244 rtx tmp = gen_reg_rtx (SImode);
4245 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4246 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4252 [(set (match_operand:SI 0 "s_register_operand" "")
4253 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4255 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4256 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4258 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4261 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4266 (define_insn "*thumb1_zero_extendqisi2"
4267 [(set (match_operand:SI 0 "register_operand" "=l,l")
4268 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4269 "TARGET_THUMB1 && !arm_arch6"
4273 [(set_attr "length" "4,2")
4274 (set_attr "type" "alu_shift,load_byte")
4275 (set_attr "pool_range" "*,32")]
4278 (define_insn "*thumb1_zero_extendqisi2_v6"
4279 [(set (match_operand:SI 0 "register_operand" "=l,l")
4280 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4281 "TARGET_THUMB1 && arm_arch6"
4285 [(set_attr "length" "2")
4286 (set_attr "type" "alu_shift,load_byte")]
4289 (define_insn "*arm_zero_extendqisi2"
4290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4291 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4292 "TARGET_ARM && !arm_arch6"
4295 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4296 [(set_attr "length" "8,4")
4297 (set_attr "type" "alu_shift,load_byte")
4298 (set_attr "predicable" "yes")]
4301 (define_insn "*arm_zero_extendqisi2_v6"
4302 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4303 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4304 "TARGET_ARM && arm_arch6"
4307 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4308 [(set_attr "type" "alu_shift,load_byte")
4309 (set_attr "predicable" "yes")]
4312 (define_insn "*arm_zero_extendqisi2addsi"
4313 [(set (match_operand:SI 0 "s_register_operand" "=r")
4314 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4315 (match_operand:SI 2 "s_register_operand" "r")))]
4317 "uxtab%?\\t%0, %2, %1"
4318 [(set_attr "predicable" "yes")
4319 (set_attr "insn" "xtab")
4320 (set_attr "type" "alu_shift")]
4324 [(set (match_operand:SI 0 "s_register_operand" "")
4325 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4326 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4327 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4328 [(set (match_dup 2) (match_dup 1))
4329 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4334 [(set (match_operand:SI 0 "s_register_operand" "")
4335 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4336 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4337 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4338 [(set (match_dup 2) (match_dup 1))
4339 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4345 [(set (match_operand:SI 0 "s_register_operand" "")
4346 (ior_xor:SI (and:SI (ashift:SI
4347 (match_operand:SI 1 "s_register_operand" "")
4348 (match_operand:SI 2 "const_int_operand" ""))
4349 (match_operand:SI 3 "const_int_operand" ""))
4351 (match_operator 5 "subreg_lowpart_operator"
4352 [(match_operand:SI 4 "s_register_operand" "")]))))]
4354 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4355 == (GET_MODE_MASK (GET_MODE (operands[5]))
4356 & (GET_MODE_MASK (GET_MODE (operands[5]))
4357 << (INTVAL (operands[2])))))"
4358 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4360 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4361 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4364 (define_insn "*compareqi_eq0"
4365 [(set (reg:CC_Z CC_REGNUM)
4366 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4370 [(set_attr "conds" "set")]
4373 (define_expand "extendhisi2"
4374 [(set (match_operand:SI 0 "s_register_operand" "")
4375 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4380 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4383 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4385 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4389 if (!arm_arch6 && !MEM_P (operands[1]))
4391 rtx t = gen_lowpart (SImode, operands[1]);
4392 rtx tmp = gen_reg_rtx (SImode);
4393 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4394 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4401 [(set (match_operand:SI 0 "register_operand" "")
4402 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4403 (clobber (match_scratch:SI 2 ""))])]
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4406 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4408 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4411 ;; We used to have an early-clobber on the scratch register here.
4412 ;; However, there's a bug somewhere in reload which means that this
4413 ;; can be partially ignored during spill allocation if the memory
4414 ;; address also needs reloading; this causes us to die later on when
4415 ;; we try to verify the operands. Fortunately, we don't really need
4416 ;; the early-clobber: we can always use operand 0 if operand 2
4417 ;; overlaps the address.
4418 (define_insn "thumb1_extendhisi2"
4419 [(set (match_operand:SI 0 "register_operand" "=l,l")
4420 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4421 (clobber (match_scratch:SI 2 "=X,l"))]
4428 if (which_alternative == 0 && !arm_arch6)
4430 if (which_alternative == 0)
4431 return \"sxth\\t%0, %1\";
4433 mem = XEXP (operands[1], 0);
4435 /* This code used to try to use 'V', and fix the address only if it was
4436 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4437 range of QImode offsets, and offsettable_address_p does a QImode
4440 if (GET_CODE (mem) == CONST)
4441 mem = XEXP (mem, 0);
4443 if (GET_CODE (mem) == LABEL_REF)
4444 return \"ldr\\t%0, %1\";
4446 if (GET_CODE (mem) == PLUS)
4448 rtx a = XEXP (mem, 0);
4449 rtx b = XEXP (mem, 1);
4451 if (GET_CODE (a) == LABEL_REF
4452 && GET_CODE (b) == CONST_INT)
4453 return \"ldr\\t%0, %1\";
4455 if (GET_CODE (b) == REG)
4456 return \"ldrsh\\t%0, %1\";
4464 ops[2] = const0_rtx;
4467 gcc_assert (GET_CODE (ops[1]) == REG);
4469 ops[0] = operands[0];
4470 if (reg_mentioned_p (operands[2], ops[1]))
4473 ops[3] = operands[2];
4474 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4477 [(set_attr_alternative "length"
4478 [(if_then_else (eq_attr "is_arch6" "yes")
4479 (const_int 2) (const_int 4))
4481 (set_attr "type" "alu_shift,load_byte")
4482 (set_attr "pool_range" "*,1020")]
4485 ;; This pattern will only be used when ldsh is not available
4486 (define_expand "extendhisi2_mem"
4487 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4489 (zero_extend:SI (match_dup 7)))
4490 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4491 (set (match_operand:SI 0 "" "")
4492 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4497 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4499 mem1 = change_address (operands[1], QImode, addr);
4500 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4501 operands[0] = gen_lowpart (SImode, operands[0]);
4503 operands[2] = gen_reg_rtx (SImode);
4504 operands[3] = gen_reg_rtx (SImode);
4505 operands[6] = gen_reg_rtx (SImode);
4508 if (BYTES_BIG_ENDIAN)
4510 operands[4] = operands[2];
4511 operands[5] = operands[3];
4515 operands[4] = operands[3];
4516 operands[5] = operands[2];
4522 [(set (match_operand:SI 0 "register_operand" "")
4523 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4525 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4526 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4528 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4531 (define_insn "*arm_extendhisi2"
4532 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4533 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4534 "TARGET_ARM && arm_arch4 && !arm_arch6"
4538 [(set_attr "length" "8,4")
4539 (set_attr "type" "alu_shift,load_byte")
4540 (set_attr "predicable" "yes")
4541 (set_attr "pool_range" "*,256")
4542 (set_attr "neg_pool_range" "*,244")]
4545 ;; ??? Check Thumb-2 pool range
4546 (define_insn "*arm_extendhisi2_v6"
4547 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4548 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4549 "TARGET_32BIT && arm_arch6"
4553 [(set_attr "type" "alu_shift,load_byte")
4554 (set_attr "predicable" "yes")
4555 (set_attr "pool_range" "*,256")
4556 (set_attr "neg_pool_range" "*,244")]
4559 (define_insn "*arm_extendhisi2addsi"
4560 [(set (match_operand:SI 0 "s_register_operand" "=r")
4561 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4562 (match_operand:SI 2 "s_register_operand" "r")))]
4564 "sxtah%?\\t%0, %2, %1"
4567 (define_expand "extendqihi2"
4569 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4571 (set (match_operand:HI 0 "s_register_operand" "")
4572 (ashiftrt:SI (match_dup 2)
4577 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4579 emit_insn (gen_rtx_SET (VOIDmode,
4581 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4584 if (!s_register_operand (operands[1], QImode))
4585 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4586 operands[0] = gen_lowpart (SImode, operands[0]);
4587 operands[1] = gen_lowpart (SImode, operands[1]);
4588 operands[2] = gen_reg_rtx (SImode);
4592 (define_insn "*arm_extendqihi_insn"
4593 [(set (match_operand:HI 0 "s_register_operand" "=r")
4594 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4595 "TARGET_ARM && arm_arch4"
4596 "ldr%(sb%)\\t%0, %1"
4597 [(set_attr "type" "load_byte")
4598 (set_attr "predicable" "yes")
4599 (set_attr "pool_range" "256")
4600 (set_attr "neg_pool_range" "244")]
4603 (define_expand "extendqisi2"
4604 [(set (match_operand:SI 0 "s_register_operand" "")
4605 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4608 if (!arm_arch4 && MEM_P (operands[1]))
4609 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4611 if (!arm_arch6 && !MEM_P (operands[1]))
4613 rtx t = gen_lowpart (SImode, operands[1]);
4614 rtx tmp = gen_reg_rtx (SImode);
4615 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4616 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4622 [(set (match_operand:SI 0 "register_operand" "")
4623 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4625 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4626 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4628 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4631 (define_insn "*arm_extendqisi"
4632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4633 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4634 "TARGET_ARM && arm_arch4 && !arm_arch6"
4638 [(set_attr "length" "8,4")
4639 (set_attr "type" "alu_shift,load_byte")
4640 (set_attr "predicable" "yes")
4641 (set_attr "pool_range" "*,256")
4642 (set_attr "neg_pool_range" "*,244")]
4645 (define_insn "*arm_extendqisi_v6"
4646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4648 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4649 "TARGET_ARM && arm_arch6"
4653 [(set_attr "type" "alu_shift,load_byte")
4654 (set_attr "predicable" "yes")
4655 (set_attr "pool_range" "*,256")
4656 (set_attr "neg_pool_range" "*,244")]
4659 (define_insn "*arm_extendqisi2addsi"
4660 [(set (match_operand:SI 0 "s_register_operand" "=r")
4661 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4662 (match_operand:SI 2 "s_register_operand" "r")))]
4664 "sxtab%?\\t%0, %2, %1"
4665 [(set_attr "type" "alu_shift")
4666 (set_attr "insn" "xtab")
4667 (set_attr "predicable" "yes")]
4671 [(set (match_operand:SI 0 "register_operand" "")
4672 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4673 "TARGET_THUMB1 && reload_completed"
4674 [(set (match_dup 0) (match_dup 2))
4675 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4677 rtx addr = XEXP (operands[1], 0);
4679 if (GET_CODE (addr) == CONST)
4680 addr = XEXP (addr, 0);
4682 if (GET_CODE (addr) == PLUS
4683 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4684 /* No split necessary. */
4687 if (GET_CODE (addr) == PLUS
4688 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4691 if (reg_overlap_mentioned_p (operands[0], addr))
4693 rtx t = gen_lowpart (QImode, operands[0]);
4694 emit_move_insn (t, operands[1]);
4695 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4701 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4702 operands[2] = const0_rtx;
4704 else if (GET_CODE (addr) != PLUS)
4706 else if (REG_P (XEXP (addr, 0)))
4708 operands[2] = XEXP (addr, 1);
4709 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4713 operands[2] = XEXP (addr, 0);
4714 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4717 operands[3] = change_address (operands[1], QImode, addr);
4721 [(set (match_operand:SI 0 "register_operand" "")
4722 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4723 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4724 (set (match_operand:SI 3 "register_operand" "")
4725 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4727 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4728 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4729 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4730 && (peep2_reg_dead_p (3, operands[0])
4731 || rtx_equal_p (operands[0], operands[3]))
4732 && (peep2_reg_dead_p (3, operands[2])
4733 || rtx_equal_p (operands[2], operands[3]))"
4734 [(set (match_dup 2) (match_dup 1))
4735 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4737 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4738 operands[4] = change_address (operands[4], QImode, addr);
4741 (define_insn "thumb1_extendqisi2"
4742 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4743 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4748 if (which_alternative == 0 && arm_arch6)
4749 return "sxtb\\t%0, %1";
4750 if (which_alternative == 0)
4753 addr = XEXP (operands[1], 0);
4754 if (GET_CODE (addr) == PLUS
4755 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4756 return "ldrsb\\t%0, %1";
4760 [(set_attr_alternative "length"
4761 [(if_then_else (eq_attr "is_arch6" "yes")
4762 (const_int 2) (const_int 4))
4764 (if_then_else (eq_attr "is_arch6" "yes")
4765 (const_int 4) (const_int 6))])
4766 (set_attr "type" "alu_shift,load_byte,load_byte")]
4769 (define_expand "extendsfdf2"
4770 [(set (match_operand:DF 0 "s_register_operand" "")
4771 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4772 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4776 /* HFmode -> DFmode conversions have to go through SFmode. */
4777 (define_expand "extendhfdf2"
4778 [(set (match_operand:DF 0 "general_operand" "")
4779 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4784 op1 = convert_to_mode (SFmode, operands[1], 0);
4785 op1 = convert_to_mode (DFmode, op1, 0);
4786 emit_insn (gen_movdf (operands[0], op1));
4791 ;; Move insns (including loads and stores)
4793 ;; XXX Just some ideas about movti.
4794 ;; I don't think these are a good idea on the arm, there just aren't enough
4796 ;;(define_expand "loadti"
4797 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4798 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4801 ;;(define_expand "storeti"
4802 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4803 ;; (match_operand:TI 1 "s_register_operand" ""))]
4806 ;;(define_expand "movti"
4807 ;; [(set (match_operand:TI 0 "general_operand" "")
4808 ;; (match_operand:TI 1 "general_operand" ""))]
4814 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4815 ;; operands[1] = copy_to_reg (operands[1]);
4816 ;; if (GET_CODE (operands[0]) == MEM)
4817 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4818 ;; else if (GET_CODE (operands[1]) == MEM)
4819 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4823 ;; emit_insn (insn);
4827 ;; Recognize garbage generated above.
4830 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4831 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4835 ;; register mem = (which_alternative < 3);
4836 ;; register const char *template;
4838 ;; operands[mem] = XEXP (operands[mem], 0);
4839 ;; switch (which_alternative)
4841 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4842 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4843 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4844 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4845 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4846 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4848 ;; output_asm_insn (template, operands);
4852 (define_expand "movdi"
4853 [(set (match_operand:DI 0 "general_operand" "")
4854 (match_operand:DI 1 "general_operand" ""))]
4857 if (can_create_pseudo_p ())
4859 if (GET_CODE (operands[0]) != REG)
4860 operands[1] = force_reg (DImode, operands[1]);
4865 (define_insn "*arm_movdi"
4866 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4867 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4869 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4871 && ( register_operand (operands[0], DImode)
4872 || register_operand (operands[1], DImode))"
4874 switch (which_alternative)
4881 return output_move_double (operands);
4884 [(set_attr "length" "8,12,16,8,8")
4885 (set_attr "type" "*,*,*,load2,store2")
4886 (set_attr "arm_pool_range" "*,*,*,1020,*")
4887 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4888 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4889 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4893 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4894 (match_operand:ANY64 1 "const_double_operand" ""))]
4897 && (arm_const_double_inline_cost (operands[1])
4898 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4901 arm_split_constant (SET, SImode, curr_insn,
4902 INTVAL (gen_lowpart (SImode, operands[1])),
4903 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4904 arm_split_constant (SET, SImode, curr_insn,
4905 INTVAL (gen_highpart_mode (SImode,
4906 GET_MODE (operands[0]),
4908 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4913 ; If optimizing for size, or if we have load delay slots, then
4914 ; we want to split the constant into two separate operations.
4915 ; In both cases this may split a trivial part into a single data op
4916 ; leaving a single complex constant to load. We can also get longer
4917 ; offsets in a LDR which means we get better chances of sharing the pool
4918 ; entries. Finally, we can normally do a better job of scheduling
4919 ; LDR instructions than we can with LDM.
4920 ; This pattern will only match if the one above did not.
4922 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4923 (match_operand:ANY64 1 "const_double_operand" ""))]
4924 "TARGET_ARM && reload_completed
4925 && arm_const_double_by_parts (operands[1])"
4926 [(set (match_dup 0) (match_dup 1))
4927 (set (match_dup 2) (match_dup 3))]
4929 operands[2] = gen_highpart (SImode, operands[0]);
4930 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4932 operands[0] = gen_lowpart (SImode, operands[0]);
4933 operands[1] = gen_lowpart (SImode, operands[1]);
4938 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4939 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4940 "TARGET_EITHER && reload_completed"
4941 [(set (match_dup 0) (match_dup 1))
4942 (set (match_dup 2) (match_dup 3))]
4944 operands[2] = gen_highpart (SImode, operands[0]);
4945 operands[3] = gen_highpart (SImode, operands[1]);
4946 operands[0] = gen_lowpart (SImode, operands[0]);
4947 operands[1] = gen_lowpart (SImode, operands[1]);
4949 /* Handle a partial overlap. */
4950 if (rtx_equal_p (operands[0], operands[3]))
4952 rtx tmp0 = operands[0];
4953 rtx tmp1 = operands[1];
4955 operands[0] = operands[2];
4956 operands[1] = operands[3];
4963 ;; We can't actually do base+index doubleword loads if the index and
4964 ;; destination overlap. Split here so that we at least have chance to
4967 [(set (match_operand:DI 0 "s_register_operand" "")
4968 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4969 (match_operand:SI 2 "s_register_operand" ""))))]
4971 && reg_overlap_mentioned_p (operands[0], operands[1])
4972 && reg_overlap_mentioned_p (operands[0], operands[2])"
4974 (plus:SI (match_dup 1)
4977 (mem:DI (match_dup 4)))]
4979 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4983 ;;; ??? This should have alternatives for constants.
4984 ;;; ??? This was originally identical to the movdf_insn pattern.
4985 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4986 ;;; thumb_reorg with a memory reference.
4987 (define_insn "*thumb1_movdi_insn"
4988 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4989 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4991 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4992 && ( register_operand (operands[0], DImode)
4993 || register_operand (operands[1], DImode))"
4996 switch (which_alternative)
5000 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5001 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5002 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5004 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5006 operands[1] = GEN_INT (- INTVAL (operands[1]));
5007 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5009 return \"ldmia\\t%1, {%0, %H0}\";
5011 return \"stmia\\t%0, {%1, %H1}\";
5013 return thumb_load_double_from_address (operands);
5015 operands[2] = gen_rtx_MEM (SImode,
5016 plus_constant (XEXP (operands[0], 0), 4));
5017 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5020 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5021 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5022 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5025 [(set_attr "length" "4,4,6,2,2,6,4,4")
5026 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5027 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5028 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5031 (define_expand "movsi"
5032 [(set (match_operand:SI 0 "general_operand" "")
5033 (match_operand:SI 1 "general_operand" ""))]
5037 rtx base, offset, tmp;
5041 /* Everything except mem = const or mem = mem can be done easily. */
5042 if (GET_CODE (operands[0]) == MEM)
5043 operands[1] = force_reg (SImode, operands[1]);
5044 if (arm_general_register_operand (operands[0], SImode)
5045 && GET_CODE (operands[1]) == CONST_INT
5046 && !(const_ok_for_arm (INTVAL (operands[1]))
5047 || const_ok_for_arm (~INTVAL (operands[1]))))
5049 arm_split_constant (SET, SImode, NULL_RTX,
5050 INTVAL (operands[1]), operands[0], NULL_RTX,
5051 optimize && can_create_pseudo_p ());
5055 if (TARGET_USE_MOVT && !target_word_relocations
5056 && GET_CODE (operands[1]) == SYMBOL_REF
5057 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5059 arm_emit_movpair (operands[0], operands[1]);
5063 else /* TARGET_THUMB1... */
5065 if (can_create_pseudo_p ())
5067 if (GET_CODE (operands[0]) != REG)
5068 operands[1] = force_reg (SImode, operands[1]);
5072 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5074 split_const (operands[1], &base, &offset);
5075 if (GET_CODE (base) == SYMBOL_REF
5076 && !offset_within_block_p (base, INTVAL (offset)))
5078 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5079 emit_move_insn (tmp, base);
5080 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5085 /* Recognize the case where operand[1] is a reference to thread-local
5086 data and load its address to a register. */
5087 if (arm_tls_referenced_p (operands[1]))
5089 rtx tmp = operands[1];
5092 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5094 addend = XEXP (XEXP (tmp, 0), 1);
5095 tmp = XEXP (XEXP (tmp, 0), 0);
5098 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5099 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5101 tmp = legitimize_tls_address (tmp,
5102 !can_create_pseudo_p () ? operands[0] : 0);
5105 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5106 tmp = force_operand (tmp, operands[0]);
5111 && (CONSTANT_P (operands[1])
5112 || symbol_mentioned_p (operands[1])
5113 || label_mentioned_p (operands[1])))
5114 operands[1] = legitimize_pic_address (operands[1], SImode,
5115 (!can_create_pseudo_p ()
5122 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5123 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5124 ;; so this does not matter.
5125 (define_insn "*arm_movt"
5126 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5127 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5128 (match_operand:SI 2 "general_operand" "i")))]
5130 "movt%?\t%0, #:upper16:%c2"
5131 [(set_attr "predicable" "yes")
5132 (set_attr "length" "4")]
5135 (define_insn "*arm_movsi_insn"
5136 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5137 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5138 "TARGET_ARM && ! TARGET_IWMMXT
5139 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5140 && ( register_operand (operands[0], SImode)
5141 || register_operand (operands[1], SImode))"
5149 [(set_attr "type" "*,*,*,*,load1,store1")
5150 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5151 (set_attr "predicable" "yes")
5152 (set_attr "pool_range" "*,*,*,*,4096,*")
5153 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5157 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5158 (match_operand:SI 1 "const_int_operand" ""))]
5160 && (!(const_ok_for_arm (INTVAL (operands[1]))
5161 || const_ok_for_arm (~INTVAL (operands[1]))))"
5162 [(clobber (const_int 0))]
5164 arm_split_constant (SET, SImode, NULL_RTX,
5165 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5170 (define_insn "*thumb1_movsi_insn"
5171 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5172 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5174 && ( register_operand (operands[0], SImode)
5175 || register_operand (operands[1], SImode))"
5186 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5187 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5188 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5189 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5192 [(set (match_operand:SI 0 "register_operand" "")
5193 (match_operand:SI 1 "const_int_operand" ""))]
5194 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5195 [(set (match_dup 2) (match_dup 1))
5196 (set (match_dup 0) (neg:SI (match_dup 2)))]
5199 operands[1] = GEN_INT (- INTVAL (operands[1]));
5200 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5205 [(set (match_operand:SI 0 "register_operand" "")
5206 (match_operand:SI 1 "const_int_operand" ""))]
5207 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5208 [(set (match_dup 2) (match_dup 1))
5209 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5212 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5213 unsigned HOST_WIDE_INT mask = 0xff;
5216 for (i = 0; i < 25; i++)
5217 if ((val & (mask << i)) == val)
5220 /* Don't split if the shift is zero. */
5224 operands[1] = GEN_INT (val >> i);
5225 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5226 operands[3] = GEN_INT (i);
5230 ;; When generating pic, we need to load the symbol offset into a register.
5231 ;; So that the optimizer does not confuse this with a normal symbol load
5232 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5233 ;; since that is the only type of relocation we can use.
5235 ;; Wrap calculation of the whole PIC address in a single pattern for the
5236 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5237 ;; a PIC address involves two loads from memory, so we want to CSE it
5238 ;; as often as possible.
5239 ;; This pattern will be split into one of the pic_load_addr_* patterns
5240 ;; and a move after GCSE optimizations.
5242 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5243 (define_expand "calculate_pic_address"
5244 [(set (match_operand:SI 0 "register_operand" "")
5245 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5246 (unspec:SI [(match_operand:SI 2 "" "")]
5251 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5253 [(set (match_operand:SI 0 "register_operand" "")
5254 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5255 (unspec:SI [(match_operand:SI 2 "" "")]
5258 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5259 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5260 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5263 ;; The rather odd constraints on the following are to force reload to leave
5264 ;; the insn alone, and to force the minipool generation pass to then move
5265 ;; the GOT symbol to memory.
5267 (define_insn "pic_load_addr_32bit"
5268 [(set (match_operand:SI 0 "s_register_operand" "=r")
5269 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5270 "TARGET_32BIT && flag_pic"
5272 [(set_attr "type" "load1")
5273 (set_attr "pool_range" "4096")
5274 (set (attr "neg_pool_range")
5275 (if_then_else (eq_attr "is_thumb" "no")
5280 (define_insn "pic_load_addr_thumb1"
5281 [(set (match_operand:SI 0 "s_register_operand" "=l")
5282 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5283 "TARGET_THUMB1 && flag_pic"
5285 [(set_attr "type" "load1")
5286 (set (attr "pool_range") (const_int 1024))]
5289 (define_insn "pic_add_dot_plus_four"
5290 [(set (match_operand:SI 0 "register_operand" "=r")
5291 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5293 (match_operand 2 "" "")]
5297 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5298 INTVAL (operands[2]));
5299 return \"add\\t%0, %|pc\";
5301 [(set_attr "length" "2")]
5304 (define_insn "pic_add_dot_plus_eight"
5305 [(set (match_operand:SI 0 "register_operand" "=r")
5306 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5308 (match_operand 2 "" "")]
5312 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5313 INTVAL (operands[2]));
5314 return \"add%?\\t%0, %|pc, %1\";
5316 [(set_attr "predicable" "yes")]
5319 (define_insn "tls_load_dot_plus_eight"
5320 [(set (match_operand:SI 0 "register_operand" "=r")
5321 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5323 (match_operand 2 "" "")]
5327 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5328 INTVAL (operands[2]));
5329 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5331 [(set_attr "predicable" "yes")]
5334 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5335 ;; followed by a load. These sequences can be crunched down to
5336 ;; tls_load_dot_plus_eight by a peephole.
5339 [(set (match_operand:SI 0 "register_operand" "")
5340 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5342 (match_operand 1 "" "")]
5344 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5345 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5347 (mem:SI (unspec:SI [(match_dup 3)
5354 (define_insn "pic_offset_arm"
5355 [(set (match_operand:SI 0 "register_operand" "=r")
5356 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5357 (unspec:SI [(match_operand:SI 2 "" "X")]
5358 UNSPEC_PIC_OFFSET))))]
5359 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5360 "ldr%?\\t%0, [%1,%2]"
5361 [(set_attr "type" "load1")]
5364 (define_expand "builtin_setjmp_receiver"
5365 [(label_ref (match_operand 0 "" ""))]
5369 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5371 if (arm_pic_register != INVALID_REGNUM)
5372 arm_load_pic_register (1UL << 3);
5376 ;; If copying one reg to another we can set the condition codes according to
5377 ;; its value. Such a move is common after a return from subroutine and the
5378 ;; result is being tested against zero.
5380 (define_insn "*movsi_compare0"
5381 [(set (reg:CC CC_REGNUM)
5382 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5384 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5390 [(set_attr "conds" "set")]
5393 ;; Subroutine to store a half word from a register into memory.
5394 ;; Operand 0 is the source register (HImode)
5395 ;; Operand 1 is the destination address in a register (SImode)
5397 ;; In both this routine and the next, we must be careful not to spill
5398 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5399 ;; can generate unrecognizable rtl.
5401 (define_expand "storehi"
5402 [;; store the low byte
5403 (set (match_operand 1 "" "") (match_dup 3))
5404 ;; extract the high byte
5406 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5407 ;; store the high byte
5408 (set (match_dup 4) (match_dup 5))]
5412 rtx op1 = operands[1];
5413 rtx addr = XEXP (op1, 0);
5414 enum rtx_code code = GET_CODE (addr);
5416 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5418 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5420 operands[4] = adjust_address (op1, QImode, 1);
5421 operands[1] = adjust_address (operands[1], QImode, 0);
5422 operands[3] = gen_lowpart (QImode, operands[0]);
5423 operands[0] = gen_lowpart (SImode, operands[0]);
5424 operands[2] = gen_reg_rtx (SImode);
5425 operands[5] = gen_lowpart (QImode, operands[2]);
5429 (define_expand "storehi_bigend"
5430 [(set (match_dup 4) (match_dup 3))
5432 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5433 (set (match_operand 1 "" "") (match_dup 5))]
5437 rtx op1 = operands[1];
5438 rtx addr = XEXP (op1, 0);
5439 enum rtx_code code = GET_CODE (addr);
5441 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5443 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5445 operands[4] = adjust_address (op1, QImode, 1);
5446 operands[1] = adjust_address (operands[1], QImode, 0);
5447 operands[3] = gen_lowpart (QImode, operands[0]);
5448 operands[0] = gen_lowpart (SImode, operands[0]);
5449 operands[2] = gen_reg_rtx (SImode);
5450 operands[5] = gen_lowpart (QImode, operands[2]);
5454 ;; Subroutine to store a half word integer constant into memory.
5455 (define_expand "storeinthi"
5456 [(set (match_operand 0 "" "")
5457 (match_operand 1 "" ""))
5458 (set (match_dup 3) (match_dup 2))]
5462 HOST_WIDE_INT value = INTVAL (operands[1]);
5463 rtx addr = XEXP (operands[0], 0);
5464 rtx op0 = operands[0];
5465 enum rtx_code code = GET_CODE (addr);
5467 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5469 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5471 operands[1] = gen_reg_rtx (SImode);
5472 if (BYTES_BIG_ENDIAN)
5474 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5475 if ((value & 255) == ((value >> 8) & 255))
5476 operands[2] = operands[1];
5479 operands[2] = gen_reg_rtx (SImode);
5480 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5485 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5486 if ((value & 255) == ((value >> 8) & 255))
5487 operands[2] = operands[1];
5490 operands[2] = gen_reg_rtx (SImode);
5491 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5495 operands[3] = adjust_address (op0, QImode, 1);
5496 operands[0] = adjust_address (operands[0], QImode, 0);
5497 operands[2] = gen_lowpart (QImode, operands[2]);
5498 operands[1] = gen_lowpart (QImode, operands[1]);
5502 (define_expand "storehi_single_op"
5503 [(set (match_operand:HI 0 "memory_operand" "")
5504 (match_operand:HI 1 "general_operand" ""))]
5505 "TARGET_32BIT && arm_arch4"
5507 if (!s_register_operand (operands[1], HImode))
5508 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5512 (define_expand "movhi"
5513 [(set (match_operand:HI 0 "general_operand" "")
5514 (match_operand:HI 1 "general_operand" ""))]
5519 if (can_create_pseudo_p ())
5521 if (GET_CODE (operands[0]) == MEM)
5525 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5528 if (GET_CODE (operands[1]) == CONST_INT)
5529 emit_insn (gen_storeinthi (operands[0], operands[1]));
5532 if (GET_CODE (operands[1]) == MEM)
5533 operands[1] = force_reg (HImode, operands[1]);
5534 if (BYTES_BIG_ENDIAN)
5535 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5537 emit_insn (gen_storehi (operands[1], operands[0]));
5541 /* Sign extend a constant, and keep it in an SImode reg. */
5542 else if (GET_CODE (operands[1]) == CONST_INT)
5544 rtx reg = gen_reg_rtx (SImode);
5545 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5547 /* If the constant is already valid, leave it alone. */
5548 if (!const_ok_for_arm (val))
5550 /* If setting all the top bits will make the constant
5551 loadable in a single instruction, then set them.
5552 Otherwise, sign extend the number. */
5554 if (const_ok_for_arm (~(val | ~0xffff)))
5556 else if (val & 0x8000)
5560 emit_insn (gen_movsi (reg, GEN_INT (val)));
5561 operands[1] = gen_lowpart (HImode, reg);
5563 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5564 && GET_CODE (operands[1]) == MEM)
5566 rtx reg = gen_reg_rtx (SImode);
5568 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5569 operands[1] = gen_lowpart (HImode, reg);
5571 else if (!arm_arch4)
5573 if (GET_CODE (operands[1]) == MEM)
5576 rtx offset = const0_rtx;
5577 rtx reg = gen_reg_rtx (SImode);
5579 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5580 || (GET_CODE (base) == PLUS
5581 && (GET_CODE (offset = XEXP (base, 1))
5583 && ((INTVAL(offset) & 1) != 1)
5584 && GET_CODE (base = XEXP (base, 0)) == REG))
5585 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5589 new_rtx = widen_memory_access (operands[1], SImode,
5590 ((INTVAL (offset) & ~3)
5591 - INTVAL (offset)));
5592 emit_insn (gen_movsi (reg, new_rtx));
5593 if (((INTVAL (offset) & 2) != 0)
5594 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5596 rtx reg2 = gen_reg_rtx (SImode);
5598 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5603 emit_insn (gen_movhi_bytes (reg, operands[1]));
5605 operands[1] = gen_lowpart (HImode, reg);
5609 /* Handle loading a large integer during reload. */
5610 else if (GET_CODE (operands[1]) == CONST_INT
5611 && !const_ok_for_arm (INTVAL (operands[1]))
5612 && !const_ok_for_arm (~INTVAL (operands[1])))
5614 /* Writing a constant to memory needs a scratch, which should
5615 be handled with SECONDARY_RELOADs. */
5616 gcc_assert (GET_CODE (operands[0]) == REG);
5618 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5619 emit_insn (gen_movsi (operands[0], operands[1]));
5623 else if (TARGET_THUMB2)
5625 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5626 if (can_create_pseudo_p ())
5628 if (GET_CODE (operands[0]) != REG)
5629 operands[1] = force_reg (HImode, operands[1]);
5630 /* Zero extend a constant, and keep it in an SImode reg. */
5631 else if (GET_CODE (operands[1]) == CONST_INT)
5633 rtx reg = gen_reg_rtx (SImode);
5634 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5636 emit_insn (gen_movsi (reg, GEN_INT (val)));
5637 operands[1] = gen_lowpart (HImode, reg);
5641 else /* TARGET_THUMB1 */
5643 if (can_create_pseudo_p ())
5645 if (GET_CODE (operands[1]) == CONST_INT)
5647 rtx reg = gen_reg_rtx (SImode);
5649 emit_insn (gen_movsi (reg, operands[1]));
5650 operands[1] = gen_lowpart (HImode, reg);
5653 /* ??? We shouldn't really get invalid addresses here, but this can
5654 happen if we are passed a SP (never OK for HImode/QImode) or
5655 virtual register (also rejected as illegitimate for HImode/QImode)
5656 relative address. */
5657 /* ??? This should perhaps be fixed elsewhere, for instance, in
5658 fixup_stack_1, by checking for other kinds of invalid addresses,
5659 e.g. a bare reference to a virtual register. This may confuse the
5660 alpha though, which must handle this case differently. */
5661 if (GET_CODE (operands[0]) == MEM
5662 && !memory_address_p (GET_MODE (operands[0]),
5663 XEXP (operands[0], 0)))
5665 = replace_equiv_address (operands[0],
5666 copy_to_reg (XEXP (operands[0], 0)));
5668 if (GET_CODE (operands[1]) == MEM
5669 && !memory_address_p (GET_MODE (operands[1]),
5670 XEXP (operands[1], 0)))
5672 = replace_equiv_address (operands[1],
5673 copy_to_reg (XEXP (operands[1], 0)));
5675 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5677 rtx reg = gen_reg_rtx (SImode);
5679 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5680 operands[1] = gen_lowpart (HImode, reg);
5683 if (GET_CODE (operands[0]) == MEM)
5684 operands[1] = force_reg (HImode, operands[1]);
5686 else if (GET_CODE (operands[1]) == CONST_INT
5687 && !satisfies_constraint_I (operands[1]))
5689 /* Handle loading a large integer during reload. */
5691 /* Writing a constant to memory needs a scratch, which should
5692 be handled with SECONDARY_RELOADs. */
5693 gcc_assert (GET_CODE (operands[0]) == REG);
5695 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5696 emit_insn (gen_movsi (operands[0], operands[1]));
5703 (define_insn "*thumb1_movhi_insn"
5704 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5705 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5707 && ( register_operand (operands[0], HImode)
5708 || register_operand (operands[1], HImode))"
5710 switch (which_alternative)
5712 case 0: return \"add %0, %1, #0\";
5713 case 2: return \"strh %1, %0\";
5714 case 3: return \"mov %0, %1\";
5715 case 4: return \"mov %0, %1\";
5716 case 5: return \"mov %0, %1\";
5717 default: gcc_unreachable ();
5719 /* The stack pointer can end up being taken as an index register.
5720 Catch this case here and deal with it. */
5721 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5722 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5723 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5726 ops[0] = operands[0];
5727 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5729 output_asm_insn (\"mov %0, %1\", ops);
5731 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5734 return \"ldrh %0, %1\";
5736 [(set_attr "length" "2,4,2,2,2,2")
5737 (set_attr "type" "*,load1,store1,*,*,*")
5738 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5741 (define_expand "movhi_bytes"
5742 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5744 (zero_extend:SI (match_dup 6)))
5745 (set (match_operand:SI 0 "" "")
5746 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5751 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5753 mem1 = change_address (operands[1], QImode, addr);
5754 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5755 operands[0] = gen_lowpart (SImode, operands[0]);
5757 operands[2] = gen_reg_rtx (SImode);
5758 operands[3] = gen_reg_rtx (SImode);
5761 if (BYTES_BIG_ENDIAN)
5763 operands[4] = operands[2];
5764 operands[5] = operands[3];
5768 operands[4] = operands[3];
5769 operands[5] = operands[2];
5774 (define_expand "movhi_bigend"
5776 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5779 (ashiftrt:SI (match_dup 2) (const_int 16)))
5780 (set (match_operand:HI 0 "s_register_operand" "")
5784 operands[2] = gen_reg_rtx (SImode);
5785 operands[3] = gen_reg_rtx (SImode);
5786 operands[4] = gen_lowpart (HImode, operands[3]);
5790 ;; Pattern to recognize insn generated default case above
5791 (define_insn "*movhi_insn_arch4"
5792 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5793 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5796 && (GET_CODE (operands[1]) != CONST_INT
5797 || const_ok_for_arm (INTVAL (operands[1]))
5798 || const_ok_for_arm (~INTVAL (operands[1])))"
5800 mov%?\\t%0, %1\\t%@ movhi
5801 mvn%?\\t%0, #%B1\\t%@ movhi
5802 str%(h%)\\t%1, %0\\t%@ movhi
5803 ldr%(h%)\\t%0, %1\\t%@ movhi"
5804 [(set_attr "type" "*,*,store1,load1")
5805 (set_attr "predicable" "yes")
5806 (set_attr "insn" "mov,mvn,*,*")
5807 (set_attr "pool_range" "*,*,*,256")
5808 (set_attr "neg_pool_range" "*,*,*,244")]
5811 (define_insn "*movhi_bytes"
5812 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5813 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5816 mov%?\\t%0, %1\\t%@ movhi
5817 mvn%?\\t%0, #%B1\\t%@ movhi"
5818 [(set_attr "predicable" "yes")
5819 (set_attr "insn" "mov,mvn")]
5822 (define_expand "thumb_movhi_clobber"
5823 [(set (match_operand:HI 0 "memory_operand" "")
5824 (match_operand:HI 1 "register_operand" ""))
5825 (clobber (match_operand:DI 2 "register_operand" ""))]
5828 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5829 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5831 emit_insn (gen_movhi (operands[0], operands[1]));
5834 /* XXX Fixme, need to handle other cases here as well. */
5839 ;; We use a DImode scratch because we may occasionally need an additional
5840 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5841 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5842 (define_expand "reload_outhi"
5843 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5844 (match_operand:HI 1 "s_register_operand" "r")
5845 (match_operand:DI 2 "s_register_operand" "=&l")])]
5848 arm_reload_out_hi (operands);
5850 thumb_reload_out_hi (operands);
5855 (define_expand "reload_inhi"
5856 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5857 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5858 (match_operand:DI 2 "s_register_operand" "=&r")])]
5862 arm_reload_in_hi (operands);
5864 thumb_reload_out_hi (operands);
5868 (define_expand "movqi"
5869 [(set (match_operand:QI 0 "general_operand" "")
5870 (match_operand:QI 1 "general_operand" ""))]
5873 /* Everything except mem = const or mem = mem can be done easily */
5875 if (can_create_pseudo_p ())
5877 if (GET_CODE (operands[1]) == CONST_INT)
5879 rtx reg = gen_reg_rtx (SImode);
5881 /* For thumb we want an unsigned immediate, then we are more likely
5882 to be able to use a movs insn. */
5884 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5886 emit_insn (gen_movsi (reg, operands[1]));
5887 operands[1] = gen_lowpart (QImode, reg);
5892 /* ??? We shouldn't really get invalid addresses here, but this can
5893 happen if we are passed a SP (never OK for HImode/QImode) or
5894 virtual register (also rejected as illegitimate for HImode/QImode)
5895 relative address. */
5896 /* ??? This should perhaps be fixed elsewhere, for instance, in
5897 fixup_stack_1, by checking for other kinds of invalid addresses,
5898 e.g. a bare reference to a virtual register. This may confuse the
5899 alpha though, which must handle this case differently. */
5900 if (GET_CODE (operands[0]) == MEM
5901 && !memory_address_p (GET_MODE (operands[0]),
5902 XEXP (operands[0], 0)))
5904 = replace_equiv_address (operands[0],
5905 copy_to_reg (XEXP (operands[0], 0)));
5906 if (GET_CODE (operands[1]) == MEM
5907 && !memory_address_p (GET_MODE (operands[1]),
5908 XEXP (operands[1], 0)))
5910 = replace_equiv_address (operands[1],
5911 copy_to_reg (XEXP (operands[1], 0)));
5914 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5916 rtx reg = gen_reg_rtx (SImode);
5918 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5919 operands[1] = gen_lowpart (QImode, reg);
5922 if (GET_CODE (operands[0]) == MEM)
5923 operands[1] = force_reg (QImode, operands[1]);
5925 else if (TARGET_THUMB
5926 && GET_CODE (operands[1]) == CONST_INT
5927 && !satisfies_constraint_I (operands[1]))
5929 /* Handle loading a large integer during reload. */
5931 /* Writing a constant to memory needs a scratch, which should
5932 be handled with SECONDARY_RELOADs. */
5933 gcc_assert (GET_CODE (operands[0]) == REG);
5935 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5936 emit_insn (gen_movsi (operands[0], operands[1]));
5943 (define_insn "*arm_movqi_insn"
5944 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5945 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5947 && ( register_operand (operands[0], QImode)
5948 || register_operand (operands[1], QImode))"
5954 [(set_attr "type" "*,*,load1,store1")
5955 (set_attr "insn" "mov,mvn,*,*")
5956 (set_attr "predicable" "yes")]
5959 (define_insn "*thumb1_movqi_insn"
5960 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5961 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5963 && ( register_operand (operands[0], QImode)
5964 || register_operand (operands[1], QImode))"
5972 [(set_attr "length" "2")
5973 (set_attr "type" "*,load1,store1,*,*,*")
5974 (set_attr "insn" "*,*,*,mov,mov,mov")
5975 (set_attr "pool_range" "*,32,*,*,*,*")
5976 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5979 (define_expand "movhf"
5980 [(set (match_operand:HF 0 "general_operand" "")
5981 (match_operand:HF 1 "general_operand" ""))]
5986 if (GET_CODE (operands[0]) == MEM)
5987 operands[1] = force_reg (HFmode, operands[1]);
5989 else /* TARGET_THUMB1 */
5991 if (can_create_pseudo_p ())
5993 if (GET_CODE (operands[0]) != REG)
5994 operands[1] = force_reg (HFmode, operands[1]);
6000 (define_insn "*arm32_movhf"
6001 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6002 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6003 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6004 && ( s_register_operand (operands[0], HFmode)
6005 || s_register_operand (operands[1], HFmode))"
6007 switch (which_alternative)
6009 case 0: /* ARM register from memory */
6010 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6011 case 1: /* memory from ARM register */
6012 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6013 case 2: /* ARM register from ARM register */
6014 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6015 case 3: /* ARM register from constant */
6021 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6022 bits = real_to_target (NULL, &r, HFmode);
6023 ops[0] = operands[0];
6024 ops[1] = GEN_INT (bits);
6025 ops[2] = GEN_INT (bits & 0xff00);
6026 ops[3] = GEN_INT (bits & 0x00ff);
6028 if (arm_arch_thumb2)
6029 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6031 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6038 [(set_attr "conds" "unconditional")
6039 (set_attr "type" "load1,store1,*,*")
6040 (set_attr "insn" "*,*,mov,mov")
6041 (set_attr "length" "4,4,4,8")
6042 (set_attr "predicable" "yes")]
6045 (define_insn "*thumb1_movhf"
6046 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6047 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6049 && ( s_register_operand (operands[0], HFmode)
6050 || s_register_operand (operands[1], HFmode))"
6052 switch (which_alternative)
6057 gcc_assert (GET_CODE(operands[1]) == MEM);
6058 addr = XEXP (operands[1], 0);
6059 if (GET_CODE (addr) == LABEL_REF
6060 || (GET_CODE (addr) == CONST
6061 && GET_CODE (XEXP (addr, 0)) == PLUS
6062 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6063 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6065 /* Constant pool entry. */
6066 return \"ldr\\t%0, %1\";
6068 return \"ldrh\\t%0, %1\";
6070 case 2: return \"strh\\t%1, %0\";
6071 default: return \"mov\\t%0, %1\";
6074 [(set_attr "length" "2")
6075 (set_attr "type" "*,load1,store1,*,*")
6076 (set_attr "insn" "mov,*,*,mov,mov")
6077 (set_attr "pool_range" "*,1020,*,*,*")
6078 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6080 (define_expand "movsf"
6081 [(set (match_operand:SF 0 "general_operand" "")
6082 (match_operand:SF 1 "general_operand" ""))]
6087 if (GET_CODE (operands[0]) == MEM)
6088 operands[1] = force_reg (SFmode, operands[1]);
6090 else /* TARGET_THUMB1 */
6092 if (can_create_pseudo_p ())
6094 if (GET_CODE (operands[0]) != REG)
6095 operands[1] = force_reg (SFmode, operands[1]);
6101 ;; Transform a floating-point move of a constant into a core register into
6102 ;; an SImode operation.
6104 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6105 (match_operand:SF 1 "immediate_operand" ""))]
6108 && GET_CODE (operands[1]) == CONST_DOUBLE"
6109 [(set (match_dup 2) (match_dup 3))]
6111 operands[2] = gen_lowpart (SImode, operands[0]);
6112 operands[3] = gen_lowpart (SImode, operands[1]);
6113 if (operands[2] == 0 || operands[3] == 0)
6118 (define_insn "*arm_movsf_soft_insn"
6119 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6120 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6122 && TARGET_SOFT_FLOAT
6123 && (GET_CODE (operands[0]) != MEM
6124 || register_operand (operands[1], SFmode))"
6127 ldr%?\\t%0, %1\\t%@ float
6128 str%?\\t%1, %0\\t%@ float"
6129 [(set_attr "predicable" "yes")
6130 (set_attr "type" "*,load1,store1")
6131 (set_attr "insn" "mov,*,*")
6132 (set_attr "pool_range" "*,4096,*")
6133 (set_attr "arm_neg_pool_range" "*,4084,*")
6134 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6137 ;;; ??? This should have alternatives for constants.
6138 (define_insn "*thumb1_movsf_insn"
6139 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6140 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6142 && ( register_operand (operands[0], SFmode)
6143 || register_operand (operands[1], SFmode))"
6152 [(set_attr "length" "2")
6153 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6154 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6155 (set_attr "insn" "*,*,*,*,*,mov,mov")
6156 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6159 (define_expand "movdf"
6160 [(set (match_operand:DF 0 "general_operand" "")
6161 (match_operand:DF 1 "general_operand" ""))]
6166 if (GET_CODE (operands[0]) == MEM)
6167 operands[1] = force_reg (DFmode, operands[1]);
6169 else /* TARGET_THUMB */
6171 if (can_create_pseudo_p ())
6173 if (GET_CODE (operands[0]) != REG)
6174 operands[1] = force_reg (DFmode, operands[1]);
6180 ;; Reloading a df mode value stored in integer regs to memory can require a
6182 (define_expand "reload_outdf"
6183 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6184 (match_operand:DF 1 "s_register_operand" "r")
6185 (match_operand:SI 2 "s_register_operand" "=&r")]
6189 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6192 operands[2] = XEXP (operands[0], 0);
6193 else if (code == POST_INC || code == PRE_DEC)
6195 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6196 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6197 emit_insn (gen_movdi (operands[0], operands[1]));
6200 else if (code == PRE_INC)
6202 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6204 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6207 else if (code == POST_DEC)
6208 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6210 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6211 XEXP (XEXP (operands[0], 0), 1)));
6213 emit_insn (gen_rtx_SET (VOIDmode,
6214 replace_equiv_address (operands[0], operands[2]),
6217 if (code == POST_DEC)
6218 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6224 (define_insn "*movdf_soft_insn"
6225 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6226 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6227 "TARGET_32BIT && TARGET_SOFT_FLOAT
6228 && ( register_operand (operands[0], DFmode)
6229 || register_operand (operands[1], DFmode))"
6231 switch (which_alternative)
6238 return output_move_double (operands);
6241 [(set_attr "length" "8,12,16,8,8")
6242 (set_attr "type" "*,*,*,load2,store2")
6243 (set_attr "pool_range" "*,*,*,1020,*")
6244 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6245 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6248 ;;; ??? This should have alternatives for constants.
6249 ;;; ??? This was originally identical to the movdi_insn pattern.
6250 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6251 ;;; thumb_reorg with a memory reference.
6252 (define_insn "*thumb_movdf_insn"
6253 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6254 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6256 && ( register_operand (operands[0], DFmode)
6257 || register_operand (operands[1], DFmode))"
6259 switch (which_alternative)
6263 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6264 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6265 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6267 return \"ldmia\\t%1, {%0, %H0}\";
6269 return \"stmia\\t%0, {%1, %H1}\";
6271 return thumb_load_double_from_address (operands);
6273 operands[2] = gen_rtx_MEM (SImode,
6274 plus_constant (XEXP (operands[0], 0), 4));
6275 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6278 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6279 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6280 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6283 [(set_attr "length" "4,2,2,6,4,4")
6284 (set_attr "type" "*,load2,store2,load2,store2,*")
6285 (set_attr "insn" "*,*,*,*,*,mov")
6286 (set_attr "pool_range" "*,*,*,1020,*,*")]
6289 (define_expand "movxf"
6290 [(set (match_operand:XF 0 "general_operand" "")
6291 (match_operand:XF 1 "general_operand" ""))]
6292 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6294 if (GET_CODE (operands[0]) == MEM)
6295 operands[1] = force_reg (XFmode, operands[1]);
6301 ;; load- and store-multiple insns
6302 ;; The arm can load/store any set of registers, provided that they are in
6303 ;; ascending order, but these expanders assume a contiguous set.
6305 (define_expand "load_multiple"
6306 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6307 (match_operand:SI 1 "" ""))
6308 (use (match_operand:SI 2 "" ""))])]
6311 HOST_WIDE_INT offset = 0;
6313 /* Support only fixed point registers. */
6314 if (GET_CODE (operands[2]) != CONST_INT
6315 || INTVAL (operands[2]) > 14
6316 || INTVAL (operands[2]) < 2
6317 || GET_CODE (operands[1]) != MEM
6318 || GET_CODE (operands[0]) != REG
6319 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6320 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6324 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6325 INTVAL (operands[2]),
6326 force_reg (SImode, XEXP (operands[1], 0)),
6327 FALSE, operands[1], &offset);
6330 (define_expand "store_multiple"
6331 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6332 (match_operand:SI 1 "" ""))
6333 (use (match_operand:SI 2 "" ""))])]
6336 HOST_WIDE_INT offset = 0;
6338 /* Support only fixed point registers. */
6339 if (GET_CODE (operands[2]) != CONST_INT
6340 || INTVAL (operands[2]) > 14
6341 || INTVAL (operands[2]) < 2
6342 || GET_CODE (operands[1]) != REG
6343 || GET_CODE (operands[0]) != MEM
6344 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6345 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6349 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6350 INTVAL (operands[2]),
6351 force_reg (SImode, XEXP (operands[0], 0)),
6352 FALSE, operands[0], &offset);
6356 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6357 ;; We could let this apply for blocks of less than this, but it clobbers so
6358 ;; many registers that there is then probably a better way.
6360 (define_expand "movmemqi"
6361 [(match_operand:BLK 0 "general_operand" "")
6362 (match_operand:BLK 1 "general_operand" "")
6363 (match_operand:SI 2 "const_int_operand" "")
6364 (match_operand:SI 3 "const_int_operand" "")]
6369 if (arm_gen_movmemqi (operands))
6373 else /* TARGET_THUMB1 */
6375 if ( INTVAL (operands[3]) != 4
6376 || INTVAL (operands[2]) > 48)
6379 thumb_expand_movmemqi (operands);
6385 ;; Thumb block-move insns
6387 (define_insn "movmem12b"
6388 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6389 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6390 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6391 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6392 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6393 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6394 (set (match_operand:SI 0 "register_operand" "=l")
6395 (plus:SI (match_dup 2) (const_int 12)))
6396 (set (match_operand:SI 1 "register_operand" "=l")
6397 (plus:SI (match_dup 3) (const_int 12)))
6398 (clobber (match_scratch:SI 4 "=&l"))
6399 (clobber (match_scratch:SI 5 "=&l"))
6400 (clobber (match_scratch:SI 6 "=&l"))]
6402 "* return thumb_output_move_mem_multiple (3, operands);"
6403 [(set_attr "length" "4")
6404 ; This isn't entirely accurate... It loads as well, but in terms of
6405 ; scheduling the following insn it is better to consider it as a store
6406 (set_attr "type" "store3")]
6409 (define_insn "movmem8b"
6410 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6411 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6412 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6413 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6414 (set (match_operand:SI 0 "register_operand" "=l")
6415 (plus:SI (match_dup 2) (const_int 8)))
6416 (set (match_operand:SI 1 "register_operand" "=l")
6417 (plus:SI (match_dup 3) (const_int 8)))
6418 (clobber (match_scratch:SI 4 "=&l"))
6419 (clobber (match_scratch:SI 5 "=&l"))]
6421 "* return thumb_output_move_mem_multiple (2, operands);"
6422 [(set_attr "length" "4")
6423 ; This isn't entirely accurate... It loads as well, but in terms of
6424 ; scheduling the following insn it is better to consider it as a store
6425 (set_attr "type" "store2")]
6430 ;; Compare & branch insns
6431 ;; The range calculations are based as follows:
6432 ;; For forward branches, the address calculation returns the address of
6433 ;; the next instruction. This is 2 beyond the branch instruction.
6434 ;; For backward branches, the address calculation returns the address of
6435 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6436 ;; instruction for the shortest sequence, and 4 before the branch instruction
6437 ;; if we have to jump around an unconditional branch.
6438 ;; To the basic branch range the PC offset must be added (this is +4).
6439 ;; So for forward branches we have
6440 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6441 ;; And for backward branches we have
6442 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6444 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6445 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6447 (define_expand "cbranchsi4"
6448 [(set (pc) (if_then_else
6449 (match_operator 0 "arm_comparison_operator"
6450 [(match_operand:SI 1 "s_register_operand" "")
6451 (match_operand:SI 2 "nonmemory_operand" "")])
6452 (label_ref (match_operand 3 "" ""))
6454 "TARGET_THUMB1 || TARGET_32BIT"
6458 if (!arm_add_operand (operands[2], SImode))
6459 operands[2] = force_reg (SImode, operands[2]);
6460 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6464 if (thumb1_cmpneg_operand (operands[2], SImode))
6466 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6467 operands[3], operands[0]));
6470 if (!thumb1_cmp_operand (operands[2], SImode))
6471 operands[2] = force_reg (SImode, operands[2]);
6474 ;; A pattern to recognize a special situation and optimize for it.
6475 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6476 ;; due to the available addressing modes. Hence, convert a signed comparison
6477 ;; with zero into an unsigned comparison with 127 if possible.
6478 (define_expand "cbranchqi4"
6479 [(set (pc) (if_then_else
6480 (match_operator 0 "lt_ge_comparison_operator"
6481 [(match_operand:QI 1 "memory_operand" "")
6482 (match_operand:QI 2 "const0_operand" "")])
6483 (label_ref (match_operand 3 "" ""))
6488 xops[1] = gen_reg_rtx (SImode);
6489 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6490 xops[2] = GEN_INT (127);
6491 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6492 VOIDmode, xops[1], xops[2]);
6493 xops[3] = operands[3];
6494 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6498 (define_expand "cbranchsf4"
6499 [(set (pc) (if_then_else
6500 (match_operator 0 "arm_comparison_operator"
6501 [(match_operand:SF 1 "s_register_operand" "")
6502 (match_operand:SF 2 "arm_float_compare_operand" "")])
6503 (label_ref (match_operand 3 "" ""))
6505 "TARGET_32BIT && TARGET_HARD_FLOAT"
6506 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6507 operands[3])); DONE;"
6510 (define_expand "cbranchdf4"
6511 [(set (pc) (if_then_else
6512 (match_operator 0 "arm_comparison_operator"
6513 [(match_operand:DF 1 "s_register_operand" "")
6514 (match_operand:DF 2 "arm_float_compare_operand" "")])
6515 (label_ref (match_operand 3 "" ""))
6517 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6518 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6519 operands[3])); DONE;"
6522 (define_expand "cbranchdi4"
6523 [(set (pc) (if_then_else
6524 (match_operator 0 "arm_comparison_operator"
6525 [(match_operand:DI 1 "cmpdi_operand" "")
6526 (match_operand:DI 2 "cmpdi_operand" "")])
6527 (label_ref (match_operand 3 "" ""))
6531 rtx swap = NULL_RTX;
6532 enum rtx_code code = GET_CODE (operands[0]);
6534 /* We should not have two constants. */
6535 gcc_assert (GET_MODE (operands[1]) == DImode
6536 || GET_MODE (operands[2]) == DImode);
6538 /* Flip unimplemented DImode comparisons to a form that
6539 arm_gen_compare_reg can handle. */
6543 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6545 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6547 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6549 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6554 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6557 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6563 (define_insn "cbranchsi4_insn"
6564 [(set (pc) (if_then_else
6565 (match_operator 0 "arm_comparison_operator"
6566 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6567 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6568 (label_ref (match_operand 3 "" ""))
6572 rtx t = cfun->machine->thumb1_cc_insn;
6575 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6576 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6578 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6580 if (!noov_comparison_operator (operands[0], VOIDmode))
6583 else if (cfun->machine->thumb1_cc_mode != CCmode)
6588 output_asm_insn ("cmp\t%1, %2", operands);
6589 cfun->machine->thumb1_cc_insn = insn;
6590 cfun->machine->thumb1_cc_op0 = operands[1];
6591 cfun->machine->thumb1_cc_op1 = operands[2];
6592 cfun->machine->thumb1_cc_mode = CCmode;
6595 /* Ensure we emit the right type of condition code on the jump. */
6596 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6599 switch (get_attr_length (insn))
6601 case 4: return \"b%d0\\t%l3\";
6602 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6603 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6606 [(set (attr "far_jump")
6608 (eq_attr "length" "8")
6609 (const_string "yes")
6610 (const_string "no")))
6611 (set (attr "length")
6613 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6614 (le (minus (match_dup 3) (pc)) (const_int 256)))
6617 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6618 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6623 (define_insn "cbranchsi4_scratch"
6624 [(set (pc) (if_then_else
6625 (match_operator 4 "arm_comparison_operator"
6626 [(match_operand:SI 1 "s_register_operand" "l,0")
6627 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6628 (label_ref (match_operand 3 "" ""))
6630 (clobber (match_scratch:SI 0 "=l,l"))]
6633 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6635 switch (get_attr_length (insn))
6637 case 4: return \"b%d4\\t%l3\";
6638 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6639 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6642 [(set (attr "far_jump")
6644 (eq_attr "length" "8")
6645 (const_string "yes")
6646 (const_string "no")))
6647 (set (attr "length")
6649 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6650 (le (minus (match_dup 3) (pc)) (const_int 256)))
6653 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6654 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6659 ;; Two peepholes to generate subtract of 0 instead of a move if the
6660 ;; condition codes will be useful.
6662 [(set (match_operand:SI 0 "low_register_operand" "")
6663 (match_operand:SI 1 "low_register_operand" ""))
6665 (if_then_else (match_operator 2 "arm_comparison_operator"
6666 [(match_dup 1) (const_int 0)])
6667 (label_ref (match_operand 3 "" ""))
6670 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6672 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6673 (label_ref (match_dup 3))
6677 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6678 ;; merge cases like this because the op1 is a hard register in
6679 ;; arm_class_likely_spilled_p.
6681 [(set (match_operand:SI 0 "low_register_operand" "")
6682 (match_operand:SI 1 "low_register_operand" ""))
6684 (if_then_else (match_operator 2 "arm_comparison_operator"
6685 [(match_dup 0) (const_int 0)])
6686 (label_ref (match_operand 3 "" ""))
6689 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6691 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6692 (label_ref (match_dup 3))
6696 (define_insn "*negated_cbranchsi4"
6699 (match_operator 0 "equality_operator"
6700 [(match_operand:SI 1 "s_register_operand" "l")
6701 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6702 (label_ref (match_operand 3 "" ""))
6706 output_asm_insn (\"cmn\\t%1, %2\", operands);
6707 switch (get_attr_length (insn))
6709 case 4: return \"b%d0\\t%l3\";
6710 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6711 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6714 [(set (attr "far_jump")
6716 (eq_attr "length" "8")
6717 (const_string "yes")
6718 (const_string "no")))
6719 (set (attr "length")
6721 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6722 (le (minus (match_dup 3) (pc)) (const_int 256)))
6725 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6726 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6731 (define_insn "*tbit_cbranch"
6734 (match_operator 0 "equality_operator"
6735 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6737 (match_operand:SI 2 "const_int_operand" "i"))
6739 (label_ref (match_operand 3 "" ""))
6741 (clobber (match_scratch:SI 4 "=l"))]
6746 op[0] = operands[4];
6747 op[1] = operands[1];
6748 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6750 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6751 switch (get_attr_length (insn))
6753 case 4: return \"b%d0\\t%l3\";
6754 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6755 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6758 [(set (attr "far_jump")
6760 (eq_attr "length" "8")
6761 (const_string "yes")
6762 (const_string "no")))
6763 (set (attr "length")
6765 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6766 (le (minus (match_dup 3) (pc)) (const_int 256)))
6769 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6770 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6775 (define_insn "*tlobits_cbranch"
6778 (match_operator 0 "equality_operator"
6779 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6780 (match_operand:SI 2 "const_int_operand" "i")
6783 (label_ref (match_operand 3 "" ""))
6785 (clobber (match_scratch:SI 4 "=l"))]
6790 op[0] = operands[4];
6791 op[1] = operands[1];
6792 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6794 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6795 switch (get_attr_length (insn))
6797 case 4: return \"b%d0\\t%l3\";
6798 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6799 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6802 [(set (attr "far_jump")
6804 (eq_attr "length" "8")
6805 (const_string "yes")
6806 (const_string "no")))
6807 (set (attr "length")
6809 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6810 (le (minus (match_dup 3) (pc)) (const_int 256)))
6813 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6814 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6819 (define_insn "*tstsi3_cbranch"
6822 (match_operator 3 "equality_operator"
6823 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6824 (match_operand:SI 1 "s_register_operand" "l"))
6826 (label_ref (match_operand 2 "" ""))
6831 output_asm_insn (\"tst\\t%0, %1\", operands);
6832 switch (get_attr_length (insn))
6834 case 4: return \"b%d3\\t%l2\";
6835 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6836 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6839 [(set (attr "far_jump")
6841 (eq_attr "length" "8")
6842 (const_string "yes")
6843 (const_string "no")))
6844 (set (attr "length")
6846 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6847 (le (minus (match_dup 2) (pc)) (const_int 256)))
6850 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6851 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6856 (define_insn "*cbranchne_decr1"
6858 (if_then_else (match_operator 3 "equality_operator"
6859 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6861 (label_ref (match_operand 4 "" ""))
6863 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6864 (plus:SI (match_dup 2) (const_int -1)))
6865 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6870 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6872 VOIDmode, operands[2], const1_rtx);
6873 cond[1] = operands[4];
6875 if (which_alternative == 0)
6876 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6877 else if (which_alternative == 1)
6879 /* We must provide an alternative for a hi reg because reload
6880 cannot handle output reloads on a jump instruction, but we
6881 can't subtract into that. Fortunately a mov from lo to hi
6882 does not clobber the condition codes. */
6883 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6884 output_asm_insn (\"mov\\t%0, %1\", operands);
6888 /* Similarly, but the target is memory. */
6889 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6890 output_asm_insn (\"str\\t%1, %0\", operands);
6893 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6896 output_asm_insn (\"b%d0\\t%l1\", cond);
6899 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6900 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6902 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6903 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6907 [(set (attr "far_jump")
6909 (ior (and (eq (symbol_ref ("which_alternative"))
6911 (eq_attr "length" "8"))
6912 (eq_attr "length" "10"))
6913 (const_string "yes")
6914 (const_string "no")))
6915 (set_attr_alternative "length"
6919 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6920 (le (minus (match_dup 4) (pc)) (const_int 256)))
6923 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6924 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6929 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6930 (le (minus (match_dup 4) (pc)) (const_int 256)))
6933 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6934 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6939 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6940 (le (minus (match_dup 4) (pc)) (const_int 256)))
6943 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6944 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6949 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6950 (le (minus (match_dup 4) (pc)) (const_int 256)))
6953 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6954 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6959 (define_insn "*addsi3_cbranch"
6962 (match_operator 4 "arm_comparison_operator"
6964 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6965 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6967 (label_ref (match_operand 5 "" ""))
6970 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6971 (plus:SI (match_dup 2) (match_dup 3)))
6972 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6974 && (GET_CODE (operands[4]) == EQ
6975 || GET_CODE (operands[4]) == NE
6976 || GET_CODE (operands[4]) == GE
6977 || GET_CODE (operands[4]) == LT)"
6982 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6983 cond[1] = operands[2];
6984 cond[2] = operands[3];
6986 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6987 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6989 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6991 if (which_alternative >= 2
6992 && which_alternative < 4)
6993 output_asm_insn (\"mov\\t%0, %1\", operands);
6994 else if (which_alternative >= 4)
6995 output_asm_insn (\"str\\t%1, %0\", operands);
6997 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7000 return \"b%d4\\t%l5\";
7002 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7004 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7008 [(set (attr "far_jump")
7010 (ior (and (lt (symbol_ref ("which_alternative"))
7012 (eq_attr "length" "8"))
7013 (eq_attr "length" "10"))
7014 (const_string "yes")
7015 (const_string "no")))
7016 (set (attr "length")
7018 (lt (symbol_ref ("which_alternative"))
7021 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7022 (le (minus (match_dup 5) (pc)) (const_int 256)))
7025 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7026 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7030 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7031 (le (minus (match_dup 5) (pc)) (const_int 256)))
7034 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7035 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7040 (define_insn "*addsi3_cbranch_scratch"
7043 (match_operator 3 "arm_comparison_operator"
7045 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7046 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7048 (label_ref (match_operand 4 "" ""))
7050 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7052 && (GET_CODE (operands[3]) == EQ
7053 || GET_CODE (operands[3]) == NE
7054 || GET_CODE (operands[3]) == GE
7055 || GET_CODE (operands[3]) == LT)"
7058 switch (which_alternative)
7061 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7064 output_asm_insn (\"cmn\t%1, %2\", operands);
7067 if (INTVAL (operands[2]) < 0)
7068 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7070 output_asm_insn (\"add\t%0, %1, %2\", operands);
7073 if (INTVAL (operands[2]) < 0)
7074 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7076 output_asm_insn (\"add\t%0, %0, %2\", operands);
7080 switch (get_attr_length (insn))
7083 return \"b%d3\\t%l4\";
7085 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7087 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7091 [(set (attr "far_jump")
7093 (eq_attr "length" "8")
7094 (const_string "yes")
7095 (const_string "no")))
7096 (set (attr "length")
7098 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7099 (le (minus (match_dup 4) (pc)) (const_int 256)))
7102 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7103 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7109 ;; Comparison and test insns
7111 (define_insn "*arm_cmpsi_insn"
7112 [(set (reg:CC CC_REGNUM)
7113 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7114 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7119 [(set_attr "conds" "set")]
7122 (define_insn "*cmpsi_shiftsi"
7123 [(set (reg:CC CC_REGNUM)
7124 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7125 (match_operator:SI 3 "shift_operator"
7126 [(match_operand:SI 1 "s_register_operand" "r,r")
7127 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7130 [(set_attr "conds" "set")
7131 (set_attr "shift" "1")
7132 (set_attr "arch" "32,a")
7133 (set_attr "type" "alu_shift,alu_shift_reg")])
7135 (define_insn "*cmpsi_shiftsi_swp"
7136 [(set (reg:CC_SWP CC_REGNUM)
7137 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7138 [(match_operand:SI 1 "s_register_operand" "r,r")
7139 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7140 (match_operand:SI 0 "s_register_operand" "r,r")))]
7143 [(set_attr "conds" "set")
7144 (set_attr "shift" "1")
7145 (set_attr "arch" "32,a")
7146 (set_attr "type" "alu_shift,alu_shift_reg")])
7148 (define_insn "*arm_cmpsi_negshiftsi_si"
7149 [(set (reg:CC_Z CC_REGNUM)
7151 (neg:SI (match_operator:SI 1 "shift_operator"
7152 [(match_operand:SI 2 "s_register_operand" "r")
7153 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7154 (match_operand:SI 0 "s_register_operand" "r")))]
7157 [(set_attr "conds" "set")
7158 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7159 (const_string "alu_shift")
7160 (const_string "alu_shift_reg")))]
7163 ;; DImode comparisons. The generic code generates branches that
7164 ;; if-conversion can not reduce to a conditional compare, so we do
7167 (define_insn "*arm_cmpdi_insn"
7168 [(set (reg:CC_NCV CC_REGNUM)
7169 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7170 (match_operand:DI 1 "arm_di_operand" "rDi")))
7171 (clobber (match_scratch:SI 2 "=r"))]
7172 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7173 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7174 [(set_attr "conds" "set")
7175 (set_attr "length" "8")]
7178 (define_insn "*arm_cmpdi_unsigned"
7179 [(set (reg:CC_CZ CC_REGNUM)
7180 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7181 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7183 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7184 [(set_attr "conds" "set")
7185 (set_attr "length" "8")]
7188 (define_insn "*arm_cmpdi_zero"
7189 [(set (reg:CC_Z CC_REGNUM)
7190 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7192 (clobber (match_scratch:SI 1 "=r"))]
7194 "orr%.\\t%1, %Q0, %R0"
7195 [(set_attr "conds" "set")]
7198 (define_insn "*thumb_cmpdi_zero"
7199 [(set (reg:CC_Z CC_REGNUM)
7200 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7202 (clobber (match_scratch:SI 1 "=l"))]
7204 "orr\\t%1, %Q0, %R0"
7205 [(set_attr "conds" "set")
7206 (set_attr "length" "2")]
7209 ;; Cirrus SF compare instruction
7210 (define_insn "*cirrus_cmpsf"
7211 [(set (reg:CCFP CC_REGNUM)
7212 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7213 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7214 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7215 "cfcmps%?\\tr15, %V0, %V1"
7216 [(set_attr "type" "mav_farith")
7217 (set_attr "cirrus" "compare")]
7220 ;; Cirrus DF compare instruction
7221 (define_insn "*cirrus_cmpdf"
7222 [(set (reg:CCFP CC_REGNUM)
7223 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7224 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7225 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7226 "cfcmpd%?\\tr15, %V0, %V1"
7227 [(set_attr "type" "mav_farith")
7228 (set_attr "cirrus" "compare")]
7231 (define_insn "*cirrus_cmpdi"
7232 [(set (reg:CC CC_REGNUM)
7233 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7234 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7235 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7236 "cfcmp64%?\\tr15, %V0, %V1"
7237 [(set_attr "type" "mav_farith")
7238 (set_attr "cirrus" "compare")]
7241 ; This insn allows redundant compares to be removed by cse, nothing should
7242 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7243 ; is deleted later on. The match_dup will match the mode here, so that
7244 ; mode changes of the condition codes aren't lost by this even though we don't
7245 ; specify what they are.
7247 (define_insn "*deleted_compare"
7248 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7250 "\\t%@ deleted compare"
7251 [(set_attr "conds" "set")
7252 (set_attr "length" "0")]
7256 ;; Conditional branch insns
7258 (define_expand "cbranch_cc"
7260 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7261 (match_operand 2 "" "")])
7262 (label_ref (match_operand 3 "" ""))
7265 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7266 operands[1], operands[2]);
7267 operands[2] = const0_rtx;"
7271 ;; Patterns to match conditional branch insns.
7274 (define_insn "*arm_cond_branch"
7276 (if_then_else (match_operator 1 "arm_comparison_operator"
7277 [(match_operand 2 "cc_register" "") (const_int 0)])
7278 (label_ref (match_operand 0 "" ""))
7282 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7284 arm_ccfsm_state += 2;
7287 return \"b%d1\\t%l0\";
7289 [(set_attr "conds" "use")
7290 (set_attr "type" "branch")]
7293 (define_insn "*arm_cond_branch_reversed"
7295 (if_then_else (match_operator 1 "arm_comparison_operator"
7296 [(match_operand 2 "cc_register" "") (const_int 0)])
7298 (label_ref (match_operand 0 "" ""))))]
7301 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7303 arm_ccfsm_state += 2;
7306 return \"b%D1\\t%l0\";
7308 [(set_attr "conds" "use")
7309 (set_attr "type" "branch")]
7316 (define_expand "cstore_cc"
7317 [(set (match_operand:SI 0 "s_register_operand" "")
7318 (match_operator:SI 1 "" [(match_operand 2 "" "")
7319 (match_operand 3 "" "")]))]
7321 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7322 operands[2], operands[3]);
7323 operands[3] = const0_rtx;"
7326 (define_insn "*mov_scc"
7327 [(set (match_operand:SI 0 "s_register_operand" "=r")
7328 (match_operator:SI 1 "arm_comparison_operator"
7329 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7331 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7332 [(set_attr "conds" "use")
7333 (set_attr "insn" "mov")
7334 (set_attr "length" "8")]
7337 (define_insn "*mov_negscc"
7338 [(set (match_operand:SI 0 "s_register_operand" "=r")
7339 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7340 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7342 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7343 [(set_attr "conds" "use")
7344 (set_attr "insn" "mov")
7345 (set_attr "length" "8")]
7348 (define_insn "*mov_notscc"
7349 [(set (match_operand:SI 0 "s_register_operand" "=r")
7350 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7351 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7353 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7354 [(set_attr "conds" "use")
7355 (set_attr "insn" "mov")
7356 (set_attr "length" "8")]
7359 (define_expand "cstoresi4"
7360 [(set (match_operand:SI 0 "s_register_operand" "")
7361 (match_operator:SI 1 "arm_comparison_operator"
7362 [(match_operand:SI 2 "s_register_operand" "")
7363 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7364 "TARGET_32BIT || TARGET_THUMB1"
7366 rtx op3, scratch, scratch2;
7370 if (!arm_add_operand (operands[3], SImode))
7371 operands[3] = force_reg (SImode, operands[3]);
7372 emit_insn (gen_cstore_cc (operands[0], operands[1],
7373 operands[2], operands[3]));
7377 if (operands[3] == const0_rtx)
7379 switch (GET_CODE (operands[1]))
7382 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7386 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7390 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7391 NULL_RTX, 0, OPTAB_WIDEN);
7392 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7393 NULL_RTX, 0, OPTAB_WIDEN);
7394 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7395 operands[0], 1, OPTAB_WIDEN);
7399 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7401 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7402 NULL_RTX, 1, OPTAB_WIDEN);
7406 scratch = expand_binop (SImode, ashr_optab, operands[2],
7407 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7408 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7409 NULL_RTX, 0, OPTAB_WIDEN);
7410 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7414 /* LT is handled by generic code. No need for unsigned with 0. */
7421 switch (GET_CODE (operands[1]))
7424 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7425 NULL_RTX, 0, OPTAB_WIDEN);
7426 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7430 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7431 NULL_RTX, 0, OPTAB_WIDEN);
7432 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7436 op3 = force_reg (SImode, operands[3]);
7438 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7439 NULL_RTX, 1, OPTAB_WIDEN);
7440 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7441 NULL_RTX, 0, OPTAB_WIDEN);
7442 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7448 if (!thumb1_cmp_operand (op3, SImode))
7449 op3 = force_reg (SImode, op3);
7450 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7451 NULL_RTX, 0, OPTAB_WIDEN);
7452 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7453 NULL_RTX, 1, OPTAB_WIDEN);
7454 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7459 op3 = force_reg (SImode, operands[3]);
7460 scratch = force_reg (SImode, const0_rtx);
7461 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7467 if (!thumb1_cmp_operand (op3, SImode))
7468 op3 = force_reg (SImode, op3);
7469 scratch = force_reg (SImode, const0_rtx);
7470 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7476 if (!thumb1_cmp_operand (op3, SImode))
7477 op3 = force_reg (SImode, op3);
7478 scratch = gen_reg_rtx (SImode);
7479 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7483 op3 = force_reg (SImode, operands[3]);
7484 scratch = gen_reg_rtx (SImode);
7485 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7488 /* No good sequences for GT, LT. */
7495 (define_expand "cstoresf4"
7496 [(set (match_operand:SI 0 "s_register_operand" "")
7497 (match_operator:SI 1 "arm_comparison_operator"
7498 [(match_operand:SF 2 "s_register_operand" "")
7499 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7500 "TARGET_32BIT && TARGET_HARD_FLOAT"
7501 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7502 operands[2], operands[3])); DONE;"
7505 (define_expand "cstoredf4"
7506 [(set (match_operand:SI 0 "s_register_operand" "")
7507 (match_operator:SI 1 "arm_comparison_operator"
7508 [(match_operand:DF 2 "s_register_operand" "")
7509 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7510 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7511 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7512 operands[2], operands[3])); DONE;"
7515 (define_expand "cstoredi4"
7516 [(set (match_operand:SI 0 "s_register_operand" "")
7517 (match_operator:SI 1 "arm_comparison_operator"
7518 [(match_operand:DI 2 "cmpdi_operand" "")
7519 (match_operand:DI 3 "cmpdi_operand" "")]))]
7522 rtx swap = NULL_RTX;
7523 enum rtx_code code = GET_CODE (operands[1]);
7525 /* We should not have two constants. */
7526 gcc_assert (GET_MODE (operands[2]) == DImode
7527 || GET_MODE (operands[3]) == DImode);
7529 /* Flip unimplemented DImode comparisons to a form that
7530 arm_gen_compare_reg can handle. */
7534 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7536 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7538 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7540 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7545 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7548 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7554 (define_expand "cstoresi_eq0_thumb1"
7556 [(set (match_operand:SI 0 "s_register_operand" "")
7557 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7559 (clobber (match_dup:SI 2))])]
7561 "operands[2] = gen_reg_rtx (SImode);"
7564 (define_expand "cstoresi_ne0_thumb1"
7566 [(set (match_operand:SI 0 "s_register_operand" "")
7567 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7569 (clobber (match_dup:SI 2))])]
7571 "operands[2] = gen_reg_rtx (SImode);"
7574 (define_insn "*cstoresi_eq0_thumb1_insn"
7575 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7576 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7578 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7581 neg\\t%0, %1\;adc\\t%0, %0, %1
7582 neg\\t%2, %1\;adc\\t%0, %1, %2"
7583 [(set_attr "length" "4")]
7586 (define_insn "*cstoresi_ne0_thumb1_insn"
7587 [(set (match_operand:SI 0 "s_register_operand" "=l")
7588 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7590 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7592 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7593 [(set_attr "length" "4")]
7596 ;; Used as part of the expansion of thumb ltu and gtu sequences
7597 (define_insn "cstoresi_nltu_thumb1"
7598 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7599 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7600 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7602 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7603 [(set_attr "length" "4")]
7606 (define_insn_and_split "cstoresi_ltu_thumb1"
7607 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7608 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7609 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7614 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7615 (set (match_dup 0) (neg:SI (match_dup 3)))]
7616 "operands[3] = gen_reg_rtx (SImode);"
7617 [(set_attr "length" "4")]
7620 ;; Used as part of the expansion of thumb les sequence.
7621 (define_insn "thumb1_addsi3_addgeu"
7622 [(set (match_operand:SI 0 "s_register_operand" "=l")
7623 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7624 (match_operand:SI 2 "s_register_operand" "l"))
7625 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7626 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7628 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7629 [(set_attr "length" "4")]
7633 ;; Conditional move insns
7635 (define_expand "movsicc"
7636 [(set (match_operand:SI 0 "s_register_operand" "")
7637 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7638 (match_operand:SI 2 "arm_not_operand" "")
7639 (match_operand:SI 3 "arm_not_operand" "")))]
7643 enum rtx_code code = GET_CODE (operands[1]);
7646 if (code == UNEQ || code == LTGT)
7649 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7650 XEXP (operands[1], 1));
7651 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7655 (define_expand "movsfcc"
7656 [(set (match_operand:SF 0 "s_register_operand" "")
7657 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7658 (match_operand:SF 2 "s_register_operand" "")
7659 (match_operand:SF 3 "nonmemory_operand" "")))]
7660 "TARGET_32BIT && TARGET_HARD_FLOAT"
7663 enum rtx_code code = GET_CODE (operands[1]);
7666 if (code == UNEQ || code == LTGT)
7669 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7670 Otherwise, ensure it is a valid FP add operand */
7671 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7672 || (!arm_float_add_operand (operands[3], SFmode)))
7673 operands[3] = force_reg (SFmode, operands[3]);
7675 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7676 XEXP (operands[1], 1));
7677 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7681 (define_expand "movdfcc"
7682 [(set (match_operand:DF 0 "s_register_operand" "")
7683 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7684 (match_operand:DF 2 "s_register_operand" "")
7685 (match_operand:DF 3 "arm_float_add_operand" "")))]
7686 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7689 enum rtx_code code = GET_CODE (operands[1]);
7692 if (code == UNEQ || code == LTGT)
7695 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7696 XEXP (operands[1], 1));
7697 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7701 (define_insn "*movsicc_insn"
7702 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7704 (match_operator 3 "arm_comparison_operator"
7705 [(match_operand 4 "cc_register" "") (const_int 0)])
7706 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7707 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7714 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7715 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7716 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7717 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7718 [(set_attr "length" "4,4,4,4,8,8,8,8")
7719 (set_attr "conds" "use")
7720 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7723 (define_insn "*movsfcc_soft_insn"
7724 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7725 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7726 [(match_operand 4 "cc_register" "") (const_int 0)])
7727 (match_operand:SF 1 "s_register_operand" "0,r")
7728 (match_operand:SF 2 "s_register_operand" "r,0")))]
7729 "TARGET_ARM && TARGET_SOFT_FLOAT"
7733 [(set_attr "conds" "use")
7734 (set_attr "insn" "mov")]
7738 ;; Jump and linkage insns
7740 (define_expand "jump"
7742 (label_ref (match_operand 0 "" "")))]
7747 (define_insn "*arm_jump"
7749 (label_ref (match_operand 0 "" "")))]
7753 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7755 arm_ccfsm_state += 2;
7758 return \"b%?\\t%l0\";
7761 [(set_attr "predicable" "yes")]
7764 (define_insn "*thumb_jump"
7766 (label_ref (match_operand 0 "" "")))]
7769 if (get_attr_length (insn) == 2)
7771 return \"bl\\t%l0\\t%@ far jump\";
7773 [(set (attr "far_jump")
7775 (eq_attr "length" "4")
7776 (const_string "yes")
7777 (const_string "no")))
7778 (set (attr "length")
7780 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7781 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7786 (define_expand "call"
7787 [(parallel [(call (match_operand 0 "memory_operand" "")
7788 (match_operand 1 "general_operand" ""))
7789 (use (match_operand 2 "" ""))
7790 (clobber (reg:SI LR_REGNUM))])]
7796 /* In an untyped call, we can get NULL for operand 2. */
7797 if (operands[2] == NULL_RTX)
7798 operands[2] = const0_rtx;
7800 /* Decide if we should generate indirect calls by loading the
7801 32-bit address of the callee into a register before performing the
7803 callee = XEXP (operands[0], 0);
7804 if (GET_CODE (callee) == SYMBOL_REF
7805 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7807 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7809 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7810 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7815 (define_expand "call_internal"
7816 [(parallel [(call (match_operand 0 "memory_operand" "")
7817 (match_operand 1 "general_operand" ""))
7818 (use (match_operand 2 "" ""))
7819 (clobber (reg:SI LR_REGNUM))])])
7821 (define_insn "*call_reg_armv5"
7822 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7823 (match_operand 1 "" ""))
7824 (use (match_operand 2 "" ""))
7825 (clobber (reg:SI LR_REGNUM))]
7826 "TARGET_ARM && arm_arch5"
7828 [(set_attr "type" "call")]
7831 (define_insn "*call_reg_arm"
7832 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7833 (match_operand 1 "" ""))
7834 (use (match_operand 2 "" ""))
7835 (clobber (reg:SI LR_REGNUM))]
7836 "TARGET_ARM && !arm_arch5"
7838 return output_call (operands);
7840 ;; length is worst case, normally it is only two
7841 [(set_attr "length" "12")
7842 (set_attr "type" "call")]
7846 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7847 ;; considered a function call by the branch predictor of some cores (PR40887).
7848 ;; Falls back to blx rN (*call_reg_armv5).
7850 (define_insn "*call_mem"
7851 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7852 (match_operand 1 "" ""))
7853 (use (match_operand 2 "" ""))
7854 (clobber (reg:SI LR_REGNUM))]
7855 "TARGET_ARM && !arm_arch5"
7857 return output_call_mem (operands);
7859 [(set_attr "length" "12")
7860 (set_attr "type" "call")]
7863 (define_insn "*call_reg_thumb1_v5"
7864 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7865 (match_operand 1 "" ""))
7866 (use (match_operand 2 "" ""))
7867 (clobber (reg:SI LR_REGNUM))]
7868 "TARGET_THUMB1 && arm_arch5"
7870 [(set_attr "length" "2")
7871 (set_attr "type" "call")]
7874 (define_insn "*call_reg_thumb1"
7875 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7876 (match_operand 1 "" ""))
7877 (use (match_operand 2 "" ""))
7878 (clobber (reg:SI LR_REGNUM))]
7879 "TARGET_THUMB1 && !arm_arch5"
7882 if (!TARGET_CALLER_INTERWORKING)
7883 return thumb_call_via_reg (operands[0]);
7884 else if (operands[1] == const0_rtx)
7885 return \"bl\\t%__interwork_call_via_%0\";
7886 else if (frame_pointer_needed)
7887 return \"bl\\t%__interwork_r7_call_via_%0\";
7889 return \"bl\\t%__interwork_r11_call_via_%0\";
7891 [(set_attr "type" "call")]
7894 (define_expand "call_value"
7895 [(parallel [(set (match_operand 0 "" "")
7896 (call (match_operand 1 "memory_operand" "")
7897 (match_operand 2 "general_operand" "")))
7898 (use (match_operand 3 "" ""))
7899 (clobber (reg:SI LR_REGNUM))])]
7905 /* In an untyped call, we can get NULL for operand 2. */
7906 if (operands[3] == 0)
7907 operands[3] = const0_rtx;
7909 /* Decide if we should generate indirect calls by loading the
7910 32-bit address of the callee into a register before performing the
7912 callee = XEXP (operands[1], 0);
7913 if (GET_CODE (callee) == SYMBOL_REF
7914 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7916 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7918 pat = gen_call_value_internal (operands[0], operands[1],
7919 operands[2], operands[3]);
7920 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7925 (define_expand "call_value_internal"
7926 [(parallel [(set (match_operand 0 "" "")
7927 (call (match_operand 1 "memory_operand" "")
7928 (match_operand 2 "general_operand" "")))
7929 (use (match_operand 3 "" ""))
7930 (clobber (reg:SI LR_REGNUM))])])
7932 (define_insn "*call_value_reg_armv5"
7933 [(set (match_operand 0 "" "")
7934 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7935 (match_operand 2 "" "")))
7936 (use (match_operand 3 "" ""))
7937 (clobber (reg:SI LR_REGNUM))]
7938 "TARGET_ARM && arm_arch5"
7940 [(set_attr "type" "call")]
7943 (define_insn "*call_value_reg_arm"
7944 [(set (match_operand 0 "" "")
7945 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7946 (match_operand 2 "" "")))
7947 (use (match_operand 3 "" ""))
7948 (clobber (reg:SI LR_REGNUM))]
7949 "TARGET_ARM && !arm_arch5"
7951 return output_call (&operands[1]);
7953 [(set_attr "length" "12")
7954 (set_attr "type" "call")]
7957 ;; Note: see *call_mem
7959 (define_insn "*call_value_mem"
7960 [(set (match_operand 0 "" "")
7961 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7962 (match_operand 2 "" "")))
7963 (use (match_operand 3 "" ""))
7964 (clobber (reg:SI LR_REGNUM))]
7965 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7967 return output_call_mem (&operands[1]);
7969 [(set_attr "length" "12")
7970 (set_attr "type" "call")]
7973 (define_insn "*call_value_reg_thumb1_v5"
7974 [(set (match_operand 0 "" "")
7975 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7976 (match_operand 2 "" "")))
7977 (use (match_operand 3 "" ""))
7978 (clobber (reg:SI LR_REGNUM))]
7979 "TARGET_THUMB1 && arm_arch5"
7981 [(set_attr "length" "2")
7982 (set_attr "type" "call")]
7985 (define_insn "*call_value_reg_thumb1"
7986 [(set (match_operand 0 "" "")
7987 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7988 (match_operand 2 "" "")))
7989 (use (match_operand 3 "" ""))
7990 (clobber (reg:SI LR_REGNUM))]
7991 "TARGET_THUMB1 && !arm_arch5"
7994 if (!TARGET_CALLER_INTERWORKING)
7995 return thumb_call_via_reg (operands[1]);
7996 else if (operands[2] == const0_rtx)
7997 return \"bl\\t%__interwork_call_via_%1\";
7998 else if (frame_pointer_needed)
7999 return \"bl\\t%__interwork_r7_call_via_%1\";
8001 return \"bl\\t%__interwork_r11_call_via_%1\";
8003 [(set_attr "type" "call")]
8006 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8007 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8009 (define_insn "*call_symbol"
8010 [(call (mem:SI (match_operand:SI 0 "" ""))
8011 (match_operand 1 "" ""))
8012 (use (match_operand 2 "" ""))
8013 (clobber (reg:SI LR_REGNUM))]
8015 && (GET_CODE (operands[0]) == SYMBOL_REF)
8016 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8019 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8021 [(set_attr "type" "call")]
8024 (define_insn "*call_value_symbol"
8025 [(set (match_operand 0 "" "")
8026 (call (mem:SI (match_operand:SI 1 "" ""))
8027 (match_operand:SI 2 "" "")))
8028 (use (match_operand 3 "" ""))
8029 (clobber (reg:SI LR_REGNUM))]
8031 && (GET_CODE (operands[1]) == SYMBOL_REF)
8032 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8035 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8037 [(set_attr "type" "call")]
8040 (define_insn "*call_insn"
8041 [(call (mem:SI (match_operand:SI 0 "" ""))
8042 (match_operand:SI 1 "" ""))
8043 (use (match_operand 2 "" ""))
8044 (clobber (reg:SI LR_REGNUM))]
8046 && GET_CODE (operands[0]) == SYMBOL_REF
8047 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8049 [(set_attr "length" "4")
8050 (set_attr "type" "call")]
8053 (define_insn "*call_value_insn"
8054 [(set (match_operand 0 "" "")
8055 (call (mem:SI (match_operand 1 "" ""))
8056 (match_operand 2 "" "")))
8057 (use (match_operand 3 "" ""))
8058 (clobber (reg:SI LR_REGNUM))]
8060 && GET_CODE (operands[1]) == SYMBOL_REF
8061 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8063 [(set_attr "length" "4")
8064 (set_attr "type" "call")]
8067 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8068 (define_expand "sibcall"
8069 [(parallel [(call (match_operand 0 "memory_operand" "")
8070 (match_operand 1 "general_operand" ""))
8072 (use (match_operand 2 "" ""))])]
8076 if (operands[2] == NULL_RTX)
8077 operands[2] = const0_rtx;
8081 (define_expand "sibcall_value"
8082 [(parallel [(set (match_operand 0 "" "")
8083 (call (match_operand 1 "memory_operand" "")
8084 (match_operand 2 "general_operand" "")))
8086 (use (match_operand 3 "" ""))])]
8090 if (operands[3] == NULL_RTX)
8091 operands[3] = const0_rtx;
8095 (define_insn "*sibcall_insn"
8096 [(call (mem:SI (match_operand:SI 0 "" "X"))
8097 (match_operand 1 "" ""))
8099 (use (match_operand 2 "" ""))]
8100 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8102 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8104 [(set_attr "type" "call")]
8107 (define_insn "*sibcall_value_insn"
8108 [(set (match_operand 0 "" "")
8109 (call (mem:SI (match_operand:SI 1 "" "X"))
8110 (match_operand 2 "" "")))
8112 (use (match_operand 3 "" ""))]
8113 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8115 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8117 [(set_attr "type" "call")]
8120 (define_expand "return"
8122 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8125 ;; Often the return insn will be the same as loading from memory, so set attr
8126 (define_insn "*arm_return"
8128 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8131 if (arm_ccfsm_state == 2)
8133 arm_ccfsm_state += 2;
8136 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8138 [(set_attr "type" "load1")
8139 (set_attr "length" "12")
8140 (set_attr "predicable" "yes")]
8143 (define_insn "*cond_return"
8145 (if_then_else (match_operator 0 "arm_comparison_operator"
8146 [(match_operand 1 "cc_register" "") (const_int 0)])
8149 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8152 if (arm_ccfsm_state == 2)
8154 arm_ccfsm_state += 2;
8157 return output_return_instruction (operands[0], TRUE, FALSE);
8159 [(set_attr "conds" "use")
8160 (set_attr "length" "12")
8161 (set_attr "type" "load1")]
8164 (define_insn "*cond_return_inverted"
8166 (if_then_else (match_operator 0 "arm_comparison_operator"
8167 [(match_operand 1 "cc_register" "") (const_int 0)])
8170 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8173 if (arm_ccfsm_state == 2)
8175 arm_ccfsm_state += 2;
8178 return output_return_instruction (operands[0], TRUE, TRUE);
8180 [(set_attr "conds" "use")
8181 (set_attr "length" "12")
8182 (set_attr "type" "load1")]
8185 ;; Generate a sequence of instructions to determine if the processor is
8186 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8189 (define_expand "return_addr_mask"
8191 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8193 (set (match_operand:SI 0 "s_register_operand" "")
8194 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8196 (const_int 67108860)))] ; 0x03fffffc
8199 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8202 (define_insn "*check_arch2"
8203 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8204 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8207 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8208 [(set_attr "length" "8")
8209 (set_attr "conds" "set")]
8212 ;; Call subroutine returning any type.
8214 (define_expand "untyped_call"
8215 [(parallel [(call (match_operand 0 "" "")
8217 (match_operand 1 "" "")
8218 (match_operand 2 "" "")])]
8223 rtx par = gen_rtx_PARALLEL (VOIDmode,
8224 rtvec_alloc (XVECLEN (operands[2], 0)));
8225 rtx addr = gen_reg_rtx (Pmode);
8229 emit_move_insn (addr, XEXP (operands[1], 0));
8230 mem = change_address (operands[1], BLKmode, addr);
8232 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8234 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8236 /* Default code only uses r0 as a return value, but we could
8237 be using anything up to 4 registers. */
8238 if (REGNO (src) == R0_REGNUM)
8239 src = gen_rtx_REG (TImode, R0_REGNUM);
8241 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8243 size += GET_MODE_SIZE (GET_MODE (src));
8246 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8251 for (i = 0; i < XVECLEN (par, 0); i++)
8253 HOST_WIDE_INT offset = 0;
8254 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8257 emit_move_insn (addr, plus_constant (addr, size));
8259 mem = change_address (mem, GET_MODE (reg), NULL);
8260 if (REGNO (reg) == R0_REGNUM)
8262 /* On thumb we have to use a write-back instruction. */
8263 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8264 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8265 size = TARGET_ARM ? 16 : 0;
8269 emit_move_insn (mem, reg);
8270 size = GET_MODE_SIZE (GET_MODE (reg));
8274 /* The optimizer does not know that the call sets the function value
8275 registers we stored in the result block. We avoid problems by
8276 claiming that all hard registers are used and clobbered at this
8278 emit_insn (gen_blockage ());
8284 (define_expand "untyped_return"
8285 [(match_operand:BLK 0 "memory_operand" "")
8286 (match_operand 1 "" "")]
8291 rtx addr = gen_reg_rtx (Pmode);
8295 emit_move_insn (addr, XEXP (operands[0], 0));
8296 mem = change_address (operands[0], BLKmode, addr);
8298 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8300 HOST_WIDE_INT offset = 0;
8301 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8304 emit_move_insn (addr, plus_constant (addr, size));
8306 mem = change_address (mem, GET_MODE (reg), NULL);
8307 if (REGNO (reg) == R0_REGNUM)
8309 /* On thumb we have to use a write-back instruction. */
8310 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8311 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8312 size = TARGET_ARM ? 16 : 0;
8316 emit_move_insn (reg, mem);
8317 size = GET_MODE_SIZE (GET_MODE (reg));
8321 /* Emit USE insns before the return. */
8322 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8323 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8325 /* Construct the return. */
8326 expand_naked_return ();
8332 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8333 ;; all of memory. This blocks insns from being moved across this point.
8335 (define_insn "blockage"
8336 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8339 [(set_attr "length" "0")
8340 (set_attr "type" "block")]
8343 (define_expand "casesi"
8344 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8345 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8346 (match_operand:SI 2 "const_int_operand" "") ; total range
8347 (match_operand:SI 3 "" "") ; table label
8348 (match_operand:SI 4 "" "")] ; Out of range label
8349 "TARGET_32BIT || optimize_size || flag_pic"
8352 enum insn_code code;
8353 if (operands[1] != const0_rtx)
8355 rtx reg = gen_reg_rtx (SImode);
8357 emit_insn (gen_addsi3 (reg, operands[0],
8358 GEN_INT (-INTVAL (operands[1]))));
8363 code = CODE_FOR_arm_casesi_internal;
8364 else if (TARGET_THUMB1)
8365 code = CODE_FOR_thumb1_casesi_internal_pic;
8367 code = CODE_FOR_thumb2_casesi_internal_pic;
8369 code = CODE_FOR_thumb2_casesi_internal;
8371 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8372 operands[2] = force_reg (SImode, operands[2]);
8374 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8375 operands[3], operands[4]));
8380 ;; The USE in this pattern is needed to tell flow analysis that this is
8381 ;; a CASESI insn. It has no other purpose.
8382 (define_insn "arm_casesi_internal"
8383 [(parallel [(set (pc)
8385 (leu (match_operand:SI 0 "s_register_operand" "r")
8386 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8387 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8388 (label_ref (match_operand 2 "" ""))))
8389 (label_ref (match_operand 3 "" ""))))
8390 (clobber (reg:CC CC_REGNUM))
8391 (use (label_ref (match_dup 2)))])]
8395 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8396 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8398 [(set_attr "conds" "clob")
8399 (set_attr "length" "12")]
8402 (define_expand "thumb1_casesi_internal_pic"
8403 [(match_operand:SI 0 "s_register_operand" "")
8404 (match_operand:SI 1 "thumb1_cmp_operand" "")
8405 (match_operand 2 "" "")
8406 (match_operand 3 "" "")]
8410 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8411 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8413 reg0 = gen_rtx_REG (SImode, 0);
8414 emit_move_insn (reg0, operands[0]);
8415 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8420 (define_insn "thumb1_casesi_dispatch"
8421 [(parallel [(set (pc) (unspec [(reg:SI 0)
8422 (label_ref (match_operand 0 "" ""))
8423 ;; (label_ref (match_operand 1 "" ""))
8425 UNSPEC_THUMB1_CASESI))
8426 (clobber (reg:SI IP_REGNUM))
8427 (clobber (reg:SI LR_REGNUM))])]
8429 "* return thumb1_output_casesi(operands);"
8430 [(set_attr "length" "4")]
8433 (define_expand "indirect_jump"
8435 (match_operand:SI 0 "s_register_operand" ""))]
8438 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8439 address and use bx. */
8443 tmp = gen_reg_rtx (SImode);
8444 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8450 ;; NB Never uses BX.
8451 (define_insn "*arm_indirect_jump"
8453 (match_operand:SI 0 "s_register_operand" "r"))]
8455 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8456 [(set_attr "predicable" "yes")]
8459 (define_insn "*load_indirect_jump"
8461 (match_operand:SI 0 "memory_operand" "m"))]
8463 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8464 [(set_attr "type" "load1")
8465 (set_attr "pool_range" "4096")
8466 (set_attr "neg_pool_range" "4084")
8467 (set_attr "predicable" "yes")]
8470 ;; NB Never uses BX.
8471 (define_insn "*thumb1_indirect_jump"
8473 (match_operand:SI 0 "register_operand" "l*r"))]
8476 [(set_attr "conds" "clob")
8477 (set_attr "length" "2")]
8487 if (TARGET_UNIFIED_ASM)
8490 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8491 return \"mov\\tr8, r8\";
8493 [(set (attr "length")
8494 (if_then_else (eq_attr "is_thumb" "yes")
8500 ;; Patterns to allow combination of arithmetic, cond code and shifts
8502 (define_insn "*arith_shiftsi"
8503 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8504 (match_operator:SI 1 "shiftable_operator"
8505 [(match_operator:SI 3 "shift_operator"
8506 [(match_operand:SI 4 "s_register_operand" "r,r")
8507 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8508 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8510 "%i1%?\\t%0, %2, %4%S3"
8511 [(set_attr "predicable" "yes")
8512 (set_attr "shift" "4")
8513 (set_attr "arch" "32,a")
8514 ;; We have to make sure to disable the second alternative if
8515 ;; the shift_operator is MULT, since otherwise the insn will
8516 ;; also match a multiply_accumulate pattern and validate_change
8517 ;; will allow a replacement of the constant with a register
8518 ;; despite the checks done in shift_operator.
8519 (set_attr_alternative "insn_enabled"
8520 [(const_string "yes")
8522 (match_operand:SI 3 "mult_operator" "")
8523 (const_string "no") (const_string "yes"))])
8524 (set_attr "type" "alu_shift,alu_shift_reg")])
8527 [(set (match_operand:SI 0 "s_register_operand" "")
8528 (match_operator:SI 1 "shiftable_operator"
8529 [(match_operator:SI 2 "shiftable_operator"
8530 [(match_operator:SI 3 "shift_operator"
8531 [(match_operand:SI 4 "s_register_operand" "")
8532 (match_operand:SI 5 "reg_or_int_operand" "")])
8533 (match_operand:SI 6 "s_register_operand" "")])
8534 (match_operand:SI 7 "arm_rhs_operand" "")]))
8535 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8538 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8541 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8544 (define_insn "*arith_shiftsi_compare0"
8545 [(set (reg:CC_NOOV CC_REGNUM)
8547 (match_operator:SI 1 "shiftable_operator"
8548 [(match_operator:SI 3 "shift_operator"
8549 [(match_operand:SI 4 "s_register_operand" "r,r")
8550 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8551 (match_operand:SI 2 "s_register_operand" "r,r")])
8553 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8554 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8557 "%i1%.\\t%0, %2, %4%S3"
8558 [(set_attr "conds" "set")
8559 (set_attr "shift" "4")
8560 (set_attr "arch" "32,a")
8561 (set_attr "type" "alu_shift,alu_shift_reg")])
8563 (define_insn "*arith_shiftsi_compare0_scratch"
8564 [(set (reg:CC_NOOV CC_REGNUM)
8566 (match_operator:SI 1 "shiftable_operator"
8567 [(match_operator:SI 3 "shift_operator"
8568 [(match_operand:SI 4 "s_register_operand" "r,r")
8569 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8570 (match_operand:SI 2 "s_register_operand" "r,r")])
8572 (clobber (match_scratch:SI 0 "=r,r"))]
8574 "%i1%.\\t%0, %2, %4%S3"
8575 [(set_attr "conds" "set")
8576 (set_attr "shift" "4")
8577 (set_attr "arch" "32,a")
8578 (set_attr "type" "alu_shift,alu_shift_reg")])
8580 (define_insn "*sub_shiftsi"
8581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8582 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8583 (match_operator:SI 2 "shift_operator"
8584 [(match_operand:SI 3 "s_register_operand" "r,r")
8585 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8587 "sub%?\\t%0, %1, %3%S2"
8588 [(set_attr "predicable" "yes")
8589 (set_attr "shift" "3")
8590 (set_attr "arch" "32,a")
8591 (set_attr "type" "alu_shift,alu_shift_reg")])
8593 (define_insn "*sub_shiftsi_compare0"
8594 [(set (reg:CC_NOOV CC_REGNUM)
8596 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8597 (match_operator:SI 2 "shift_operator"
8598 [(match_operand:SI 3 "s_register_operand" "r,r")
8599 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8601 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8602 (minus:SI (match_dup 1)
8603 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8605 "sub%.\\t%0, %1, %3%S2"
8606 [(set_attr "conds" "set")
8607 (set_attr "shift" "3")
8608 (set_attr "arch" "32,a")
8609 (set_attr "type" "alu_shift,alu_shift_reg")])
8611 (define_insn "*sub_shiftsi_compare0_scratch"
8612 [(set (reg:CC_NOOV CC_REGNUM)
8614 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8615 (match_operator:SI 2 "shift_operator"
8616 [(match_operand:SI 3 "s_register_operand" "r,r")
8617 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8619 (clobber (match_scratch:SI 0 "=r,r"))]
8621 "sub%.\\t%0, %1, %3%S2"
8622 [(set_attr "conds" "set")
8623 (set_attr "shift" "3")
8624 (set_attr "arch" "32,a")
8625 (set_attr "type" "alu_shift,alu_shift_reg")])
8628 (define_insn "*and_scc"
8629 [(set (match_operand:SI 0 "s_register_operand" "=r")
8630 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8631 [(match_operand 3 "cc_register" "") (const_int 0)])
8632 (match_operand:SI 2 "s_register_operand" "r")))]
8634 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8635 [(set_attr "conds" "use")
8636 (set_attr "insn" "mov")
8637 (set_attr "length" "8")]
8640 (define_insn "*ior_scc"
8641 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8642 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8643 [(match_operand 3 "cc_register" "") (const_int 0)])
8644 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8648 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8649 [(set_attr "conds" "use")
8650 (set_attr "length" "4,8")]
8653 ; A series of splitters for the compare_scc pattern below. Note that
8654 ; order is important.
8656 [(set (match_operand:SI 0 "s_register_operand" "")
8657 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8659 (clobber (reg:CC CC_REGNUM))]
8660 "TARGET_32BIT && reload_completed"
8661 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8664 [(set (match_operand:SI 0 "s_register_operand" "")
8665 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8667 (clobber (reg:CC CC_REGNUM))]
8668 "TARGET_32BIT && reload_completed"
8669 [(set (match_dup 0) (not:SI (match_dup 1)))
8670 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8673 [(set (match_operand:SI 0 "s_register_operand" "")
8674 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8676 (clobber (reg:CC CC_REGNUM))]
8677 "TARGET_32BIT && reload_completed"
8679 [(set (reg:CC CC_REGNUM)
8680 (compare:CC (const_int 1) (match_dup 1)))
8682 (minus:SI (const_int 1) (match_dup 1)))])
8683 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8684 (set (match_dup 0) (const_int 0)))])
8687 [(set (match_operand:SI 0 "s_register_operand" "")
8688 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8689 (match_operand:SI 2 "const_int_operand" "")))
8690 (clobber (reg:CC CC_REGNUM))]
8691 "TARGET_32BIT && reload_completed"
8693 [(set (reg:CC CC_REGNUM)
8694 (compare:CC (match_dup 1) (match_dup 2)))
8695 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8696 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8697 (set (match_dup 0) (const_int 1)))]
8699 operands[3] = GEN_INT (-INTVAL (operands[2]));
8703 [(set (match_operand:SI 0 "s_register_operand" "")
8704 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8705 (match_operand:SI 2 "arm_add_operand" "")))
8706 (clobber (reg:CC CC_REGNUM))]
8707 "TARGET_32BIT && reload_completed"
8709 [(set (reg:CC_NOOV CC_REGNUM)
8710 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8712 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8713 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8714 (set (match_dup 0) (const_int 1)))])
8716 (define_insn_and_split "*compare_scc"
8717 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8718 (match_operator:SI 1 "arm_comparison_operator"
8719 [(match_operand:SI 2 "s_register_operand" "r,r")
8720 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8721 (clobber (reg:CC CC_REGNUM))]
8724 "&& reload_completed"
8725 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8726 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8727 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8730 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8731 operands[2], operands[3]);
8732 enum rtx_code rc = GET_CODE (operands[1]);
8734 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8736 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8737 if (mode == CCFPmode || mode == CCFPEmode)
8738 rc = reverse_condition_maybe_unordered (rc);
8740 rc = reverse_condition (rc);
8741 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8744 ;; Attempt to improve the sequence generated by the compare_scc splitters
8745 ;; not to use conditional execution.
8747 [(set (reg:CC CC_REGNUM)
8748 (compare:CC (match_operand:SI 1 "register_operand" "")
8749 (match_operand:SI 2 "arm_rhs_operand" "")))
8750 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8751 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8752 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8753 (set (match_dup 0) (const_int 1)))
8754 (match_scratch:SI 3 "r")]
8756 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8758 [(set (reg:CC CC_REGNUM)
8759 (compare:CC (const_int 0) (match_dup 3)))
8760 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8762 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8763 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8765 (define_insn "*cond_move"
8766 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8767 (if_then_else:SI (match_operator 3 "equality_operator"
8768 [(match_operator 4 "arm_comparison_operator"
8769 [(match_operand 5 "cc_register" "") (const_int 0)])
8771 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8772 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8775 if (GET_CODE (operands[3]) == NE)
8777 if (which_alternative != 1)
8778 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8779 if (which_alternative != 0)
8780 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8783 if (which_alternative != 0)
8784 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8785 if (which_alternative != 1)
8786 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8789 [(set_attr "conds" "use")
8790 (set_attr "insn" "mov")
8791 (set_attr "length" "4,4,8")]
8794 (define_insn "*cond_arith"
8795 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8796 (match_operator:SI 5 "shiftable_operator"
8797 [(match_operator:SI 4 "arm_comparison_operator"
8798 [(match_operand:SI 2 "s_register_operand" "r,r")
8799 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8800 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8801 (clobber (reg:CC CC_REGNUM))]
8804 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8805 return \"%i5\\t%0, %1, %2, lsr #31\";
8807 output_asm_insn (\"cmp\\t%2, %3\", operands);
8808 if (GET_CODE (operands[5]) == AND)
8809 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8810 else if (GET_CODE (operands[5]) == MINUS)
8811 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8812 else if (which_alternative != 0)
8813 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8814 return \"%i5%d4\\t%0, %1, #1\";
8816 [(set_attr "conds" "clob")
8817 (set_attr "length" "12")]
8820 (define_insn "*cond_sub"
8821 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8822 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8823 (match_operator:SI 4 "arm_comparison_operator"
8824 [(match_operand:SI 2 "s_register_operand" "r,r")
8825 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8826 (clobber (reg:CC CC_REGNUM))]
8829 output_asm_insn (\"cmp\\t%2, %3\", operands);
8830 if (which_alternative != 0)
8831 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8832 return \"sub%d4\\t%0, %1, #1\";
8834 [(set_attr "conds" "clob")
8835 (set_attr "length" "8,12")]
8838 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8839 (define_insn "*cmp_ite0"
8840 [(set (match_operand 6 "dominant_cc_register" "")
8843 (match_operator 4 "arm_comparison_operator"
8844 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8845 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8846 (match_operator:SI 5 "arm_comparison_operator"
8847 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8848 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8854 static const char * const opcodes[4][2] =
8856 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8857 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8858 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8859 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8860 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8861 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8862 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8863 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8866 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8868 return opcodes[which_alternative][swap];
8870 [(set_attr "conds" "set")
8871 (set_attr "length" "8")]
8874 (define_insn "*cmp_ite1"
8875 [(set (match_operand 6 "dominant_cc_register" "")
8878 (match_operator 4 "arm_comparison_operator"
8879 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8880 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8881 (match_operator:SI 5 "arm_comparison_operator"
8882 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8883 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8889 static const char * const opcodes[4][2] =
8891 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8892 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8893 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8894 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8895 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8896 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8897 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8898 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8901 comparison_dominates_p (GET_CODE (operands[5]),
8902 reverse_condition (GET_CODE (operands[4])));
8904 return opcodes[which_alternative][swap];
8906 [(set_attr "conds" "set")
8907 (set_attr "length" "8")]
8910 (define_insn "*cmp_and"
8911 [(set (match_operand 6 "dominant_cc_register" "")
8914 (match_operator 4 "arm_comparison_operator"
8915 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8916 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8917 (match_operator:SI 5 "arm_comparison_operator"
8918 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8919 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8924 static const char *const opcodes[4][2] =
8926 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8927 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8928 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8929 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8930 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8931 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8932 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8933 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8936 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8938 return opcodes[which_alternative][swap];
8940 [(set_attr "conds" "set")
8941 (set_attr "predicable" "no")
8942 (set_attr "length" "8")]
8945 (define_insn "*cmp_ior"
8946 [(set (match_operand 6 "dominant_cc_register" "")
8949 (match_operator 4 "arm_comparison_operator"
8950 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8951 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8952 (match_operator:SI 5 "arm_comparison_operator"
8953 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8954 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8959 static const char *const opcodes[4][2] =
8961 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8962 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8963 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8964 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8965 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8966 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8967 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8968 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8971 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8973 return opcodes[which_alternative][swap];
8976 [(set_attr "conds" "set")
8977 (set_attr "length" "8")]
8980 (define_insn_and_split "*ior_scc_scc"
8981 [(set (match_operand:SI 0 "s_register_operand" "=r")
8982 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8983 [(match_operand:SI 1 "s_register_operand" "r")
8984 (match_operand:SI 2 "arm_add_operand" "rIL")])
8985 (match_operator:SI 6 "arm_comparison_operator"
8986 [(match_operand:SI 4 "s_register_operand" "r")
8987 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8988 (clobber (reg:CC CC_REGNUM))]
8990 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8993 "TARGET_ARM && reload_completed"
8997 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8998 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9000 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9002 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9005 [(set_attr "conds" "clob")
9006 (set_attr "length" "16")])
9008 ; If the above pattern is followed by a CMP insn, then the compare is
9009 ; redundant, since we can rework the conditional instruction that follows.
9010 (define_insn_and_split "*ior_scc_scc_cmp"
9011 [(set (match_operand 0 "dominant_cc_register" "")
9012 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9013 [(match_operand:SI 1 "s_register_operand" "r")
9014 (match_operand:SI 2 "arm_add_operand" "rIL")])
9015 (match_operator:SI 6 "arm_comparison_operator"
9016 [(match_operand:SI 4 "s_register_operand" "r")
9017 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9019 (set (match_operand:SI 7 "s_register_operand" "=r")
9020 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9021 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9024 "TARGET_ARM && reload_completed"
9028 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9029 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9031 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9033 [(set_attr "conds" "set")
9034 (set_attr "length" "16")])
9036 (define_insn_and_split "*and_scc_scc"
9037 [(set (match_operand:SI 0 "s_register_operand" "=r")
9038 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9039 [(match_operand:SI 1 "s_register_operand" "r")
9040 (match_operand:SI 2 "arm_add_operand" "rIL")])
9041 (match_operator:SI 6 "arm_comparison_operator"
9042 [(match_operand:SI 4 "s_register_operand" "r")
9043 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9044 (clobber (reg:CC CC_REGNUM))]
9046 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9049 "TARGET_ARM && reload_completed
9050 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9055 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9056 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9058 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9060 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9063 [(set_attr "conds" "clob")
9064 (set_attr "length" "16")])
9066 ; If the above pattern is followed by a CMP insn, then the compare is
9067 ; redundant, since we can rework the conditional instruction that follows.
9068 (define_insn_and_split "*and_scc_scc_cmp"
9069 [(set (match_operand 0 "dominant_cc_register" "")
9070 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9071 [(match_operand:SI 1 "s_register_operand" "r")
9072 (match_operand:SI 2 "arm_add_operand" "rIL")])
9073 (match_operator:SI 6 "arm_comparison_operator"
9074 [(match_operand:SI 4 "s_register_operand" "r")
9075 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9077 (set (match_operand:SI 7 "s_register_operand" "=r")
9078 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9079 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9082 "TARGET_ARM && reload_completed"
9086 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9087 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9089 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9091 [(set_attr "conds" "set")
9092 (set_attr "length" "16")])
9094 ;; If there is no dominance in the comparison, then we can still save an
9095 ;; instruction in the AND case, since we can know that the second compare
9096 ;; need only zero the value if false (if true, then the value is already
9098 (define_insn_and_split "*and_scc_scc_nodom"
9099 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9100 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9101 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9102 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9103 (match_operator:SI 6 "arm_comparison_operator"
9104 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9105 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9106 (clobber (reg:CC CC_REGNUM))]
9108 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9111 "TARGET_ARM && reload_completed"
9112 [(parallel [(set (match_dup 0)
9113 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9114 (clobber (reg:CC CC_REGNUM))])
9115 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9117 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9120 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9121 operands[4], operands[5]),
9123 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9125 [(set_attr "conds" "clob")
9126 (set_attr "length" "20")])
9129 [(set (reg:CC_NOOV CC_REGNUM)
9130 (compare:CC_NOOV (ior:SI
9131 (and:SI (match_operand:SI 0 "s_register_operand" "")
9133 (match_operator:SI 1 "arm_comparison_operator"
9134 [(match_operand:SI 2 "s_register_operand" "")
9135 (match_operand:SI 3 "arm_add_operand" "")]))
9137 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9140 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9142 (set (reg:CC_NOOV CC_REGNUM)
9143 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9148 [(set (reg:CC_NOOV CC_REGNUM)
9149 (compare:CC_NOOV (ior:SI
9150 (match_operator:SI 1 "arm_comparison_operator"
9151 [(match_operand:SI 2 "s_register_operand" "")
9152 (match_operand:SI 3 "arm_add_operand" "")])
9153 (and:SI (match_operand:SI 0 "s_register_operand" "")
9156 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9159 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9161 (set (reg:CC_NOOV CC_REGNUM)
9162 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9165 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9167 (define_insn "*negscc"
9168 [(set (match_operand:SI 0 "s_register_operand" "=r")
9169 (neg:SI (match_operator 3 "arm_comparison_operator"
9170 [(match_operand:SI 1 "s_register_operand" "r")
9171 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9172 (clobber (reg:CC CC_REGNUM))]
9175 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9176 return \"mov\\t%0, %1, asr #31\";
9178 if (GET_CODE (operands[3]) == NE)
9179 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9181 output_asm_insn (\"cmp\\t%1, %2\", operands);
9182 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9183 return \"mvn%d3\\t%0, #0\";
9185 [(set_attr "conds" "clob")
9186 (set_attr "length" "12")]
9189 (define_insn "movcond"
9190 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9192 (match_operator 5 "arm_comparison_operator"
9193 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9194 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9195 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9196 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9197 (clobber (reg:CC CC_REGNUM))]
9200 if (GET_CODE (operands[5]) == LT
9201 && (operands[4] == const0_rtx))
9203 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9205 if (operands[2] == const0_rtx)
9206 return \"and\\t%0, %1, %3, asr #31\";
9207 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9209 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9211 if (operands[1] == const0_rtx)
9212 return \"bic\\t%0, %2, %3, asr #31\";
9213 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9215 /* The only case that falls through to here is when both ops 1 & 2
9219 if (GET_CODE (operands[5]) == GE
9220 && (operands[4] == const0_rtx))
9222 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9224 if (operands[2] == const0_rtx)
9225 return \"bic\\t%0, %1, %3, asr #31\";
9226 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9228 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9230 if (operands[1] == const0_rtx)
9231 return \"and\\t%0, %2, %3, asr #31\";
9232 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9234 /* The only case that falls through to here is when both ops 1 & 2
9237 if (GET_CODE (operands[4]) == CONST_INT
9238 && !const_ok_for_arm (INTVAL (operands[4])))
9239 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9241 output_asm_insn (\"cmp\\t%3, %4\", operands);
9242 if (which_alternative != 0)
9243 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9244 if (which_alternative != 1)
9245 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9248 [(set_attr "conds" "clob")
9249 (set_attr "length" "8,8,12")]
9252 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9254 (define_insn "*ifcompare_plus_move"
9255 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9256 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9257 [(match_operand:SI 4 "s_register_operand" "r,r")
9258 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9260 (match_operand:SI 2 "s_register_operand" "r,r")
9261 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9262 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9263 (clobber (reg:CC CC_REGNUM))]
9266 [(set_attr "conds" "clob")
9267 (set_attr "length" "8,12")]
9270 (define_insn "*if_plus_move"
9271 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9273 (match_operator 4 "arm_comparison_operator"
9274 [(match_operand 5 "cc_register" "") (const_int 0)])
9276 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9277 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9278 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9282 sub%d4\\t%0, %2, #%n3
9283 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9284 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9285 [(set_attr "conds" "use")
9286 (set_attr "length" "4,4,8,8")
9287 (set_attr "type" "*,*,*,*")]
9290 (define_insn "*ifcompare_move_plus"
9291 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9292 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9293 [(match_operand:SI 4 "s_register_operand" "r,r")
9294 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9295 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9297 (match_operand:SI 2 "s_register_operand" "r,r")
9298 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9299 (clobber (reg:CC CC_REGNUM))]
9302 [(set_attr "conds" "clob")
9303 (set_attr "length" "8,12")]
9306 (define_insn "*if_move_plus"
9307 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9309 (match_operator 4 "arm_comparison_operator"
9310 [(match_operand 5 "cc_register" "") (const_int 0)])
9311 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9313 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9314 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9318 sub%D4\\t%0, %2, #%n3
9319 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9320 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9321 [(set_attr "conds" "use")
9322 (set_attr "length" "4,4,8,8")
9323 (set_attr "type" "*,*,*,*")]
9326 (define_insn "*ifcompare_arith_arith"
9327 [(set (match_operand:SI 0 "s_register_operand" "=r")
9328 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9329 [(match_operand:SI 5 "s_register_operand" "r")
9330 (match_operand:SI 6 "arm_add_operand" "rIL")])
9331 (match_operator:SI 8 "shiftable_operator"
9332 [(match_operand:SI 1 "s_register_operand" "r")
9333 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9334 (match_operator:SI 7 "shiftable_operator"
9335 [(match_operand:SI 3 "s_register_operand" "r")
9336 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9337 (clobber (reg:CC CC_REGNUM))]
9340 [(set_attr "conds" "clob")
9341 (set_attr "length" "12")]
9344 (define_insn "*if_arith_arith"
9345 [(set (match_operand:SI 0 "s_register_operand" "=r")
9346 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9347 [(match_operand 8 "cc_register" "") (const_int 0)])
9348 (match_operator:SI 6 "shiftable_operator"
9349 [(match_operand:SI 1 "s_register_operand" "r")
9350 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9351 (match_operator:SI 7 "shiftable_operator"
9352 [(match_operand:SI 3 "s_register_operand" "r")
9353 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9355 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9356 [(set_attr "conds" "use")
9357 (set_attr "length" "8")]
9360 (define_insn "*ifcompare_arith_move"
9361 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9362 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9363 [(match_operand:SI 2 "s_register_operand" "r,r")
9364 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9365 (match_operator:SI 7 "shiftable_operator"
9366 [(match_operand:SI 4 "s_register_operand" "r,r")
9367 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9368 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9369 (clobber (reg:CC CC_REGNUM))]
9372 /* If we have an operation where (op x 0) is the identity operation and
9373 the conditional operator is LT or GE and we are comparing against zero and
9374 everything is in registers then we can do this in two instructions. */
9375 if (operands[3] == const0_rtx
9376 && GET_CODE (operands[7]) != AND
9377 && GET_CODE (operands[5]) == REG
9378 && GET_CODE (operands[1]) == REG
9379 && REGNO (operands[1]) == REGNO (operands[4])
9380 && REGNO (operands[4]) != REGNO (operands[0]))
9382 if (GET_CODE (operands[6]) == LT)
9383 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9384 else if (GET_CODE (operands[6]) == GE)
9385 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9387 if (GET_CODE (operands[3]) == CONST_INT
9388 && !const_ok_for_arm (INTVAL (operands[3])))
9389 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9391 output_asm_insn (\"cmp\\t%2, %3\", operands);
9392 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9393 if (which_alternative != 0)
9394 return \"mov%D6\\t%0, %1\";
9397 [(set_attr "conds" "clob")
9398 (set_attr "length" "8,12")]
9401 (define_insn "*if_arith_move"
9402 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9403 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9404 [(match_operand 6 "cc_register" "") (const_int 0)])
9405 (match_operator:SI 5 "shiftable_operator"
9406 [(match_operand:SI 2 "s_register_operand" "r,r")
9407 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9408 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9412 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9413 [(set_attr "conds" "use")
9414 (set_attr "length" "4,8")
9415 (set_attr "type" "*,*")]
9418 (define_insn "*ifcompare_move_arith"
9419 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9420 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9421 [(match_operand:SI 4 "s_register_operand" "r,r")
9422 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9423 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9424 (match_operator:SI 7 "shiftable_operator"
9425 [(match_operand:SI 2 "s_register_operand" "r,r")
9426 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9427 (clobber (reg:CC CC_REGNUM))]
9430 /* If we have an operation where (op x 0) is the identity operation and
9431 the conditional operator is LT or GE and we are comparing against zero and
9432 everything is in registers then we can do this in two instructions */
9433 if (operands[5] == const0_rtx
9434 && GET_CODE (operands[7]) != AND
9435 && GET_CODE (operands[3]) == REG
9436 && GET_CODE (operands[1]) == REG
9437 && REGNO (operands[1]) == REGNO (operands[2])
9438 && REGNO (operands[2]) != REGNO (operands[0]))
9440 if (GET_CODE (operands[6]) == GE)
9441 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9442 else if (GET_CODE (operands[6]) == LT)
9443 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9446 if (GET_CODE (operands[5]) == CONST_INT
9447 && !const_ok_for_arm (INTVAL (operands[5])))
9448 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9450 output_asm_insn (\"cmp\\t%4, %5\", operands);
9452 if (which_alternative != 0)
9453 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9454 return \"%I7%D6\\t%0, %2, %3\";
9456 [(set_attr "conds" "clob")
9457 (set_attr "length" "8,12")]
9460 (define_insn "*if_move_arith"
9461 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9463 (match_operator 4 "arm_comparison_operator"
9464 [(match_operand 6 "cc_register" "") (const_int 0)])
9465 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9466 (match_operator:SI 5 "shiftable_operator"
9467 [(match_operand:SI 2 "s_register_operand" "r,r")
9468 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9472 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9473 [(set_attr "conds" "use")
9474 (set_attr "length" "4,8")
9475 (set_attr "type" "*,*")]
9478 (define_insn "*ifcompare_move_not"
9479 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9481 (match_operator 5 "arm_comparison_operator"
9482 [(match_operand:SI 3 "s_register_operand" "r,r")
9483 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9484 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9486 (match_operand:SI 2 "s_register_operand" "r,r"))))
9487 (clobber (reg:CC CC_REGNUM))]
9490 [(set_attr "conds" "clob")
9491 (set_attr "length" "8,12")]
9494 (define_insn "*if_move_not"
9495 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9497 (match_operator 4 "arm_comparison_operator"
9498 [(match_operand 3 "cc_register" "") (const_int 0)])
9499 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9500 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9504 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9505 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9506 [(set_attr "conds" "use")
9507 (set_attr "insn" "mvn")
9508 (set_attr "length" "4,8,8")]
9511 (define_insn "*ifcompare_not_move"
9512 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9514 (match_operator 5 "arm_comparison_operator"
9515 [(match_operand:SI 3 "s_register_operand" "r,r")
9516 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9518 (match_operand:SI 2 "s_register_operand" "r,r"))
9519 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9520 (clobber (reg:CC CC_REGNUM))]
9523 [(set_attr "conds" "clob")
9524 (set_attr "length" "8,12")]
9527 (define_insn "*if_not_move"
9528 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9530 (match_operator 4 "arm_comparison_operator"
9531 [(match_operand 3 "cc_register" "") (const_int 0)])
9532 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9533 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9537 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9538 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9539 [(set_attr "conds" "use")
9540 (set_attr "insn" "mvn")
9541 (set_attr "length" "4,8,8")]
9544 (define_insn "*ifcompare_shift_move"
9545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9547 (match_operator 6 "arm_comparison_operator"
9548 [(match_operand:SI 4 "s_register_operand" "r,r")
9549 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9550 (match_operator:SI 7 "shift_operator"
9551 [(match_operand:SI 2 "s_register_operand" "r,r")
9552 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9553 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9554 (clobber (reg:CC CC_REGNUM))]
9557 [(set_attr "conds" "clob")
9558 (set_attr "length" "8,12")]
9561 (define_insn "*if_shift_move"
9562 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9564 (match_operator 5 "arm_comparison_operator"
9565 [(match_operand 6 "cc_register" "") (const_int 0)])
9566 (match_operator:SI 4 "shift_operator"
9567 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9568 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9569 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9573 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9574 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9575 [(set_attr "conds" "use")
9576 (set_attr "shift" "2")
9577 (set_attr "length" "4,8,8")
9578 (set_attr "insn" "mov")
9579 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9580 (const_string "alu_shift")
9581 (const_string "alu_shift_reg")))]
9584 (define_insn "*ifcompare_move_shift"
9585 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9587 (match_operator 6 "arm_comparison_operator"
9588 [(match_operand:SI 4 "s_register_operand" "r,r")
9589 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9590 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9591 (match_operator:SI 7 "shift_operator"
9592 [(match_operand:SI 2 "s_register_operand" "r,r")
9593 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9594 (clobber (reg:CC CC_REGNUM))]
9597 [(set_attr "conds" "clob")
9598 (set_attr "length" "8,12")]
9601 (define_insn "*if_move_shift"
9602 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9604 (match_operator 5 "arm_comparison_operator"
9605 [(match_operand 6 "cc_register" "") (const_int 0)])
9606 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9607 (match_operator:SI 4 "shift_operator"
9608 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9609 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9613 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9614 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9615 [(set_attr "conds" "use")
9616 (set_attr "shift" "2")
9617 (set_attr "length" "4,8,8")
9618 (set_attr "insn" "mov")
9619 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9620 (const_string "alu_shift")
9621 (const_string "alu_shift_reg")))]
9624 (define_insn "*ifcompare_shift_shift"
9625 [(set (match_operand:SI 0 "s_register_operand" "=r")
9627 (match_operator 7 "arm_comparison_operator"
9628 [(match_operand:SI 5 "s_register_operand" "r")
9629 (match_operand:SI 6 "arm_add_operand" "rIL")])
9630 (match_operator:SI 8 "shift_operator"
9631 [(match_operand:SI 1 "s_register_operand" "r")
9632 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9633 (match_operator:SI 9 "shift_operator"
9634 [(match_operand:SI 3 "s_register_operand" "r")
9635 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9636 (clobber (reg:CC CC_REGNUM))]
9639 [(set_attr "conds" "clob")
9640 (set_attr "length" "12")]
9643 (define_insn "*if_shift_shift"
9644 [(set (match_operand:SI 0 "s_register_operand" "=r")
9646 (match_operator 5 "arm_comparison_operator"
9647 [(match_operand 8 "cc_register" "") (const_int 0)])
9648 (match_operator:SI 6 "shift_operator"
9649 [(match_operand:SI 1 "s_register_operand" "r")
9650 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9651 (match_operator:SI 7 "shift_operator"
9652 [(match_operand:SI 3 "s_register_operand" "r")
9653 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9655 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9656 [(set_attr "conds" "use")
9657 (set_attr "shift" "1")
9658 (set_attr "length" "8")
9659 (set_attr "insn" "mov")
9660 (set (attr "type") (if_then_else
9661 (and (match_operand 2 "const_int_operand" "")
9662 (match_operand 4 "const_int_operand" ""))
9663 (const_string "alu_shift")
9664 (const_string "alu_shift_reg")))]
9667 (define_insn "*ifcompare_not_arith"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r")
9670 (match_operator 6 "arm_comparison_operator"
9671 [(match_operand:SI 4 "s_register_operand" "r")
9672 (match_operand:SI 5 "arm_add_operand" "rIL")])
9673 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9674 (match_operator:SI 7 "shiftable_operator"
9675 [(match_operand:SI 2 "s_register_operand" "r")
9676 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9677 (clobber (reg:CC CC_REGNUM))]
9680 [(set_attr "conds" "clob")
9681 (set_attr "length" "12")]
9684 (define_insn "*if_not_arith"
9685 [(set (match_operand:SI 0 "s_register_operand" "=r")
9687 (match_operator 5 "arm_comparison_operator"
9688 [(match_operand 4 "cc_register" "") (const_int 0)])
9689 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9690 (match_operator:SI 6 "shiftable_operator"
9691 [(match_operand:SI 2 "s_register_operand" "r")
9692 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9694 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9695 [(set_attr "conds" "use")
9696 (set_attr "insn" "mvn")
9697 (set_attr "length" "8")]
9700 (define_insn "*ifcompare_arith_not"
9701 [(set (match_operand:SI 0 "s_register_operand" "=r")
9703 (match_operator 6 "arm_comparison_operator"
9704 [(match_operand:SI 4 "s_register_operand" "r")
9705 (match_operand:SI 5 "arm_add_operand" "rIL")])
9706 (match_operator:SI 7 "shiftable_operator"
9707 [(match_operand:SI 2 "s_register_operand" "r")
9708 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9709 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9710 (clobber (reg:CC CC_REGNUM))]
9713 [(set_attr "conds" "clob")
9714 (set_attr "length" "12")]
9717 (define_insn "*if_arith_not"
9718 [(set (match_operand:SI 0 "s_register_operand" "=r")
9720 (match_operator 5 "arm_comparison_operator"
9721 [(match_operand 4 "cc_register" "") (const_int 0)])
9722 (match_operator:SI 6 "shiftable_operator"
9723 [(match_operand:SI 2 "s_register_operand" "r")
9724 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9725 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9727 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9728 [(set_attr "conds" "use")
9729 (set_attr "insn" "mvn")
9730 (set_attr "length" "8")]
9733 (define_insn "*ifcompare_neg_move"
9734 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9736 (match_operator 5 "arm_comparison_operator"
9737 [(match_operand:SI 3 "s_register_operand" "r,r")
9738 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9739 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9740 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9741 (clobber (reg:CC CC_REGNUM))]
9744 [(set_attr "conds" "clob")
9745 (set_attr "length" "8,12")]
9748 (define_insn "*if_neg_move"
9749 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9751 (match_operator 4 "arm_comparison_operator"
9752 [(match_operand 3 "cc_register" "") (const_int 0)])
9753 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9754 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9758 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9759 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9760 [(set_attr "conds" "use")
9761 (set_attr "length" "4,8,8")]
9764 (define_insn "*ifcompare_move_neg"
9765 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9767 (match_operator 5 "arm_comparison_operator"
9768 [(match_operand:SI 3 "s_register_operand" "r,r")
9769 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9770 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9771 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9772 (clobber (reg:CC CC_REGNUM))]
9775 [(set_attr "conds" "clob")
9776 (set_attr "length" "8,12")]
9779 (define_insn "*if_move_neg"
9780 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9782 (match_operator 4 "arm_comparison_operator"
9783 [(match_operand 3 "cc_register" "") (const_int 0)])
9784 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9785 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9789 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9790 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9791 [(set_attr "conds" "use")
9792 (set_attr "length" "4,8,8")]
9795 (define_insn "*arith_adjacentmem"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r")
9797 (match_operator:SI 1 "shiftable_operator"
9798 [(match_operand:SI 2 "memory_operand" "m")
9799 (match_operand:SI 3 "memory_operand" "m")]))
9800 (clobber (match_scratch:SI 4 "=r"))]
9801 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9807 HOST_WIDE_INT val1 = 0, val2 = 0;
9809 if (REGNO (operands[0]) > REGNO (operands[4]))
9811 ldm[1] = operands[4];
9812 ldm[2] = operands[0];
9816 ldm[1] = operands[0];
9817 ldm[2] = operands[4];
9820 base_reg = XEXP (operands[2], 0);
9822 if (!REG_P (base_reg))
9824 val1 = INTVAL (XEXP (base_reg, 1));
9825 base_reg = XEXP (base_reg, 0);
9828 if (!REG_P (XEXP (operands[3], 0)))
9829 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9831 arith[0] = operands[0];
9832 arith[3] = operands[1];
9846 if (val1 !=0 && val2 != 0)
9850 if (val1 == 4 || val2 == 4)
9851 /* Other val must be 8, since we know they are adjacent and neither
9853 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9854 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9856 ldm[0] = ops[0] = operands[4];
9858 ops[2] = GEN_INT (val1);
9859 output_add_immediate (ops);
9861 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9863 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9867 /* Offset is out of range for a single add, so use two ldr. */
9870 ops[2] = GEN_INT (val1);
9871 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9873 ops[2] = GEN_INT (val2);
9874 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9880 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9882 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9887 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9889 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9891 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9894 [(set_attr "length" "12")
9895 (set_attr "predicable" "yes")
9896 (set_attr "type" "load1")]
9899 ; This pattern is never tried by combine, so do it as a peephole
9902 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9903 (match_operand:SI 1 "arm_general_register_operand" ""))
9904 (set (reg:CC CC_REGNUM)
9905 (compare:CC (match_dup 1) (const_int 0)))]
9907 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9908 (set (match_dup 0) (match_dup 1))])]
9913 [(set (match_operand:SI 0 "s_register_operand" "")
9914 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9916 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9917 [(match_operand:SI 3 "s_register_operand" "")
9918 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9919 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9921 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9922 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9927 ;; This split can be used because CC_Z mode implies that the following
9928 ;; branch will be an equality, or an unsigned inequality, so the sign
9929 ;; extension is not needed.
9932 [(set (reg:CC_Z CC_REGNUM)
9934 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9936 (match_operand 1 "const_int_operand" "")))
9937 (clobber (match_scratch:SI 2 ""))]
9939 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9940 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9941 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9942 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9944 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9947 ;; ??? Check the patterns above for Thumb-2 usefulness
9949 (define_expand "prologue"
9950 [(clobber (const_int 0))]
9953 arm_expand_prologue ();
9955 thumb1_expand_prologue ();
9960 (define_expand "epilogue"
9961 [(clobber (const_int 0))]
9964 if (crtl->calls_eh_return)
9965 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9967 thumb1_expand_epilogue ();
9968 else if (USE_RETURN_INSN (FALSE))
9970 emit_jump_insn (gen_return ());
9973 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9975 gen_rtx_RETURN (VOIDmode)),
9981 ;; Note - although unspec_volatile's USE all hard registers,
9982 ;; USEs are ignored after relaod has completed. Thus we need
9983 ;; to add an unspec of the link register to ensure that flow
9984 ;; does not think that it is unused by the sibcall branch that
9985 ;; will replace the standard function epilogue.
9986 (define_insn "sibcall_epilogue"
9987 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9988 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9991 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9992 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9993 return arm_output_epilogue (next_nonnote_insn (insn));
9995 ;; Length is absolute worst case
9996 [(set_attr "length" "44")
9997 (set_attr "type" "block")
9998 ;; We don't clobber the conditions, but the potential length of this
9999 ;; operation is sufficient to make conditionalizing the sequence
10000 ;; unlikely to be profitable.
10001 (set_attr "conds" "clob")]
10004 (define_insn "*epilogue_insns"
10005 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10009 return arm_output_epilogue (NULL);
10010 else /* TARGET_THUMB1 */
10011 return thumb_unexpanded_epilogue ();
10013 ; Length is absolute worst case
10014 [(set_attr "length" "44")
10015 (set_attr "type" "block")
10016 ;; We don't clobber the conditions, but the potential length of this
10017 ;; operation is sufficient to make conditionalizing the sequence
10018 ;; unlikely to be profitable.
10019 (set_attr "conds" "clob")]
10022 (define_expand "eh_epilogue"
10023 [(use (match_operand:SI 0 "register_operand" ""))
10024 (use (match_operand:SI 1 "register_operand" ""))
10025 (use (match_operand:SI 2 "register_operand" ""))]
10029 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10030 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10032 rtx ra = gen_rtx_REG (Pmode, 2);
10034 emit_move_insn (ra, operands[2]);
10037 /* This is a hack -- we may have crystalized the function type too
10039 cfun->machine->func_type = 0;
10043 ;; This split is only used during output to reduce the number of patterns
10044 ;; that need assembler instructions adding to them. We allowed the setting
10045 ;; of the conditions to be implicit during rtl generation so that
10046 ;; the conditional compare patterns would work. However this conflicts to
10047 ;; some extent with the conditional data operations, so we have to split them
10050 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10051 ;; conditional execution sufficient?
10054 [(set (match_operand:SI 0 "s_register_operand" "")
10055 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10056 [(match_operand 2 "" "") (match_operand 3 "" "")])
10058 (match_operand 4 "" "")))
10059 (clobber (reg:CC CC_REGNUM))]
10060 "TARGET_ARM && reload_completed"
10061 [(set (match_dup 5) (match_dup 6))
10062 (cond_exec (match_dup 7)
10063 (set (match_dup 0) (match_dup 4)))]
10066 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10067 operands[2], operands[3]);
10068 enum rtx_code rc = GET_CODE (operands[1]);
10070 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10071 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10072 if (mode == CCFPmode || mode == CCFPEmode)
10073 rc = reverse_condition_maybe_unordered (rc);
10075 rc = reverse_condition (rc);
10077 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10082 [(set (match_operand:SI 0 "s_register_operand" "")
10083 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10084 [(match_operand 2 "" "") (match_operand 3 "" "")])
10085 (match_operand 4 "" "")
10087 (clobber (reg:CC CC_REGNUM))]
10088 "TARGET_ARM && reload_completed"
10089 [(set (match_dup 5) (match_dup 6))
10090 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10091 (set (match_dup 0) (match_dup 4)))]
10094 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10095 operands[2], operands[3]);
10097 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10098 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10103 [(set (match_operand:SI 0 "s_register_operand" "")
10104 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10105 [(match_operand 2 "" "") (match_operand 3 "" "")])
10106 (match_operand 4 "" "")
10107 (match_operand 5 "" "")))
10108 (clobber (reg:CC CC_REGNUM))]
10109 "TARGET_ARM && reload_completed"
10110 [(set (match_dup 6) (match_dup 7))
10111 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10112 (set (match_dup 0) (match_dup 4)))
10113 (cond_exec (match_dup 8)
10114 (set (match_dup 0) (match_dup 5)))]
10117 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10118 operands[2], operands[3]);
10119 enum rtx_code rc = GET_CODE (operands[1]);
10121 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10122 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10123 if (mode == CCFPmode || mode == CCFPEmode)
10124 rc = reverse_condition_maybe_unordered (rc);
10126 rc = reverse_condition (rc);
10128 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10133 [(set (match_operand:SI 0 "s_register_operand" "")
10134 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10135 [(match_operand:SI 2 "s_register_operand" "")
10136 (match_operand:SI 3 "arm_add_operand" "")])
10137 (match_operand:SI 4 "arm_rhs_operand" "")
10139 (match_operand:SI 5 "s_register_operand" ""))))
10140 (clobber (reg:CC CC_REGNUM))]
10141 "TARGET_ARM && reload_completed"
10142 [(set (match_dup 6) (match_dup 7))
10143 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10144 (set (match_dup 0) (match_dup 4)))
10145 (cond_exec (match_dup 8)
10146 (set (match_dup 0) (not:SI (match_dup 5))))]
10149 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10150 operands[2], operands[3]);
10151 enum rtx_code rc = GET_CODE (operands[1]);
10153 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10154 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10155 if (mode == CCFPmode || mode == CCFPEmode)
10156 rc = reverse_condition_maybe_unordered (rc);
10158 rc = reverse_condition (rc);
10160 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10164 (define_insn "*cond_move_not"
10165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10166 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10167 [(match_operand 3 "cc_register" "") (const_int 0)])
10168 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10170 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10174 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10175 [(set_attr "conds" "use")
10176 (set_attr "insn" "mvn")
10177 (set_attr "length" "4,8")]
10180 ;; The next two patterns occur when an AND operation is followed by a
10181 ;; scc insn sequence
10183 (define_insn "*sign_extract_onebit"
10184 [(set (match_operand:SI 0 "s_register_operand" "=r")
10185 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10187 (match_operand:SI 2 "const_int_operand" "n")))
10188 (clobber (reg:CC CC_REGNUM))]
10191 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10192 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10193 return \"mvnne\\t%0, #0\";
10195 [(set_attr "conds" "clob")
10196 (set_attr "length" "8")]
10199 (define_insn "*not_signextract_onebit"
10200 [(set (match_operand:SI 0 "s_register_operand" "=r")
10202 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10204 (match_operand:SI 2 "const_int_operand" "n"))))
10205 (clobber (reg:CC CC_REGNUM))]
10208 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10209 output_asm_insn (\"tst\\t%1, %2\", operands);
10210 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10211 return \"movne\\t%0, #0\";
10213 [(set_attr "conds" "clob")
10214 (set_attr "length" "12")]
10216 ;; ??? The above patterns need auditing for Thumb-2
10218 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10219 ;; expressions. For simplicity, the first register is also in the unspec
10221 (define_insn "*push_multi"
10222 [(match_parallel 2 "multi_register_push"
10223 [(set (match_operand:BLK 0 "memory_operand" "=m")
10224 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10225 UNSPEC_PUSH_MULT))])]
10229 int num_saves = XVECLEN (operands[2], 0);
10231 /* For the StrongARM at least it is faster to
10232 use STR to store only a single register.
10233 In Thumb mode always use push, and the assembler will pick
10234 something appropriate. */
10235 if (num_saves == 1 && TARGET_ARM)
10236 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10243 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10245 strcpy (pattern, \"push\\t{%1\");
10247 for (i = 1; i < num_saves; i++)
10249 strcat (pattern, \", %|\");
10251 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10254 strcat (pattern, \"}\");
10255 output_asm_insn (pattern, operands);
10260 [(set_attr "type" "store4")]
10263 (define_insn "stack_tie"
10264 [(set (mem:BLK (scratch))
10265 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10266 (match_operand:SI 1 "s_register_operand" "rk")]
10270 [(set_attr "length" "0")]
10273 ;; Similarly for the floating point registers
10274 (define_insn "*push_fp_multi"
10275 [(match_parallel 2 "multi_register_push"
10276 [(set (match_operand:BLK 0 "memory_operand" "=m")
10277 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10278 UNSPEC_PUSH_MULT))])]
10279 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10284 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10285 output_asm_insn (pattern, operands);
10288 [(set_attr "type" "f_store")]
10291 ;; Special patterns for dealing with the constant pool
10293 (define_insn "align_4"
10294 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10297 assemble_align (32);
10302 (define_insn "align_8"
10303 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10306 assemble_align (64);
10311 (define_insn "consttable_end"
10312 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10315 making_const_table = FALSE;
10320 (define_insn "consttable_1"
10321 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10324 making_const_table = TRUE;
10325 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10326 assemble_zeros (3);
10329 [(set_attr "length" "4")]
10332 (define_insn "consttable_2"
10333 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10336 making_const_table = TRUE;
10337 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10338 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10339 assemble_zeros (2);
10342 [(set_attr "length" "4")]
10345 (define_insn "consttable_4"
10346 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10350 rtx x = operands[0];
10351 making_const_table = TRUE;
10352 switch (GET_MODE_CLASS (GET_MODE (x)))
10355 if (GET_MODE (x) == HFmode)
10356 arm_emit_fp16_const (x);
10360 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10361 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10365 /* XXX: Sometimes gcc does something really dumb and ends up with
10366 a HIGH in a constant pool entry, usually because it's trying to
10367 load into a VFP register. We know this will always be used in
10368 combination with a LO_SUM which ignores the high bits, so just
10369 strip off the HIGH. */
10370 if (GET_CODE (x) == HIGH)
10372 assemble_integer (x, 4, BITS_PER_WORD, 1);
10373 mark_symbol_refs_as_used (x);
10378 [(set_attr "length" "4")]
10381 (define_insn "consttable_8"
10382 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10386 making_const_table = TRUE;
10387 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10392 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10393 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10397 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10402 [(set_attr "length" "8")]
10405 (define_insn "consttable_16"
10406 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10410 making_const_table = TRUE;
10411 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10416 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10417 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10421 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10426 [(set_attr "length" "16")]
10429 ;; Miscellaneous Thumb patterns
10431 (define_expand "tablejump"
10432 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10433 (use (label_ref (match_operand 1 "" "")))])]
10438 /* Hopefully, CSE will eliminate this copy. */
10439 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10440 rtx reg2 = gen_reg_rtx (SImode);
10442 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10443 operands[0] = reg2;
10448 ;; NB never uses BX.
10449 (define_insn "*thumb1_tablejump"
10450 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10451 (use (label_ref (match_operand 1 "" "")))]
10454 [(set_attr "length" "2")]
10457 ;; V5 Instructions,
10459 (define_insn "clzsi2"
10460 [(set (match_operand:SI 0 "s_register_operand" "=r")
10461 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10462 "TARGET_32BIT && arm_arch5"
10464 [(set_attr "predicable" "yes")
10465 (set_attr "insn" "clz")])
10467 (define_insn "rbitsi2"
10468 [(set (match_operand:SI 0 "s_register_operand" "=r")
10469 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10470 "TARGET_32BIT && arm_arch_thumb2"
10472 [(set_attr "predicable" "yes")
10473 (set_attr "insn" "clz")])
10475 (define_expand "ctzsi2"
10476 [(set (match_operand:SI 0 "s_register_operand" "")
10477 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10478 "TARGET_32BIT && arm_arch_thumb2"
10481 rtx tmp = gen_reg_rtx (SImode);
10482 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10483 emit_insn (gen_clzsi2 (operands[0], tmp));
10489 ;; V5E instructions.
10491 (define_insn "prefetch"
10492 [(prefetch (match_operand:SI 0 "address_operand" "p")
10493 (match_operand:SI 1 "" "")
10494 (match_operand:SI 2 "" ""))]
10495 "TARGET_32BIT && arm_arch5e"
10498 ;; General predication pattern
10501 [(match_operator 0 "arm_comparison_operator"
10502 [(match_operand 1 "cc_register" "")
10508 (define_insn "prologue_use"
10509 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10511 "%@ %0 needed for prologue"
10512 [(set_attr "length" "0")]
10516 ;; Patterns for exception handling
10518 (define_expand "eh_return"
10519 [(use (match_operand 0 "general_operand" ""))]
10524 emit_insn (gen_arm_eh_return (operands[0]));
10526 emit_insn (gen_thumb_eh_return (operands[0]));
10531 ;; We can't expand this before we know where the link register is stored.
10532 (define_insn_and_split "arm_eh_return"
10533 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10535 (clobber (match_scratch:SI 1 "=&r"))]
10538 "&& reload_completed"
10542 arm_set_return_address (operands[0], operands[1]);
10547 (define_insn_and_split "thumb_eh_return"
10548 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10550 (clobber (match_scratch:SI 1 "=&l"))]
10553 "&& reload_completed"
10557 thumb_set_return_address (operands[0], operands[1]);
10565 (define_insn "load_tp_hard"
10566 [(set (match_operand:SI 0 "register_operand" "=r")
10567 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10569 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10570 [(set_attr "predicable" "yes")]
10573 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10574 (define_insn "load_tp_soft"
10575 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10576 (clobber (reg:SI LR_REGNUM))
10577 (clobber (reg:SI IP_REGNUM))
10578 (clobber (reg:CC CC_REGNUM))]
10580 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10581 [(set_attr "conds" "clob")]
10584 (define_insn "*arm_movtas_ze"
10585 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10588 (match_operand:SI 1 "const_int_operand" ""))]
10591 [(set_attr "predicable" "yes")
10592 (set_attr "length" "4")]
10595 (define_insn "*arm_rev"
10596 [(set (match_operand:SI 0 "s_register_operand" "=r")
10597 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10598 "TARGET_32BIT && arm_arch6"
10600 [(set_attr "predicable" "yes")
10601 (set_attr "length" "4")]
10604 (define_insn "*thumb1_rev"
10605 [(set (match_operand:SI 0 "s_register_operand" "=l")
10606 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10607 "TARGET_THUMB1 && arm_arch6"
10609 [(set_attr "length" "2")]
10612 (define_expand "arm_legacy_rev"
10613 [(set (match_operand:SI 2 "s_register_operand" "")
10614 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10618 (lshiftrt:SI (match_dup 2)
10620 (set (match_operand:SI 3 "s_register_operand" "")
10621 (rotatert:SI (match_dup 1)
10624 (and:SI (match_dup 2)
10625 (const_int -65281)))
10626 (set (match_operand:SI 0 "s_register_operand" "")
10627 (xor:SI (match_dup 3)
10633 ;; Reuse temporaries to keep register pressure down.
10634 (define_expand "thumb_legacy_rev"
10635 [(set (match_operand:SI 2 "s_register_operand" "")
10636 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10638 (set (match_operand:SI 3 "s_register_operand" "")
10639 (lshiftrt:SI (match_dup 1)
10642 (ior:SI (match_dup 3)
10644 (set (match_operand:SI 4 "s_register_operand" "")
10646 (set (match_operand:SI 5 "s_register_operand" "")
10647 (rotatert:SI (match_dup 1)
10650 (ashift:SI (match_dup 5)
10653 (lshiftrt:SI (match_dup 5)
10656 (ior:SI (match_dup 5)
10659 (rotatert:SI (match_dup 5)
10661 (set (match_operand:SI 0 "s_register_operand" "")
10662 (ior:SI (match_dup 5)
10668 (define_expand "bswapsi2"
10669 [(set (match_operand:SI 0 "s_register_operand" "=r")
10670 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10671 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10675 rtx op2 = gen_reg_rtx (SImode);
10676 rtx op3 = gen_reg_rtx (SImode);
10680 rtx op4 = gen_reg_rtx (SImode);
10681 rtx op5 = gen_reg_rtx (SImode);
10683 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10684 op2, op3, op4, op5));
10688 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10697 ;; Load the load/store multiple patterns
10698 (include "ldmstm.md")
10699 ;; Load the FPA co-processor patterns
10701 ;; Load the Maverick co-processor patterns
10702 (include "cirrus.md")
10703 ;; Vector bits common to IWMMXT and Neon
10704 (include "vec-common.md")
10705 ;; Load the Intel Wireless Multimedia Extension patterns
10706 (include "iwmmxt.md")
10707 ;; Load the VFP co-processor patterns
10709 ;; Thumb-2 patterns
10710 (include "thumb2.md")
10712 (include "neon.md")
10713 ;; Synchronization Primitives
10714 (include "sync.md")