1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
107 ;; UNSPEC_VOLATILE Usage:
110 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
112 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
113 ; instruction epilogue sequence that isn't expanded
114 ; into normal RTL. Used for both normal and sibcall
116 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
117 ; for inlined constants.
118 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
120 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
122 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
124 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
126 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
128 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
130 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
131 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
132 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
133 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
134 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
135 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
136 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 ;;---------------------------------------------------------------------------
144 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
145 ; generating ARM code. This is used to control the length of some insn
146 ; patterns that share the same RTL in both ARM and Thumb code.
147 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
149 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
150 ; scheduling decisions for the load unit and the multiplier.
151 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
153 ; IS_XSCALE is set to 'yes' when compiling for XScale.
154 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
156 ;; Operand number of an input operand that is shifted. Zero if the
157 ;; given instruction does not shift one of its input operands.
158 (define_attr "shift" "" (const_int 0))
160 ; Floating Point Unit. If we only have floating point emulation, then there
161 ; is no point in scheduling the floating point insns. (Well, for best
162 ; performance we should try and group them together).
163 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
164 (const (symbol_ref "arm_fpu_attr")))
166 ; LENGTH of an instruction (in bytes)
167 (define_attr "length" "" (const_int 4))
169 ; POOL_RANGE is how far away from a constant pool entry that this insn
170 ; can be placed. If the distance is zero, then this insn will never
171 ; reference the pool.
172 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
173 ; before its address.
174 (define_attr "pool_range" "" (const_int 0))
175 (define_attr "neg_pool_range" "" (const_int 0))
177 ; An assembler sequence may clobber the condition codes without us knowing.
178 ; If such an insn references the pool, then we have no way of knowing how,
179 ; so use the most conservative value for pool_range.
180 (define_asm_attributes
181 [(set_attr "conds" "clob")
182 (set_attr "length" "4")
183 (set_attr "pool_range" "250")])
185 ;; The instruction used to implement a particular pattern. This
186 ;; information is used by pipeline descriptions to provide accurate
187 ;; scheduling information.
190 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
191 (const_string "other"))
193 ; TYPE attribute is used to detect floating point instructions which, if
194 ; running on a co-processor can run in parallel with other, basic instructions
195 ; If write-buffer scheduling is enabled then it can also be used in the
196 ; scheduling of writes.
198 ; Classification of each insn
199 ; Note: vfp.md has different meanings for some of these, and some further
200 ; types as well. See that file for details.
201 ; alu any alu instruction that doesn't hit memory or fp
202 ; regs or have a shifted source operand
203 ; alu_shift any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a constant
205 ; alu_shift_reg any data instruction that doesn't hit memory or fp
206 ; regs, but has a source operand shifted by a register value
207 ; mult a multiply instruction
208 ; block blockage insn, this blocks all functional units
209 ; float a floating point arithmetic operation (subject to expansion)
210 ; fdivd DFmode floating point division
211 ; fdivs SFmode floating point division
212 ; fmul Floating point multiply
213 ; ffmul Fast floating point multiply
214 ; farith Floating point arithmetic (4 cycle)
215 ; ffarith Fast floating point arithmetic (2 cycle)
216 ; float_em a floating point arithmetic operation that is normally emulated
217 ; even on a machine with an fpa.
218 ; f_load a floating point load from memory
219 ; f_store a floating point store to memory
220 ; f_load[sd] single/double load from memory
221 ; f_store[sd] single/double store to memory
222 ; f_flag a transfer of co-processor flags to the CPSR
223 ; f_mem_r a transfer of a floating point register to a real reg via mem
224 ; r_mem_f the reverse of f_mem_r
225 ; f_2_r fast transfer float to arm (no memory needed)
226 ; r_2_f fast transfer arm to float
227 ; f_cvt convert floating<->integral
229 ; call a subroutine call
230 ; load_byte load byte(s) from memory to arm registers
231 ; load1 load 1 word from memory to arm registers
232 ; load2 load 2 words from memory to arm registers
233 ; load3 load 3 words from memory to arm registers
234 ; load4 load 4 words from memory to arm registers
235 ; store store 1 word to memory from arm registers
236 ; store2 store 2 words
237 ; store3 store 3 words
238 ; store4 store 4 (or more) words
239 ; Additions for Cirrus Maverick co-processor:
240 ; mav_farith Floating point arithmetic (4 cycle)
241 ; mav_dmult Double multiplies (7 cycle)
245 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
247 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
248 (const_string "mult")
249 (const_string "alu")))
251 ; Load scheduling, set from the arm_ld_sched variable
252 ; initialized by arm_override_options()
253 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
255 ;; Classification of NEON instructions for scheduling purposes.
256 ;; Do not set this attribute and the "type" attribute together in
257 ;; any one instruction pattern.
258 (define_attr "neon_type"
269 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
270 neon_mul_qqq_8_16_32_ddd_32,\
271 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
272 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
274 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
275 neon_mla_qqq_32_qqd_32_scalar,\
276 neon_mul_ddd_16_scalar_32_16_long_scalar,\
277 neon_mul_qqd_32_scalar,\
278 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
283 neon_vqshl_vrshl_vqrshl_qqq,\
285 neon_fp_vadd_ddd_vabs_dd,\
286 neon_fp_vadd_qqq_vabs_qq,\
292 neon_fp_vmla_ddd_scalar,\
293 neon_fp_vmla_qqq_scalar,\
294 neon_fp_vrecps_vrsqrts_ddd,\
295 neon_fp_vrecps_vrsqrts_qqq,\
303 neon_vld2_2_regs_vld1_vld2_all_lanes,\
306 neon_vst1_1_2_regs_vst2_2_regs,\
308 neon_vst2_4_regs_vst3_vst4,\
310 neon_vld1_vld2_lane,\
311 neon_vld3_vld4_lane,\
312 neon_vst1_vst2_lane,\
313 neon_vst3_vst4_lane,\
314 neon_vld3_vld4_all_lanes,\
322 (const_string "none"))
324 ; condition codes: this one is used by final_prescan_insn to speed up
325 ; conditionalizing instructions. It saves having to scan the rtl to see if
326 ; it uses or alters the condition codes.
328 ; USE means that the condition codes are used by the insn in the process of
329 ; outputting code, this means (at present) that we can't use the insn in
332 ; SET means that the purpose of the insn is to set the condition codes in a
333 ; well defined manner.
335 ; CLOB means that the condition codes are altered in an undefined manner, if
336 ; they are altered at all
338 ; UNCONDITIONAL means the instions can not be conditionally executed.
340 ; NOCOND means that the condition codes are neither altered nor affect the
341 ; output of this insn
343 (define_attr "conds" "use,set,clob,unconditional,nocond"
344 (if_then_else (eq_attr "type" "call")
345 (const_string "clob")
346 (if_then_else (eq_attr "neon_type" "none")
347 (const_string "nocond")
348 (const_string "unconditional"))))
350 ; Predicable means that the insn can be conditionally executed based on
351 ; an automatically added predicate (additional patterns are generated by
352 ; gen...). We default to 'no' because no Thumb patterns match this rule
353 ; and not all ARM patterns do.
354 (define_attr "predicable" "no,yes" (const_string "no"))
356 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
357 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
358 ; suffer blockages enough to warrant modelling this (and it can adversely
359 ; affect the schedule).
360 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
362 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
363 ; to stall the processor. Used with model_wbuf above.
364 (define_attr "write_conflict" "no,yes"
365 (if_then_else (eq_attr "type"
366 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
368 (const_string "no")))
370 ; Classify the insns into those that take one cycle and those that take more
371 ; than one on the main cpu execution unit.
372 (define_attr "core_cycles" "single,multi"
373 (if_then_else (eq_attr "type"
374 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
375 (const_string "single")
376 (const_string "multi")))
378 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
379 ;; distant label. Only applicable to Thumb code.
380 (define_attr "far_jump" "yes,no" (const_string "no"))
383 ;; The number of machine instructions this pattern expands to.
384 ;; Used for Thumb-2 conditional execution.
385 (define_attr "ce_count" "" (const_int 1))
387 ;;---------------------------------------------------------------------------
390 ; A list of modes that are exactly 64 bits in size. We use this to expand
391 ; some splits that are the same for all modes when operating on ARM
393 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
395 ;; The integer modes up to word size
396 (define_mode_iterator QHSI [QI HI SI])
398 ;;---------------------------------------------------------------------------
401 (include "predicates.md")
402 (include "constraints.md")
404 ;;---------------------------------------------------------------------------
405 ;; Pipeline descriptions
407 ;; Processor type. This is created automatically from arm-cores.def.
408 (include "arm-tune.md")
410 (define_attr "tune_cortexr4" "yes,no"
412 (eq_attr "tune" "cortexr4,cortexr4f")
414 (const_string "no"))))
416 ;; True if the generic scheduling description should be used.
418 (define_attr "generic_sched" "yes,no"
420 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
421 (eq_attr "tune_cortexr4" "yes"))
423 (const_string "yes"))))
425 (define_attr "generic_vfp" "yes,no"
427 (and (eq_attr "fpu" "vfp")
428 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
429 (eq_attr "tune_cortexr4" "no"))
431 (const_string "no"))))
433 (include "arm-generic.md")
434 (include "arm926ejs.md")
435 (include "arm1020e.md")
436 (include "arm1026ejs.md")
437 (include "arm1136jfs.md")
438 (include "cortex-a8.md")
439 (include "cortex-a9.md")
440 (include "cortex-r4.md")
441 (include "cortex-r4f.md")
445 ;;---------------------------------------------------------------------------
450 ;; Note: For DImode insns, there is normally no reason why operands should
451 ;; not be in the same register, what we don't want is for something being
452 ;; written to partially overlap something that is an input.
453 ;; Cirrus 64bit additions should not be split because we have a native
454 ;; 64bit addition instructions.
456 (define_expand "adddi3"
458 [(set (match_operand:DI 0 "s_register_operand" "")
459 (plus:DI (match_operand:DI 1 "s_register_operand" "")
460 (match_operand:DI 2 "s_register_operand" "")))
461 (clobber (reg:CC CC_REGNUM))])]
464 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
466 if (!cirrus_fp_register (operands[0], DImode))
467 operands[0] = force_reg (DImode, operands[0]);
468 if (!cirrus_fp_register (operands[1], DImode))
469 operands[1] = force_reg (DImode, operands[1]);
470 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
476 if (GET_CODE (operands[1]) != REG)
477 operands[1] = force_reg (DImode, operands[1]);
478 if (GET_CODE (operands[2]) != REG)
479 operands[2] = force_reg (DImode, operands[2]);
484 (define_insn "*thumb1_adddi3"
485 [(set (match_operand:DI 0 "register_operand" "=l")
486 (plus:DI (match_operand:DI 1 "register_operand" "%0")
487 (match_operand:DI 2 "register_operand" "l")))
488 (clobber (reg:CC CC_REGNUM))
491 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
492 [(set_attr "length" "4")]
495 (define_insn_and_split "*arm_adddi3"
496 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
497 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
498 (match_operand:DI 2 "s_register_operand" "r, 0")))
499 (clobber (reg:CC CC_REGNUM))]
500 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
502 "TARGET_32BIT && reload_completed"
503 [(parallel [(set (reg:CC_C CC_REGNUM)
504 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
506 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
507 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
508 (plus:SI (match_dup 4) (match_dup 5))))]
511 operands[3] = gen_highpart (SImode, operands[0]);
512 operands[0] = gen_lowpart (SImode, operands[0]);
513 operands[4] = gen_highpart (SImode, operands[1]);
514 operands[1] = gen_lowpart (SImode, operands[1]);
515 operands[5] = gen_highpart (SImode, operands[2]);
516 operands[2] = gen_lowpart (SImode, operands[2]);
518 [(set_attr "conds" "clob")
519 (set_attr "length" "8")]
522 (define_insn_and_split "*adddi_sesidi_di"
523 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
524 (plus:DI (sign_extend:DI
525 (match_operand:SI 2 "s_register_operand" "r,r"))
526 (match_operand:DI 1 "s_register_operand" "0,r")))
527 (clobber (reg:CC CC_REGNUM))]
528 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
530 "TARGET_32BIT && reload_completed"
531 [(parallel [(set (reg:CC_C CC_REGNUM)
532 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
534 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
535 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
536 (plus:SI (ashiftrt:SI (match_dup 2)
541 operands[3] = gen_highpart (SImode, operands[0]);
542 operands[0] = gen_lowpart (SImode, operands[0]);
543 operands[4] = gen_highpart (SImode, operands[1]);
544 operands[1] = gen_lowpart (SImode, operands[1]);
545 operands[2] = gen_lowpart (SImode, operands[2]);
547 [(set_attr "conds" "clob")
548 (set_attr "length" "8")]
551 (define_insn_and_split "*adddi_zesidi_di"
552 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
553 (plus:DI (zero_extend:DI
554 (match_operand:SI 2 "s_register_operand" "r,r"))
555 (match_operand:DI 1 "s_register_operand" "0,r")))
556 (clobber (reg:CC CC_REGNUM))]
557 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
559 "TARGET_32BIT && reload_completed"
560 [(parallel [(set (reg:CC_C CC_REGNUM)
561 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
563 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
564 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
565 (plus:SI (match_dup 4) (const_int 0))))]
568 operands[3] = gen_highpart (SImode, operands[0]);
569 operands[0] = gen_lowpart (SImode, operands[0]);
570 operands[4] = gen_highpart (SImode, operands[1]);
571 operands[1] = gen_lowpart (SImode, operands[1]);
572 operands[2] = gen_lowpart (SImode, operands[2]);
574 [(set_attr "conds" "clob")
575 (set_attr "length" "8")]
578 (define_expand "addsi3"
579 [(set (match_operand:SI 0 "s_register_operand" "")
580 (plus:SI (match_operand:SI 1 "s_register_operand" "")
581 (match_operand:SI 2 "reg_or_int_operand" "")))]
584 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
586 arm_split_constant (PLUS, SImode, NULL_RTX,
587 INTVAL (operands[2]), operands[0], operands[1],
588 optimize && can_create_pseudo_p ());
594 ; If there is a scratch available, this will be faster than synthesizing the
597 [(match_scratch:SI 3 "r")
598 (set (match_operand:SI 0 "arm_general_register_operand" "")
599 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
600 (match_operand:SI 2 "const_int_operand" "")))]
602 !(const_ok_for_arm (INTVAL (operands[2]))
603 || const_ok_for_arm (-INTVAL (operands[2])))
604 && const_ok_for_arm (~INTVAL (operands[2]))"
605 [(set (match_dup 3) (match_dup 2))
606 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
610 ;; The r/r/k alternative is required when reloading the address
611 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
612 ;; put the duplicated register first, and not try the commutative version.
613 (define_insn_and_split "*arm_addsi3"
614 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
615 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
616 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
626 && GET_CODE (operands[2]) == CONST_INT
627 && !(const_ok_for_arm (INTVAL (operands[2]))
628 || const_ok_for_arm (-INTVAL (operands[2])))
629 && (reload_completed || !arm_eliminable_register (operands[1]))"
630 [(clobber (const_int 0))]
632 arm_split_constant (PLUS, SImode, curr_insn,
633 INTVAL (operands[2]), operands[0],
637 [(set_attr "length" "4,4,4,4,4,16")
638 (set_attr "predicable" "yes")]
641 ;; Register group 'k' is a single register group containing only the stack
642 ;; register. Trying to reload it will always fail catastrophically,
643 ;; so never allow those alternatives to match if reloading is needed.
645 (define_insn_and_split "*thumb1_addsi3"
646 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
647 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
648 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
651 static const char * const asms[] =
653 \"add\\t%0, %0, %2\",
654 \"sub\\t%0, %0, #%n2\",
655 \"add\\t%0, %1, %2\",
656 \"add\\t%0, %0, %2\",
657 \"add\\t%0, %0, %2\",
658 \"add\\t%0, %1, %2\",
659 \"add\\t%0, %1, %2\",
663 if ((which_alternative == 2 || which_alternative == 6)
664 && GET_CODE (operands[2]) == CONST_INT
665 && INTVAL (operands[2]) < 0)
666 return \"sub\\t%0, %1, #%n2\";
667 return asms[which_alternative];
669 "&& reload_completed && CONST_INT_P (operands[2])
670 && operands[1] != stack_pointer_rtx
671 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
672 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
673 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
675 HOST_WIDE_INT offset = INTVAL (operands[2]);
678 else if (offset < -255)
681 operands[3] = GEN_INT (offset);
682 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
684 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
687 ;; Reloading and elimination of the frame pointer can
688 ;; sometimes cause this optimization to be missed.
690 [(set (match_operand:SI 0 "arm_general_register_operand" "")
691 (match_operand:SI 1 "const_int_operand" ""))
693 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
695 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
696 && (INTVAL (operands[1]) & 3) == 0"
697 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
701 ;; ??? Make Thumb-2 variants which prefer low regs
702 (define_insn "*addsi3_compare0"
703 [(set (reg:CC_NOOV CC_REGNUM)
705 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
706 (match_operand:SI 2 "arm_add_operand" "rI,L"))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1) (match_dup 2)))]
713 sub%.\\t%0, %1, #%n2"
714 [(set_attr "conds" "set")]
717 (define_insn "*addsi3_compare0_scratch"
718 [(set (reg:CC_NOOV CC_REGNUM)
720 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
721 (match_operand:SI 1 "arm_add_operand" "rI,L"))
727 [(set_attr "conds" "set")]
730 (define_insn "*compare_negsi_si"
731 [(set (reg:CC_Z CC_REGNUM)
733 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
734 (match_operand:SI 1 "s_register_operand" "r")))]
737 [(set_attr "conds" "set")]
740 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
741 ;; addend is a constant.
742 (define_insn "*cmpsi2_addneg"
743 [(set (reg:CC CC_REGNUM)
745 (match_operand:SI 1 "s_register_operand" "r,r")
746 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
747 (set (match_operand:SI 0 "s_register_operand" "=r,r")
748 (plus:SI (match_dup 1)
749 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
750 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
753 add%.\\t%0, %1, #%n2"
754 [(set_attr "conds" "set")]
757 ;; Convert the sequence
759 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
763 ;; bcs dest ((unsigned)rn >= 1)
764 ;; similarly for the beq variant using bcc.
765 ;; This is a common looping idiom (while (n--))
767 [(set (match_operand:SI 0 "arm_general_register_operand" "")
768 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
770 (set (match_operand 2 "cc_register" "")
771 (compare (match_dup 0) (const_int -1)))
773 (if_then_else (match_operator 3 "equality_operator"
774 [(match_dup 2) (const_int 0)])
775 (match_operand 4 "" "")
776 (match_operand 5 "" "")))]
777 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
781 (match_dup 1) (const_int 1)))
782 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
784 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
787 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
788 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
791 operands[2], const0_rtx);"
794 ;; The next four insns work because they compare the result with one of
795 ;; the operands, and we know that the use of the condition code is
796 ;; either GEU or LTU, so we can use the carry flag from the addition
797 ;; instead of doing the compare a second time.
798 (define_insn "*addsi3_compare_op1"
799 [(set (reg:CC_C CC_REGNUM)
801 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
802 (match_operand:SI 2 "arm_add_operand" "rI,L"))
804 (set (match_operand:SI 0 "s_register_operand" "=r,r")
805 (plus:SI (match_dup 1) (match_dup 2)))]
809 sub%.\\t%0, %1, #%n2"
810 [(set_attr "conds" "set")]
813 (define_insn "*addsi3_compare_op2"
814 [(set (reg:CC_C CC_REGNUM)
816 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
817 (match_operand:SI 2 "arm_add_operand" "rI,L"))
819 (set (match_operand:SI 0 "s_register_operand" "=r,r")
820 (plus:SI (match_dup 1) (match_dup 2)))]
824 sub%.\\t%0, %1, #%n2"
825 [(set_attr "conds" "set")]
828 (define_insn "*compare_addsi2_op0"
829 [(set (reg:CC_C CC_REGNUM)
831 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
832 (match_operand:SI 1 "arm_add_operand" "rI,L"))
838 [(set_attr "conds" "set")]
841 (define_insn "*compare_addsi2_op1"
842 [(set (reg:CC_C CC_REGNUM)
844 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
845 (match_operand:SI 1 "arm_add_operand" "rI,L"))
851 [(set_attr "conds" "set")]
854 (define_insn "*addsi3_carryin"
855 [(set (match_operand:SI 0 "s_register_operand" "=r")
856 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
857 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
858 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
861 [(set_attr "conds" "use")]
864 (define_insn "*addsi3_carryin_shift"
865 [(set (match_operand:SI 0 "s_register_operand" "=r")
866 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
868 (match_operator:SI 2 "shift_operator"
869 [(match_operand:SI 3 "s_register_operand" "r")
870 (match_operand:SI 4 "reg_or_int_operand" "rM")])
871 (match_operand:SI 1 "s_register_operand" "r"))))]
873 "adc%?\\t%0, %1, %3%S2"
874 [(set_attr "conds" "use")
875 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
876 (const_string "alu_shift")
877 (const_string "alu_shift_reg")))]
880 (define_insn "*addsi3_carryin_alt1"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
883 (match_operand:SI 2 "arm_rhs_operand" "rI"))
884 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
887 [(set_attr "conds" "use")]
890 (define_insn "*addsi3_carryin_alt2"
891 [(set (match_operand:SI 0 "s_register_operand" "=r")
892 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
893 (match_operand:SI 1 "s_register_operand" "r"))
894 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
897 [(set_attr "conds" "use")]
900 (define_insn "*addsi3_carryin_alt3"
901 [(set (match_operand:SI 0 "s_register_operand" "=r")
902 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
903 (match_operand:SI 2 "arm_rhs_operand" "rI"))
904 (match_operand:SI 1 "s_register_operand" "r")))]
907 [(set_attr "conds" "use")]
910 (define_expand "incscc"
911 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
912 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
913 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
914 (match_operand:SI 1 "s_register_operand" "0,?r")))]
919 (define_insn "*arm_incscc"
920 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
921 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
922 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
923 (match_operand:SI 1 "s_register_operand" "0,?r")))]
927 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
928 [(set_attr "conds" "use")
929 (set_attr "length" "4,8")]
932 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
934 [(set (match_operand:SI 0 "s_register_operand" "")
935 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
936 (match_operand:SI 2 "s_register_operand" ""))
938 (clobber (match_operand:SI 3 "s_register_operand" ""))]
940 [(set (match_dup 3) (match_dup 1))
941 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
943 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
946 (define_expand "addsf3"
947 [(set (match_operand:SF 0 "s_register_operand" "")
948 (plus:SF (match_operand:SF 1 "s_register_operand" "")
949 (match_operand:SF 2 "arm_float_add_operand" "")))]
950 "TARGET_32BIT && TARGET_HARD_FLOAT"
953 && !cirrus_fp_register (operands[2], SFmode))
954 operands[2] = force_reg (SFmode, operands[2]);
957 (define_expand "adddf3"
958 [(set (match_operand:DF 0 "s_register_operand" "")
959 (plus:DF (match_operand:DF 1 "s_register_operand" "")
960 (match_operand:DF 2 "arm_float_add_operand" "")))]
961 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
964 && !cirrus_fp_register (operands[2], DFmode))
965 operands[2] = force_reg (DFmode, operands[2]);
968 (define_expand "subdi3"
970 [(set (match_operand:DI 0 "s_register_operand" "")
971 (minus:DI (match_operand:DI 1 "s_register_operand" "")
972 (match_operand:DI 2 "s_register_operand" "")))
973 (clobber (reg:CC CC_REGNUM))])]
976 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
978 && cirrus_fp_register (operands[0], DImode)
979 && cirrus_fp_register (operands[1], DImode))
981 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
987 if (GET_CODE (operands[1]) != REG)
988 operands[1] = force_reg (DImode, operands[1]);
989 if (GET_CODE (operands[2]) != REG)
990 operands[2] = force_reg (DImode, operands[2]);
995 (define_insn "*arm_subdi3"
996 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
997 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
998 (match_operand:DI 2 "s_register_operand" "r,0,0")))
999 (clobber (reg:CC CC_REGNUM))]
1001 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1002 [(set_attr "conds" "clob")
1003 (set_attr "length" "8")]
1006 (define_insn "*thumb_subdi3"
1007 [(set (match_operand:DI 0 "register_operand" "=l")
1008 (minus:DI (match_operand:DI 1 "register_operand" "0")
1009 (match_operand:DI 2 "register_operand" "l")))
1010 (clobber (reg:CC CC_REGNUM))]
1012 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1013 [(set_attr "length" "4")]
1016 (define_insn "*subdi_di_zesidi"
1017 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1018 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1020 (match_operand:SI 2 "s_register_operand" "r,r"))))
1021 (clobber (reg:CC CC_REGNUM))]
1023 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1024 [(set_attr "conds" "clob")
1025 (set_attr "length" "8")]
1028 (define_insn "*subdi_di_sesidi"
1029 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1030 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1032 (match_operand:SI 2 "s_register_operand" "r,r"))))
1033 (clobber (reg:CC CC_REGNUM))]
1035 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1036 [(set_attr "conds" "clob")
1037 (set_attr "length" "8")]
1040 (define_insn "*subdi_zesidi_di"
1041 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1042 (minus:DI (zero_extend:DI
1043 (match_operand:SI 2 "s_register_operand" "r,r"))
1044 (match_operand:DI 1 "s_register_operand" "0,r")))
1045 (clobber (reg:CC CC_REGNUM))]
1047 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1048 [(set_attr "conds" "clob")
1049 (set_attr "length" "8")]
1052 (define_insn "*subdi_sesidi_di"
1053 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1054 (minus:DI (sign_extend:DI
1055 (match_operand:SI 2 "s_register_operand" "r,r"))
1056 (match_operand:DI 1 "s_register_operand" "0,r")))
1057 (clobber (reg:CC CC_REGNUM))]
1059 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1060 [(set_attr "conds" "clob")
1061 (set_attr "length" "8")]
1064 (define_insn "*subdi_zesidi_zesidi"
1065 [(set (match_operand:DI 0 "s_register_operand" "=r")
1066 (minus:DI (zero_extend:DI
1067 (match_operand:SI 1 "s_register_operand" "r"))
1069 (match_operand:SI 2 "s_register_operand" "r"))))
1070 (clobber (reg:CC CC_REGNUM))]
1072 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1073 [(set_attr "conds" "clob")
1074 (set_attr "length" "8")]
1077 (define_expand "subsi3"
1078 [(set (match_operand:SI 0 "s_register_operand" "")
1079 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1080 (match_operand:SI 2 "s_register_operand" "")))]
1083 if (GET_CODE (operands[1]) == CONST_INT)
1087 arm_split_constant (MINUS, SImode, NULL_RTX,
1088 INTVAL (operands[1]), operands[0],
1089 operands[2], optimize && can_create_pseudo_p ());
1092 else /* TARGET_THUMB1 */
1093 operands[1] = force_reg (SImode, operands[1]);
1098 (define_insn "*thumb1_subsi3_insn"
1099 [(set (match_operand:SI 0 "register_operand" "=l")
1100 (minus:SI (match_operand:SI 1 "register_operand" "l")
1101 (match_operand:SI 2 "register_operand" "l")))]
1104 [(set_attr "length" "2")]
1107 ; ??? Check Thumb-2 split length
1108 (define_insn_and_split "*arm_subsi3_insn"
1109 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1110 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1111 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1118 && GET_CODE (operands[1]) == CONST_INT
1119 && !const_ok_for_arm (INTVAL (operands[1]))"
1120 [(clobber (const_int 0))]
1122 arm_split_constant (MINUS, SImode, curr_insn,
1123 INTVAL (operands[1]), operands[0], operands[2], 0);
1126 [(set_attr "length" "4,4,16")
1127 (set_attr "predicable" "yes")]
1131 [(match_scratch:SI 3 "r")
1132 (set (match_operand:SI 0 "arm_general_register_operand" "")
1133 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1134 (match_operand:SI 2 "arm_general_register_operand" "")))]
1136 && !const_ok_for_arm (INTVAL (operands[1]))
1137 && const_ok_for_arm (~INTVAL (operands[1]))"
1138 [(set (match_dup 3) (match_dup 1))
1139 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1143 (define_insn "*subsi3_compare0"
1144 [(set (reg:CC_NOOV CC_REGNUM)
1146 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1147 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1149 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1150 (minus:SI (match_dup 1) (match_dup 2)))]
1155 [(set_attr "conds" "set")]
1158 (define_expand "decscc"
1159 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1160 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1161 (match_operator:SI 2 "arm_comparison_operator"
1162 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1167 (define_insn "*arm_decscc"
1168 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1169 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1170 (match_operator:SI 2 "arm_comparison_operator"
1171 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1175 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1176 [(set_attr "conds" "use")
1177 (set_attr "length" "*,8")]
1180 (define_expand "subsf3"
1181 [(set (match_operand:SF 0 "s_register_operand" "")
1182 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1183 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1184 "TARGET_32BIT && TARGET_HARD_FLOAT"
1186 if (TARGET_MAVERICK)
1188 if (!cirrus_fp_register (operands[1], SFmode))
1189 operands[1] = force_reg (SFmode, operands[1]);
1190 if (!cirrus_fp_register (operands[2], SFmode))
1191 operands[2] = force_reg (SFmode, operands[2]);
1195 (define_expand "subdf3"
1196 [(set (match_operand:DF 0 "s_register_operand" "")
1197 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1198 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1199 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1201 if (TARGET_MAVERICK)
1203 if (!cirrus_fp_register (operands[1], DFmode))
1204 operands[1] = force_reg (DFmode, operands[1]);
1205 if (!cirrus_fp_register (operands[2], DFmode))
1206 operands[2] = force_reg (DFmode, operands[2]);
1211 ;; Multiplication insns
1213 (define_expand "mulsi3"
1214 [(set (match_operand:SI 0 "s_register_operand" "")
1215 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1216 (match_operand:SI 1 "s_register_operand" "")))]
1221 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1222 (define_insn "*arm_mulsi3"
1223 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1224 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1225 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1226 "TARGET_32BIT && !arm_arch6"
1227 "mul%?\\t%0, %2, %1"
1228 [(set_attr "insn" "mul")
1229 (set_attr "predicable" "yes")]
1232 (define_insn "*arm_mulsi3_v6"
1233 [(set (match_operand:SI 0 "s_register_operand" "=r")
1234 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1235 (match_operand:SI 2 "s_register_operand" "r")))]
1236 "TARGET_32BIT && arm_arch6"
1237 "mul%?\\t%0, %1, %2"
1238 [(set_attr "insn" "mul")
1239 (set_attr "predicable" "yes")]
1242 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1243 ; 1 and 2; are the same, because reload will make operand 0 match
1244 ; operand 1 without realizing that this conflicts with operand 2. We fix
1245 ; this by adding another alternative to match this case, and then `reload'
1246 ; it ourselves. This alternative must come first.
1247 (define_insn "*thumb_mulsi3"
1248 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1249 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1250 (match_operand:SI 2 "register_operand" "l,l,l")))]
1251 "TARGET_THUMB1 && !arm_arch6"
1253 if (which_alternative < 2)
1254 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1256 return \"mul\\t%0, %2\";
1258 [(set_attr "length" "4,4,2")
1259 (set_attr "insn" "mul")]
1262 (define_insn "*thumb_mulsi3_v6"
1263 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1264 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1265 (match_operand:SI 2 "register_operand" "l,0,0")))]
1266 "TARGET_THUMB1 && arm_arch6"
1271 [(set_attr "length" "2")
1272 (set_attr "insn" "mul")]
1275 (define_insn "*mulsi3_compare0"
1276 [(set (reg:CC_NOOV CC_REGNUM)
1277 (compare:CC_NOOV (mult:SI
1278 (match_operand:SI 2 "s_register_operand" "r,r")
1279 (match_operand:SI 1 "s_register_operand" "%0,r"))
1281 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1282 (mult:SI (match_dup 2) (match_dup 1)))]
1283 "TARGET_ARM && !arm_arch6"
1284 "mul%.\\t%0, %2, %1"
1285 [(set_attr "conds" "set")
1286 (set_attr "insn" "muls")]
1289 (define_insn "*mulsi3_compare0_v6"
1290 [(set (reg:CC_NOOV CC_REGNUM)
1291 (compare:CC_NOOV (mult:SI
1292 (match_operand:SI 2 "s_register_operand" "r")
1293 (match_operand:SI 1 "s_register_operand" "r"))
1295 (set (match_operand:SI 0 "s_register_operand" "=r")
1296 (mult:SI (match_dup 2) (match_dup 1)))]
1297 "TARGET_ARM && arm_arch6 && optimize_size"
1298 "mul%.\\t%0, %2, %1"
1299 [(set_attr "conds" "set")
1300 (set_attr "insn" "muls")]
1303 (define_insn "*mulsi_compare0_scratch"
1304 [(set (reg:CC_NOOV CC_REGNUM)
1305 (compare:CC_NOOV (mult:SI
1306 (match_operand:SI 2 "s_register_operand" "r,r")
1307 (match_operand:SI 1 "s_register_operand" "%0,r"))
1309 (clobber (match_scratch:SI 0 "=&r,&r"))]
1310 "TARGET_ARM && !arm_arch6"
1311 "mul%.\\t%0, %2, %1"
1312 [(set_attr "conds" "set")
1313 (set_attr "insn" "muls")]
1316 (define_insn "*mulsi_compare0_scratch_v6"
1317 [(set (reg:CC_NOOV CC_REGNUM)
1318 (compare:CC_NOOV (mult:SI
1319 (match_operand:SI 2 "s_register_operand" "r")
1320 (match_operand:SI 1 "s_register_operand" "r"))
1322 (clobber (match_scratch:SI 0 "=r"))]
1323 "TARGET_ARM && arm_arch6 && optimize_size"
1324 "mul%.\\t%0, %2, %1"
1325 [(set_attr "conds" "set")
1326 (set_attr "insn" "muls")]
1329 ;; Unnamed templates to match MLA instruction.
1331 (define_insn "*mulsi3addsi"
1332 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1334 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1335 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1336 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1337 "TARGET_32BIT && !arm_arch6"
1338 "mla%?\\t%0, %2, %1, %3"
1339 [(set_attr "insn" "mla")
1340 (set_attr "predicable" "yes")]
1343 (define_insn "*mulsi3addsi_v6"
1344 [(set (match_operand:SI 0 "s_register_operand" "=r")
1346 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1347 (match_operand:SI 1 "s_register_operand" "r"))
1348 (match_operand:SI 3 "s_register_operand" "r")))]
1349 "TARGET_32BIT && arm_arch6"
1350 "mla%?\\t%0, %2, %1, %3"
1351 [(set_attr "insn" "mla")
1352 (set_attr "predicable" "yes")]
1355 (define_insn "*mulsi3addsi_compare0"
1356 [(set (reg:CC_NOOV CC_REGNUM)
1359 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1360 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1361 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1363 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1364 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1366 "TARGET_ARM && arm_arch6"
1367 "mla%.\\t%0, %2, %1, %3"
1368 [(set_attr "conds" "set")
1369 (set_attr "insn" "mlas")]
1372 (define_insn "*mulsi3addsi_compare0_v6"
1373 [(set (reg:CC_NOOV CC_REGNUM)
1376 (match_operand:SI 2 "s_register_operand" "r")
1377 (match_operand:SI 1 "s_register_operand" "r"))
1378 (match_operand:SI 3 "s_register_operand" "r"))
1380 (set (match_operand:SI 0 "s_register_operand" "=r")
1381 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1383 "TARGET_ARM && arm_arch6 && optimize_size"
1384 "mla%.\\t%0, %2, %1, %3"
1385 [(set_attr "conds" "set")
1386 (set_attr "insn" "mlas")]
1389 (define_insn "*mulsi3addsi_compare0_scratch"
1390 [(set (reg:CC_NOOV CC_REGNUM)
1393 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1394 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1395 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1397 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1398 "TARGET_ARM && !arm_arch6"
1399 "mla%.\\t%0, %2, %1, %3"
1400 [(set_attr "conds" "set")
1401 (set_attr "insn" "mlas")]
1404 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1408 (match_operand:SI 2 "s_register_operand" "r")
1409 (match_operand:SI 1 "s_register_operand" "r"))
1410 (match_operand:SI 3 "s_register_operand" "r"))
1412 (clobber (match_scratch:SI 0 "=r"))]
1413 "TARGET_ARM && arm_arch6 && optimize_size"
1414 "mla%.\\t%0, %2, %1, %3"
1415 [(set_attr "conds" "set")
1416 (set_attr "insn" "mlas")]
1419 (define_insn "*mulsi3subsi"
1420 [(set (match_operand:SI 0 "s_register_operand" "=r")
1422 (match_operand:SI 3 "s_register_operand" "r")
1423 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1424 (match_operand:SI 1 "s_register_operand" "r"))))]
1425 "TARGET_32BIT && arm_arch_thumb2"
1426 "mls%?\\t%0, %2, %1, %3"
1427 [(set_attr "insn" "mla")
1428 (set_attr "predicable" "yes")]
1431 ;; Unnamed template to match long long multiply-accumulate (smlal)
1433 (define_insn "*mulsidi3adddi"
1434 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1437 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1438 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1439 (match_operand:DI 1 "s_register_operand" "0")))]
1440 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1441 "smlal%?\\t%Q0, %R0, %3, %2"
1442 [(set_attr "insn" "smlal")
1443 (set_attr "predicable" "yes")]
1446 (define_insn "*mulsidi3adddi_v6"
1447 [(set (match_operand:DI 0 "s_register_operand" "=r")
1450 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1451 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1452 (match_operand:DI 1 "s_register_operand" "0")))]
1453 "TARGET_32BIT && arm_arch6"
1454 "smlal%?\\t%Q0, %R0, %3, %2"
1455 [(set_attr "insn" "smlal")
1456 (set_attr "predicable" "yes")]
1459 ;; 32x32->64 widening multiply.
1460 ;; As with mulsi3, the only difference between the v3-5 and v6+
1461 ;; versions of these patterns is the requirement that the output not
1462 ;; overlap the inputs, but that still means we have to have a named
1463 ;; expander and two different starred insns.
1465 (define_expand "mulsidi3"
1466 [(set (match_operand:DI 0 "s_register_operand" "")
1468 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1469 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1470 "TARGET_32BIT && arm_arch3m"
1474 (define_insn "*mulsidi3_nov6"
1475 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1477 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1478 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1479 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1480 "smull%?\\t%Q0, %R0, %1, %2"
1481 [(set_attr "insn" "smull")
1482 (set_attr "predicable" "yes")]
1485 (define_insn "*mulsidi3_v6"
1486 [(set (match_operand:DI 0 "s_register_operand" "=r")
1488 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1489 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1490 "TARGET_32BIT && arm_arch6"
1491 "smull%?\\t%Q0, %R0, %1, %2"
1492 [(set_attr "insn" "smull")
1493 (set_attr "predicable" "yes")]
1496 (define_expand "umulsidi3"
1497 [(set (match_operand:DI 0 "s_register_operand" "")
1499 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1500 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1501 "TARGET_32BIT && arm_arch3m"
1505 (define_insn "*umulsidi3_nov6"
1506 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1508 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1509 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1510 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1511 "umull%?\\t%Q0, %R0, %1, %2"
1512 [(set_attr "insn" "umull")
1513 (set_attr "predicable" "yes")]
1516 (define_insn "*umulsidi3_v6"
1517 [(set (match_operand:DI 0 "s_register_operand" "=r")
1519 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1520 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1521 "TARGET_32BIT && arm_arch6"
1522 "umull%?\\t%Q0, %R0, %1, %2"
1523 [(set_attr "insn" "umull")
1524 (set_attr "predicable" "yes")]
1527 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1529 (define_insn "*umulsidi3adddi"
1530 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1533 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1534 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1535 (match_operand:DI 1 "s_register_operand" "0")))]
1536 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1537 "umlal%?\\t%Q0, %R0, %3, %2"
1538 [(set_attr "insn" "umlal")
1539 (set_attr "predicable" "yes")]
1542 (define_insn "*umulsidi3adddi_v6"
1543 [(set (match_operand:DI 0 "s_register_operand" "=r")
1546 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1547 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1548 (match_operand:DI 1 "s_register_operand" "0")))]
1549 "TARGET_32BIT && arm_arch6"
1550 "umlal%?\\t%Q0, %R0, %3, %2"
1551 [(set_attr "insn" "umlal")
1552 (set_attr "predicable" "yes")]
1555 (define_expand "smulsi3_highpart"
1557 [(set (match_operand:SI 0 "s_register_operand" "")
1561 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1562 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1564 (clobber (match_scratch:SI 3 ""))])]
1565 "TARGET_32BIT && arm_arch3m"
1569 (define_insn "*smulsi3_highpart_nov6"
1570 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1574 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1575 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1577 (clobber (match_scratch:SI 3 "=&r,&r"))]
1578 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1579 "smull%?\\t%3, %0, %2, %1"
1580 [(set_attr "insn" "smull")
1581 (set_attr "predicable" "yes")]
1584 (define_insn "*smulsi3_highpart_v6"
1585 [(set (match_operand:SI 0 "s_register_operand" "=r")
1589 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1590 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1592 (clobber (match_scratch:SI 3 "=r"))]
1593 "TARGET_32BIT && arm_arch6"
1594 "smull%?\\t%3, %0, %2, %1"
1595 [(set_attr "insn" "smull")
1596 (set_attr "predicable" "yes")]
1599 (define_expand "umulsi3_highpart"
1601 [(set (match_operand:SI 0 "s_register_operand" "")
1605 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1606 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1608 (clobber (match_scratch:SI 3 ""))])]
1609 "TARGET_32BIT && arm_arch3m"
1613 (define_insn "*umulsi3_highpart_nov6"
1614 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1618 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1619 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1621 (clobber (match_scratch:SI 3 "=&r,&r"))]
1622 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1623 "umull%?\\t%3, %0, %2, %1"
1624 [(set_attr "insn" "umull")
1625 (set_attr "predicable" "yes")]
1628 (define_insn "*umulsi3_highpart_v6"
1629 [(set (match_operand:SI 0 "s_register_operand" "=r")
1633 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1634 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1636 (clobber (match_scratch:SI 3 "=r"))]
1637 "TARGET_32BIT && arm_arch6"
1638 "umull%?\\t%3, %0, %2, %1"
1639 [(set_attr "insn" "umull")
1640 (set_attr "predicable" "yes")]
1643 (define_insn "mulhisi3"
1644 [(set (match_operand:SI 0 "s_register_operand" "=r")
1645 (mult:SI (sign_extend:SI
1646 (match_operand:HI 1 "s_register_operand" "%r"))
1648 (match_operand:HI 2 "s_register_operand" "r"))))]
1649 "TARGET_DSP_MULTIPLY"
1650 "smulbb%?\\t%0, %1, %2"
1651 [(set_attr "insn" "smulxy")
1652 (set_attr "predicable" "yes")]
1655 (define_insn "*mulhisi3tb"
1656 [(set (match_operand:SI 0 "s_register_operand" "=r")
1657 (mult:SI (ashiftrt:SI
1658 (match_operand:SI 1 "s_register_operand" "r")
1661 (match_operand:HI 2 "s_register_operand" "r"))))]
1662 "TARGET_DSP_MULTIPLY"
1663 "smultb%?\\t%0, %1, %2"
1664 [(set_attr "insn" "smulxy")
1665 (set_attr "predicable" "yes")]
1668 (define_insn "*mulhisi3bt"
1669 [(set (match_operand:SI 0 "s_register_operand" "=r")
1670 (mult:SI (sign_extend:SI
1671 (match_operand:HI 1 "s_register_operand" "r"))
1673 (match_operand:SI 2 "s_register_operand" "r")
1675 "TARGET_DSP_MULTIPLY"
1676 "smulbt%?\\t%0, %1, %2"
1677 [(set_attr "insn" "smulxy")
1678 (set_attr "predicable" "yes")]
1681 (define_insn "*mulhisi3tt"
1682 [(set (match_operand:SI 0 "s_register_operand" "=r")
1683 (mult:SI (ashiftrt:SI
1684 (match_operand:SI 1 "s_register_operand" "r")
1687 (match_operand:SI 2 "s_register_operand" "r")
1689 "TARGET_DSP_MULTIPLY"
1690 "smultt%?\\t%0, %1, %2"
1691 [(set_attr "insn" "smulxy")
1692 (set_attr "predicable" "yes")]
1695 (define_insn "*mulhisi3addsi"
1696 [(set (match_operand:SI 0 "s_register_operand" "=r")
1697 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1698 (mult:SI (sign_extend:SI
1699 (match_operand:HI 2 "s_register_operand" "%r"))
1701 (match_operand:HI 3 "s_register_operand" "r")))))]
1702 "TARGET_DSP_MULTIPLY"
1703 "smlabb%?\\t%0, %2, %3, %1"
1704 [(set_attr "insn" "smlaxy")
1705 (set_attr "predicable" "yes")]
1708 (define_insn "*mulhidi3adddi"
1709 [(set (match_operand:DI 0 "s_register_operand" "=r")
1711 (match_operand:DI 1 "s_register_operand" "0")
1712 (mult:DI (sign_extend:DI
1713 (match_operand:HI 2 "s_register_operand" "%r"))
1715 (match_operand:HI 3 "s_register_operand" "r")))))]
1716 "TARGET_DSP_MULTIPLY"
1717 "smlalbb%?\\t%Q0, %R0, %2, %3"
1718 [(set_attr "insn" "smlalxy")
1719 (set_attr "predicable" "yes")])
1721 (define_expand "mulsf3"
1722 [(set (match_operand:SF 0 "s_register_operand" "")
1723 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1724 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1725 "TARGET_32BIT && TARGET_HARD_FLOAT"
1728 && !cirrus_fp_register (operands[2], SFmode))
1729 operands[2] = force_reg (SFmode, operands[2]);
1732 (define_expand "muldf3"
1733 [(set (match_operand:DF 0 "s_register_operand" "")
1734 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1735 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1736 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1739 && !cirrus_fp_register (operands[2], DFmode))
1740 operands[2] = force_reg (DFmode, operands[2]);
1745 (define_expand "divsf3"
1746 [(set (match_operand:SF 0 "s_register_operand" "")
1747 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1748 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1749 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1752 (define_expand "divdf3"
1753 [(set (match_operand:DF 0 "s_register_operand" "")
1754 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1755 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1756 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1761 (define_expand "modsf3"
1762 [(set (match_operand:SF 0 "s_register_operand" "")
1763 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1764 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1765 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1768 (define_expand "moddf3"
1769 [(set (match_operand:DF 0 "s_register_operand" "")
1770 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1771 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1772 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1775 ;; Boolean and,ior,xor insns
1777 ;; Split up double word logical operations
1779 ;; Split up simple DImode logical operations. Simply perform the logical
1780 ;; operation on the upper and lower halves of the registers.
1782 [(set (match_operand:DI 0 "s_register_operand" "")
1783 (match_operator:DI 6 "logical_binary_operator"
1784 [(match_operand:DI 1 "s_register_operand" "")
1785 (match_operand:DI 2 "s_register_operand" "")]))]
1786 "TARGET_32BIT && reload_completed
1787 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1788 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1789 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1792 operands[3] = gen_highpart (SImode, operands[0]);
1793 operands[0] = gen_lowpart (SImode, operands[0]);
1794 operands[4] = gen_highpart (SImode, operands[1]);
1795 operands[1] = gen_lowpart (SImode, operands[1]);
1796 operands[5] = gen_highpart (SImode, operands[2]);
1797 operands[2] = gen_lowpart (SImode, operands[2]);
1802 [(set (match_operand:DI 0 "s_register_operand" "")
1803 (match_operator:DI 6 "logical_binary_operator"
1804 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1805 (match_operand:DI 1 "s_register_operand" "")]))]
1806 "TARGET_32BIT && reload_completed"
1807 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1808 (set (match_dup 3) (match_op_dup:SI 6
1809 [(ashiftrt:SI (match_dup 2) (const_int 31))
1813 operands[3] = gen_highpart (SImode, operands[0]);
1814 operands[0] = gen_lowpart (SImode, operands[0]);
1815 operands[4] = gen_highpart (SImode, operands[1]);
1816 operands[1] = gen_lowpart (SImode, operands[1]);
1817 operands[5] = gen_highpart (SImode, operands[2]);
1818 operands[2] = gen_lowpart (SImode, operands[2]);
1822 ;; The zero extend of operand 2 means we can just copy the high part of
1823 ;; operand1 into operand0.
1825 [(set (match_operand:DI 0 "s_register_operand" "")
1827 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1828 (match_operand:DI 1 "s_register_operand" "")))]
1829 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1830 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1831 (set (match_dup 3) (match_dup 4))]
1834 operands[4] = gen_highpart (SImode, operands[1]);
1835 operands[3] = gen_highpart (SImode, operands[0]);
1836 operands[0] = gen_lowpart (SImode, operands[0]);
1837 operands[1] = gen_lowpart (SImode, operands[1]);
1841 ;; The zero extend of operand 2 means we can just copy the high part of
1842 ;; operand1 into operand0.
1844 [(set (match_operand:DI 0 "s_register_operand" "")
1846 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1847 (match_operand:DI 1 "s_register_operand" "")))]
1848 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1849 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1850 (set (match_dup 3) (match_dup 4))]
1853 operands[4] = gen_highpart (SImode, operands[1]);
1854 operands[3] = gen_highpart (SImode, operands[0]);
1855 operands[0] = gen_lowpart (SImode, operands[0]);
1856 operands[1] = gen_lowpart (SImode, operands[1]);
1860 (define_insn "anddi3"
1861 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1862 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1863 (match_operand:DI 2 "s_register_operand" "r,r")))]
1864 "TARGET_32BIT && ! TARGET_IWMMXT"
1866 [(set_attr "length" "8")]
1869 (define_insn_and_split "*anddi_zesidi_di"
1870 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1871 (and:DI (zero_extend:DI
1872 (match_operand:SI 2 "s_register_operand" "r,r"))
1873 (match_operand:DI 1 "s_register_operand" "0,r")))]
1876 "TARGET_32BIT && reload_completed"
1877 ; The zero extend of operand 2 clears the high word of the output
1879 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1880 (set (match_dup 3) (const_int 0))]
1883 operands[3] = gen_highpart (SImode, operands[0]);
1884 operands[0] = gen_lowpart (SImode, operands[0]);
1885 operands[1] = gen_lowpart (SImode, operands[1]);
1887 [(set_attr "length" "8")]
1890 (define_insn "*anddi_sesdi_di"
1891 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1892 (and:DI (sign_extend:DI
1893 (match_operand:SI 2 "s_register_operand" "r,r"))
1894 (match_operand:DI 1 "s_register_operand" "0,r")))]
1897 [(set_attr "length" "8")]
1900 (define_expand "andsi3"
1901 [(set (match_operand:SI 0 "s_register_operand" "")
1902 (and:SI (match_operand:SI 1 "s_register_operand" "")
1903 (match_operand:SI 2 "reg_or_int_operand" "")))]
1908 if (GET_CODE (operands[2]) == CONST_INT)
1910 arm_split_constant (AND, SImode, NULL_RTX,
1911 INTVAL (operands[2]), operands[0],
1912 operands[1], optimize && can_create_pseudo_p ());
1917 else /* TARGET_THUMB1 */
1919 if (GET_CODE (operands[2]) != CONST_INT)
1921 rtx tmp = force_reg (SImode, operands[2]);
1922 if (rtx_equal_p (operands[0], operands[1]))
1926 operands[2] = operands[1];
1934 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1936 operands[2] = force_reg (SImode,
1937 GEN_INT (~INTVAL (operands[2])));
1939 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1944 for (i = 9; i <= 31; i++)
1946 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1948 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1952 else if ((((HOST_WIDE_INT) 1) << i) - 1
1953 == ~INTVAL (operands[2]))
1955 rtx shift = GEN_INT (i);
1956 rtx reg = gen_reg_rtx (SImode);
1958 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1959 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1965 operands[2] = force_reg (SImode, operands[2]);
1971 ; ??? Check split length for Thumb-2
1972 (define_insn_and_split "*arm_andsi3_insn"
1973 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1974 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1975 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1979 bic%?\\t%0, %1, #%B2
1982 && GET_CODE (operands[2]) == CONST_INT
1983 && !(const_ok_for_arm (INTVAL (operands[2]))
1984 || const_ok_for_arm (~INTVAL (operands[2])))"
1985 [(clobber (const_int 0))]
1987 arm_split_constant (AND, SImode, curr_insn,
1988 INTVAL (operands[2]), operands[0], operands[1], 0);
1991 [(set_attr "length" "4,4,16")
1992 (set_attr "predicable" "yes")]
1995 (define_insn "*thumb1_andsi3_insn"
1996 [(set (match_operand:SI 0 "register_operand" "=l")
1997 (and:SI (match_operand:SI 1 "register_operand" "%0")
1998 (match_operand:SI 2 "register_operand" "l")))]
2001 [(set_attr "length" "2")]
2004 (define_insn "*andsi3_compare0"
2005 [(set (reg:CC_NOOV CC_REGNUM)
2007 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2008 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2010 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2011 (and:SI (match_dup 1) (match_dup 2)))]
2015 bic%.\\t%0, %1, #%B2"
2016 [(set_attr "conds" "set")]
2019 (define_insn "*andsi3_compare0_scratch"
2020 [(set (reg:CC_NOOV CC_REGNUM)
2022 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2023 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2025 (clobber (match_scratch:SI 2 "=X,r"))]
2029 bic%.\\t%2, %0, #%B1"
2030 [(set_attr "conds" "set")]
2033 (define_insn "*zeroextractsi_compare0_scratch"
2034 [(set (reg:CC_NOOV CC_REGNUM)
2035 (compare:CC_NOOV (zero_extract:SI
2036 (match_operand:SI 0 "s_register_operand" "r")
2037 (match_operand 1 "const_int_operand" "n")
2038 (match_operand 2 "const_int_operand" "n"))
2041 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2042 && INTVAL (operands[1]) > 0
2043 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2044 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2046 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2047 << INTVAL (operands[2]));
2048 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2051 [(set_attr "conds" "set")]
2054 (define_insn_and_split "*ne_zeroextractsi"
2055 [(set (match_operand:SI 0 "s_register_operand" "=r")
2056 (ne:SI (zero_extract:SI
2057 (match_operand:SI 1 "s_register_operand" "r")
2058 (match_operand:SI 2 "const_int_operand" "n")
2059 (match_operand:SI 3 "const_int_operand" "n"))
2061 (clobber (reg:CC CC_REGNUM))]
2063 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2064 && INTVAL (operands[2]) > 0
2065 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2066 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2069 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2070 && INTVAL (operands[2]) > 0
2071 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2072 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2076 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2081 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2082 << INTVAL (operands[3]));
2084 [(set_attr "conds" "clob")
2085 (set (attr "length")
2086 (if_then_else (eq_attr "is_thumb" "yes")
2091 (define_insn_and_split "*ne_zeroextractsi_shifted"
2092 [(set (match_operand:SI 0 "s_register_operand" "=r")
2093 (ne:SI (zero_extract:SI
2094 (match_operand:SI 1 "s_register_operand" "r")
2095 (match_operand:SI 2 "const_int_operand" "n")
2098 (clobber (reg:CC CC_REGNUM))]
2102 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2103 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2105 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2107 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2108 (match_dup 0) (const_int 1)))]
2110 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2112 [(set_attr "conds" "clob")
2113 (set_attr "length" "8")]
2116 (define_insn_and_split "*ite_ne_zeroextractsi"
2117 [(set (match_operand:SI 0 "s_register_operand" "=r")
2118 (if_then_else:SI (ne (zero_extract:SI
2119 (match_operand:SI 1 "s_register_operand" "r")
2120 (match_operand:SI 2 "const_int_operand" "n")
2121 (match_operand:SI 3 "const_int_operand" "n"))
2123 (match_operand:SI 4 "arm_not_operand" "rIK")
2125 (clobber (reg:CC CC_REGNUM))]
2127 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2128 && INTVAL (operands[2]) > 0
2129 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2130 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2131 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2134 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2135 && INTVAL (operands[2]) > 0
2136 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2137 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2138 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2139 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2140 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2142 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2144 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2145 (match_dup 0) (match_dup 4)))]
2147 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2148 << INTVAL (operands[3]));
2150 [(set_attr "conds" "clob")
2151 (set_attr "length" "8")]
2154 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2155 [(set (match_operand:SI 0 "s_register_operand" "=r")
2156 (if_then_else:SI (ne (zero_extract:SI
2157 (match_operand:SI 1 "s_register_operand" "r")
2158 (match_operand:SI 2 "const_int_operand" "n")
2161 (match_operand:SI 3 "arm_not_operand" "rIK")
2163 (clobber (reg:CC CC_REGNUM))]
2164 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2166 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2167 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2168 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2170 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2172 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2173 (match_dup 0) (match_dup 3)))]
2175 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2177 [(set_attr "conds" "clob")
2178 (set_attr "length" "8")]
2182 [(set (match_operand:SI 0 "s_register_operand" "")
2183 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2184 (match_operand:SI 2 "const_int_operand" "")
2185 (match_operand:SI 3 "const_int_operand" "")))
2186 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2188 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2189 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2191 HOST_WIDE_INT temp = INTVAL (operands[2]);
2193 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2194 operands[3] = GEN_INT (32 - temp);
2198 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2200 [(set (match_operand:SI 0 "s_register_operand" "")
2201 (match_operator:SI 1 "shiftable_operator"
2202 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2203 (match_operand:SI 3 "const_int_operand" "")
2204 (match_operand:SI 4 "const_int_operand" ""))
2205 (match_operand:SI 5 "s_register_operand" "")]))
2206 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2208 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2211 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2214 HOST_WIDE_INT temp = INTVAL (operands[3]);
2216 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2217 operands[4] = GEN_INT (32 - temp);
2222 [(set (match_operand:SI 0 "s_register_operand" "")
2223 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2224 (match_operand:SI 2 "const_int_operand" "")
2225 (match_operand:SI 3 "const_int_operand" "")))]
2227 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2228 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2230 HOST_WIDE_INT temp = INTVAL (operands[2]);
2232 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2233 operands[3] = GEN_INT (32 - temp);
2238 [(set (match_operand:SI 0 "s_register_operand" "")
2239 (match_operator:SI 1 "shiftable_operator"
2240 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2241 (match_operand:SI 3 "const_int_operand" "")
2242 (match_operand:SI 4 "const_int_operand" ""))
2243 (match_operand:SI 5 "s_register_operand" "")]))
2244 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2246 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2249 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2252 HOST_WIDE_INT temp = INTVAL (operands[3]);
2254 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2255 operands[4] = GEN_INT (32 - temp);
2259 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2260 ;;; represented by the bitfield, then this will produce incorrect results.
2261 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2262 ;;; which have a real bit-field insert instruction, the truncation happens
2263 ;;; in the bit-field insert instruction itself. Since arm does not have a
2264 ;;; bit-field insert instruction, we would have to emit code here to truncate
2265 ;;; the value before we insert. This loses some of the advantage of having
2266 ;;; this insv pattern, so this pattern needs to be reevalutated.
2268 (define_expand "insv"
2269 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2270 (match_operand:SI 1 "general_operand" "")
2271 (match_operand:SI 2 "general_operand" ""))
2272 (match_operand:SI 3 "reg_or_int_operand" ""))]
2273 "TARGET_ARM || arm_arch_thumb2"
2276 int start_bit = INTVAL (operands[2]);
2277 int width = INTVAL (operands[1]);
2278 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2279 rtx target, subtarget;
2281 if (arm_arch_thumb2)
2283 bool use_bfi = TRUE;
2285 if (GET_CODE (operands[3]) == CONST_INT)
2287 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2291 emit_insn (gen_insv_zero (operands[0], operands[1],
2296 /* See if the set can be done with a single orr instruction. */
2297 if (val == mask && const_ok_for_arm (val << start_bit))
2303 if (GET_CODE (operands[3]) != REG)
2304 operands[3] = force_reg (SImode, operands[3]);
2306 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2312 target = copy_rtx (operands[0]);
2313 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2314 subreg as the final target. */
2315 if (GET_CODE (target) == SUBREG)
2317 subtarget = gen_reg_rtx (SImode);
2318 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2319 < GET_MODE_SIZE (SImode))
2320 target = SUBREG_REG (target);
2325 if (GET_CODE (operands[3]) == CONST_INT)
2327 /* Since we are inserting a known constant, we may be able to
2328 reduce the number of bits that we have to clear so that
2329 the mask becomes simple. */
2330 /* ??? This code does not check to see if the new mask is actually
2331 simpler. It may not be. */
2332 rtx op1 = gen_reg_rtx (SImode);
2333 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2334 start of this pattern. */
2335 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2336 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2338 emit_insn (gen_andsi3 (op1, operands[0],
2339 gen_int_mode (~mask2, SImode)));
2340 emit_insn (gen_iorsi3 (subtarget, op1,
2341 gen_int_mode (op3_value << start_bit, SImode)));
2343 else if (start_bit == 0
2344 && !(const_ok_for_arm (mask)
2345 || const_ok_for_arm (~mask)))
2347 /* A Trick, since we are setting the bottom bits in the word,
2348 we can shift operand[3] up, operand[0] down, OR them together
2349 and rotate the result back again. This takes 3 insns, and
2350 the third might be mergeable into another op. */
2351 /* The shift up copes with the possibility that operand[3] is
2352 wider than the bitfield. */
2353 rtx op0 = gen_reg_rtx (SImode);
2354 rtx op1 = gen_reg_rtx (SImode);
2356 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2357 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2358 emit_insn (gen_iorsi3 (op1, op1, op0));
2359 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2361 else if ((width + start_bit == 32)
2362 && !(const_ok_for_arm (mask)
2363 || const_ok_for_arm (~mask)))
2365 /* Similar trick, but slightly less efficient. */
2367 rtx op0 = gen_reg_rtx (SImode);
2368 rtx op1 = gen_reg_rtx (SImode);
2370 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2371 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2372 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2373 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2377 rtx op0 = gen_int_mode (mask, SImode);
2378 rtx op1 = gen_reg_rtx (SImode);
2379 rtx op2 = gen_reg_rtx (SImode);
2381 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2383 rtx tmp = gen_reg_rtx (SImode);
2385 emit_insn (gen_movsi (tmp, op0));
2389 /* Mask out any bits in operand[3] that are not needed. */
2390 emit_insn (gen_andsi3 (op1, operands[3], op0));
2392 if (GET_CODE (op0) == CONST_INT
2393 && (const_ok_for_arm (mask << start_bit)
2394 || const_ok_for_arm (~(mask << start_bit))))
2396 op0 = gen_int_mode (~(mask << start_bit), SImode);
2397 emit_insn (gen_andsi3 (op2, operands[0], op0));
2401 if (GET_CODE (op0) == CONST_INT)
2403 rtx tmp = gen_reg_rtx (SImode);
2405 emit_insn (gen_movsi (tmp, op0));
2410 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2412 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2416 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2418 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2421 if (subtarget != target)
2423 /* If TARGET is still a SUBREG, then it must be wider than a word,
2424 so we must be careful only to set the subword we were asked to. */
2425 if (GET_CODE (target) == SUBREG)
2426 emit_move_insn (target, subtarget);
2428 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2435 (define_insn "insv_zero"
2436 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2437 (match_operand:SI 1 "const_int_operand" "M")
2438 (match_operand:SI 2 "const_int_operand" "M"))
2442 [(set_attr "length" "4")
2443 (set_attr "predicable" "yes")]
2446 (define_insn "insv_t2"
2447 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2448 (match_operand:SI 1 "const_int_operand" "M")
2449 (match_operand:SI 2 "const_int_operand" "M"))
2450 (match_operand:SI 3 "s_register_operand" "r"))]
2452 "bfi%?\t%0, %3, %2, %1"
2453 [(set_attr "length" "4")
2454 (set_attr "predicable" "yes")]
2457 ; constants for op 2 will never be given to these patterns.
2458 (define_insn_and_split "*anddi_notdi_di"
2459 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2460 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2461 (match_operand:DI 2 "s_register_operand" "r,0")))]
2464 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2465 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2466 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2469 operands[3] = gen_highpart (SImode, operands[0]);
2470 operands[0] = gen_lowpart (SImode, operands[0]);
2471 operands[4] = gen_highpart (SImode, operands[1]);
2472 operands[1] = gen_lowpart (SImode, operands[1]);
2473 operands[5] = gen_highpart (SImode, operands[2]);
2474 operands[2] = gen_lowpart (SImode, operands[2]);
2476 [(set_attr "length" "8")
2477 (set_attr "predicable" "yes")]
2480 (define_insn_and_split "*anddi_notzesidi_di"
2481 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2482 (and:DI (not:DI (zero_extend:DI
2483 (match_operand:SI 2 "s_register_operand" "r,r")))
2484 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2487 bic%?\\t%Q0, %Q1, %2
2489 ; (not (zero_extend ...)) allows us to just copy the high word from
2490 ; operand1 to operand0.
2493 && operands[0] != operands[1]"
2494 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2495 (set (match_dup 3) (match_dup 4))]
2498 operands[3] = gen_highpart (SImode, operands[0]);
2499 operands[0] = gen_lowpart (SImode, operands[0]);
2500 operands[4] = gen_highpart (SImode, operands[1]);
2501 operands[1] = gen_lowpart (SImode, operands[1]);
2503 [(set_attr "length" "4,8")
2504 (set_attr "predicable" "yes")]
2507 (define_insn_and_split "*anddi_notsesidi_di"
2508 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2509 (and:DI (not:DI (sign_extend:DI
2510 (match_operand:SI 2 "s_register_operand" "r,r")))
2511 (match_operand:DI 1 "s_register_operand" "0,r")))]
2514 "TARGET_32BIT && reload_completed"
2515 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2516 (set (match_dup 3) (and:SI (not:SI
2517 (ashiftrt:SI (match_dup 2) (const_int 31)))
2521 operands[3] = gen_highpart (SImode, operands[0]);
2522 operands[0] = gen_lowpart (SImode, operands[0]);
2523 operands[4] = gen_highpart (SImode, operands[1]);
2524 operands[1] = gen_lowpart (SImode, operands[1]);
2526 [(set_attr "length" "8")
2527 (set_attr "predicable" "yes")]
2530 (define_insn "andsi_notsi_si"
2531 [(set (match_operand:SI 0 "s_register_operand" "=r")
2532 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2533 (match_operand:SI 1 "s_register_operand" "r")))]
2535 "bic%?\\t%0, %1, %2"
2536 [(set_attr "predicable" "yes")]
2539 (define_insn "bicsi3"
2540 [(set (match_operand:SI 0 "register_operand" "=l")
2541 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2542 (match_operand:SI 2 "register_operand" "0")))]
2545 [(set_attr "length" "2")]
2548 (define_insn "andsi_not_shiftsi_si"
2549 [(set (match_operand:SI 0 "s_register_operand" "=r")
2550 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2551 [(match_operand:SI 2 "s_register_operand" "r")
2552 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2553 (match_operand:SI 1 "s_register_operand" "r")))]
2555 "bic%?\\t%0, %1, %2%S4"
2556 [(set_attr "predicable" "yes")
2557 (set_attr "shift" "2")
2558 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2559 (const_string "alu_shift")
2560 (const_string "alu_shift_reg")))]
2563 (define_insn "*andsi_notsi_si_compare0"
2564 [(set (reg:CC_NOOV CC_REGNUM)
2566 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2567 (match_operand:SI 1 "s_register_operand" "r"))
2569 (set (match_operand:SI 0 "s_register_operand" "=r")
2570 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2572 "bic%.\\t%0, %1, %2"
2573 [(set_attr "conds" "set")]
2576 (define_insn "*andsi_notsi_si_compare0_scratch"
2577 [(set (reg:CC_NOOV CC_REGNUM)
2579 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2580 (match_operand:SI 1 "s_register_operand" "r"))
2582 (clobber (match_scratch:SI 0 "=r"))]
2584 "bic%.\\t%0, %1, %2"
2585 [(set_attr "conds" "set")]
2588 (define_insn "iordi3"
2589 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2590 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2591 (match_operand:DI 2 "s_register_operand" "r,r")))]
2592 "TARGET_32BIT && ! TARGET_IWMMXT"
2594 [(set_attr "length" "8")
2595 (set_attr "predicable" "yes")]
2598 (define_insn "*iordi_zesidi_di"
2599 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600 (ior:DI (zero_extend:DI
2601 (match_operand:SI 2 "s_register_operand" "r,r"))
2602 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2605 orr%?\\t%Q0, %Q1, %2
2607 [(set_attr "length" "4,8")
2608 (set_attr "predicable" "yes")]
2611 (define_insn "*iordi_sesidi_di"
2612 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2613 (ior:DI (sign_extend:DI
2614 (match_operand:SI 2 "s_register_operand" "r,r"))
2615 (match_operand:DI 1 "s_register_operand" "0,r")))]
2618 [(set_attr "length" "8")
2619 (set_attr "predicable" "yes")]
2622 (define_expand "iorsi3"
2623 [(set (match_operand:SI 0 "s_register_operand" "")
2624 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2625 (match_operand:SI 2 "reg_or_int_operand" "")))]
2628 if (GET_CODE (operands[2]) == CONST_INT)
2632 arm_split_constant (IOR, SImode, NULL_RTX,
2633 INTVAL (operands[2]), operands[0], operands[1],
2634 optimize && can_create_pseudo_p ());
2637 else /* TARGET_THUMB1 */
2639 rtx tmp = force_reg (SImode, operands[2]);
2640 if (rtx_equal_p (operands[0], operands[1]))
2644 operands[2] = operands[1];
2652 (define_insn_and_split "*arm_iorsi3"
2653 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2654 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2655 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2661 && GET_CODE (operands[2]) == CONST_INT
2662 && !const_ok_for_arm (INTVAL (operands[2]))"
2663 [(clobber (const_int 0))]
2665 arm_split_constant (IOR, SImode, curr_insn,
2666 INTVAL (operands[2]), operands[0], operands[1], 0);
2669 [(set_attr "length" "4,16")
2670 (set_attr "predicable" "yes")]
2673 (define_insn "*thumb1_iorsi3"
2674 [(set (match_operand:SI 0 "register_operand" "=l")
2675 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2676 (match_operand:SI 2 "register_operand" "l")))]
2679 [(set_attr "length" "2")]
2683 [(match_scratch:SI 3 "r")
2684 (set (match_operand:SI 0 "arm_general_register_operand" "")
2685 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2686 (match_operand:SI 2 "const_int_operand" "")))]
2688 && !const_ok_for_arm (INTVAL (operands[2]))
2689 && const_ok_for_arm (~INTVAL (operands[2]))"
2690 [(set (match_dup 3) (match_dup 2))
2691 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2695 (define_insn "*iorsi3_compare0"
2696 [(set (reg:CC_NOOV CC_REGNUM)
2697 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2698 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2700 (set (match_operand:SI 0 "s_register_operand" "=r")
2701 (ior:SI (match_dup 1) (match_dup 2)))]
2703 "orr%.\\t%0, %1, %2"
2704 [(set_attr "conds" "set")]
2707 (define_insn "*iorsi3_compare0_scratch"
2708 [(set (reg:CC_NOOV CC_REGNUM)
2709 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2710 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2712 (clobber (match_scratch:SI 0 "=r"))]
2714 "orr%.\\t%0, %1, %2"
2715 [(set_attr "conds" "set")]
2718 (define_insn "xordi3"
2719 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2720 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2721 (match_operand:DI 2 "s_register_operand" "r,r")))]
2722 "TARGET_32BIT && !TARGET_IWMMXT"
2724 [(set_attr "length" "8")
2725 (set_attr "predicable" "yes")]
2728 (define_insn "*xordi_zesidi_di"
2729 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2730 (xor:DI (zero_extend:DI
2731 (match_operand:SI 2 "s_register_operand" "r,r"))
2732 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2735 eor%?\\t%Q0, %Q1, %2
2737 [(set_attr "length" "4,8")
2738 (set_attr "predicable" "yes")]
2741 (define_insn "*xordi_sesidi_di"
2742 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2743 (xor:DI (sign_extend:DI
2744 (match_operand:SI 2 "s_register_operand" "r,r"))
2745 (match_operand:DI 1 "s_register_operand" "0,r")))]
2748 [(set_attr "length" "8")
2749 (set_attr "predicable" "yes")]
2752 (define_expand "xorsi3"
2753 [(set (match_operand:SI 0 "s_register_operand" "")
2754 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2755 (match_operand:SI 2 "reg_or_int_operand" "")))]
2757 "if (GET_CODE (operands[2]) == CONST_INT)
2761 arm_split_constant (XOR, SImode, NULL_RTX,
2762 INTVAL (operands[2]), operands[0], operands[1],
2763 optimize && can_create_pseudo_p ());
2766 else /* TARGET_THUMB1 */
2768 rtx tmp = force_reg (SImode, operands[2]);
2769 if (rtx_equal_p (operands[0], operands[1]))
2773 operands[2] = operands[1];
2780 (define_insn "*arm_xorsi3"
2781 [(set (match_operand:SI 0 "s_register_operand" "=r")
2782 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2783 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2785 "eor%?\\t%0, %1, %2"
2786 [(set_attr "predicable" "yes")]
2789 (define_insn "*thumb1_xorsi3"
2790 [(set (match_operand:SI 0 "register_operand" "=l")
2791 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2792 (match_operand:SI 2 "register_operand" "l")))]
2795 [(set_attr "length" "2")]
2798 (define_insn "*xorsi3_compare0"
2799 [(set (reg:CC_NOOV CC_REGNUM)
2800 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2801 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2803 (set (match_operand:SI 0 "s_register_operand" "=r")
2804 (xor:SI (match_dup 1) (match_dup 2)))]
2806 "eor%.\\t%0, %1, %2"
2807 [(set_attr "conds" "set")]
2810 (define_insn "*xorsi3_compare0_scratch"
2811 [(set (reg:CC_NOOV CC_REGNUM)
2812 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2813 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2817 [(set_attr "conds" "set")]
2820 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2821 ; (NOT D) we can sometimes merge the final NOT into one of the following
2825 [(set (match_operand:SI 0 "s_register_operand" "")
2826 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2827 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2828 (match_operand:SI 3 "arm_rhs_operand" "")))
2829 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2831 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2832 (not:SI (match_dup 3))))
2833 (set (match_dup 0) (not:SI (match_dup 4)))]
2837 (define_insn "*andsi_iorsi3_notsi"
2838 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2839 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2840 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2841 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2843 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2844 [(set_attr "length" "8")
2845 (set_attr "ce_count" "2")
2846 (set_attr "predicable" "yes")]
2849 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2850 ; insns are available?
2852 [(set (match_operand:SI 0 "s_register_operand" "")
2853 (match_operator:SI 1 "logical_binary_operator"
2854 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2855 (match_operand:SI 3 "const_int_operand" "")
2856 (match_operand:SI 4 "const_int_operand" ""))
2857 (match_operator:SI 9 "logical_binary_operator"
2858 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2859 (match_operand:SI 6 "const_int_operand" ""))
2860 (match_operand:SI 7 "s_register_operand" "")])]))
2861 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2863 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2864 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2867 [(ashift:SI (match_dup 2) (match_dup 4))
2871 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2874 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2878 [(set (match_operand:SI 0 "s_register_operand" "")
2879 (match_operator:SI 1 "logical_binary_operator"
2880 [(match_operator:SI 9 "logical_binary_operator"
2881 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2882 (match_operand:SI 6 "const_int_operand" ""))
2883 (match_operand:SI 7 "s_register_operand" "")])
2884 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2885 (match_operand:SI 3 "const_int_operand" "")
2886 (match_operand:SI 4 "const_int_operand" ""))]))
2887 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2889 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2890 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2893 [(ashift:SI (match_dup 2) (match_dup 4))
2897 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2900 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2904 [(set (match_operand:SI 0 "s_register_operand" "")
2905 (match_operator:SI 1 "logical_binary_operator"
2906 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2907 (match_operand:SI 3 "const_int_operand" "")
2908 (match_operand:SI 4 "const_int_operand" ""))
2909 (match_operator:SI 9 "logical_binary_operator"
2910 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2911 (match_operand:SI 6 "const_int_operand" ""))
2912 (match_operand:SI 7 "s_register_operand" "")])]))
2913 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2915 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2916 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2919 [(ashift:SI (match_dup 2) (match_dup 4))
2923 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2926 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2930 [(set (match_operand:SI 0 "s_register_operand" "")
2931 (match_operator:SI 1 "logical_binary_operator"
2932 [(match_operator:SI 9 "logical_binary_operator"
2933 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2934 (match_operand:SI 6 "const_int_operand" ""))
2935 (match_operand:SI 7 "s_register_operand" "")])
2936 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2937 (match_operand:SI 3 "const_int_operand" "")
2938 (match_operand:SI 4 "const_int_operand" ""))]))
2939 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2941 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2942 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2945 [(ashift:SI (match_dup 2) (match_dup 4))
2949 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2952 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2956 ;; Minimum and maximum insns
2958 (define_expand "smaxsi3"
2960 (set (match_operand:SI 0 "s_register_operand" "")
2961 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2962 (match_operand:SI 2 "arm_rhs_operand" "")))
2963 (clobber (reg:CC CC_REGNUM))])]
2966 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2968 /* No need for a clobber of the condition code register here. */
2969 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2970 gen_rtx_SMAX (SImode, operands[1],
2976 (define_insn "*smax_0"
2977 [(set (match_operand:SI 0 "s_register_operand" "=r")
2978 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2981 "bic%?\\t%0, %1, %1, asr #31"
2982 [(set_attr "predicable" "yes")]
2985 (define_insn "*smax_m1"
2986 [(set (match_operand:SI 0 "s_register_operand" "=r")
2987 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2990 "orr%?\\t%0, %1, %1, asr #31"
2991 [(set_attr "predicable" "yes")]
2994 (define_insn "*arm_smax_insn"
2995 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2996 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2997 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2998 (clobber (reg:CC CC_REGNUM))]
3001 cmp\\t%1, %2\;movlt\\t%0, %2
3002 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3003 [(set_attr "conds" "clob")
3004 (set_attr "length" "8,12")]
3007 (define_expand "sminsi3"
3009 (set (match_operand:SI 0 "s_register_operand" "")
3010 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3011 (match_operand:SI 2 "arm_rhs_operand" "")))
3012 (clobber (reg:CC CC_REGNUM))])]
3015 if (operands[2] == const0_rtx)
3017 /* No need for a clobber of the condition code register here. */
3018 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3019 gen_rtx_SMIN (SImode, operands[1],
3025 (define_insn "*smin_0"
3026 [(set (match_operand:SI 0 "s_register_operand" "=r")
3027 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3030 "and%?\\t%0, %1, %1, asr #31"
3031 [(set_attr "predicable" "yes")]
3034 (define_insn "*arm_smin_insn"
3035 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3036 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3037 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3038 (clobber (reg:CC CC_REGNUM))]
3041 cmp\\t%1, %2\;movge\\t%0, %2
3042 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3043 [(set_attr "conds" "clob")
3044 (set_attr "length" "8,12")]
3047 (define_expand "umaxsi3"
3049 (set (match_operand:SI 0 "s_register_operand" "")
3050 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3051 (match_operand:SI 2 "arm_rhs_operand" "")))
3052 (clobber (reg:CC CC_REGNUM))])]
3057 (define_insn "*arm_umaxsi3"
3058 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3059 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3060 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3061 (clobber (reg:CC CC_REGNUM))]
3064 cmp\\t%1, %2\;movcc\\t%0, %2
3065 cmp\\t%1, %2\;movcs\\t%0, %1
3066 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3067 [(set_attr "conds" "clob")
3068 (set_attr "length" "8,8,12")]
3071 (define_expand "uminsi3"
3073 (set (match_operand:SI 0 "s_register_operand" "")
3074 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3075 (match_operand:SI 2 "arm_rhs_operand" "")))
3076 (clobber (reg:CC CC_REGNUM))])]
3081 (define_insn "*arm_uminsi3"
3082 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3083 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3084 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3085 (clobber (reg:CC CC_REGNUM))]
3088 cmp\\t%1, %2\;movcs\\t%0, %2
3089 cmp\\t%1, %2\;movcc\\t%0, %1
3090 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3091 [(set_attr "conds" "clob")
3092 (set_attr "length" "8,8,12")]
3095 (define_insn "*store_minmaxsi"
3096 [(set (match_operand:SI 0 "memory_operand" "=m")
3097 (match_operator:SI 3 "minmax_operator"
3098 [(match_operand:SI 1 "s_register_operand" "r")
3099 (match_operand:SI 2 "s_register_operand" "r")]))
3100 (clobber (reg:CC CC_REGNUM))]
3103 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3104 operands[1], operands[2]);
3105 output_asm_insn (\"cmp\\t%1, %2\", operands);
3107 output_asm_insn (\"ite\t%d3\", operands);
3108 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3109 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3112 [(set_attr "conds" "clob")
3113 (set (attr "length")
3114 (if_then_else (eq_attr "is_thumb" "yes")
3117 (set_attr "type" "store1")]
3120 ; Reject the frame pointer in operand[1], since reloading this after
3121 ; it has been eliminated can cause carnage.
3122 (define_insn "*minmax_arithsi"
3123 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3124 (match_operator:SI 4 "shiftable_operator"
3125 [(match_operator:SI 5 "minmax_operator"
3126 [(match_operand:SI 2 "s_register_operand" "r,r")
3127 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3128 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3129 (clobber (reg:CC CC_REGNUM))]
3130 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3133 enum rtx_code code = GET_CODE (operands[4]);
3136 if (which_alternative != 0 || operands[3] != const0_rtx
3137 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3142 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3143 operands[2], operands[3]);
3144 output_asm_insn (\"cmp\\t%2, %3\", operands);
3148 output_asm_insn (\"ite\\t%d5\", operands);
3150 output_asm_insn (\"it\\t%d5\", operands);
3152 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3154 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3157 [(set_attr "conds" "clob")
3158 (set (attr "length")
3159 (if_then_else (eq_attr "is_thumb" "yes")
3165 ;; Shift and rotation insns
3167 (define_expand "ashldi3"
3168 [(set (match_operand:DI 0 "s_register_operand" "")
3169 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3170 (match_operand:SI 2 "reg_or_int_operand" "")))]
3173 if (GET_CODE (operands[2]) == CONST_INT)
3175 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3177 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3180 /* Ideally we shouldn't fail here if we could know that operands[1]
3181 ends up already living in an iwmmxt register. Otherwise it's
3182 cheaper to have the alternate code being generated than moving
3183 values to iwmmxt regs and back. */
3186 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3191 (define_insn "arm_ashldi3_1bit"
3192 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3193 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3195 (clobber (reg:CC CC_REGNUM))]
3197 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3198 [(set_attr "conds" "clob")
3199 (set_attr "length" "8")]
3202 (define_expand "ashlsi3"
3203 [(set (match_operand:SI 0 "s_register_operand" "")
3204 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3205 (match_operand:SI 2 "arm_rhs_operand" "")))]
3208 if (GET_CODE (operands[2]) == CONST_INT
3209 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3211 emit_insn (gen_movsi (operands[0], const0_rtx));
3217 (define_insn "*thumb1_ashlsi3"
3218 [(set (match_operand:SI 0 "register_operand" "=l,l")
3219 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3220 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3223 [(set_attr "length" "2")]
3226 (define_expand "ashrdi3"
3227 [(set (match_operand:DI 0 "s_register_operand" "")
3228 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3229 (match_operand:SI 2 "reg_or_int_operand" "")))]
3232 if (GET_CODE (operands[2]) == CONST_INT)
3234 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3236 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3239 /* Ideally we shouldn't fail here if we could know that operands[1]
3240 ends up already living in an iwmmxt register. Otherwise it's
3241 cheaper to have the alternate code being generated than moving
3242 values to iwmmxt regs and back. */
3245 else if (!TARGET_REALLY_IWMMXT)
3250 (define_insn "arm_ashrdi3_1bit"
3251 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3252 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3254 (clobber (reg:CC CC_REGNUM))]
3256 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3257 [(set_attr "conds" "clob")
3258 (set_attr "length" "8")]
3261 (define_expand "ashrsi3"
3262 [(set (match_operand:SI 0 "s_register_operand" "")
3263 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3264 (match_operand:SI 2 "arm_rhs_operand" "")))]
3267 if (GET_CODE (operands[2]) == CONST_INT
3268 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3269 operands[2] = GEN_INT (31);
3273 (define_insn "*thumb1_ashrsi3"
3274 [(set (match_operand:SI 0 "register_operand" "=l,l")
3275 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3276 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3279 [(set_attr "length" "2")]
3282 (define_expand "lshrdi3"
3283 [(set (match_operand:DI 0 "s_register_operand" "")
3284 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3285 (match_operand:SI 2 "reg_or_int_operand" "")))]
3288 if (GET_CODE (operands[2]) == CONST_INT)
3290 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3292 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3295 /* Ideally we shouldn't fail here if we could know that operands[1]
3296 ends up already living in an iwmmxt register. Otherwise it's
3297 cheaper to have the alternate code being generated than moving
3298 values to iwmmxt regs and back. */
3301 else if (!TARGET_REALLY_IWMMXT)
3306 (define_insn "arm_lshrdi3_1bit"
3307 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3308 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3310 (clobber (reg:CC CC_REGNUM))]
3312 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3313 [(set_attr "conds" "clob")
3314 (set_attr "length" "8")]
3317 (define_expand "lshrsi3"
3318 [(set (match_operand:SI 0 "s_register_operand" "")
3319 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3320 (match_operand:SI 2 "arm_rhs_operand" "")))]
3323 if (GET_CODE (operands[2]) == CONST_INT
3324 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3326 emit_insn (gen_movsi (operands[0], const0_rtx));
3332 (define_insn "*thumb1_lshrsi3"
3333 [(set (match_operand:SI 0 "register_operand" "=l,l")
3334 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3335 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3338 [(set_attr "length" "2")]
3341 (define_expand "rotlsi3"
3342 [(set (match_operand:SI 0 "s_register_operand" "")
3343 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3344 (match_operand:SI 2 "reg_or_int_operand" "")))]
3347 if (GET_CODE (operands[2]) == CONST_INT)
3348 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3351 rtx reg = gen_reg_rtx (SImode);
3352 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3358 (define_expand "rotrsi3"
3359 [(set (match_operand:SI 0 "s_register_operand" "")
3360 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3361 (match_operand:SI 2 "arm_rhs_operand" "")))]
3366 if (GET_CODE (operands[2]) == CONST_INT
3367 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3368 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3370 else /* TARGET_THUMB1 */
3372 if (GET_CODE (operands [2]) == CONST_INT)
3373 operands [2] = force_reg (SImode, operands[2]);
3378 (define_insn "*thumb1_rotrsi3"
3379 [(set (match_operand:SI 0 "register_operand" "=l")
3380 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3381 (match_operand:SI 2 "register_operand" "l")))]
3384 [(set_attr "length" "2")]
3387 (define_insn "*arm_shiftsi3"
3388 [(set (match_operand:SI 0 "s_register_operand" "=r")
3389 (match_operator:SI 3 "shift_operator"
3390 [(match_operand:SI 1 "s_register_operand" "r")
3391 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3393 "* return arm_output_shift(operands, 0);"
3394 [(set_attr "predicable" "yes")
3395 (set_attr "shift" "1")
3396 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3397 (const_string "alu_shift")
3398 (const_string "alu_shift_reg")))]
3401 (define_insn "*shiftsi3_compare0"
3402 [(set (reg:CC_NOOV CC_REGNUM)
3403 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3404 [(match_operand:SI 1 "s_register_operand" "r")
3405 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3407 (set (match_operand:SI 0 "s_register_operand" "=r")
3408 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3410 "* return arm_output_shift(operands, 1);"
3411 [(set_attr "conds" "set")
3412 (set_attr "shift" "1")
3413 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3414 (const_string "alu_shift")
3415 (const_string "alu_shift_reg")))]
3418 (define_insn "*shiftsi3_compare0_scratch"
3419 [(set (reg:CC_NOOV CC_REGNUM)
3420 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3421 [(match_operand:SI 1 "s_register_operand" "r")
3422 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3424 (clobber (match_scratch:SI 0 "=r"))]
3426 "* return arm_output_shift(operands, 1);"
3427 [(set_attr "conds" "set")
3428 (set_attr "shift" "1")]
3431 (define_insn "*arm_notsi_shiftsi"
3432 [(set (match_operand:SI 0 "s_register_operand" "=r")
3433 (not:SI (match_operator:SI 3 "shift_operator"
3434 [(match_operand:SI 1 "s_register_operand" "r")
3435 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3438 [(set_attr "predicable" "yes")
3439 (set_attr "shift" "1")
3440 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3441 (const_string "alu_shift")
3442 (const_string "alu_shift_reg")))]
3445 (define_insn "*arm_notsi_shiftsi_compare0"
3446 [(set (reg:CC_NOOV CC_REGNUM)
3447 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3448 [(match_operand:SI 1 "s_register_operand" "r")
3449 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3451 (set (match_operand:SI 0 "s_register_operand" "=r")
3452 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3455 [(set_attr "conds" "set")
3456 (set_attr "shift" "1")
3457 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3458 (const_string "alu_shift")
3459 (const_string "alu_shift_reg")))]
3462 (define_insn "*arm_not_shiftsi_compare0_scratch"
3463 [(set (reg:CC_NOOV CC_REGNUM)
3464 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3465 [(match_operand:SI 1 "s_register_operand" "r")
3466 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3468 (clobber (match_scratch:SI 0 "=r"))]
3471 [(set_attr "conds" "set")
3472 (set_attr "shift" "1")
3473 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3474 (const_string "alu_shift")
3475 (const_string "alu_shift_reg")))]
3478 ;; We don't really have extzv, but defining this using shifts helps
3479 ;; to reduce register pressure later on.
3481 (define_expand "extzv"
3483 (ashift:SI (match_operand:SI 1 "register_operand" "")
3484 (match_operand:SI 2 "const_int_operand" "")))
3485 (set (match_operand:SI 0 "register_operand" "")
3486 (lshiftrt:SI (match_dup 4)
3487 (match_operand:SI 3 "const_int_operand" "")))]
3488 "TARGET_THUMB1 || arm_arch_thumb2"
3491 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3492 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3494 if (arm_arch_thumb2)
3496 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3501 operands[3] = GEN_INT (rshift);
3505 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3509 operands[2] = GEN_INT (lshift);
3510 operands[4] = gen_reg_rtx (SImode);
3515 [(set (match_operand:SI 0 "s_register_operand" "=r")
3516 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3517 (match_operand:SI 2 "const_int_operand" "M")
3518 (match_operand:SI 3 "const_int_operand" "M")))]
3520 "sbfx%?\t%0, %1, %3, %2"
3521 [(set_attr "length" "4")
3522 (set_attr "predicable" "yes")]
3525 (define_insn "extzv_t2"
3526 [(set (match_operand:SI 0 "s_register_operand" "=r")
3527 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3528 (match_operand:SI 2 "const_int_operand" "M")
3529 (match_operand:SI 3 "const_int_operand" "M")))]
3531 "ubfx%?\t%0, %1, %3, %2"
3532 [(set_attr "length" "4")
3533 (set_attr "predicable" "yes")]
3537 ;; Unary arithmetic insns
3539 (define_expand "negdi2"
3541 [(set (match_operand:DI 0 "s_register_operand" "")
3542 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3543 (clobber (reg:CC CC_REGNUM))])]
3548 if (GET_CODE (operands[1]) != REG)
3549 operands[1] = force_reg (DImode, operands[1]);
3554 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3555 ;; The first alternative allows the common case of a *full* overlap.
3556 (define_insn "*arm_negdi2"
3557 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3558 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3559 (clobber (reg:CC CC_REGNUM))]
3561 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3562 [(set_attr "conds" "clob")
3563 (set_attr "length" "8")]
3566 (define_insn "*thumb1_negdi2"
3567 [(set (match_operand:DI 0 "register_operand" "=&l")
3568 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3569 (clobber (reg:CC CC_REGNUM))]
3571 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3572 [(set_attr "length" "6")]
3575 (define_expand "negsi2"
3576 [(set (match_operand:SI 0 "s_register_operand" "")
3577 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3582 (define_insn "*arm_negsi2"
3583 [(set (match_operand:SI 0 "s_register_operand" "=r")
3584 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3586 "rsb%?\\t%0, %1, #0"
3587 [(set_attr "predicable" "yes")]
3590 (define_insn "*thumb1_negsi2"
3591 [(set (match_operand:SI 0 "register_operand" "=l")
3592 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3595 [(set_attr "length" "2")]
3598 (define_expand "negsf2"
3599 [(set (match_operand:SF 0 "s_register_operand" "")
3600 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3601 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3605 (define_expand "negdf2"
3606 [(set (match_operand:DF 0 "s_register_operand" "")
3607 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3608 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3611 ;; abssi2 doesn't really clobber the condition codes if a different register
3612 ;; is being set. To keep things simple, assume during rtl manipulations that
3613 ;; it does, but tell the final scan operator the truth. Similarly for
3616 (define_expand "abssi2"
3618 [(set (match_operand:SI 0 "s_register_operand" "")
3619 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3620 (clobber (match_dup 2))])]
3624 operands[2] = gen_rtx_SCRATCH (SImode);
3626 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3629 (define_insn "*arm_abssi2"
3630 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3631 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3632 (clobber (reg:CC CC_REGNUM))]
3635 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3636 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3637 [(set_attr "conds" "clob,*")
3638 (set_attr "shift" "1")
3639 ;; predicable can't be set based on the variant, so left as no
3640 (set_attr "length" "8")]
3643 (define_insn_and_split "*thumb1_abssi2"
3644 [(set (match_operand:SI 0 "s_register_operand" "=l")
3645 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3646 (clobber (match_scratch:SI 2 "=&l"))]
3649 "TARGET_THUMB1 && reload_completed"
3650 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3651 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3652 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3654 [(set_attr "length" "6")]
3657 (define_insn "*arm_neg_abssi2"
3658 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3659 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3660 (clobber (reg:CC CC_REGNUM))]
3663 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3664 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3665 [(set_attr "conds" "clob,*")
3666 (set_attr "shift" "1")
3667 ;; predicable can't be set based on the variant, so left as no
3668 (set_attr "length" "8")]
3671 (define_insn_and_split "*thumb1_neg_abssi2"
3672 [(set (match_operand:SI 0 "s_register_operand" "=l")
3673 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3674 (clobber (match_scratch:SI 2 "=&l"))]
3677 "TARGET_THUMB1 && reload_completed"
3678 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3679 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3680 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3682 [(set_attr "length" "6")]
3685 (define_expand "abssf2"
3686 [(set (match_operand:SF 0 "s_register_operand" "")
3687 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3688 "TARGET_32BIT && TARGET_HARD_FLOAT"
3691 (define_expand "absdf2"
3692 [(set (match_operand:DF 0 "s_register_operand" "")
3693 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3694 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3697 (define_expand "sqrtsf2"
3698 [(set (match_operand:SF 0 "s_register_operand" "")
3699 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3700 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3703 (define_expand "sqrtdf2"
3704 [(set (match_operand:DF 0 "s_register_operand" "")
3705 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3706 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3709 (define_insn_and_split "one_cmpldi2"
3710 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3711 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3714 "TARGET_32BIT && reload_completed"
3715 [(set (match_dup 0) (not:SI (match_dup 1)))
3716 (set (match_dup 2) (not:SI (match_dup 3)))]
3719 operands[2] = gen_highpart (SImode, operands[0]);
3720 operands[0] = gen_lowpart (SImode, operands[0]);
3721 operands[3] = gen_highpart (SImode, operands[1]);
3722 operands[1] = gen_lowpart (SImode, operands[1]);
3724 [(set_attr "length" "8")
3725 (set_attr "predicable" "yes")]
3728 (define_expand "one_cmplsi2"
3729 [(set (match_operand:SI 0 "s_register_operand" "")
3730 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3735 (define_insn "*arm_one_cmplsi2"
3736 [(set (match_operand:SI 0 "s_register_operand" "=r")
3737 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3740 [(set_attr "predicable" "yes")]
3743 (define_insn "*thumb1_one_cmplsi2"
3744 [(set (match_operand:SI 0 "register_operand" "=l")
3745 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3748 [(set_attr "length" "2")]
3751 (define_insn "*notsi_compare0"
3752 [(set (reg:CC_NOOV CC_REGNUM)
3753 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3755 (set (match_operand:SI 0 "s_register_operand" "=r")
3756 (not:SI (match_dup 1)))]
3759 [(set_attr "conds" "set")]
3762 (define_insn "*notsi_compare0_scratch"
3763 [(set (reg:CC_NOOV CC_REGNUM)
3764 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3766 (clobber (match_scratch:SI 0 "=r"))]
3769 [(set_attr "conds" "set")]
3772 ;; Fixed <--> Floating conversion insns
3774 (define_expand "floatsihf2"
3775 [(set (match_operand:HF 0 "general_operand" "")
3776 (float:HF (match_operand:SI 1 "general_operand" "")))]
3780 rtx op1 = gen_reg_rtx (SFmode);
3781 expand_float (op1, operands[1], 0);
3782 op1 = convert_to_mode (HFmode, op1, 0);
3783 emit_move_insn (operands[0], op1);
3788 (define_expand "floatdihf2"
3789 [(set (match_operand:HF 0 "general_operand" "")
3790 (float:HF (match_operand:DI 1 "general_operand" "")))]
3794 rtx op1 = gen_reg_rtx (SFmode);
3795 expand_float (op1, operands[1], 0);
3796 op1 = convert_to_mode (HFmode, op1, 0);
3797 emit_move_insn (operands[0], op1);
3802 (define_expand "floatsisf2"
3803 [(set (match_operand:SF 0 "s_register_operand" "")
3804 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3805 "TARGET_32BIT && TARGET_HARD_FLOAT"
3807 if (TARGET_MAVERICK)
3809 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3814 (define_expand "floatsidf2"
3815 [(set (match_operand:DF 0 "s_register_operand" "")
3816 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3817 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3819 if (TARGET_MAVERICK)
3821 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3826 (define_expand "fix_trunchfsi2"
3827 [(set (match_operand:SI 0 "general_operand" "")
3828 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3832 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3833 expand_fix (operands[0], op1, 0);
3838 (define_expand "fix_trunchfdi2"
3839 [(set (match_operand:DI 0 "general_operand" "")
3840 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3844 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3845 expand_fix (operands[0], op1, 0);
3850 (define_expand "fix_truncsfsi2"
3851 [(set (match_operand:SI 0 "s_register_operand" "")
3852 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3853 "TARGET_32BIT && TARGET_HARD_FLOAT"
3855 if (TARGET_MAVERICK)
3857 if (!cirrus_fp_register (operands[0], SImode))
3858 operands[0] = force_reg (SImode, operands[0]);
3859 if (!cirrus_fp_register (operands[1], SFmode))
3860 operands[1] = force_reg (SFmode, operands[0]);
3861 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3866 (define_expand "fix_truncdfsi2"
3867 [(set (match_operand:SI 0 "s_register_operand" "")
3868 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3869 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3871 if (TARGET_MAVERICK)
3873 if (!cirrus_fp_register (operands[1], DFmode))
3874 operands[1] = force_reg (DFmode, operands[0]);
3875 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3882 (define_expand "truncdfsf2"
3883 [(set (match_operand:SF 0 "s_register_operand" "")
3885 (match_operand:DF 1 "s_register_operand" "")))]
3886 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3890 /* DFmode -> HFmode conversions have to go through SFmode. */
3891 (define_expand "truncdfhf2"
3892 [(set (match_operand:HF 0 "general_operand" "")
3894 (match_operand:DF 1 "general_operand" "")))]
3899 op1 = convert_to_mode (SFmode, operands[1], 0);
3900 op1 = convert_to_mode (HFmode, op1, 0);
3901 emit_move_insn (operands[0], op1);
3906 ;; Zero and sign extension instructions.
3908 (define_expand "zero_extendsidi2"
3909 [(set (match_operand:DI 0 "s_register_operand" "")
3910 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3915 (define_insn "*arm_zero_extendsidi2"
3916 [(set (match_operand:DI 0 "s_register_operand" "=r")
3917 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3920 if (REGNO (operands[1])
3921 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3922 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3923 return \"mov%?\\t%R0, #0\";
3925 [(set_attr "length" "8")
3926 (set_attr "predicable" "yes")]
3929 (define_expand "zero_extendqidi2"
3930 [(set (match_operand:DI 0 "s_register_operand" "")
3931 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3936 (define_insn "*arm_zero_extendqidi2"
3937 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3938 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3941 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3942 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3943 [(set_attr "length" "8")
3944 (set_attr "predicable" "yes")
3945 (set_attr "type" "*,load_byte")
3946 (set_attr "pool_range" "*,4092")
3947 (set_attr "neg_pool_range" "*,4084")]
3950 (define_expand "extendsidi2"
3951 [(set (match_operand:DI 0 "s_register_operand" "")
3952 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3957 (define_insn "*arm_extendsidi2"
3958 [(set (match_operand:DI 0 "s_register_operand" "=r")
3959 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3962 if (REGNO (operands[1])
3963 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3964 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3965 return \"mov%?\\t%R0, %Q0, asr #31\";
3967 [(set_attr "length" "8")
3968 (set_attr "shift" "1")
3969 (set_attr "predicable" "yes")]
3972 (define_expand "zero_extendhisi2"
3974 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3976 (set (match_operand:SI 0 "s_register_operand" "")
3977 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3981 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3983 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3984 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3988 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3990 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3994 if (!s_register_operand (operands[1], HImode))
3995 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3999 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4000 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4004 operands[1] = gen_lowpart (SImode, operands[1]);
4005 operands[2] = gen_reg_rtx (SImode);
4009 (define_insn "*thumb1_zero_extendhisi2"
4010 [(set (match_operand:SI 0 "register_operand" "=l")
4011 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4012 "TARGET_THUMB1 && !arm_arch6"
4014 rtx mem = XEXP (operands[1], 0);
4016 if (GET_CODE (mem) == CONST)
4017 mem = XEXP (mem, 0);
4019 if (GET_CODE (mem) == LABEL_REF)
4020 return \"ldr\\t%0, %1\";
4022 if (GET_CODE (mem) == PLUS)
4024 rtx a = XEXP (mem, 0);
4025 rtx b = XEXP (mem, 1);
4027 /* This can happen due to bugs in reload. */
4028 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4031 ops[0] = operands[0];
4034 output_asm_insn (\"mov %0, %1\", ops);
4036 XEXP (mem, 0) = operands[0];
4039 else if ( GET_CODE (a) == LABEL_REF
4040 && GET_CODE (b) == CONST_INT)
4041 return \"ldr\\t%0, %1\";
4044 return \"ldrh\\t%0, %1\";
4046 [(set_attr "length" "4")
4047 (set_attr "type" "load_byte")
4048 (set_attr "pool_range" "60")]
4051 (define_insn "*thumb1_zero_extendhisi2_v6"
4052 [(set (match_operand:SI 0 "register_operand" "=l,l")
4053 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4054 "TARGET_THUMB1 && arm_arch6"
4058 if (which_alternative == 0)
4059 return \"uxth\\t%0, %1\";
4061 mem = XEXP (operands[1], 0);
4063 if (GET_CODE (mem) == CONST)
4064 mem = XEXP (mem, 0);
4066 if (GET_CODE (mem) == LABEL_REF)
4067 return \"ldr\\t%0, %1\";
4069 if (GET_CODE (mem) == PLUS)
4071 rtx a = XEXP (mem, 0);
4072 rtx b = XEXP (mem, 1);
4074 /* This can happen due to bugs in reload. */
4075 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4078 ops[0] = operands[0];
4081 output_asm_insn (\"mov %0, %1\", ops);
4083 XEXP (mem, 0) = operands[0];
4086 else if ( GET_CODE (a) == LABEL_REF
4087 && GET_CODE (b) == CONST_INT)
4088 return \"ldr\\t%0, %1\";
4091 return \"ldrh\\t%0, %1\";
4093 [(set_attr "length" "2,4")
4094 (set_attr "type" "alu_shift,load_byte")
4095 (set_attr "pool_range" "*,60")]
4098 (define_insn "*arm_zero_extendhisi2"
4099 [(set (match_operand:SI 0 "s_register_operand" "=r")
4100 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4101 "TARGET_ARM && arm_arch4 && !arm_arch6"
4103 [(set_attr "type" "load_byte")
4104 (set_attr "predicable" "yes")
4105 (set_attr "pool_range" "256")
4106 (set_attr "neg_pool_range" "244")]
4109 (define_insn "*arm_zero_extendhisi2_v6"
4110 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4111 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4112 "TARGET_ARM && arm_arch6"
4116 [(set_attr "type" "alu_shift,load_byte")
4117 (set_attr "predicable" "yes")
4118 (set_attr "pool_range" "*,256")
4119 (set_attr "neg_pool_range" "*,244")]
4122 (define_insn "*arm_zero_extendhisi2addsi"
4123 [(set (match_operand:SI 0 "s_register_operand" "=r")
4124 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4125 (match_operand:SI 2 "s_register_operand" "r")))]
4127 "uxtah%?\\t%0, %2, %1"
4128 [(set_attr "type" "alu_shift")
4129 (set_attr "predicable" "yes")]
4132 (define_expand "zero_extendqisi2"
4133 [(set (match_operand:SI 0 "s_register_operand" "")
4134 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4137 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4141 emit_insn (gen_andsi3 (operands[0],
4142 gen_lowpart (SImode, operands[1]),
4145 else /* TARGET_THUMB */
4147 rtx temp = gen_reg_rtx (SImode);
4150 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4151 operands[1] = gen_lowpart (SImode, operands[1]);
4154 ops[1] = operands[1];
4155 ops[2] = GEN_INT (24);
4157 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4158 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4160 ops[0] = operands[0];
4162 ops[2] = GEN_INT (24);
4164 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4165 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4172 (define_insn "*thumb1_zero_extendqisi2"
4173 [(set (match_operand:SI 0 "register_operand" "=l")
4174 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4175 "TARGET_THUMB1 && !arm_arch6"
4177 [(set_attr "length" "2")
4178 (set_attr "type" "load_byte")
4179 (set_attr "pool_range" "32")]
4182 (define_insn "*thumb1_zero_extendqisi2_v6"
4183 [(set (match_operand:SI 0 "register_operand" "=l,l")
4184 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4185 "TARGET_THUMB1 && arm_arch6"
4189 [(set_attr "length" "2,2")
4190 (set_attr "type" "alu_shift,load_byte")
4191 (set_attr "pool_range" "*,32")]
4194 (define_insn "*arm_zero_extendqisi2"
4195 [(set (match_operand:SI 0 "s_register_operand" "=r")
4196 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4197 "TARGET_ARM && !arm_arch6"
4198 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4199 [(set_attr "type" "load_byte")
4200 (set_attr "predicable" "yes")
4201 (set_attr "pool_range" "4096")
4202 (set_attr "neg_pool_range" "4084")]
4205 (define_insn "*arm_zero_extendqisi2_v6"
4206 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4207 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4208 "TARGET_ARM && arm_arch6"
4211 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4212 [(set_attr "type" "alu_shift,load_byte")
4213 (set_attr "predicable" "yes")
4214 (set_attr "pool_range" "*,4096")
4215 (set_attr "neg_pool_range" "*,4084")]
4218 (define_insn "*arm_zero_extendqisi2addsi"
4219 [(set (match_operand:SI 0 "s_register_operand" "=r")
4220 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4221 (match_operand:SI 2 "s_register_operand" "r")))]
4223 "uxtab%?\\t%0, %2, %1"
4224 [(set_attr "predicable" "yes")
4225 (set_attr "insn" "xtab")
4226 (set_attr "type" "alu_shift")]
4230 [(set (match_operand:SI 0 "s_register_operand" "")
4231 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4232 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4233 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4234 [(set (match_dup 2) (match_dup 1))
4235 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4240 [(set (match_operand:SI 0 "s_register_operand" "")
4241 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4242 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4243 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4244 [(set (match_dup 2) (match_dup 1))
4245 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4249 (define_code_iterator ior_xor [ior xor])
4252 [(set (match_operand:SI 0 "s_register_operand" "")
4253 (ior_xor:SI (and:SI (ashift:SI
4254 (match_operand:SI 1 "s_register_operand" "")
4255 (match_operand:SI 2 "const_int_operand" ""))
4256 (match_operand:SI 3 "const_int_operand" ""))
4258 (match_operator 5 "subreg_lowpart_operator"
4259 [(match_operand:SI 4 "s_register_operand" "")]))))]
4261 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4262 == (GET_MODE_MASK (GET_MODE (operands[5]))
4263 & (GET_MODE_MASK (GET_MODE (operands[5]))
4264 << (INTVAL (operands[2])))))"
4265 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4267 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4268 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4271 (define_insn "*compareqi_eq0"
4272 [(set (reg:CC_Z CC_REGNUM)
4273 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4277 [(set_attr "conds" "set")]
4280 (define_expand "extendhisi2"
4282 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4284 (set (match_operand:SI 0 "s_register_operand" "")
4285 (ashiftrt:SI (match_dup 2)
4290 if (GET_CODE (operands[1]) == MEM)
4294 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4299 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4300 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4305 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4307 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4311 if (!s_register_operand (operands[1], HImode))
4312 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4317 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4319 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4320 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4325 operands[1] = gen_lowpart (SImode, operands[1]);
4326 operands[2] = gen_reg_rtx (SImode);
4330 (define_insn "thumb1_extendhisi2"
4331 [(set (match_operand:SI 0 "register_operand" "=l")
4332 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4333 (clobber (match_scratch:SI 2 "=&l"))]
4334 "TARGET_THUMB1 && !arm_arch6"
4338 rtx mem = XEXP (operands[1], 0);
4340 /* This code used to try to use 'V', and fix the address only if it was
4341 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4342 range of QImode offsets, and offsettable_address_p does a QImode
4345 if (GET_CODE (mem) == CONST)
4346 mem = XEXP (mem, 0);
4348 if (GET_CODE (mem) == LABEL_REF)
4349 return \"ldr\\t%0, %1\";
4351 if (GET_CODE (mem) == PLUS)
4353 rtx a = XEXP (mem, 0);
4354 rtx b = XEXP (mem, 1);
4356 if (GET_CODE (a) == LABEL_REF
4357 && GET_CODE (b) == CONST_INT)
4358 return \"ldr\\t%0, %1\";
4360 if (GET_CODE (b) == REG)
4361 return \"ldrsh\\t%0, %1\";
4369 ops[2] = const0_rtx;
4372 gcc_assert (GET_CODE (ops[1]) == REG);
4374 ops[0] = operands[0];
4375 ops[3] = operands[2];
4376 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4379 [(set_attr "length" "4")
4380 (set_attr "type" "load_byte")
4381 (set_attr "pool_range" "1020")]
4384 ;; We used to have an early-clobber on the scratch register here.
4385 ;; However, there's a bug somewhere in reload which means that this
4386 ;; can be partially ignored during spill allocation if the memory
4387 ;; address also needs reloading; this causes us to die later on when
4388 ;; we try to verify the operands. Fortunately, we don't really need
4389 ;; the early-clobber: we can always use operand 0 if operand 2
4390 ;; overlaps the address.
4391 (define_insn "*thumb1_extendhisi2_insn_v6"
4392 [(set (match_operand:SI 0 "register_operand" "=l,l")
4393 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4394 (clobber (match_scratch:SI 2 "=X,l"))]
4395 "TARGET_THUMB1 && arm_arch6"
4401 if (which_alternative == 0)
4402 return \"sxth\\t%0, %1\";
4404 mem = XEXP (operands[1], 0);
4406 /* This code used to try to use 'V', and fix the address only if it was
4407 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4408 range of QImode offsets, and offsettable_address_p does a QImode
4411 if (GET_CODE (mem) == CONST)
4412 mem = XEXP (mem, 0);
4414 if (GET_CODE (mem) == LABEL_REF)
4415 return \"ldr\\t%0, %1\";
4417 if (GET_CODE (mem) == PLUS)
4419 rtx a = XEXP (mem, 0);
4420 rtx b = XEXP (mem, 1);
4422 if (GET_CODE (a) == LABEL_REF
4423 && GET_CODE (b) == CONST_INT)
4424 return \"ldr\\t%0, %1\";
4426 if (GET_CODE (b) == REG)
4427 return \"ldrsh\\t%0, %1\";
4435 ops[2] = const0_rtx;
4438 gcc_assert (GET_CODE (ops[1]) == REG);
4440 ops[0] = operands[0];
4441 if (reg_mentioned_p (operands[2], ops[1]))
4444 ops[3] = operands[2];
4445 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4448 [(set_attr "length" "2,4")
4449 (set_attr "type" "alu_shift,load_byte")
4450 (set_attr "pool_range" "*,1020")]
4453 ;; This pattern will only be used when ldsh is not available
4454 (define_expand "extendhisi2_mem"
4455 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4457 (zero_extend:SI (match_dup 7)))
4458 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4459 (set (match_operand:SI 0 "" "")
4460 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4465 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4467 mem1 = change_address (operands[1], QImode, addr);
4468 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4469 operands[0] = gen_lowpart (SImode, operands[0]);
4471 operands[2] = gen_reg_rtx (SImode);
4472 operands[3] = gen_reg_rtx (SImode);
4473 operands[6] = gen_reg_rtx (SImode);
4476 if (BYTES_BIG_ENDIAN)
4478 operands[4] = operands[2];
4479 operands[5] = operands[3];
4483 operands[4] = operands[3];
4484 operands[5] = operands[2];
4489 (define_insn "*arm_extendhisi2"
4490 [(set (match_operand:SI 0 "s_register_operand" "=r")
4491 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4492 "TARGET_ARM && arm_arch4 && !arm_arch6"
4493 "ldr%(sh%)\\t%0, %1"
4494 [(set_attr "type" "load_byte")
4495 (set_attr "predicable" "yes")
4496 (set_attr "pool_range" "256")
4497 (set_attr "neg_pool_range" "244")]
4500 ;; ??? Check Thumb-2 pool range
4501 (define_insn "*arm_extendhisi2_v6"
4502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4503 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4504 "TARGET_32BIT && arm_arch6"
4508 [(set_attr "type" "alu_shift,load_byte")
4509 (set_attr "predicable" "yes")
4510 (set_attr "pool_range" "*,256")
4511 (set_attr "neg_pool_range" "*,244")]
4514 (define_insn "*arm_extendhisi2addsi"
4515 [(set (match_operand:SI 0 "s_register_operand" "=r")
4516 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4517 (match_operand:SI 2 "s_register_operand" "r")))]
4519 "sxtah%?\\t%0, %2, %1"
4522 (define_expand "extendqihi2"
4524 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4526 (set (match_operand:HI 0 "s_register_operand" "")
4527 (ashiftrt:SI (match_dup 2)
4532 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4534 emit_insn (gen_rtx_SET (VOIDmode,
4536 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4539 if (!s_register_operand (operands[1], QImode))
4540 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4541 operands[0] = gen_lowpart (SImode, operands[0]);
4542 operands[1] = gen_lowpart (SImode, operands[1]);
4543 operands[2] = gen_reg_rtx (SImode);
4547 (define_insn "*arm_extendqihi_insn"
4548 [(set (match_operand:HI 0 "s_register_operand" "=r")
4549 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4550 "TARGET_ARM && arm_arch4"
4551 "ldr%(sb%)\\t%0, %1"
4552 [(set_attr "type" "load_byte")
4553 (set_attr "predicable" "yes")
4554 (set_attr "pool_range" "256")
4555 (set_attr "neg_pool_range" "244")]
4558 (define_expand "extendqisi2"
4560 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4562 (set (match_operand:SI 0 "s_register_operand" "")
4563 (ashiftrt:SI (match_dup 2)
4568 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4570 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4571 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4575 if (!s_register_operand (operands[1], QImode))
4576 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4580 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4581 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4585 operands[1] = gen_lowpart (SImode, operands[1]);
4586 operands[2] = gen_reg_rtx (SImode);
4590 (define_insn "*arm_extendqisi"
4591 [(set (match_operand:SI 0 "s_register_operand" "=r")
4592 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4593 "TARGET_ARM && arm_arch4 && !arm_arch6"
4594 "ldr%(sb%)\\t%0, %1"
4595 [(set_attr "type" "load_byte")
4596 (set_attr "predicable" "yes")
4597 (set_attr "pool_range" "256")
4598 (set_attr "neg_pool_range" "244")]
4601 (define_insn "*arm_extendqisi_v6"
4602 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4604 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4605 "TARGET_ARM && arm_arch6"
4609 [(set_attr "type" "alu_shift,load_byte")
4610 (set_attr "predicable" "yes")
4611 (set_attr "pool_range" "*,256")
4612 (set_attr "neg_pool_range" "*,244")]
4615 (define_insn "*arm_extendqisi2addsi"
4616 [(set (match_operand:SI 0 "s_register_operand" "=r")
4617 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4618 (match_operand:SI 2 "s_register_operand" "r")))]
4620 "sxtab%?\\t%0, %2, %1"
4621 [(set_attr "type" "alu_shift")
4622 (set_attr "insn" "xtab")
4623 (set_attr "predicable" "yes")]
4626 (define_insn "*thumb1_extendqisi2"
4627 [(set (match_operand:SI 0 "register_operand" "=l,l")
4628 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4629 "TARGET_THUMB1 && !arm_arch6"
4633 rtx mem = XEXP (operands[1], 0);
4635 if (GET_CODE (mem) == CONST)
4636 mem = XEXP (mem, 0);
4638 if (GET_CODE (mem) == LABEL_REF)
4639 return \"ldr\\t%0, %1\";
4641 if (GET_CODE (mem) == PLUS
4642 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4643 return \"ldr\\t%0, %1\";
4645 if (which_alternative == 0)
4646 return \"ldrsb\\t%0, %1\";
4648 ops[0] = operands[0];
4650 if (GET_CODE (mem) == PLUS)
4652 rtx a = XEXP (mem, 0);
4653 rtx b = XEXP (mem, 1);
4658 if (GET_CODE (a) == REG)
4660 if (GET_CODE (b) == REG)
4661 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4662 else if (REGNO (a) == REGNO (ops[0]))
4664 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4665 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4666 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4669 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4673 gcc_assert (GET_CODE (b) == REG);
4674 if (REGNO (b) == REGNO (ops[0]))
4676 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4677 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4678 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4681 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4684 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4686 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4687 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4688 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4693 ops[2] = const0_rtx;
4695 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4699 [(set_attr "length" "2,6")
4700 (set_attr "type" "load_byte,load_byte")
4701 (set_attr "pool_range" "32,32")]
4704 (define_insn "*thumb1_extendqisi2_v6"
4705 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4706 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4707 "TARGET_THUMB1 && arm_arch6"
4713 if (which_alternative == 0)
4714 return \"sxtb\\t%0, %1\";
4716 mem = XEXP (operands[1], 0);
4718 if (GET_CODE (mem) == CONST)
4719 mem = XEXP (mem, 0);
4721 if (GET_CODE (mem) == LABEL_REF)
4722 return \"ldr\\t%0, %1\";
4724 if (GET_CODE (mem) == PLUS
4725 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4726 return \"ldr\\t%0, %1\";
4728 if (which_alternative == 0)
4729 return \"ldrsb\\t%0, %1\";
4731 ops[0] = operands[0];
4733 if (GET_CODE (mem) == PLUS)
4735 rtx a = XEXP (mem, 0);
4736 rtx b = XEXP (mem, 1);
4741 if (GET_CODE (a) == REG)
4743 if (GET_CODE (b) == REG)
4744 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4745 else if (REGNO (a) == REGNO (ops[0]))
4747 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4748 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4751 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4755 gcc_assert (GET_CODE (b) == REG);
4756 if (REGNO (b) == REGNO (ops[0]))
4758 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4759 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4762 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4765 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4767 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4768 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4773 ops[2] = const0_rtx;
4775 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4779 [(set_attr "length" "2,2,4")
4780 (set_attr "type" "alu_shift,load_byte,load_byte")
4781 (set_attr "pool_range" "*,32,32")]
4784 (define_expand "extendsfdf2"
4785 [(set (match_operand:DF 0 "s_register_operand" "")
4786 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4787 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4791 /* HFmode -> DFmode conversions have to go through SFmode. */
4792 (define_expand "extendhfdf2"
4793 [(set (match_operand:DF 0 "general_operand" "")
4794 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4799 op1 = convert_to_mode (SFmode, operands[1], 0);
4800 op1 = convert_to_mode (DFmode, op1, 0);
4801 emit_insn (gen_movdf (operands[0], op1));
4806 ;; Move insns (including loads and stores)
4808 ;; XXX Just some ideas about movti.
4809 ;; I don't think these are a good idea on the arm, there just aren't enough
4811 ;;(define_expand "loadti"
4812 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4813 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4816 ;;(define_expand "storeti"
4817 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4818 ;; (match_operand:TI 1 "s_register_operand" ""))]
4821 ;;(define_expand "movti"
4822 ;; [(set (match_operand:TI 0 "general_operand" "")
4823 ;; (match_operand:TI 1 "general_operand" ""))]
4829 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4830 ;; operands[1] = copy_to_reg (operands[1]);
4831 ;; if (GET_CODE (operands[0]) == MEM)
4832 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4833 ;; else if (GET_CODE (operands[1]) == MEM)
4834 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4838 ;; emit_insn (insn);
4842 ;; Recognize garbage generated above.
4845 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4846 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4850 ;; register mem = (which_alternative < 3);
4851 ;; register const char *template;
4853 ;; operands[mem] = XEXP (operands[mem], 0);
4854 ;; switch (which_alternative)
4856 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4857 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4858 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4859 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4860 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4861 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4863 ;; output_asm_insn (template, operands);
4867 (define_expand "movdi"
4868 [(set (match_operand:DI 0 "general_operand" "")
4869 (match_operand:DI 1 "general_operand" ""))]
4872 if (can_create_pseudo_p ())
4874 if (GET_CODE (operands[0]) != REG)
4875 operands[1] = force_reg (DImode, operands[1]);
4880 (define_insn "*arm_movdi"
4881 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4882 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4884 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4886 && ( register_operand (operands[0], DImode)
4887 || register_operand (operands[1], DImode))"
4889 switch (which_alternative)
4896 return output_move_double (operands);
4899 [(set_attr "length" "8,12,16,8,8")
4900 (set_attr "type" "*,*,*,load2,store2")
4901 (set_attr "pool_range" "*,*,*,1020,*")
4902 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4906 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4907 (match_operand:ANY64 1 "const_double_operand" ""))]
4910 && (arm_const_double_inline_cost (operands[1])
4911 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4914 arm_split_constant (SET, SImode, curr_insn,
4915 INTVAL (gen_lowpart (SImode, operands[1])),
4916 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4917 arm_split_constant (SET, SImode, curr_insn,
4918 INTVAL (gen_highpart_mode (SImode,
4919 GET_MODE (operands[0]),
4921 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4926 ; If optimizing for size, or if we have load delay slots, then
4927 ; we want to split the constant into two separate operations.
4928 ; In both cases this may split a trivial part into a single data op
4929 ; leaving a single complex constant to load. We can also get longer
4930 ; offsets in a LDR which means we get better chances of sharing the pool
4931 ; entries. Finally, we can normally do a better job of scheduling
4932 ; LDR instructions than we can with LDM.
4933 ; This pattern will only match if the one above did not.
4935 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4936 (match_operand:ANY64 1 "const_double_operand" ""))]
4937 "TARGET_ARM && reload_completed
4938 && arm_const_double_by_parts (operands[1])"
4939 [(set (match_dup 0) (match_dup 1))
4940 (set (match_dup 2) (match_dup 3))]
4942 operands[2] = gen_highpart (SImode, operands[0]);
4943 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4945 operands[0] = gen_lowpart (SImode, operands[0]);
4946 operands[1] = gen_lowpart (SImode, operands[1]);
4951 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4952 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4953 "TARGET_EITHER && reload_completed"
4954 [(set (match_dup 0) (match_dup 1))
4955 (set (match_dup 2) (match_dup 3))]
4957 operands[2] = gen_highpart (SImode, operands[0]);
4958 operands[3] = gen_highpart (SImode, operands[1]);
4959 operands[0] = gen_lowpart (SImode, operands[0]);
4960 operands[1] = gen_lowpart (SImode, operands[1]);
4962 /* Handle a partial overlap. */
4963 if (rtx_equal_p (operands[0], operands[3]))
4965 rtx tmp0 = operands[0];
4966 rtx tmp1 = operands[1];
4968 operands[0] = operands[2];
4969 operands[1] = operands[3];
4976 ;; We can't actually do base+index doubleword loads if the index and
4977 ;; destination overlap. Split here so that we at least have chance to
4980 [(set (match_operand:DI 0 "s_register_operand" "")
4981 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4982 (match_operand:SI 2 "s_register_operand" ""))))]
4984 && reg_overlap_mentioned_p (operands[0], operands[1])
4985 && reg_overlap_mentioned_p (operands[0], operands[2])"
4987 (plus:SI (match_dup 1)
4990 (mem:DI (match_dup 4)))]
4992 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4996 ;;; ??? This should have alternatives for constants.
4997 ;;; ??? This was originally identical to the movdf_insn pattern.
4998 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4999 ;;; thumb_reorg with a memory reference.
5000 (define_insn "*thumb1_movdi_insn"
5001 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5002 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5004 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5005 && ( register_operand (operands[0], DImode)
5006 || register_operand (operands[1], DImode))"
5009 switch (which_alternative)
5013 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5014 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5015 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5017 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5019 operands[1] = GEN_INT (- INTVAL (operands[1]));
5020 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5022 return \"ldmia\\t%1, {%0, %H0}\";
5024 return \"stmia\\t%0, {%1, %H1}\";
5026 return thumb_load_double_from_address (operands);
5028 operands[2] = gen_rtx_MEM (SImode,
5029 plus_constant (XEXP (operands[0], 0), 4));
5030 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5033 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5034 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5035 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5038 [(set_attr "length" "4,4,6,2,2,6,4,4")
5039 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5040 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5043 (define_expand "movsi"
5044 [(set (match_operand:SI 0 "general_operand" "")
5045 (match_operand:SI 1 "general_operand" ""))]
5049 rtx base, offset, tmp;
5053 /* Everything except mem = const or mem = mem can be done easily. */
5054 if (GET_CODE (operands[0]) == MEM)
5055 operands[1] = force_reg (SImode, operands[1]);
5056 if (arm_general_register_operand (operands[0], SImode)
5057 && GET_CODE (operands[1]) == CONST_INT
5058 && !(const_ok_for_arm (INTVAL (operands[1]))
5059 || const_ok_for_arm (~INTVAL (operands[1]))))
5061 arm_split_constant (SET, SImode, NULL_RTX,
5062 INTVAL (operands[1]), operands[0], NULL_RTX,
5063 optimize && can_create_pseudo_p ());
5067 if (TARGET_USE_MOVT && !target_word_relocations
5068 && GET_CODE (operands[1]) == SYMBOL_REF
5069 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5071 arm_emit_movpair (operands[0], operands[1]);
5075 else /* TARGET_THUMB1... */
5077 if (can_create_pseudo_p ())
5079 if (GET_CODE (operands[0]) != REG)
5080 operands[1] = force_reg (SImode, operands[1]);
5084 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5086 split_const (operands[1], &base, &offset);
5087 if (GET_CODE (base) == SYMBOL_REF
5088 && !offset_within_block_p (base, INTVAL (offset)))
5090 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5091 emit_move_insn (tmp, base);
5092 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5097 /* Recognize the case where operand[1] is a reference to thread-local
5098 data and load its address to a register. */
5099 if (arm_tls_referenced_p (operands[1]))
5101 rtx tmp = operands[1];
5104 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5106 addend = XEXP (XEXP (tmp, 0), 1);
5107 tmp = XEXP (XEXP (tmp, 0), 0);
5110 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5111 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5113 tmp = legitimize_tls_address (tmp,
5114 !can_create_pseudo_p () ? operands[0] : 0);
5117 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5118 tmp = force_operand (tmp, operands[0]);
5123 && (CONSTANT_P (operands[1])
5124 || symbol_mentioned_p (operands[1])
5125 || label_mentioned_p (operands[1])))
5126 operands[1] = legitimize_pic_address (operands[1], SImode,
5127 (!can_create_pseudo_p ()
5134 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5135 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5136 ;; so this does not matter.
5137 (define_insn "*arm_movt"
5138 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5139 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5140 (match_operand:SI 2 "general_operand" "i")))]
5142 "movt%?\t%0, #:upper16:%c2"
5143 [(set_attr "predicable" "yes")
5144 (set_attr "length" "4")]
5147 (define_insn "*arm_movsi_insn"
5148 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5149 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5150 "TARGET_ARM && ! TARGET_IWMMXT
5151 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5152 && ( register_operand (operands[0], SImode)
5153 || register_operand (operands[1], SImode))"
5161 [(set_attr "type" "*,*,*,*,load1,store1")
5162 (set_attr "predicable" "yes")
5163 (set_attr "pool_range" "*,*,*,*,4096,*")
5164 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5168 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5169 (match_operand:SI 1 "const_int_operand" ""))]
5171 && (!(const_ok_for_arm (INTVAL (operands[1]))
5172 || const_ok_for_arm (~INTVAL (operands[1]))))"
5173 [(clobber (const_int 0))]
5175 arm_split_constant (SET, SImode, NULL_RTX,
5176 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5181 (define_insn "*thumb1_movsi_insn"
5182 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5183 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5185 && ( register_operand (operands[0], SImode)
5186 || register_operand (operands[1], SImode))"
5197 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5198 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5199 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5203 [(set (match_operand:SI 0 "register_operand" "")
5204 (match_operand:SI 1 "const_int_operand" ""))]
5205 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5206 [(set (match_dup 0) (match_dup 1))
5207 (set (match_dup 0) (neg:SI (match_dup 0)))]
5208 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5212 [(set (match_operand:SI 0 "register_operand" "")
5213 (match_operand:SI 1 "const_int_operand" ""))]
5214 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5215 [(set (match_dup 0) (match_dup 1))
5216 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5219 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5220 unsigned HOST_WIDE_INT mask = 0xff;
5223 for (i = 0; i < 25; i++)
5224 if ((val & (mask << i)) == val)
5227 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5231 operands[1] = GEN_INT (val >> i);
5232 operands[2] = GEN_INT (i);
5236 ;; When generating pic, we need to load the symbol offset into a register.
5237 ;; So that the optimizer does not confuse this with a normal symbol load
5238 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5239 ;; since that is the only type of relocation we can use.
5241 ;; The rather odd constraints on the following are to force reload to leave
5242 ;; the insn alone, and to force the minipool generation pass to then move
5243 ;; the GOT symbol to memory.
5245 (define_insn "pic_load_addr_32bit"
5246 [(set (match_operand:SI 0 "s_register_operand" "=r")
5247 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5248 "TARGET_32BIT && flag_pic"
5250 [(set_attr "type" "load1")
5251 (set_attr "pool_range" "4096")
5252 (set (attr "neg_pool_range")
5253 (if_then_else (eq_attr "is_thumb" "no")
5258 (define_insn "pic_load_addr_thumb1"
5259 [(set (match_operand:SI 0 "s_register_operand" "=l")
5260 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5261 "TARGET_THUMB1 && flag_pic"
5263 [(set_attr "type" "load1")
5264 (set (attr "pool_range") (const_int 1024))]
5267 (define_insn "pic_add_dot_plus_four"
5268 [(set (match_operand:SI 0 "register_operand" "=r")
5269 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5271 (match_operand 2 "" "")]
5275 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5276 INTVAL (operands[2]));
5277 return \"add\\t%0, %|pc\";
5279 [(set_attr "length" "2")]
5282 (define_insn "pic_add_dot_plus_eight"
5283 [(set (match_operand:SI 0 "register_operand" "=r")
5284 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5286 (match_operand 2 "" "")]
5290 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5291 INTVAL (operands[2]));
5292 return \"add%?\\t%0, %|pc, %1\";
5294 [(set_attr "predicable" "yes")]
5297 (define_insn "tls_load_dot_plus_eight"
5298 [(set (match_operand:SI 0 "register_operand" "=r")
5299 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5301 (match_operand 2 "" "")]
5305 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5306 INTVAL (operands[2]));
5307 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5309 [(set_attr "predicable" "yes")]
5312 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5313 ;; followed by a load. These sequences can be crunched down to
5314 ;; tls_load_dot_plus_eight by a peephole.
5317 [(set (match_operand:SI 0 "register_operand" "")
5318 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5320 (match_operand 1 "" "")]
5322 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5323 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5325 (mem:SI (unspec:SI [(match_dup 3)
5332 (define_insn "pic_offset_arm"
5333 [(set (match_operand:SI 0 "register_operand" "=r")
5334 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5335 (unspec:SI [(match_operand:SI 2 "" "X")]
5336 UNSPEC_PIC_OFFSET))))]
5337 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5338 "ldr%?\\t%0, [%1,%2]"
5339 [(set_attr "type" "load1")]
5342 (define_expand "builtin_setjmp_receiver"
5343 [(label_ref (match_operand 0 "" ""))]
5347 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5349 if (arm_pic_register != INVALID_REGNUM)
5350 arm_load_pic_register (1UL << 3);
5354 ;; If copying one reg to another we can set the condition codes according to
5355 ;; its value. Such a move is common after a return from subroutine and the
5356 ;; result is being tested against zero.
5358 (define_insn "*movsi_compare0"
5359 [(set (reg:CC CC_REGNUM)
5360 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5362 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5368 [(set_attr "conds" "set")]
5371 ;; Subroutine to store a half word from a register into memory.
5372 ;; Operand 0 is the source register (HImode)
5373 ;; Operand 1 is the destination address in a register (SImode)
5375 ;; In both this routine and the next, we must be careful not to spill
5376 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5377 ;; can generate unrecognizable rtl.
5379 (define_expand "storehi"
5380 [;; store the low byte
5381 (set (match_operand 1 "" "") (match_dup 3))
5382 ;; extract the high byte
5384 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5385 ;; store the high byte
5386 (set (match_dup 4) (match_dup 5))]
5390 rtx op1 = operands[1];
5391 rtx addr = XEXP (op1, 0);
5392 enum rtx_code code = GET_CODE (addr);
5394 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5396 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5398 operands[4] = adjust_address (op1, QImode, 1);
5399 operands[1] = adjust_address (operands[1], QImode, 0);
5400 operands[3] = gen_lowpart (QImode, operands[0]);
5401 operands[0] = gen_lowpart (SImode, operands[0]);
5402 operands[2] = gen_reg_rtx (SImode);
5403 operands[5] = gen_lowpart (QImode, operands[2]);
5407 (define_expand "storehi_bigend"
5408 [(set (match_dup 4) (match_dup 3))
5410 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5411 (set (match_operand 1 "" "") (match_dup 5))]
5415 rtx op1 = operands[1];
5416 rtx addr = XEXP (op1, 0);
5417 enum rtx_code code = GET_CODE (addr);
5419 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5421 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5423 operands[4] = adjust_address (op1, QImode, 1);
5424 operands[1] = adjust_address (operands[1], QImode, 0);
5425 operands[3] = gen_lowpart (QImode, operands[0]);
5426 operands[0] = gen_lowpart (SImode, operands[0]);
5427 operands[2] = gen_reg_rtx (SImode);
5428 operands[5] = gen_lowpart (QImode, operands[2]);
5432 ;; Subroutine to store a half word integer constant into memory.
5433 (define_expand "storeinthi"
5434 [(set (match_operand 0 "" "")
5435 (match_operand 1 "" ""))
5436 (set (match_dup 3) (match_dup 2))]
5440 HOST_WIDE_INT value = INTVAL (operands[1]);
5441 rtx addr = XEXP (operands[0], 0);
5442 rtx op0 = operands[0];
5443 enum rtx_code code = GET_CODE (addr);
5445 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5447 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5449 operands[1] = gen_reg_rtx (SImode);
5450 if (BYTES_BIG_ENDIAN)
5452 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5453 if ((value & 255) == ((value >> 8) & 255))
5454 operands[2] = operands[1];
5457 operands[2] = gen_reg_rtx (SImode);
5458 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5463 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5464 if ((value & 255) == ((value >> 8) & 255))
5465 operands[2] = operands[1];
5468 operands[2] = gen_reg_rtx (SImode);
5469 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5473 operands[3] = adjust_address (op0, QImode, 1);
5474 operands[0] = adjust_address (operands[0], QImode, 0);
5475 operands[2] = gen_lowpart (QImode, operands[2]);
5476 operands[1] = gen_lowpart (QImode, operands[1]);
5480 (define_expand "storehi_single_op"
5481 [(set (match_operand:HI 0 "memory_operand" "")
5482 (match_operand:HI 1 "general_operand" ""))]
5483 "TARGET_32BIT && arm_arch4"
5485 if (!s_register_operand (operands[1], HImode))
5486 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5490 (define_expand "movhi"
5491 [(set (match_operand:HI 0 "general_operand" "")
5492 (match_operand:HI 1 "general_operand" ""))]
5497 if (can_create_pseudo_p ())
5499 if (GET_CODE (operands[0]) == MEM)
5503 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5506 if (GET_CODE (operands[1]) == CONST_INT)
5507 emit_insn (gen_storeinthi (operands[0], operands[1]));
5510 if (GET_CODE (operands[1]) == MEM)
5511 operands[1] = force_reg (HImode, operands[1]);
5512 if (BYTES_BIG_ENDIAN)
5513 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5515 emit_insn (gen_storehi (operands[1], operands[0]));
5519 /* Sign extend a constant, and keep it in an SImode reg. */
5520 else if (GET_CODE (operands[1]) == CONST_INT)
5522 rtx reg = gen_reg_rtx (SImode);
5523 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5525 /* If the constant is already valid, leave it alone. */
5526 if (!const_ok_for_arm (val))
5528 /* If setting all the top bits will make the constant
5529 loadable in a single instruction, then set them.
5530 Otherwise, sign extend the number. */
5532 if (const_ok_for_arm (~(val | ~0xffff)))
5534 else if (val & 0x8000)
5538 emit_insn (gen_movsi (reg, GEN_INT (val)));
5539 operands[1] = gen_lowpart (HImode, reg);
5541 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5542 && GET_CODE (operands[1]) == MEM)
5544 rtx reg = gen_reg_rtx (SImode);
5546 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5547 operands[1] = gen_lowpart (HImode, reg);
5549 else if (!arm_arch4)
5551 if (GET_CODE (operands[1]) == MEM)
5554 rtx offset = const0_rtx;
5555 rtx reg = gen_reg_rtx (SImode);
5557 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5558 || (GET_CODE (base) == PLUS
5559 && (GET_CODE (offset = XEXP (base, 1))
5561 && ((INTVAL(offset) & 1) != 1)
5562 && GET_CODE (base = XEXP (base, 0)) == REG))
5563 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5567 new_rtx = widen_memory_access (operands[1], SImode,
5568 ((INTVAL (offset) & ~3)
5569 - INTVAL (offset)));
5570 emit_insn (gen_movsi (reg, new_rtx));
5571 if (((INTVAL (offset) & 2) != 0)
5572 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5574 rtx reg2 = gen_reg_rtx (SImode);
5576 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5581 emit_insn (gen_movhi_bytes (reg, operands[1]));
5583 operands[1] = gen_lowpart (HImode, reg);
5587 /* Handle loading a large integer during reload. */
5588 else if (GET_CODE (operands[1]) == CONST_INT
5589 && !const_ok_for_arm (INTVAL (operands[1]))
5590 && !const_ok_for_arm (~INTVAL (operands[1])))
5592 /* Writing a constant to memory needs a scratch, which should
5593 be handled with SECONDARY_RELOADs. */
5594 gcc_assert (GET_CODE (operands[0]) == REG);
5596 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5597 emit_insn (gen_movsi (operands[0], operands[1]));
5601 else if (TARGET_THUMB2)
5603 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5604 if (can_create_pseudo_p ())
5606 if (GET_CODE (operands[0]) != REG)
5607 operands[1] = force_reg (HImode, operands[1]);
5608 /* Zero extend a constant, and keep it in an SImode reg. */
5609 else if (GET_CODE (operands[1]) == CONST_INT)
5611 rtx reg = gen_reg_rtx (SImode);
5612 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5614 emit_insn (gen_movsi (reg, GEN_INT (val)));
5615 operands[1] = gen_lowpart (HImode, reg);
5619 else /* TARGET_THUMB1 */
5621 if (can_create_pseudo_p ())
5623 if (GET_CODE (operands[1]) == CONST_INT)
5625 rtx reg = gen_reg_rtx (SImode);
5627 emit_insn (gen_movsi (reg, operands[1]));
5628 operands[1] = gen_lowpart (HImode, reg);
5631 /* ??? We shouldn't really get invalid addresses here, but this can
5632 happen if we are passed a SP (never OK for HImode/QImode) or
5633 virtual register (also rejected as illegitimate for HImode/QImode)
5634 relative address. */
5635 /* ??? This should perhaps be fixed elsewhere, for instance, in
5636 fixup_stack_1, by checking for other kinds of invalid addresses,
5637 e.g. a bare reference to a virtual register. This may confuse the
5638 alpha though, which must handle this case differently. */
5639 if (GET_CODE (operands[0]) == MEM
5640 && !memory_address_p (GET_MODE (operands[0]),
5641 XEXP (operands[0], 0)))
5643 = replace_equiv_address (operands[0],
5644 copy_to_reg (XEXP (operands[0], 0)));
5646 if (GET_CODE (operands[1]) == MEM
5647 && !memory_address_p (GET_MODE (operands[1]),
5648 XEXP (operands[1], 0)))
5650 = replace_equiv_address (operands[1],
5651 copy_to_reg (XEXP (operands[1], 0)));
5653 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5655 rtx reg = gen_reg_rtx (SImode);
5657 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5658 operands[1] = gen_lowpart (HImode, reg);
5661 if (GET_CODE (operands[0]) == MEM)
5662 operands[1] = force_reg (HImode, operands[1]);
5664 else if (GET_CODE (operands[1]) == CONST_INT
5665 && !satisfies_constraint_I (operands[1]))
5667 /* Handle loading a large integer during reload. */
5669 /* Writing a constant to memory needs a scratch, which should
5670 be handled with SECONDARY_RELOADs. */
5671 gcc_assert (GET_CODE (operands[0]) == REG);
5673 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5674 emit_insn (gen_movsi (operands[0], operands[1]));
5681 (define_insn "*thumb1_movhi_insn"
5682 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5683 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5685 && ( register_operand (operands[0], HImode)
5686 || register_operand (operands[1], HImode))"
5688 switch (which_alternative)
5690 case 0: return \"add %0, %1, #0\";
5691 case 2: return \"strh %1, %0\";
5692 case 3: return \"mov %0, %1\";
5693 case 4: return \"mov %0, %1\";
5694 case 5: return \"mov %0, %1\";
5695 default: gcc_unreachable ();
5697 /* The stack pointer can end up being taken as an index register.
5698 Catch this case here and deal with it. */
5699 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5700 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5701 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5704 ops[0] = operands[0];
5705 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5707 output_asm_insn (\"mov %0, %1\", ops);
5709 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5712 return \"ldrh %0, %1\";
5714 [(set_attr "length" "2,4,2,2,2,2")
5715 (set_attr "type" "*,load1,store1,*,*,*")]
5719 (define_expand "movhi_bytes"
5720 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5722 (zero_extend:SI (match_dup 6)))
5723 (set (match_operand:SI 0 "" "")
5724 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5729 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5731 mem1 = change_address (operands[1], QImode, addr);
5732 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5733 operands[0] = gen_lowpart (SImode, operands[0]);
5735 operands[2] = gen_reg_rtx (SImode);
5736 operands[3] = gen_reg_rtx (SImode);
5739 if (BYTES_BIG_ENDIAN)
5741 operands[4] = operands[2];
5742 operands[5] = operands[3];
5746 operands[4] = operands[3];
5747 operands[5] = operands[2];
5752 (define_expand "movhi_bigend"
5754 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5757 (ashiftrt:SI (match_dup 2) (const_int 16)))
5758 (set (match_operand:HI 0 "s_register_operand" "")
5762 operands[2] = gen_reg_rtx (SImode);
5763 operands[3] = gen_reg_rtx (SImode);
5764 operands[4] = gen_lowpart (HImode, operands[3]);
5768 ;; Pattern to recognize insn generated default case above
5769 (define_insn "*movhi_insn_arch4"
5770 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5771 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5774 && (GET_CODE (operands[1]) != CONST_INT
5775 || const_ok_for_arm (INTVAL (operands[1]))
5776 || const_ok_for_arm (~INTVAL (operands[1])))"
5778 mov%?\\t%0, %1\\t%@ movhi
5779 mvn%?\\t%0, #%B1\\t%@ movhi
5780 str%(h%)\\t%1, %0\\t%@ movhi
5781 ldr%(h%)\\t%0, %1\\t%@ movhi"
5782 [(set_attr "type" "*,*,store1,load1")
5783 (set_attr "predicable" "yes")
5784 (set_attr "pool_range" "*,*,*,256")
5785 (set_attr "neg_pool_range" "*,*,*,244")]
5788 (define_insn "*movhi_bytes"
5789 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5790 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5793 mov%?\\t%0, %1\\t%@ movhi
5794 mvn%?\\t%0, #%B1\\t%@ movhi"
5795 [(set_attr "predicable" "yes")]
5798 (define_expand "thumb_movhi_clobber"
5799 [(set (match_operand:HI 0 "memory_operand" "")
5800 (match_operand:HI 1 "register_operand" ""))
5801 (clobber (match_operand:DI 2 "register_operand" ""))]
5804 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5805 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5807 emit_insn (gen_movhi (operands[0], operands[1]));
5810 /* XXX Fixme, need to handle other cases here as well. */
5815 ;; We use a DImode scratch because we may occasionally need an additional
5816 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5817 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5818 (define_expand "reload_outhi"
5819 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5820 (match_operand:HI 1 "s_register_operand" "r")
5821 (match_operand:DI 2 "s_register_operand" "=&l")])]
5824 arm_reload_out_hi (operands);
5826 thumb_reload_out_hi (operands);
5831 (define_expand "reload_inhi"
5832 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5833 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5834 (match_operand:DI 2 "s_register_operand" "=&r")])]
5838 arm_reload_in_hi (operands);
5840 thumb_reload_out_hi (operands);
5844 (define_expand "movqi"
5845 [(set (match_operand:QI 0 "general_operand" "")
5846 (match_operand:QI 1 "general_operand" ""))]
5849 /* Everything except mem = const or mem = mem can be done easily */
5851 if (can_create_pseudo_p ())
5853 if (GET_CODE (operands[1]) == CONST_INT)
5855 rtx reg = gen_reg_rtx (SImode);
5857 /* For thumb we want an unsigned immediate, then we are more likely
5858 to be able to use a movs insn. */
5860 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5862 emit_insn (gen_movsi (reg, operands[1]));
5863 operands[1] = gen_lowpart (QImode, reg);
5868 /* ??? We shouldn't really get invalid addresses here, but this can
5869 happen if we are passed a SP (never OK for HImode/QImode) or
5870 virtual register (also rejected as illegitimate for HImode/QImode)
5871 relative address. */
5872 /* ??? This should perhaps be fixed elsewhere, for instance, in
5873 fixup_stack_1, by checking for other kinds of invalid addresses,
5874 e.g. a bare reference to a virtual register. This may confuse the
5875 alpha though, which must handle this case differently. */
5876 if (GET_CODE (operands[0]) == MEM
5877 && !memory_address_p (GET_MODE (operands[0]),
5878 XEXP (operands[0], 0)))
5880 = replace_equiv_address (operands[0],
5881 copy_to_reg (XEXP (operands[0], 0)));
5882 if (GET_CODE (operands[1]) == MEM
5883 && !memory_address_p (GET_MODE (operands[1]),
5884 XEXP (operands[1], 0)))
5886 = replace_equiv_address (operands[1],
5887 copy_to_reg (XEXP (operands[1], 0)));
5890 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5892 rtx reg = gen_reg_rtx (SImode);
5894 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5895 operands[1] = gen_lowpart (QImode, reg);
5898 if (GET_CODE (operands[0]) == MEM)
5899 operands[1] = force_reg (QImode, operands[1]);
5901 else if (TARGET_THUMB
5902 && GET_CODE (operands[1]) == CONST_INT
5903 && !satisfies_constraint_I (operands[1]))
5905 /* Handle loading a large integer during reload. */
5907 /* Writing a constant to memory needs a scratch, which should
5908 be handled with SECONDARY_RELOADs. */
5909 gcc_assert (GET_CODE (operands[0]) == REG);
5911 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5912 emit_insn (gen_movsi (operands[0], operands[1]));
5919 (define_insn "*arm_movqi_insn"
5920 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5921 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5923 && ( register_operand (operands[0], QImode)
5924 || register_operand (operands[1], QImode))"
5930 [(set_attr "type" "*,*,load1,store1")
5931 (set_attr "predicable" "yes")]
5934 (define_insn "*thumb1_movqi_insn"
5935 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5936 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5938 && ( register_operand (operands[0], QImode)
5939 || register_operand (operands[1], QImode))"
5947 [(set_attr "length" "2")
5948 (set_attr "type" "*,load1,store1,*,*,*")
5949 (set_attr "pool_range" "*,32,*,*,*,*")]
5953 (define_expand "movhf"
5954 [(set (match_operand:HF 0 "general_operand" "")
5955 (match_operand:HF 1 "general_operand" ""))]
5960 if (GET_CODE (operands[0]) == MEM)
5961 operands[1] = force_reg (HFmode, operands[1]);
5963 else /* TARGET_THUMB1 */
5965 if (can_create_pseudo_p ())
5967 if (GET_CODE (operands[0]) != REG)
5968 operands[1] = force_reg (HFmode, operands[1]);
5974 (define_insn "*arm32_movhf"
5975 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5976 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5977 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5978 && ( s_register_operand (operands[0], HFmode)
5979 || s_register_operand (operands[1], HFmode))"
5981 switch (which_alternative)
5983 case 0: /* ARM register from memory */
5984 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5985 case 1: /* memory from ARM register */
5986 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5987 case 2: /* ARM register from ARM register */
5988 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5989 case 3: /* ARM register from constant */
5995 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5996 bits = real_to_target (NULL, &r, HFmode);
5997 ops[0] = operands[0];
5998 ops[1] = GEN_INT (bits);
5999 ops[2] = GEN_INT (bits & 0xff00);
6000 ops[3] = GEN_INT (bits & 0x00ff);
6002 if (arm_arch_thumb2)
6003 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6005 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6012 [(set_attr "conds" "unconditional")
6013 (set_attr "type" "load1,store1,*,*")
6014 (set_attr "length" "4,4,4,8")
6015 (set_attr "predicable" "yes")
6019 (define_insn "*thumb1_movhf"
6020 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6021 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6023 && ( s_register_operand (operands[0], HFmode)
6024 || s_register_operand (operands[1], HFmode))"
6026 switch (which_alternative)
6031 gcc_assert (GET_CODE(operands[1]) == MEM);
6032 addr = XEXP (operands[1], 0);
6033 if (GET_CODE (addr) == LABEL_REF
6034 || (GET_CODE (addr) == CONST
6035 && GET_CODE (XEXP (addr, 0)) == PLUS
6036 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6037 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6039 /* Constant pool entry. */
6040 return \"ldr\\t%0, %1\";
6042 return \"ldrh\\t%0, %1\";
6044 case 2: return \"strh\\t%1, %0\";
6045 default: return \"mov\\t%0, %1\";
6048 [(set_attr "length" "2")
6049 (set_attr "type" "*,load1,store1,*,*")
6050 (set_attr "pool_range" "*,1020,*,*,*")]
6053 (define_expand "movsf"
6054 [(set (match_operand:SF 0 "general_operand" "")
6055 (match_operand:SF 1 "general_operand" ""))]
6060 if (GET_CODE (operands[0]) == MEM)
6061 operands[1] = force_reg (SFmode, operands[1]);
6063 else /* TARGET_THUMB1 */
6065 if (can_create_pseudo_p ())
6067 if (GET_CODE (operands[0]) != REG)
6068 operands[1] = force_reg (SFmode, operands[1]);
6074 ;; Transform a floating-point move of a constant into a core register into
6075 ;; an SImode operation.
6077 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6078 (match_operand:SF 1 "immediate_operand" ""))]
6081 && GET_CODE (operands[1]) == CONST_DOUBLE"
6082 [(set (match_dup 2) (match_dup 3))]
6084 operands[2] = gen_lowpart (SImode, operands[0]);
6085 operands[3] = gen_lowpart (SImode, operands[1]);
6086 if (operands[2] == 0 || operands[3] == 0)
6091 (define_insn "*arm_movsf_soft_insn"
6092 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6093 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6095 && TARGET_SOFT_FLOAT
6096 && (GET_CODE (operands[0]) != MEM
6097 || register_operand (operands[1], SFmode))"
6100 ldr%?\\t%0, %1\\t%@ float
6101 str%?\\t%1, %0\\t%@ float"
6102 [(set_attr "length" "4,4,4")
6103 (set_attr "predicable" "yes")
6104 (set_attr "type" "*,load1,store1")
6105 (set_attr "pool_range" "*,4096,*")
6106 (set_attr "neg_pool_range" "*,4084,*")]
6109 ;;; ??? This should have alternatives for constants.
6110 (define_insn "*thumb1_movsf_insn"
6111 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6112 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6114 && ( register_operand (operands[0], SFmode)
6115 || register_operand (operands[1], SFmode))"
6124 [(set_attr "length" "2")
6125 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6126 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6129 (define_expand "movdf"
6130 [(set (match_operand:DF 0 "general_operand" "")
6131 (match_operand:DF 1 "general_operand" ""))]
6136 if (GET_CODE (operands[0]) == MEM)
6137 operands[1] = force_reg (DFmode, operands[1]);
6139 else /* TARGET_THUMB */
6141 if (can_create_pseudo_p ())
6143 if (GET_CODE (operands[0]) != REG)
6144 operands[1] = force_reg (DFmode, operands[1]);
6150 ;; Reloading a df mode value stored in integer regs to memory can require a
6152 (define_expand "reload_outdf"
6153 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6154 (match_operand:DF 1 "s_register_operand" "r")
6155 (match_operand:SI 2 "s_register_operand" "=&r")]
6159 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6162 operands[2] = XEXP (operands[0], 0);
6163 else if (code == POST_INC || code == PRE_DEC)
6165 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6166 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6167 emit_insn (gen_movdi (operands[0], operands[1]));
6170 else if (code == PRE_INC)
6172 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6174 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6177 else if (code == POST_DEC)
6178 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6180 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6181 XEXP (XEXP (operands[0], 0), 1)));
6183 emit_insn (gen_rtx_SET (VOIDmode,
6184 replace_equiv_address (operands[0], operands[2]),
6187 if (code == POST_DEC)
6188 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6194 (define_insn "*movdf_soft_insn"
6195 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6196 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6197 "TARGET_ARM && TARGET_SOFT_FLOAT
6198 && ( register_operand (operands[0], DFmode)
6199 || register_operand (operands[1], DFmode))"
6201 switch (which_alternative)
6208 return output_move_double (operands);
6211 [(set_attr "length" "8,12,16,8,8")
6212 (set_attr "type" "*,*,*,load2,store2")
6213 (set_attr "pool_range" "1020")
6214 (set_attr "neg_pool_range" "1008")]
6217 ;;; ??? This should have alternatives for constants.
6218 ;;; ??? This was originally identical to the movdi_insn pattern.
6219 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6220 ;;; thumb_reorg with a memory reference.
6221 (define_insn "*thumb_movdf_insn"
6222 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6223 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6225 && ( register_operand (operands[0], DFmode)
6226 || register_operand (operands[1], DFmode))"
6228 switch (which_alternative)
6232 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6233 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6234 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6236 return \"ldmia\\t%1, {%0, %H0}\";
6238 return \"stmia\\t%0, {%1, %H1}\";
6240 return thumb_load_double_from_address (operands);
6242 operands[2] = gen_rtx_MEM (SImode,
6243 plus_constant (XEXP (operands[0], 0), 4));
6244 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6247 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6248 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6249 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6252 [(set_attr "length" "4,2,2,6,4,4")
6253 (set_attr "type" "*,load2,store2,load2,store2,*")
6254 (set_attr "pool_range" "*,*,*,1020,*,*")]
6257 (define_expand "movxf"
6258 [(set (match_operand:XF 0 "general_operand" "")
6259 (match_operand:XF 1 "general_operand" ""))]
6260 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6262 if (GET_CODE (operands[0]) == MEM)
6263 operands[1] = force_reg (XFmode, operands[1]);
6269 ;; load- and store-multiple insns
6270 ;; The arm can load/store any set of registers, provided that they are in
6271 ;; ascending order; but that is beyond GCC so stick with what it knows.
6273 (define_expand "load_multiple"
6274 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6275 (match_operand:SI 1 "" ""))
6276 (use (match_operand:SI 2 "" ""))])]
6279 HOST_WIDE_INT offset = 0;
6281 /* Support only fixed point registers. */
6282 if (GET_CODE (operands[2]) != CONST_INT
6283 || INTVAL (operands[2]) > 14
6284 || INTVAL (operands[2]) < 2
6285 || GET_CODE (operands[1]) != MEM
6286 || GET_CODE (operands[0]) != REG
6287 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6288 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6292 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6293 force_reg (SImode, XEXP (operands[1], 0)),
6294 TRUE, FALSE, operands[1], &offset);
6297 ;; Load multiple with write-back
6299 (define_insn "*ldmsi_postinc4"
6300 [(match_parallel 0 "load_multiple_operation"
6301 [(set (match_operand:SI 1 "s_register_operand" "=r")
6302 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6304 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6305 (mem:SI (match_dup 2)))
6306 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6307 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6308 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6309 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6310 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6311 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6312 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6313 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6314 [(set_attr "type" "load4")
6315 (set_attr "predicable" "yes")]
6318 (define_insn "*ldmsi_postinc4_thumb1"
6319 [(match_parallel 0 "load_multiple_operation"
6320 [(set (match_operand:SI 1 "s_register_operand" "=l")
6321 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6323 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6324 (mem:SI (match_dup 2)))
6325 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6326 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6327 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6328 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6329 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6330 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6331 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6332 "ldmia\\t%1!, {%3, %4, %5, %6}"
6333 [(set_attr "type" "load4")]
6336 (define_insn "*ldmsi_postinc3"
6337 [(match_parallel 0 "load_multiple_operation"
6338 [(set (match_operand:SI 1 "s_register_operand" "=r")
6339 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6341 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6342 (mem:SI (match_dup 2)))
6343 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6344 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6345 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6346 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6347 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6348 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6349 [(set_attr "type" "load3")
6350 (set_attr "predicable" "yes")]
6353 (define_insn "*ldmsi_postinc2"
6354 [(match_parallel 0 "load_multiple_operation"
6355 [(set (match_operand:SI 1 "s_register_operand" "=r")
6356 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6358 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6359 (mem:SI (match_dup 2)))
6360 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6361 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6362 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6363 "ldm%(ia%)\\t%1!, {%3, %4}"
6364 [(set_attr "type" "load2")
6365 (set_attr "predicable" "yes")]
6368 ;; Ordinary load multiple
6370 (define_insn "*ldmsi4"
6371 [(match_parallel 0 "load_multiple_operation"
6372 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6373 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6374 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6375 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6376 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6377 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6378 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6379 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6380 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6381 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6382 [(set_attr "type" "load4")
6383 (set_attr "predicable" "yes")]
6386 (define_insn "*ldmsi3"
6387 [(match_parallel 0 "load_multiple_operation"
6388 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6389 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6390 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6391 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6392 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6393 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6394 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6395 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6396 [(set_attr "type" "load3")
6397 (set_attr "predicable" "yes")]
6400 (define_insn "*ldmsi2"
6401 [(match_parallel 0 "load_multiple_operation"
6402 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6403 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6404 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6405 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6406 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6407 "ldm%(ia%)\\t%1, {%2, %3}"
6408 [(set_attr "type" "load2")
6409 (set_attr "predicable" "yes")]
6412 (define_expand "store_multiple"
6413 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6414 (match_operand:SI 1 "" ""))
6415 (use (match_operand:SI 2 "" ""))])]
6418 HOST_WIDE_INT offset = 0;
6420 /* Support only fixed point registers. */
6421 if (GET_CODE (operands[2]) != CONST_INT
6422 || INTVAL (operands[2]) > 14
6423 || INTVAL (operands[2]) < 2
6424 || GET_CODE (operands[1]) != REG
6425 || GET_CODE (operands[0]) != MEM
6426 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6427 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6431 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6432 force_reg (SImode, XEXP (operands[0], 0)),
6433 TRUE, FALSE, operands[0], &offset);
6436 ;; Store multiple with write-back
6438 (define_insn "*stmsi_postinc4"
6439 [(match_parallel 0 "store_multiple_operation"
6440 [(set (match_operand:SI 1 "s_register_operand" "=r")
6441 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6443 (set (mem:SI (match_dup 2))
6444 (match_operand:SI 3 "arm_hard_register_operand" ""))
6445 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6446 (match_operand:SI 4 "arm_hard_register_operand" ""))
6447 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6448 (match_operand:SI 5 "arm_hard_register_operand" ""))
6449 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6450 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6451 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6452 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6453 [(set_attr "predicable" "yes")
6454 (set_attr "type" "store4")]
6457 (define_insn "*stmsi_postinc4_thumb1"
6458 [(match_parallel 0 "store_multiple_operation"
6459 [(set (match_operand:SI 1 "s_register_operand" "=l")
6460 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6462 (set (mem:SI (match_dup 2))
6463 (match_operand:SI 3 "arm_hard_register_operand" ""))
6464 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6465 (match_operand:SI 4 "arm_hard_register_operand" ""))
6466 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6467 (match_operand:SI 5 "arm_hard_register_operand" ""))
6468 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6469 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6470 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6471 "stmia\\t%1!, {%3, %4, %5, %6}"
6472 [(set_attr "type" "store4")]
6475 (define_insn "*stmsi_postinc3"
6476 [(match_parallel 0 "store_multiple_operation"
6477 [(set (match_operand:SI 1 "s_register_operand" "=r")
6478 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6480 (set (mem:SI (match_dup 2))
6481 (match_operand:SI 3 "arm_hard_register_operand" ""))
6482 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6483 (match_operand:SI 4 "arm_hard_register_operand" ""))
6484 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6485 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6486 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6487 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6488 [(set_attr "predicable" "yes")
6489 (set_attr "type" "store3")]
6492 (define_insn "*stmsi_postinc2"
6493 [(match_parallel 0 "store_multiple_operation"
6494 [(set (match_operand:SI 1 "s_register_operand" "=r")
6495 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6497 (set (mem:SI (match_dup 2))
6498 (match_operand:SI 3 "arm_hard_register_operand" ""))
6499 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6500 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6501 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6502 "stm%(ia%)\\t%1!, {%3, %4}"
6503 [(set_attr "predicable" "yes")
6504 (set_attr "type" "store2")]
6507 ;; Ordinary store multiple
6509 (define_insn "*stmsi4"
6510 [(match_parallel 0 "store_multiple_operation"
6511 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6512 (match_operand:SI 2 "arm_hard_register_operand" ""))
6513 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6514 (match_operand:SI 3 "arm_hard_register_operand" ""))
6515 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6516 (match_operand:SI 4 "arm_hard_register_operand" ""))
6517 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6518 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6519 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6520 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6521 [(set_attr "predicable" "yes")
6522 (set_attr "type" "store4")]
6525 (define_insn "*stmsi3"
6526 [(match_parallel 0 "store_multiple_operation"
6527 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6528 (match_operand:SI 2 "arm_hard_register_operand" ""))
6529 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6530 (match_operand:SI 3 "arm_hard_register_operand" ""))
6531 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6532 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6533 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6534 "stm%(ia%)\\t%1, {%2, %3, %4}"
6535 [(set_attr "predicable" "yes")
6536 (set_attr "type" "store3")]
6539 (define_insn "*stmsi2"
6540 [(match_parallel 0 "store_multiple_operation"
6541 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6542 (match_operand:SI 2 "arm_hard_register_operand" ""))
6543 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6544 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6545 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6546 "stm%(ia%)\\t%1, {%2, %3}"
6547 [(set_attr "predicable" "yes")
6548 (set_attr "type" "store2")]
6551 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6552 ;; We could let this apply for blocks of less than this, but it clobbers so
6553 ;; many registers that there is then probably a better way.
6555 (define_expand "movmemqi"
6556 [(match_operand:BLK 0 "general_operand" "")
6557 (match_operand:BLK 1 "general_operand" "")
6558 (match_operand:SI 2 "const_int_operand" "")
6559 (match_operand:SI 3 "const_int_operand" "")]
6564 if (arm_gen_movmemqi (operands))
6568 else /* TARGET_THUMB1 */
6570 if ( INTVAL (operands[3]) != 4
6571 || INTVAL (operands[2]) > 48)
6574 thumb_expand_movmemqi (operands);
6580 ;; Thumb block-move insns
6582 (define_insn "movmem12b"
6583 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6584 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6585 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6586 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6587 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6588 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6589 (set (match_operand:SI 0 "register_operand" "=l")
6590 (plus:SI (match_dup 2) (const_int 12)))
6591 (set (match_operand:SI 1 "register_operand" "=l")
6592 (plus:SI (match_dup 3) (const_int 12)))
6593 (clobber (match_scratch:SI 4 "=&l"))
6594 (clobber (match_scratch:SI 5 "=&l"))
6595 (clobber (match_scratch:SI 6 "=&l"))]
6597 "* return thumb_output_move_mem_multiple (3, operands);"
6598 [(set_attr "length" "4")
6599 ; This isn't entirely accurate... It loads as well, but in terms of
6600 ; scheduling the following insn it is better to consider it as a store
6601 (set_attr "type" "store3")]
6604 (define_insn "movmem8b"
6605 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6606 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6607 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6608 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6609 (set (match_operand:SI 0 "register_operand" "=l")
6610 (plus:SI (match_dup 2) (const_int 8)))
6611 (set (match_operand:SI 1 "register_operand" "=l")
6612 (plus:SI (match_dup 3) (const_int 8)))
6613 (clobber (match_scratch:SI 4 "=&l"))
6614 (clobber (match_scratch:SI 5 "=&l"))]
6616 "* return thumb_output_move_mem_multiple (2, operands);"
6617 [(set_attr "length" "4")
6618 ; This isn't entirely accurate... It loads as well, but in terms of
6619 ; scheduling the following insn it is better to consider it as a store
6620 (set_attr "type" "store2")]
6625 ;; Compare & branch insns
6626 ;; The range calculations are based as follows:
6627 ;; For forward branches, the address calculation returns the address of
6628 ;; the next instruction. This is 2 beyond the branch instruction.
6629 ;; For backward branches, the address calculation returns the address of
6630 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6631 ;; instruction for the shortest sequence, and 4 before the branch instruction
6632 ;; if we have to jump around an unconditional branch.
6633 ;; To the basic branch range the PC offset must be added (this is +4).
6634 ;; So for forward branches we have
6635 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6636 ;; And for backward branches we have
6637 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6639 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6640 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6642 (define_expand "cbranchsi4"
6643 [(set (pc) (if_then_else
6644 (match_operator 0 "arm_comparison_operator"
6645 [(match_operand:SI 1 "s_register_operand" "")
6646 (match_operand:SI 2 "nonmemory_operand" "")])
6647 (label_ref (match_operand 3 "" ""))
6649 "TARGET_THUMB1 || TARGET_32BIT"
6653 if (!arm_add_operand (operands[2], SImode))
6654 operands[2] = force_reg (SImode, operands[2]);
6655 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6659 if (thumb1_cmpneg_operand (operands[2], SImode))
6661 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6662 operands[3], operands[0]));
6665 if (!thumb1_cmp_operand (operands[2], SImode))
6666 operands[2] = force_reg (SImode, operands[2]);
6669 (define_expand "cbranchsf4"
6670 [(set (pc) (if_then_else
6671 (match_operator 0 "arm_comparison_operator"
6672 [(match_operand:SF 1 "s_register_operand" "")
6673 (match_operand:SF 2 "arm_float_compare_operand" "")])
6674 (label_ref (match_operand 3 "" ""))
6676 "TARGET_32BIT && TARGET_HARD_FLOAT"
6677 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6678 operands[3])); DONE;"
6681 (define_expand "cbranchdf4"
6682 [(set (pc) (if_then_else
6683 (match_operator 0 "arm_comparison_operator"
6684 [(match_operand:DF 1 "s_register_operand" "")
6685 (match_operand:DF 2 "arm_float_compare_operand" "")])
6686 (label_ref (match_operand 3 "" ""))
6688 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6689 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6690 operands[3])); DONE;"
6693 ;; this uses the Cirrus DI compare instruction
6694 (define_expand "cbranchdi4"
6695 [(set (pc) (if_then_else
6696 (match_operator 0 "arm_comparison_operator"
6697 [(match_operand:DI 1 "cirrus_fp_register" "")
6698 (match_operand:DI 2 "cirrus_fp_register" "")])
6699 (label_ref (match_operand 3 "" ""))
6701 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6702 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6703 operands[3])); DONE;"
6706 (define_insn "*cbranchsi4_insn"
6707 [(set (pc) (if_then_else
6708 (match_operator 0 "arm_comparison_operator"
6709 [(match_operand:SI 1 "s_register_operand" "l,*h")
6710 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6711 (label_ref (match_operand 3 "" ""))
6715 output_asm_insn (\"cmp\\t%1, %2\", operands);
6717 switch (get_attr_length (insn))
6719 case 4: return \"b%d0\\t%l3\";
6720 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6721 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6724 [(set (attr "far_jump")
6726 (eq_attr "length" "8")
6727 (const_string "yes")
6728 (const_string "no")))
6729 (set (attr "length")
6731 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6732 (le (minus (match_dup 3) (pc)) (const_int 256)))
6735 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6736 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6741 (define_insn "cbranchsi4_scratch"
6742 [(set (pc) (if_then_else
6743 (match_operator 4 "arm_comparison_operator"
6744 [(match_operand:SI 1 "s_register_operand" "l,0")
6745 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6746 (label_ref (match_operand 3 "" ""))
6748 (clobber (match_scratch:SI 0 "=l,l"))]
6751 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6753 switch (get_attr_length (insn))
6755 case 4: return \"b%d4\\t%l3\";
6756 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6757 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6760 [(set (attr "far_jump")
6762 (eq_attr "length" "8")
6763 (const_string "yes")
6764 (const_string "no")))
6765 (set (attr "length")
6767 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6768 (le (minus (match_dup 3) (pc)) (const_int 256)))
6771 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6772 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6777 (define_insn "*movsi_cbranchsi4"
6780 (match_operator 3 "arm_comparison_operator"
6781 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6783 (label_ref (match_operand 2 "" ""))
6785 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6789 if (which_alternative == 0)
6790 output_asm_insn (\"cmp\t%0, #0\", operands);
6791 else if (which_alternative == 1)
6792 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6795 output_asm_insn (\"cmp\t%1, #0\", operands);
6796 if (which_alternative == 2)
6797 output_asm_insn (\"mov\t%0, %1\", operands);
6799 output_asm_insn (\"str\t%1, %0\", operands);
6801 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6803 case 4: return \"b%d3\\t%l2\";
6804 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6805 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6808 [(set (attr "far_jump")
6810 (ior (and (gt (symbol_ref ("which_alternative"))
6812 (eq_attr "length" "8"))
6813 (eq_attr "length" "10"))
6814 (const_string "yes")
6815 (const_string "no")))
6816 (set (attr "length")
6818 (le (symbol_ref ("which_alternative"))
6821 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6822 (le (minus (match_dup 2) (pc)) (const_int 256)))
6825 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6826 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6830 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6831 (le (minus (match_dup 2) (pc)) (const_int 256)))
6834 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6835 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6841 [(set (match_operand:SI 0 "low_register_operand" "")
6842 (match_operand:SI 1 "low_register_operand" ""))
6844 (if_then_else (match_operator 2 "arm_comparison_operator"
6845 [(match_dup 1) (const_int 0)])
6846 (label_ref (match_operand 3 "" ""))
6851 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6852 (label_ref (match_dup 3))
6854 (set (match_dup 0) (match_dup 1))])]
6858 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6859 ;; merge cases like this because the op1 is a hard register in
6860 ;; CLASS_LIKELY_SPILLED_P.
6862 [(set (match_operand:SI 0 "low_register_operand" "")
6863 (match_operand:SI 1 "low_register_operand" ""))
6865 (if_then_else (match_operator 2 "arm_comparison_operator"
6866 [(match_dup 0) (const_int 0)])
6867 (label_ref (match_operand 3 "" ""))
6872 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6873 (label_ref (match_dup 3))
6875 (set (match_dup 0) (match_dup 1))])]
6879 (define_insn "*negated_cbranchsi4"
6882 (match_operator 0 "equality_operator"
6883 [(match_operand:SI 1 "s_register_operand" "l")
6884 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6885 (label_ref (match_operand 3 "" ""))
6889 output_asm_insn (\"cmn\\t%1, %2\", operands);
6890 switch (get_attr_length (insn))
6892 case 4: return \"b%d0\\t%l3\";
6893 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6894 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6897 [(set (attr "far_jump")
6899 (eq_attr "length" "8")
6900 (const_string "yes")
6901 (const_string "no")))
6902 (set (attr "length")
6904 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6905 (le (minus (match_dup 3) (pc)) (const_int 256)))
6908 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6909 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6914 (define_insn "*tbit_cbranch"
6917 (match_operator 0 "equality_operator"
6918 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6920 (match_operand:SI 2 "const_int_operand" "i"))
6922 (label_ref (match_operand 3 "" ""))
6924 (clobber (match_scratch:SI 4 "=l"))]
6929 op[0] = operands[4];
6930 op[1] = operands[1];
6931 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6933 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6934 switch (get_attr_length (insn))
6936 case 4: return \"b%d0\\t%l3\";
6937 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6938 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6941 [(set (attr "far_jump")
6943 (eq_attr "length" "8")
6944 (const_string "yes")
6945 (const_string "no")))
6946 (set (attr "length")
6948 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6949 (le (minus (match_dup 3) (pc)) (const_int 256)))
6952 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6953 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6958 (define_insn "*tlobits_cbranch"
6961 (match_operator 0 "equality_operator"
6962 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6963 (match_operand:SI 2 "const_int_operand" "i")
6966 (label_ref (match_operand 3 "" ""))
6968 (clobber (match_scratch:SI 4 "=l"))]
6973 op[0] = operands[4];
6974 op[1] = operands[1];
6975 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6977 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6978 switch (get_attr_length (insn))
6980 case 4: return \"b%d0\\t%l3\";
6981 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6982 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6985 [(set (attr "far_jump")
6987 (eq_attr "length" "8")
6988 (const_string "yes")
6989 (const_string "no")))
6990 (set (attr "length")
6992 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6993 (le (minus (match_dup 3) (pc)) (const_int 256)))
6996 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6997 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7002 (define_insn "*tstsi3_cbranch"
7005 (match_operator 3 "equality_operator"
7006 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7007 (match_operand:SI 1 "s_register_operand" "l"))
7009 (label_ref (match_operand 2 "" ""))
7014 output_asm_insn (\"tst\\t%0, %1\", operands);
7015 switch (get_attr_length (insn))
7017 case 4: return \"b%d3\\t%l2\";
7018 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7019 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7022 [(set (attr "far_jump")
7024 (eq_attr "length" "8")
7025 (const_string "yes")
7026 (const_string "no")))
7027 (set (attr "length")
7029 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7030 (le (minus (match_dup 2) (pc)) (const_int 256)))
7033 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7034 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7039 (define_insn "*andsi3_cbranch"
7042 (match_operator 5 "equality_operator"
7043 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7044 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7046 (label_ref (match_operand 4 "" ""))
7048 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7049 (and:SI (match_dup 2) (match_dup 3)))
7050 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7054 if (which_alternative == 0)
7055 output_asm_insn (\"and\\t%0, %3\", operands);
7056 else if (which_alternative == 1)
7058 output_asm_insn (\"and\\t%1, %3\", operands);
7059 output_asm_insn (\"mov\\t%0, %1\", operands);
7063 output_asm_insn (\"and\\t%1, %3\", operands);
7064 output_asm_insn (\"str\\t%1, %0\", operands);
7067 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7069 case 4: return \"b%d5\\t%l4\";
7070 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7071 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7074 [(set (attr "far_jump")
7076 (ior (and (eq (symbol_ref ("which_alternative"))
7078 (eq_attr "length" "8"))
7079 (eq_attr "length" "10"))
7080 (const_string "yes")
7081 (const_string "no")))
7082 (set (attr "length")
7084 (eq (symbol_ref ("which_alternative"))
7087 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7088 (le (minus (match_dup 4) (pc)) (const_int 256)))
7091 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7092 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7096 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7097 (le (minus (match_dup 4) (pc)) (const_int 256)))
7100 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7101 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7106 (define_insn "*orrsi3_cbranch_scratch"
7109 (match_operator 4 "equality_operator"
7110 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7111 (match_operand:SI 2 "s_register_operand" "l"))
7113 (label_ref (match_operand 3 "" ""))
7115 (clobber (match_scratch:SI 0 "=l"))]
7119 output_asm_insn (\"orr\\t%0, %2\", operands);
7120 switch (get_attr_length (insn))
7122 case 4: return \"b%d4\\t%l3\";
7123 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7124 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7127 [(set (attr "far_jump")
7129 (eq_attr "length" "8")
7130 (const_string "yes")
7131 (const_string "no")))
7132 (set (attr "length")
7134 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7135 (le (minus (match_dup 3) (pc)) (const_int 256)))
7138 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7139 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7144 (define_insn "*orrsi3_cbranch"
7147 (match_operator 5 "equality_operator"
7148 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7149 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7151 (label_ref (match_operand 4 "" ""))
7153 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7154 (ior:SI (match_dup 2) (match_dup 3)))
7155 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7159 if (which_alternative == 0)
7160 output_asm_insn (\"orr\\t%0, %3\", operands);
7161 else if (which_alternative == 1)
7163 output_asm_insn (\"orr\\t%1, %3\", operands);
7164 output_asm_insn (\"mov\\t%0, %1\", operands);
7168 output_asm_insn (\"orr\\t%1, %3\", operands);
7169 output_asm_insn (\"str\\t%1, %0\", operands);
7172 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7174 case 4: return \"b%d5\\t%l4\";
7175 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7176 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7179 [(set (attr "far_jump")
7181 (ior (and (eq (symbol_ref ("which_alternative"))
7183 (eq_attr "length" "8"))
7184 (eq_attr "length" "10"))
7185 (const_string "yes")
7186 (const_string "no")))
7187 (set (attr "length")
7189 (eq (symbol_ref ("which_alternative"))
7192 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7193 (le (minus (match_dup 4) (pc)) (const_int 256)))
7196 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7197 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7201 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7202 (le (minus (match_dup 4) (pc)) (const_int 256)))
7205 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7206 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7211 (define_insn "*xorsi3_cbranch_scratch"
7214 (match_operator 4 "equality_operator"
7215 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7216 (match_operand:SI 2 "s_register_operand" "l"))
7218 (label_ref (match_operand 3 "" ""))
7220 (clobber (match_scratch:SI 0 "=l"))]
7224 output_asm_insn (\"eor\\t%0, %2\", operands);
7225 switch (get_attr_length (insn))
7227 case 4: return \"b%d4\\t%l3\";
7228 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7229 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7232 [(set (attr "far_jump")
7234 (eq_attr "length" "8")
7235 (const_string "yes")
7236 (const_string "no")))
7237 (set (attr "length")
7239 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7240 (le (minus (match_dup 3) (pc)) (const_int 256)))
7243 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7244 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7249 (define_insn "*xorsi3_cbranch"
7252 (match_operator 5 "equality_operator"
7253 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7254 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7256 (label_ref (match_operand 4 "" ""))
7258 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7259 (xor:SI (match_dup 2) (match_dup 3)))
7260 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7264 if (which_alternative == 0)
7265 output_asm_insn (\"eor\\t%0, %3\", operands);
7266 else if (which_alternative == 1)
7268 output_asm_insn (\"eor\\t%1, %3\", operands);
7269 output_asm_insn (\"mov\\t%0, %1\", operands);
7273 output_asm_insn (\"eor\\t%1, %3\", operands);
7274 output_asm_insn (\"str\\t%1, %0\", operands);
7277 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7279 case 4: return \"b%d5\\t%l4\";
7280 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7281 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7284 [(set (attr "far_jump")
7286 (ior (and (eq (symbol_ref ("which_alternative"))
7288 (eq_attr "length" "8"))
7289 (eq_attr "length" "10"))
7290 (const_string "yes")
7291 (const_string "no")))
7292 (set (attr "length")
7294 (eq (symbol_ref ("which_alternative"))
7297 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7298 (le (minus (match_dup 4) (pc)) (const_int 256)))
7301 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7302 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7306 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7307 (le (minus (match_dup 4) (pc)) (const_int 256)))
7310 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7311 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7316 (define_insn "*bicsi3_cbranch_scratch"
7319 (match_operator 4 "equality_operator"
7320 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7321 (match_operand:SI 1 "s_register_operand" "0"))
7323 (label_ref (match_operand 3 "" ""))
7325 (clobber (match_scratch:SI 0 "=l"))]
7329 output_asm_insn (\"bic\\t%0, %2\", operands);
7330 switch (get_attr_length (insn))
7332 case 4: return \"b%d4\\t%l3\";
7333 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7334 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7337 [(set (attr "far_jump")
7339 (eq_attr "length" "8")
7340 (const_string "yes")
7341 (const_string "no")))
7342 (set (attr "length")
7344 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7345 (le (minus (match_dup 3) (pc)) (const_int 256)))
7348 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7349 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7354 (define_insn "*bicsi3_cbranch"
7357 (match_operator 5 "equality_operator"
7358 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7359 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7361 (label_ref (match_operand 4 "" ""))
7363 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7364 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7365 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7369 if (which_alternative == 0)
7370 output_asm_insn (\"bic\\t%0, %3\", operands);
7371 else if (which_alternative <= 2)
7373 output_asm_insn (\"bic\\t%1, %3\", operands);
7374 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7375 conditions again, since we're only testing for equality. */
7376 output_asm_insn (\"mov\\t%0, %1\", operands);
7380 output_asm_insn (\"bic\\t%1, %3\", operands);
7381 output_asm_insn (\"str\\t%1, %0\", operands);
7384 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7386 case 4: return \"b%d5\\t%l4\";
7387 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7388 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7391 [(set (attr "far_jump")
7393 (ior (and (eq (symbol_ref ("which_alternative"))
7395 (eq_attr "length" "8"))
7396 (eq_attr "length" "10"))
7397 (const_string "yes")
7398 (const_string "no")))
7399 (set (attr "length")
7401 (eq (symbol_ref ("which_alternative"))
7404 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7405 (le (minus (match_dup 4) (pc)) (const_int 256)))
7408 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7409 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7413 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7414 (le (minus (match_dup 4) (pc)) (const_int 256)))
7417 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7418 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7423 (define_insn "*cbranchne_decr1"
7425 (if_then_else (match_operator 3 "equality_operator"
7426 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7428 (label_ref (match_operand 4 "" ""))
7430 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7431 (plus:SI (match_dup 2) (const_int -1)))
7432 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7437 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7439 VOIDmode, operands[2], const1_rtx);
7440 cond[1] = operands[4];
7442 if (which_alternative == 0)
7443 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7444 else if (which_alternative == 1)
7446 /* We must provide an alternative for a hi reg because reload
7447 cannot handle output reloads on a jump instruction, but we
7448 can't subtract into that. Fortunately a mov from lo to hi
7449 does not clobber the condition codes. */
7450 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7451 output_asm_insn (\"mov\\t%0, %1\", operands);
7455 /* Similarly, but the target is memory. */
7456 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7457 output_asm_insn (\"str\\t%1, %0\", operands);
7460 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7463 output_asm_insn (\"b%d0\\t%l1\", cond);
7466 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7467 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7469 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7470 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7474 [(set (attr "far_jump")
7476 (ior (and (eq (symbol_ref ("which_alternative"))
7478 (eq_attr "length" "8"))
7479 (eq_attr "length" "10"))
7480 (const_string "yes")
7481 (const_string "no")))
7482 (set_attr_alternative "length"
7486 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7487 (le (minus (match_dup 4) (pc)) (const_int 256)))
7490 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7491 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7496 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7497 (le (minus (match_dup 4) (pc)) (const_int 256)))
7500 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7501 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7506 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7507 (le (minus (match_dup 4) (pc)) (const_int 256)))
7510 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7511 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7516 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7517 (le (minus (match_dup 4) (pc)) (const_int 256)))
7520 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7521 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7526 (define_insn "*addsi3_cbranch"
7529 (match_operator 4 "arm_comparison_operator"
7531 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7532 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7534 (label_ref (match_operand 5 "" ""))
7537 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7538 (plus:SI (match_dup 2) (match_dup 3)))
7539 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7541 && (GET_CODE (operands[4]) == EQ
7542 || GET_CODE (operands[4]) == NE
7543 || GET_CODE (operands[4]) == GE
7544 || GET_CODE (operands[4]) == LT)"
7550 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7551 cond[1] = operands[2];
7552 cond[2] = operands[3];
7554 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7555 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7557 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7559 if (which_alternative >= 3
7560 && which_alternative < 4)
7561 output_asm_insn (\"mov\\t%0, %1\", operands);
7562 else if (which_alternative >= 4)
7563 output_asm_insn (\"str\\t%1, %0\", operands);
7565 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7568 return \"b%d4\\t%l5\";
7570 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7572 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7576 [(set (attr "far_jump")
7578 (ior (and (lt (symbol_ref ("which_alternative"))
7580 (eq_attr "length" "8"))
7581 (eq_attr "length" "10"))
7582 (const_string "yes")
7583 (const_string "no")))
7584 (set (attr "length")
7586 (lt (symbol_ref ("which_alternative"))
7589 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7590 (le (minus (match_dup 5) (pc)) (const_int 256)))
7593 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7594 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7598 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7599 (le (minus (match_dup 5) (pc)) (const_int 256)))
7602 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7603 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7608 (define_insn "*addsi3_cbranch_scratch"
7611 (match_operator 3 "arm_comparison_operator"
7613 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7614 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7616 (label_ref (match_operand 4 "" ""))
7618 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7620 && (GET_CODE (operands[3]) == EQ
7621 || GET_CODE (operands[3]) == NE
7622 || GET_CODE (operands[3]) == GE
7623 || GET_CODE (operands[3]) == LT)"
7626 switch (which_alternative)
7629 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7632 output_asm_insn (\"cmn\t%1, %2\", operands);
7635 if (INTVAL (operands[2]) < 0)
7636 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7638 output_asm_insn (\"add\t%0, %1, %2\", operands);
7641 if (INTVAL (operands[2]) < 0)
7642 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7644 output_asm_insn (\"add\t%0, %0, %2\", operands);
7648 switch (get_attr_length (insn))
7651 return \"b%d3\\t%l4\";
7653 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7655 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7659 [(set (attr "far_jump")
7661 (eq_attr "length" "8")
7662 (const_string "yes")
7663 (const_string "no")))
7664 (set (attr "length")
7666 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7667 (le (minus (match_dup 4) (pc)) (const_int 256)))
7670 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7671 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7676 (define_insn "*subsi3_cbranch"
7679 (match_operator 4 "arm_comparison_operator"
7681 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7682 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7684 (label_ref (match_operand 5 "" ""))
7686 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7687 (minus:SI (match_dup 2) (match_dup 3)))
7688 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7690 && (GET_CODE (operands[4]) == EQ
7691 || GET_CODE (operands[4]) == NE
7692 || GET_CODE (operands[4]) == GE
7693 || GET_CODE (operands[4]) == LT)"
7696 if (which_alternative == 0)
7697 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7698 else if (which_alternative == 1)
7700 /* We must provide an alternative for a hi reg because reload
7701 cannot handle output reloads on a jump instruction, but we
7702 can't subtract into that. Fortunately a mov from lo to hi
7703 does not clobber the condition codes. */
7704 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7705 output_asm_insn (\"mov\\t%0, %1\", operands);
7709 /* Similarly, but the target is memory. */
7710 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7711 output_asm_insn (\"str\\t%1, %0\", operands);
7714 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7717 return \"b%d4\\t%l5\";
7719 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7721 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7725 [(set (attr "far_jump")
7727 (ior (and (eq (symbol_ref ("which_alternative"))
7729 (eq_attr "length" "8"))
7730 (eq_attr "length" "10"))
7731 (const_string "yes")
7732 (const_string "no")))
7733 (set (attr "length")
7735 (eq (symbol_ref ("which_alternative"))
7738 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7739 (le (minus (match_dup 5) (pc)) (const_int 256)))
7742 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7743 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7747 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7748 (le (minus (match_dup 5) (pc)) (const_int 256)))
7751 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7752 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7757 (define_insn "*subsi3_cbranch_scratch"
7760 (match_operator 0 "arm_comparison_operator"
7761 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7762 (match_operand:SI 2 "nonmemory_operand" "l"))
7764 (label_ref (match_operand 3 "" ""))
7767 && (GET_CODE (operands[0]) == EQ
7768 || GET_CODE (operands[0]) == NE
7769 || GET_CODE (operands[0]) == GE
7770 || GET_CODE (operands[0]) == LT)"
7772 output_asm_insn (\"cmp\\t%1, %2\", operands);
7773 switch (get_attr_length (insn))
7775 case 4: return \"b%d0\\t%l3\";
7776 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7777 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7780 [(set (attr "far_jump")
7782 (eq_attr "length" "8")
7783 (const_string "yes")
7784 (const_string "no")))
7785 (set (attr "length")
7787 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7788 (le (minus (match_dup 3) (pc)) (const_int 256)))
7791 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7792 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7797 ;; Comparison and test insns
7799 (define_insn "*arm_cmpsi_insn"
7800 [(set (reg:CC CC_REGNUM)
7801 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7802 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7807 [(set_attr "conds" "set")]
7810 (define_insn "*arm_cmpsi_shiftsi"
7811 [(set (reg:CC CC_REGNUM)
7812 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7813 (match_operator:SI 3 "shift_operator"
7814 [(match_operand:SI 1 "s_register_operand" "r")
7815 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7818 [(set_attr "conds" "set")
7819 (set_attr "shift" "1")
7820 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7821 (const_string "alu_shift")
7822 (const_string "alu_shift_reg")))]
7825 (define_insn "*arm_cmpsi_shiftsi_swp"
7826 [(set (reg:CC_SWP CC_REGNUM)
7827 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7828 [(match_operand:SI 1 "s_register_operand" "r")
7829 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7830 (match_operand:SI 0 "s_register_operand" "r")))]
7833 [(set_attr "conds" "set")
7834 (set_attr "shift" "1")
7835 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7836 (const_string "alu_shift")
7837 (const_string "alu_shift_reg")))]
7840 (define_insn "*arm_cmpsi_negshiftsi_si"
7841 [(set (reg:CC_Z CC_REGNUM)
7843 (neg:SI (match_operator:SI 1 "shift_operator"
7844 [(match_operand:SI 2 "s_register_operand" "r")
7845 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7846 (match_operand:SI 0 "s_register_operand" "r")))]
7849 [(set_attr "conds" "set")
7850 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7851 (const_string "alu_shift")
7852 (const_string "alu_shift_reg")))]
7855 ;; Cirrus SF compare instruction
7856 (define_insn "*cirrus_cmpsf"
7857 [(set (reg:CCFP CC_REGNUM)
7858 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7859 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7860 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7861 "cfcmps%?\\tr15, %V0, %V1"
7862 [(set_attr "type" "mav_farith")
7863 (set_attr "cirrus" "compare")]
7866 ;; Cirrus DF compare instruction
7867 (define_insn "*cirrus_cmpdf"
7868 [(set (reg:CCFP CC_REGNUM)
7869 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7870 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7871 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7872 "cfcmpd%?\\tr15, %V0, %V1"
7873 [(set_attr "type" "mav_farith")
7874 (set_attr "cirrus" "compare")]
7877 (define_insn "*cirrus_cmpdi"
7878 [(set (reg:CC CC_REGNUM)
7879 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7880 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7881 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7882 "cfcmp64%?\\tr15, %V0, %V1"
7883 [(set_attr "type" "mav_farith")
7884 (set_attr "cirrus" "compare")]
7887 ; This insn allows redundant compares to be removed by cse, nothing should
7888 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7889 ; is deleted later on. The match_dup will match the mode here, so that
7890 ; mode changes of the condition codes aren't lost by this even though we don't
7891 ; specify what they are.
7893 (define_insn "*deleted_compare"
7894 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7896 "\\t%@ deleted compare"
7897 [(set_attr "conds" "set")
7898 (set_attr "length" "0")]
7902 ;; Conditional branch insns
7904 (define_expand "cbranch_cc"
7906 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7907 (match_operand 2 "" "")])
7908 (label_ref (match_operand 3 "" ""))
7911 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7912 operands[1], operands[2]);
7913 operands[2] = const0_rtx;"
7917 ;; Patterns to match conditional branch insns.
7920 (define_insn "*arm_cond_branch"
7922 (if_then_else (match_operator 1 "arm_comparison_operator"
7923 [(match_operand 2 "cc_register" "") (const_int 0)])
7924 (label_ref (match_operand 0 "" ""))
7928 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7930 arm_ccfsm_state += 2;
7933 return \"b%d1\\t%l0\";
7935 [(set_attr "conds" "use")
7936 (set_attr "type" "branch")]
7939 (define_insn "*arm_cond_branch_reversed"
7941 (if_then_else (match_operator 1 "arm_comparison_operator"
7942 [(match_operand 2 "cc_register" "") (const_int 0)])
7944 (label_ref (match_operand 0 "" ""))))]
7947 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7949 arm_ccfsm_state += 2;
7952 return \"b%D1\\t%l0\";
7954 [(set_attr "conds" "use")
7955 (set_attr "type" "branch")]
7962 (define_expand "cstore_cc"
7963 [(set (match_operand:SI 0 "s_register_operand" "")
7964 (match_operator:SI 1 "" [(match_operand 2 "" "")
7965 (match_operand 3 "" "")]))]
7967 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7968 operands[2], operands[3]);
7969 operands[3] = const0_rtx;"
7972 (define_insn "*mov_scc"
7973 [(set (match_operand:SI 0 "s_register_operand" "=r")
7974 (match_operator:SI 1 "arm_comparison_operator"
7975 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7977 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7978 [(set_attr "conds" "use")
7979 (set_attr "length" "8")]
7982 (define_insn "*mov_negscc"
7983 [(set (match_operand:SI 0 "s_register_operand" "=r")
7984 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7985 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7987 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7988 [(set_attr "conds" "use")
7989 (set_attr "length" "8")]
7992 (define_insn "*mov_notscc"
7993 [(set (match_operand:SI 0 "s_register_operand" "=r")
7994 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7995 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7997 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7998 [(set_attr "conds" "use")
7999 (set_attr "length" "8")]
8002 (define_expand "cstoresi4"
8003 [(set (match_operand:SI 0 "s_register_operand" "")
8004 (match_operator:SI 1 "arm_comparison_operator"
8005 [(match_operand:SI 2 "s_register_operand" "")
8006 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8007 "TARGET_32BIT || TARGET_THUMB1"
8009 rtx op3, scratch, scratch2;
8013 if (!arm_add_operand (operands[3], SImode))
8014 operands[3] = force_reg (SImode, operands[3]);
8015 emit_insn (gen_cstore_cc (operands[0], operands[1],
8016 operands[2], operands[3]));
8020 if (operands[3] == const0_rtx)
8022 switch (GET_CODE (operands[1]))
8025 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8029 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8033 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8034 NULL_RTX, 0, OPTAB_WIDEN);
8035 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8036 NULL_RTX, 0, OPTAB_WIDEN);
8037 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8038 operands[0], 1, OPTAB_WIDEN);
8042 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8044 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8045 NULL_RTX, 1, OPTAB_WIDEN);
8049 scratch = expand_binop (SImode, ashr_optab, operands[2],
8050 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8051 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8052 NULL_RTX, 0, OPTAB_WIDEN);
8053 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8057 /* LT is handled by generic code. No need for unsigned with 0. */
8064 switch (GET_CODE (operands[1]))
8067 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8068 NULL_RTX, 0, OPTAB_WIDEN);
8069 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8073 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8074 NULL_RTX, 0, OPTAB_WIDEN);
8075 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8079 op3 = force_reg (SImode, operands[3]);
8081 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8082 NULL_RTX, 1, OPTAB_WIDEN);
8083 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8084 NULL_RTX, 0, OPTAB_WIDEN);
8085 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8091 if (!thumb1_cmp_operand (op3, SImode))
8092 op3 = force_reg (SImode, op3);
8093 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8094 NULL_RTX, 0, OPTAB_WIDEN);
8095 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8096 NULL_RTX, 1, OPTAB_WIDEN);
8097 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8102 op3 = force_reg (SImode, operands[3]);
8103 scratch = force_reg (SImode, const0_rtx);
8104 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8110 if (!thumb1_cmp_operand (op3, SImode))
8111 op3 = force_reg (SImode, op3);
8112 scratch = force_reg (SImode, const0_rtx);
8113 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8119 if (!thumb1_cmp_operand (op3, SImode))
8120 op3 = force_reg (SImode, op3);
8121 scratch = gen_reg_rtx (SImode);
8122 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8126 op3 = force_reg (SImode, operands[3]);
8127 scratch = gen_reg_rtx (SImode);
8128 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8131 /* No good sequences for GT, LT. */
8138 (define_expand "cstoresf4"
8139 [(set (match_operand:SI 0 "s_register_operand" "")
8140 (match_operator:SI 1 "arm_comparison_operator"
8141 [(match_operand:SF 2 "s_register_operand" "")
8142 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8143 "TARGET_32BIT && TARGET_HARD_FLOAT"
8144 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8145 operands[2], operands[3])); DONE;"
8148 (define_expand "cstoredf4"
8149 [(set (match_operand:SI 0 "s_register_operand" "")
8150 (match_operator:SI 1 "arm_comparison_operator"
8151 [(match_operand:DF 2 "s_register_operand" "")
8152 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8153 "TARGET_32BIT && TARGET_HARD_FLOAT"
8154 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8155 operands[2], operands[3])); DONE;"
8158 ;; this uses the Cirrus DI compare instruction
8159 (define_expand "cstoredi4"
8160 [(set (match_operand:SI 0 "s_register_operand" "")
8161 (match_operator:SI 1 "arm_comparison_operator"
8162 [(match_operand:DI 2 "cirrus_fp_register" "")
8163 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8164 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8165 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8166 operands[2], operands[3])); DONE;"
8170 (define_expand "cstoresi_eq0_thumb1"
8172 [(set (match_operand:SI 0 "s_register_operand" "")
8173 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8175 (clobber (match_dup:SI 2))])]
8177 "operands[2] = gen_reg_rtx (SImode);"
8180 (define_expand "cstoresi_ne0_thumb1"
8182 [(set (match_operand:SI 0 "s_register_operand" "")
8183 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8185 (clobber (match_dup:SI 2))])]
8187 "operands[2] = gen_reg_rtx (SImode);"
8190 (define_insn "*cstoresi_eq0_thumb1_insn"
8191 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8192 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8194 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8197 neg\\t%0, %1\;adc\\t%0, %0, %1
8198 neg\\t%2, %1\;adc\\t%0, %1, %2"
8199 [(set_attr "length" "4")]
8202 (define_insn "*cstoresi_ne0_thumb1_insn"
8203 [(set (match_operand:SI 0 "s_register_operand" "=l")
8204 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8206 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8208 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8209 [(set_attr "length" "4")]
8212 ;; Used as part of the expansion of thumb ltu and gtu sequences
8213 (define_insn "cstoresi_nltu_thumb1"
8214 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8215 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8216 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8218 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8219 [(set_attr "length" "4")]
8222 (define_insn_and_split "cstoresi_ltu_thumb1"
8223 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8224 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8225 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8230 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8231 (set (match_dup 0) (neg:SI (match_dup 3)))]
8232 "operands[3] = gen_reg_rtx (SImode);"
8233 [(set_attr "length" "4")]
8236 ;; Used as part of the expansion of thumb les sequence.
8237 (define_insn "thumb1_addsi3_addgeu"
8238 [(set (match_operand:SI 0 "s_register_operand" "=l")
8239 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8240 (match_operand:SI 2 "s_register_operand" "l"))
8241 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8242 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8244 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8245 [(set_attr "length" "4")]
8249 ;; Conditional move insns
8251 (define_expand "movsicc"
8252 [(set (match_operand:SI 0 "s_register_operand" "")
8253 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8254 (match_operand:SI 2 "arm_not_operand" "")
8255 (match_operand:SI 3 "arm_not_operand" "")))]
8259 enum rtx_code code = GET_CODE (operands[1]);
8262 if (code == UNEQ || code == LTGT)
8265 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8266 XEXP (operands[1], 1));
8267 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8271 (define_expand "movsfcc"
8272 [(set (match_operand:SF 0 "s_register_operand" "")
8273 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8274 (match_operand:SF 2 "s_register_operand" "")
8275 (match_operand:SF 3 "nonmemory_operand" "")))]
8276 "TARGET_32BIT && TARGET_HARD_FLOAT"
8279 enum rtx_code code = GET_CODE (operands[1]);
8282 if (code == UNEQ || code == LTGT)
8285 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8286 Otherwise, ensure it is a valid FP add operand */
8287 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8288 || (!arm_float_add_operand (operands[3], SFmode)))
8289 operands[3] = force_reg (SFmode, operands[3]);
8291 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8292 XEXP (operands[1], 1));
8293 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8297 (define_expand "movdfcc"
8298 [(set (match_operand:DF 0 "s_register_operand" "")
8299 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8300 (match_operand:DF 2 "s_register_operand" "")
8301 (match_operand:DF 3 "arm_float_add_operand" "")))]
8302 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8305 enum rtx_code code = GET_CODE (operands[1]);
8308 if (code == UNEQ || code == LTGT)
8311 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8312 XEXP (operands[1], 1));
8313 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8317 (define_insn "*movsicc_insn"
8318 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8320 (match_operator 3 "arm_comparison_operator"
8321 [(match_operand 4 "cc_register" "") (const_int 0)])
8322 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8323 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8330 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8331 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8332 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8333 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8334 [(set_attr "length" "4,4,4,4,8,8,8,8")
8335 (set_attr "conds" "use")]
8338 (define_insn "*movsfcc_soft_insn"
8339 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8340 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8341 [(match_operand 4 "cc_register" "") (const_int 0)])
8342 (match_operand:SF 1 "s_register_operand" "0,r")
8343 (match_operand:SF 2 "s_register_operand" "r,0")))]
8344 "TARGET_ARM && TARGET_SOFT_FLOAT"
8348 [(set_attr "conds" "use")]
8352 ;; Jump and linkage insns
8354 (define_expand "jump"
8356 (label_ref (match_operand 0 "" "")))]
8361 (define_insn "*arm_jump"
8363 (label_ref (match_operand 0 "" "")))]
8367 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8369 arm_ccfsm_state += 2;
8372 return \"b%?\\t%l0\";
8375 [(set_attr "predicable" "yes")]
8378 (define_insn "*thumb_jump"
8380 (label_ref (match_operand 0 "" "")))]
8383 if (get_attr_length (insn) == 2)
8385 return \"bl\\t%l0\\t%@ far jump\";
8387 [(set (attr "far_jump")
8389 (eq_attr "length" "4")
8390 (const_string "yes")
8391 (const_string "no")))
8392 (set (attr "length")
8394 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8395 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8400 (define_expand "call"
8401 [(parallel [(call (match_operand 0 "memory_operand" "")
8402 (match_operand 1 "general_operand" ""))
8403 (use (match_operand 2 "" ""))
8404 (clobber (reg:SI LR_REGNUM))])]
8410 /* In an untyped call, we can get NULL for operand 2. */
8411 if (operands[2] == NULL_RTX)
8412 operands[2] = const0_rtx;
8414 /* Decide if we should generate indirect calls by loading the
8415 32-bit address of the callee into a register before performing the
8417 callee = XEXP (operands[0], 0);
8418 if (GET_CODE (callee) == SYMBOL_REF
8419 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8421 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8423 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8424 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8429 (define_expand "call_internal"
8430 [(parallel [(call (match_operand 0 "memory_operand" "")
8431 (match_operand 1 "general_operand" ""))
8432 (use (match_operand 2 "" ""))
8433 (clobber (reg:SI LR_REGNUM))])])
8435 (define_insn "*call_reg_armv5"
8436 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8437 (match_operand 1 "" ""))
8438 (use (match_operand 2 "" ""))
8439 (clobber (reg:SI LR_REGNUM))]
8440 "TARGET_ARM && arm_arch5"
8442 [(set_attr "type" "call")]
8445 (define_insn "*call_reg_arm"
8446 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8447 (match_operand 1 "" ""))
8448 (use (match_operand 2 "" ""))
8449 (clobber (reg:SI LR_REGNUM))]
8450 "TARGET_ARM && !arm_arch5"
8452 return output_call (operands);
8454 ;; length is worst case, normally it is only two
8455 [(set_attr "length" "12")
8456 (set_attr "type" "call")]
8460 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8461 ;; considered a function call by the branch predictor of some cores (PR40887).
8462 ;; Falls back to blx rN (*call_reg_armv5).
8464 (define_insn "*call_mem"
8465 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8466 (match_operand 1 "" ""))
8467 (use (match_operand 2 "" ""))
8468 (clobber (reg:SI LR_REGNUM))]
8469 "TARGET_ARM && !arm_arch5"
8471 return output_call_mem (operands);
8473 [(set_attr "length" "12")
8474 (set_attr "type" "call")]
8477 (define_insn "*call_reg_thumb1_v5"
8478 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8479 (match_operand 1 "" ""))
8480 (use (match_operand 2 "" ""))
8481 (clobber (reg:SI LR_REGNUM))]
8482 "TARGET_THUMB1 && arm_arch5"
8484 [(set_attr "length" "2")
8485 (set_attr "type" "call")]
8488 (define_insn "*call_reg_thumb1"
8489 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8490 (match_operand 1 "" ""))
8491 (use (match_operand 2 "" ""))
8492 (clobber (reg:SI LR_REGNUM))]
8493 "TARGET_THUMB1 && !arm_arch5"
8496 if (!TARGET_CALLER_INTERWORKING)
8497 return thumb_call_via_reg (operands[0]);
8498 else if (operands[1] == const0_rtx)
8499 return \"bl\\t%__interwork_call_via_%0\";
8500 else if (frame_pointer_needed)
8501 return \"bl\\t%__interwork_r7_call_via_%0\";
8503 return \"bl\\t%__interwork_r11_call_via_%0\";
8505 [(set_attr "type" "call")]
8508 (define_expand "call_value"
8509 [(parallel [(set (match_operand 0 "" "")
8510 (call (match_operand 1 "memory_operand" "")
8511 (match_operand 2 "general_operand" "")))
8512 (use (match_operand 3 "" ""))
8513 (clobber (reg:SI LR_REGNUM))])]
8519 /* In an untyped call, we can get NULL for operand 2. */
8520 if (operands[3] == 0)
8521 operands[3] = const0_rtx;
8523 /* Decide if we should generate indirect calls by loading the
8524 32-bit address of the callee into a register before performing the
8526 callee = XEXP (operands[1], 0);
8527 if (GET_CODE (callee) == SYMBOL_REF
8528 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8530 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8532 pat = gen_call_value_internal (operands[0], operands[1],
8533 operands[2], operands[3]);
8534 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8539 (define_expand "call_value_internal"
8540 [(parallel [(set (match_operand 0 "" "")
8541 (call (match_operand 1 "memory_operand" "")
8542 (match_operand 2 "general_operand" "")))
8543 (use (match_operand 3 "" ""))
8544 (clobber (reg:SI LR_REGNUM))])])
8546 (define_insn "*call_value_reg_armv5"
8547 [(set (match_operand 0 "" "")
8548 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8549 (match_operand 2 "" "")))
8550 (use (match_operand 3 "" ""))
8551 (clobber (reg:SI LR_REGNUM))]
8552 "TARGET_ARM && arm_arch5"
8554 [(set_attr "type" "call")]
8557 (define_insn "*call_value_reg_arm"
8558 [(set (match_operand 0 "" "")
8559 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8560 (match_operand 2 "" "")))
8561 (use (match_operand 3 "" ""))
8562 (clobber (reg:SI LR_REGNUM))]
8563 "TARGET_ARM && !arm_arch5"
8565 return output_call (&operands[1]);
8567 [(set_attr "length" "12")
8568 (set_attr "type" "call")]
8571 ;; Note: see *call_mem
8573 (define_insn "*call_value_mem"
8574 [(set (match_operand 0 "" "")
8575 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8576 (match_operand 2 "" "")))
8577 (use (match_operand 3 "" ""))
8578 (clobber (reg:SI LR_REGNUM))]
8579 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8581 return output_call_mem (&operands[1]);
8583 [(set_attr "length" "12")
8584 (set_attr "type" "call")]
8587 (define_insn "*call_value_reg_thumb1_v5"
8588 [(set (match_operand 0 "" "")
8589 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8590 (match_operand 2 "" "")))
8591 (use (match_operand 3 "" ""))
8592 (clobber (reg:SI LR_REGNUM))]
8593 "TARGET_THUMB1 && arm_arch5"
8595 [(set_attr "length" "2")
8596 (set_attr "type" "call")]
8599 (define_insn "*call_value_reg_thumb1"
8600 [(set (match_operand 0 "" "")
8601 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8602 (match_operand 2 "" "")))
8603 (use (match_operand 3 "" ""))
8604 (clobber (reg:SI LR_REGNUM))]
8605 "TARGET_THUMB1 && !arm_arch5"
8608 if (!TARGET_CALLER_INTERWORKING)
8609 return thumb_call_via_reg (operands[1]);
8610 else if (operands[2] == const0_rtx)
8611 return \"bl\\t%__interwork_call_via_%1\";
8612 else if (frame_pointer_needed)
8613 return \"bl\\t%__interwork_r7_call_via_%1\";
8615 return \"bl\\t%__interwork_r11_call_via_%1\";
8617 [(set_attr "type" "call")]
8620 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8621 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8623 (define_insn "*call_symbol"
8624 [(call (mem:SI (match_operand:SI 0 "" ""))
8625 (match_operand 1 "" ""))
8626 (use (match_operand 2 "" ""))
8627 (clobber (reg:SI LR_REGNUM))]
8629 && (GET_CODE (operands[0]) == SYMBOL_REF)
8630 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8633 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8635 [(set_attr "type" "call")]
8638 (define_insn "*call_value_symbol"
8639 [(set (match_operand 0 "" "")
8640 (call (mem:SI (match_operand:SI 1 "" ""))
8641 (match_operand:SI 2 "" "")))
8642 (use (match_operand 3 "" ""))
8643 (clobber (reg:SI LR_REGNUM))]
8645 && (GET_CODE (operands[1]) == SYMBOL_REF)
8646 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8649 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8651 [(set_attr "type" "call")]
8654 (define_insn "*call_insn"
8655 [(call (mem:SI (match_operand:SI 0 "" ""))
8656 (match_operand:SI 1 "" ""))
8657 (use (match_operand 2 "" ""))
8658 (clobber (reg:SI LR_REGNUM))]
8660 && GET_CODE (operands[0]) == SYMBOL_REF
8661 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8663 [(set_attr "length" "4")
8664 (set_attr "type" "call")]
8667 (define_insn "*call_value_insn"
8668 [(set (match_operand 0 "" "")
8669 (call (mem:SI (match_operand 1 "" ""))
8670 (match_operand 2 "" "")))
8671 (use (match_operand 3 "" ""))
8672 (clobber (reg:SI LR_REGNUM))]
8674 && GET_CODE (operands[1]) == SYMBOL_REF
8675 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8677 [(set_attr "length" "4")
8678 (set_attr "type" "call")]
8681 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8682 (define_expand "sibcall"
8683 [(parallel [(call (match_operand 0 "memory_operand" "")
8684 (match_operand 1 "general_operand" ""))
8686 (use (match_operand 2 "" ""))])]
8690 if (operands[2] == NULL_RTX)
8691 operands[2] = const0_rtx;
8695 (define_expand "sibcall_value"
8696 [(parallel [(set (match_operand 0 "" "")
8697 (call (match_operand 1 "memory_operand" "")
8698 (match_operand 2 "general_operand" "")))
8700 (use (match_operand 3 "" ""))])]
8704 if (operands[3] == NULL_RTX)
8705 operands[3] = const0_rtx;
8709 (define_insn "*sibcall_insn"
8710 [(call (mem:SI (match_operand:SI 0 "" "X"))
8711 (match_operand 1 "" ""))
8713 (use (match_operand 2 "" ""))]
8714 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8716 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8718 [(set_attr "type" "call")]
8721 (define_insn "*sibcall_value_insn"
8722 [(set (match_operand 0 "" "")
8723 (call (mem:SI (match_operand:SI 1 "" "X"))
8724 (match_operand 2 "" "")))
8726 (use (match_operand 3 "" ""))]
8727 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8729 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8731 [(set_attr "type" "call")]
8734 ;; Often the return insn will be the same as loading from memory, so set attr
8735 (define_insn "return"
8737 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8740 if (arm_ccfsm_state == 2)
8742 arm_ccfsm_state += 2;
8745 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8747 [(set_attr "type" "load1")
8748 (set_attr "length" "12")
8749 (set_attr "predicable" "yes")]
8752 (define_insn "*cond_return"
8754 (if_then_else (match_operator 0 "arm_comparison_operator"
8755 [(match_operand 1 "cc_register" "") (const_int 0)])
8758 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8761 if (arm_ccfsm_state == 2)
8763 arm_ccfsm_state += 2;
8766 return output_return_instruction (operands[0], TRUE, FALSE);
8768 [(set_attr "conds" "use")
8769 (set_attr "length" "12")
8770 (set_attr "type" "load1")]
8773 (define_insn "*cond_return_inverted"
8775 (if_then_else (match_operator 0 "arm_comparison_operator"
8776 [(match_operand 1 "cc_register" "") (const_int 0)])
8779 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8782 if (arm_ccfsm_state == 2)
8784 arm_ccfsm_state += 2;
8787 return output_return_instruction (operands[0], TRUE, TRUE);
8789 [(set_attr "conds" "use")
8790 (set_attr "length" "12")
8791 (set_attr "type" "load1")]
8794 ;; Generate a sequence of instructions to determine if the processor is
8795 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8798 (define_expand "return_addr_mask"
8800 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8802 (set (match_operand:SI 0 "s_register_operand" "")
8803 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8805 (const_int 67108860)))] ; 0x03fffffc
8808 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8811 (define_insn "*check_arch2"
8812 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8813 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8816 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8817 [(set_attr "length" "8")
8818 (set_attr "conds" "set")]
8821 ;; Call subroutine returning any type.
8823 (define_expand "untyped_call"
8824 [(parallel [(call (match_operand 0 "" "")
8826 (match_operand 1 "" "")
8827 (match_operand 2 "" "")])]
8832 rtx par = gen_rtx_PARALLEL (VOIDmode,
8833 rtvec_alloc (XVECLEN (operands[2], 0)));
8834 rtx addr = gen_reg_rtx (Pmode);
8838 emit_move_insn (addr, XEXP (operands[1], 0));
8839 mem = change_address (operands[1], BLKmode, addr);
8841 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8843 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8845 /* Default code only uses r0 as a return value, but we could
8846 be using anything up to 4 registers. */
8847 if (REGNO (src) == R0_REGNUM)
8848 src = gen_rtx_REG (TImode, R0_REGNUM);
8850 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8852 size += GET_MODE_SIZE (GET_MODE (src));
8855 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8860 for (i = 0; i < XVECLEN (par, 0); i++)
8862 HOST_WIDE_INT offset = 0;
8863 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8866 emit_move_insn (addr, plus_constant (addr, size));
8868 mem = change_address (mem, GET_MODE (reg), NULL);
8869 if (REGNO (reg) == R0_REGNUM)
8871 /* On thumb we have to use a write-back instruction. */
8872 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8873 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8874 size = TARGET_ARM ? 16 : 0;
8878 emit_move_insn (mem, reg);
8879 size = GET_MODE_SIZE (GET_MODE (reg));
8883 /* The optimizer does not know that the call sets the function value
8884 registers we stored in the result block. We avoid problems by
8885 claiming that all hard registers are used and clobbered at this
8887 emit_insn (gen_blockage ());
8893 (define_expand "untyped_return"
8894 [(match_operand:BLK 0 "memory_operand" "")
8895 (match_operand 1 "" "")]
8900 rtx addr = gen_reg_rtx (Pmode);
8904 emit_move_insn (addr, XEXP (operands[0], 0));
8905 mem = change_address (operands[0], BLKmode, addr);
8907 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8909 HOST_WIDE_INT offset = 0;
8910 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8913 emit_move_insn (addr, plus_constant (addr, size));
8915 mem = change_address (mem, GET_MODE (reg), NULL);
8916 if (REGNO (reg) == R0_REGNUM)
8918 /* On thumb we have to use a write-back instruction. */
8919 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8920 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8921 size = TARGET_ARM ? 16 : 0;
8925 emit_move_insn (reg, mem);
8926 size = GET_MODE_SIZE (GET_MODE (reg));
8930 /* Emit USE insns before the return. */
8931 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8932 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8934 /* Construct the return. */
8935 expand_naked_return ();
8941 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8942 ;; all of memory. This blocks insns from being moved across this point.
8944 (define_insn "blockage"
8945 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8948 [(set_attr "length" "0")
8949 (set_attr "type" "block")]
8952 (define_expand "casesi"
8953 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8954 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8955 (match_operand:SI 2 "const_int_operand" "") ; total range
8956 (match_operand:SI 3 "" "") ; table label
8957 (match_operand:SI 4 "" "")] ; Out of range label
8958 "TARGET_32BIT || optimize_size || flag_pic"
8961 enum insn_code code;
8962 if (operands[1] != const0_rtx)
8964 rtx reg = gen_reg_rtx (SImode);
8966 emit_insn (gen_addsi3 (reg, operands[0],
8967 GEN_INT (-INTVAL (operands[1]))));
8972 code = CODE_FOR_arm_casesi_internal;
8973 else if (TARGET_THUMB1)
8974 code = CODE_FOR_thumb1_casesi_internal_pic;
8976 code = CODE_FOR_thumb2_casesi_internal_pic;
8978 code = CODE_FOR_thumb2_casesi_internal;
8980 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8981 operands[2] = force_reg (SImode, operands[2]);
8983 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8984 operands[3], operands[4]));
8989 ;; The USE in this pattern is needed to tell flow analysis that this is
8990 ;; a CASESI insn. It has no other purpose.
8991 (define_insn "arm_casesi_internal"
8992 [(parallel [(set (pc)
8994 (leu (match_operand:SI 0 "s_register_operand" "r")
8995 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8996 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8997 (label_ref (match_operand 2 "" ""))))
8998 (label_ref (match_operand 3 "" ""))))
8999 (clobber (reg:CC CC_REGNUM))
9000 (use (label_ref (match_dup 2)))])]
9004 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9005 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9007 [(set_attr "conds" "clob")
9008 (set_attr "length" "12")]
9011 (define_expand "thumb1_casesi_internal_pic"
9012 [(match_operand:SI 0 "s_register_operand" "")
9013 (match_operand:SI 1 "thumb1_cmp_operand" "")
9014 (match_operand 2 "" "")
9015 (match_operand 3 "" "")]
9019 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9020 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9022 reg0 = gen_rtx_REG (SImode, 0);
9023 emit_move_insn (reg0, operands[0]);
9024 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9029 (define_insn "thumb1_casesi_dispatch"
9030 [(parallel [(set (pc) (unspec [(reg:SI 0)
9031 (label_ref (match_operand 0 "" ""))
9032 ;; (label_ref (match_operand 1 "" ""))
9034 UNSPEC_THUMB1_CASESI))
9035 (clobber (reg:SI IP_REGNUM))
9036 (clobber (reg:SI LR_REGNUM))])]
9038 "* return thumb1_output_casesi(operands);"
9039 [(set_attr "length" "4")]
9042 (define_expand "indirect_jump"
9044 (match_operand:SI 0 "s_register_operand" ""))]
9047 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9048 address and use bx. */
9052 tmp = gen_reg_rtx (SImode);
9053 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9059 ;; NB Never uses BX.
9060 (define_insn "*arm_indirect_jump"
9062 (match_operand:SI 0 "s_register_operand" "r"))]
9064 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9065 [(set_attr "predicable" "yes")]
9068 (define_insn "*load_indirect_jump"
9070 (match_operand:SI 0 "memory_operand" "m"))]
9072 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9073 [(set_attr "type" "load1")
9074 (set_attr "pool_range" "4096")
9075 (set_attr "neg_pool_range" "4084")
9076 (set_attr "predicable" "yes")]
9079 ;; NB Never uses BX.
9080 (define_insn "*thumb1_indirect_jump"
9082 (match_operand:SI 0 "register_operand" "l*r"))]
9085 [(set_attr "conds" "clob")
9086 (set_attr "length" "2")]
9096 if (TARGET_UNIFIED_ASM)
9099 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9100 return \"mov\\tr8, r8\";
9102 [(set (attr "length")
9103 (if_then_else (eq_attr "is_thumb" "yes")
9109 ;; Patterns to allow combination of arithmetic, cond code and shifts
9111 (define_insn "*arith_shiftsi"
9112 [(set (match_operand:SI 0 "s_register_operand" "=r")
9113 (match_operator:SI 1 "shiftable_operator"
9114 [(match_operator:SI 3 "shift_operator"
9115 [(match_operand:SI 4 "s_register_operand" "r")
9116 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9117 (match_operand:SI 2 "s_register_operand" "r")]))]
9119 "%i1%?\\t%0, %2, %4%S3"
9120 [(set_attr "predicable" "yes")
9121 (set_attr "shift" "4")
9122 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9123 (const_string "alu_shift")
9124 (const_string "alu_shift_reg")))]
9128 [(set (match_operand:SI 0 "s_register_operand" "")
9129 (match_operator:SI 1 "shiftable_operator"
9130 [(match_operator:SI 2 "shiftable_operator"
9131 [(match_operator:SI 3 "shift_operator"
9132 [(match_operand:SI 4 "s_register_operand" "")
9133 (match_operand:SI 5 "reg_or_int_operand" "")])
9134 (match_operand:SI 6 "s_register_operand" "")])
9135 (match_operand:SI 7 "arm_rhs_operand" "")]))
9136 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9139 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9142 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9145 (define_insn "*arith_shiftsi_compare0"
9146 [(set (reg:CC_NOOV CC_REGNUM)
9147 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9148 [(match_operator:SI 3 "shift_operator"
9149 [(match_operand:SI 4 "s_register_operand" "r")
9150 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9151 (match_operand:SI 2 "s_register_operand" "r")])
9153 (set (match_operand:SI 0 "s_register_operand" "=r")
9154 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9157 "%i1%.\\t%0, %2, %4%S3"
9158 [(set_attr "conds" "set")
9159 (set_attr "shift" "4")
9160 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9161 (const_string "alu_shift")
9162 (const_string "alu_shift_reg")))]
9165 (define_insn "*arith_shiftsi_compare0_scratch"
9166 [(set (reg:CC_NOOV CC_REGNUM)
9167 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9168 [(match_operator:SI 3 "shift_operator"
9169 [(match_operand:SI 4 "s_register_operand" "r")
9170 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9171 (match_operand:SI 2 "s_register_operand" "r")])
9173 (clobber (match_scratch:SI 0 "=r"))]
9175 "%i1%.\\t%0, %2, %4%S3"
9176 [(set_attr "conds" "set")
9177 (set_attr "shift" "4")
9178 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9179 (const_string "alu_shift")
9180 (const_string "alu_shift_reg")))]
9183 (define_insn "*sub_shiftsi"
9184 [(set (match_operand:SI 0 "s_register_operand" "=r")
9185 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9186 (match_operator:SI 2 "shift_operator"
9187 [(match_operand:SI 3 "s_register_operand" "r")
9188 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9190 "sub%?\\t%0, %1, %3%S2"
9191 [(set_attr "predicable" "yes")
9192 (set_attr "shift" "3")
9193 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9194 (const_string "alu_shift")
9195 (const_string "alu_shift_reg")))]
9198 (define_insn "*sub_shiftsi_compare0"
9199 [(set (reg:CC_NOOV CC_REGNUM)
9201 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9202 (match_operator:SI 2 "shift_operator"
9203 [(match_operand:SI 3 "s_register_operand" "r")
9204 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9206 (set (match_operand:SI 0 "s_register_operand" "=r")
9207 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9210 "sub%.\\t%0, %1, %3%S2"
9211 [(set_attr "conds" "set")
9212 (set_attr "shift" "3")
9213 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9214 (const_string "alu_shift")
9215 (const_string "alu_shift_reg")))]
9218 (define_insn "*sub_shiftsi_compare0_scratch"
9219 [(set (reg:CC_NOOV CC_REGNUM)
9221 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9222 (match_operator:SI 2 "shift_operator"
9223 [(match_operand:SI 3 "s_register_operand" "r")
9224 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9226 (clobber (match_scratch:SI 0 "=r"))]
9228 "sub%.\\t%0, %1, %3%S2"
9229 [(set_attr "conds" "set")
9230 (set_attr "shift" "3")
9231 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9232 (const_string "alu_shift")
9233 (const_string "alu_shift_reg")))]
9238 (define_insn "*and_scc"
9239 [(set (match_operand:SI 0 "s_register_operand" "=r")
9240 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9241 [(match_operand 3 "cc_register" "") (const_int 0)])
9242 (match_operand:SI 2 "s_register_operand" "r")))]
9244 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9245 [(set_attr "conds" "use")
9246 (set_attr "length" "8")]
9249 (define_insn "*ior_scc"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9251 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9252 [(match_operand 3 "cc_register" "") (const_int 0)])
9253 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9257 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9258 [(set_attr "conds" "use")
9259 (set_attr "length" "4,8")]
9262 (define_insn "*compare_scc"
9263 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9264 (match_operator:SI 1 "arm_comparison_operator"
9265 [(match_operand:SI 2 "s_register_operand" "r,r")
9266 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9267 (clobber (reg:CC CC_REGNUM))]
9270 if (operands[3] == const0_rtx)
9272 if (GET_CODE (operands[1]) == LT)
9273 return \"mov\\t%0, %2, lsr #31\";
9275 if (GET_CODE (operands[1]) == GE)
9276 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9278 if (GET_CODE (operands[1]) == EQ)
9279 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9282 if (GET_CODE (operands[1]) == NE)
9284 if (which_alternative == 1)
9285 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9286 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9288 if (which_alternative == 1)
9289 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9291 output_asm_insn (\"cmp\\t%2, %3\", operands);
9292 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9294 [(set_attr "conds" "clob")
9295 (set_attr "length" "12")]
9298 (define_insn "*cond_move"
9299 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9300 (if_then_else:SI (match_operator 3 "equality_operator"
9301 [(match_operator 4 "arm_comparison_operator"
9302 [(match_operand 5 "cc_register" "") (const_int 0)])
9304 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9305 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9308 if (GET_CODE (operands[3]) == NE)
9310 if (which_alternative != 1)
9311 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9312 if (which_alternative != 0)
9313 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9316 if (which_alternative != 0)
9317 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9318 if (which_alternative != 1)
9319 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9322 [(set_attr "conds" "use")
9323 (set_attr "length" "4,4,8")]
9326 (define_insn "*cond_arith"
9327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9328 (match_operator:SI 5 "shiftable_operator"
9329 [(match_operator:SI 4 "arm_comparison_operator"
9330 [(match_operand:SI 2 "s_register_operand" "r,r")
9331 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9332 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9333 (clobber (reg:CC CC_REGNUM))]
9336 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9337 return \"%i5\\t%0, %1, %2, lsr #31\";
9339 output_asm_insn (\"cmp\\t%2, %3\", operands);
9340 if (GET_CODE (operands[5]) == AND)
9341 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9342 else if (GET_CODE (operands[5]) == MINUS)
9343 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9344 else if (which_alternative != 0)
9345 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9346 return \"%i5%d4\\t%0, %1, #1\";
9348 [(set_attr "conds" "clob")
9349 (set_attr "length" "12")]
9352 (define_insn "*cond_sub"
9353 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9354 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9355 (match_operator:SI 4 "arm_comparison_operator"
9356 [(match_operand:SI 2 "s_register_operand" "r,r")
9357 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9358 (clobber (reg:CC CC_REGNUM))]
9361 output_asm_insn (\"cmp\\t%2, %3\", operands);
9362 if (which_alternative != 0)
9363 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9364 return \"sub%d4\\t%0, %1, #1\";
9366 [(set_attr "conds" "clob")
9367 (set_attr "length" "8,12")]
9370 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9371 (define_insn "*cmp_ite0"
9372 [(set (match_operand 6 "dominant_cc_register" "")
9375 (match_operator 4 "arm_comparison_operator"
9376 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9377 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9378 (match_operator:SI 5 "arm_comparison_operator"
9379 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9380 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9386 static const char * const opcodes[4][2] =
9388 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9389 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9390 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9391 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9392 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9393 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9394 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9395 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9398 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9400 return opcodes[which_alternative][swap];
9402 [(set_attr "conds" "set")
9403 (set_attr "length" "8")]
9406 (define_insn "*cmp_ite1"
9407 [(set (match_operand 6 "dominant_cc_register" "")
9410 (match_operator 4 "arm_comparison_operator"
9411 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9412 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9413 (match_operator:SI 5 "arm_comparison_operator"
9414 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9415 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9421 static const char * const opcodes[4][2] =
9423 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9424 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9425 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9426 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9427 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9428 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9429 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9430 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9433 comparison_dominates_p (GET_CODE (operands[5]),
9434 reverse_condition (GET_CODE (operands[4])));
9436 return opcodes[which_alternative][swap];
9438 [(set_attr "conds" "set")
9439 (set_attr "length" "8")]
9442 (define_insn "*cmp_and"
9443 [(set (match_operand 6 "dominant_cc_register" "")
9446 (match_operator 4 "arm_comparison_operator"
9447 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9448 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9449 (match_operator:SI 5 "arm_comparison_operator"
9450 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9451 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9456 static const char *const opcodes[4][2] =
9458 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9459 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9460 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9461 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9462 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9463 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9464 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9465 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9468 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9470 return opcodes[which_alternative][swap];
9472 [(set_attr "conds" "set")
9473 (set_attr "predicable" "no")
9474 (set_attr "length" "8")]
9477 (define_insn "*cmp_ior"
9478 [(set (match_operand 6 "dominant_cc_register" "")
9481 (match_operator 4 "arm_comparison_operator"
9482 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9483 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9484 (match_operator:SI 5 "arm_comparison_operator"
9485 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9486 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9491 static const char *const opcodes[4][2] =
9493 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9494 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9495 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9496 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9497 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9498 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9499 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9500 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9503 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9505 return opcodes[which_alternative][swap];
9508 [(set_attr "conds" "set")
9509 (set_attr "length" "8")]
9512 (define_insn_and_split "*ior_scc_scc"
9513 [(set (match_operand:SI 0 "s_register_operand" "=r")
9514 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9515 [(match_operand:SI 1 "s_register_operand" "r")
9516 (match_operand:SI 2 "arm_add_operand" "rIL")])
9517 (match_operator:SI 6 "arm_comparison_operator"
9518 [(match_operand:SI 4 "s_register_operand" "r")
9519 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9520 (clobber (reg:CC CC_REGNUM))]
9522 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9525 "TARGET_ARM && reload_completed"
9529 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9530 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9532 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9534 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9537 [(set_attr "conds" "clob")
9538 (set_attr "length" "16")])
9540 ; If the above pattern is followed by a CMP insn, then the compare is
9541 ; redundant, since we can rework the conditional instruction that follows.
9542 (define_insn_and_split "*ior_scc_scc_cmp"
9543 [(set (match_operand 0 "dominant_cc_register" "")
9544 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9545 [(match_operand:SI 1 "s_register_operand" "r")
9546 (match_operand:SI 2 "arm_add_operand" "rIL")])
9547 (match_operator:SI 6 "arm_comparison_operator"
9548 [(match_operand:SI 4 "s_register_operand" "r")
9549 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9551 (set (match_operand:SI 7 "s_register_operand" "=r")
9552 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9553 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9556 "TARGET_ARM && reload_completed"
9560 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9561 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9563 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9565 [(set_attr "conds" "set")
9566 (set_attr "length" "16")])
9568 (define_insn_and_split "*and_scc_scc"
9569 [(set (match_operand:SI 0 "s_register_operand" "=r")
9570 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9571 [(match_operand:SI 1 "s_register_operand" "r")
9572 (match_operand:SI 2 "arm_add_operand" "rIL")])
9573 (match_operator:SI 6 "arm_comparison_operator"
9574 [(match_operand:SI 4 "s_register_operand" "r")
9575 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9576 (clobber (reg:CC CC_REGNUM))]
9578 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9581 "TARGET_ARM && reload_completed
9582 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9587 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9588 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9590 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9592 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9595 [(set_attr "conds" "clob")
9596 (set_attr "length" "16")])
9598 ; If the above pattern is followed by a CMP insn, then the compare is
9599 ; redundant, since we can rework the conditional instruction that follows.
9600 (define_insn_and_split "*and_scc_scc_cmp"
9601 [(set (match_operand 0 "dominant_cc_register" "")
9602 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9603 [(match_operand:SI 1 "s_register_operand" "r")
9604 (match_operand:SI 2 "arm_add_operand" "rIL")])
9605 (match_operator:SI 6 "arm_comparison_operator"
9606 [(match_operand:SI 4 "s_register_operand" "r")
9607 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9609 (set (match_operand:SI 7 "s_register_operand" "=r")
9610 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9611 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9614 "TARGET_ARM && reload_completed"
9618 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9619 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9621 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9623 [(set_attr "conds" "set")
9624 (set_attr "length" "16")])
9626 ;; If there is no dominance in the comparison, then we can still save an
9627 ;; instruction in the AND case, since we can know that the second compare
9628 ;; need only zero the value if false (if true, then the value is already
9630 (define_insn_and_split "*and_scc_scc_nodom"
9631 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9632 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9633 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9634 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9635 (match_operator:SI 6 "arm_comparison_operator"
9636 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9637 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9638 (clobber (reg:CC CC_REGNUM))]
9640 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9643 "TARGET_ARM && reload_completed"
9644 [(parallel [(set (match_dup 0)
9645 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9646 (clobber (reg:CC CC_REGNUM))])
9647 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9649 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9652 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9653 operands[4], operands[5]),
9655 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9657 [(set_attr "conds" "clob")
9658 (set_attr "length" "20")])
9661 [(set (reg:CC_NOOV CC_REGNUM)
9662 (compare:CC_NOOV (ior:SI
9663 (and:SI (match_operand:SI 0 "s_register_operand" "")
9665 (match_operator:SI 1 "arm_comparison_operator"
9666 [(match_operand:SI 2 "s_register_operand" "")
9667 (match_operand:SI 3 "arm_add_operand" "")]))
9669 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9672 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9674 (set (reg:CC_NOOV CC_REGNUM)
9675 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9680 [(set (reg:CC_NOOV CC_REGNUM)
9681 (compare:CC_NOOV (ior:SI
9682 (match_operator:SI 1 "arm_comparison_operator"
9683 [(match_operand:SI 2 "s_register_operand" "")
9684 (match_operand:SI 3 "arm_add_operand" "")])
9685 (and:SI (match_operand:SI 0 "s_register_operand" "")
9688 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9691 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9693 (set (reg:CC_NOOV CC_REGNUM)
9694 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9697 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9699 (define_insn "*negscc"
9700 [(set (match_operand:SI 0 "s_register_operand" "=r")
9701 (neg:SI (match_operator 3 "arm_comparison_operator"
9702 [(match_operand:SI 1 "s_register_operand" "r")
9703 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9704 (clobber (reg:CC CC_REGNUM))]
9707 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9708 return \"mov\\t%0, %1, asr #31\";
9710 if (GET_CODE (operands[3]) == NE)
9711 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9713 output_asm_insn (\"cmp\\t%1, %2\", operands);
9714 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9715 return \"mvn%d3\\t%0, #0\";
9717 [(set_attr "conds" "clob")
9718 (set_attr "length" "12")]
9721 (define_insn "movcond"
9722 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9724 (match_operator 5 "arm_comparison_operator"
9725 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9726 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9727 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9728 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9729 (clobber (reg:CC CC_REGNUM))]
9732 if (GET_CODE (operands[5]) == LT
9733 && (operands[4] == const0_rtx))
9735 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9737 if (operands[2] == const0_rtx)
9738 return \"and\\t%0, %1, %3, asr #31\";
9739 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9741 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9743 if (operands[1] == const0_rtx)
9744 return \"bic\\t%0, %2, %3, asr #31\";
9745 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9747 /* The only case that falls through to here is when both ops 1 & 2
9751 if (GET_CODE (operands[5]) == GE
9752 && (operands[4] == const0_rtx))
9754 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9756 if (operands[2] == const0_rtx)
9757 return \"bic\\t%0, %1, %3, asr #31\";
9758 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9760 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9762 if (operands[1] == const0_rtx)
9763 return \"and\\t%0, %2, %3, asr #31\";
9764 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9766 /* The only case that falls through to here is when both ops 1 & 2
9769 if (GET_CODE (operands[4]) == CONST_INT
9770 && !const_ok_for_arm (INTVAL (operands[4])))
9771 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9773 output_asm_insn (\"cmp\\t%3, %4\", operands);
9774 if (which_alternative != 0)
9775 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9776 if (which_alternative != 1)
9777 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9780 [(set_attr "conds" "clob")
9781 (set_attr "length" "8,8,12")]
9784 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9786 (define_insn "*ifcompare_plus_move"
9787 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9788 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9789 [(match_operand:SI 4 "s_register_operand" "r,r")
9790 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9792 (match_operand:SI 2 "s_register_operand" "r,r")
9793 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9794 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9795 (clobber (reg:CC CC_REGNUM))]
9798 [(set_attr "conds" "clob")
9799 (set_attr "length" "8,12")]
9802 (define_insn "*if_plus_move"
9803 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9805 (match_operator 4 "arm_comparison_operator"
9806 [(match_operand 5 "cc_register" "") (const_int 0)])
9808 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9809 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9810 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9814 sub%d4\\t%0, %2, #%n3
9815 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9816 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9817 [(set_attr "conds" "use")
9818 (set_attr "length" "4,4,8,8")
9819 (set_attr "type" "*,*,*,*")]
9822 (define_insn "*ifcompare_move_plus"
9823 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9824 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9825 [(match_operand:SI 4 "s_register_operand" "r,r")
9826 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9827 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9829 (match_operand:SI 2 "s_register_operand" "r,r")
9830 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9831 (clobber (reg:CC CC_REGNUM))]
9834 [(set_attr "conds" "clob")
9835 (set_attr "length" "8,12")]
9838 (define_insn "*if_move_plus"
9839 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9841 (match_operator 4 "arm_comparison_operator"
9842 [(match_operand 5 "cc_register" "") (const_int 0)])
9843 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9845 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9846 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9850 sub%D4\\t%0, %2, #%n3
9851 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9852 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9853 [(set_attr "conds" "use")
9854 (set_attr "length" "4,4,8,8")
9855 (set_attr "type" "*,*,*,*")]
9858 (define_insn "*ifcompare_arith_arith"
9859 [(set (match_operand:SI 0 "s_register_operand" "=r")
9860 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9861 [(match_operand:SI 5 "s_register_operand" "r")
9862 (match_operand:SI 6 "arm_add_operand" "rIL")])
9863 (match_operator:SI 8 "shiftable_operator"
9864 [(match_operand:SI 1 "s_register_operand" "r")
9865 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9866 (match_operator:SI 7 "shiftable_operator"
9867 [(match_operand:SI 3 "s_register_operand" "r")
9868 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9869 (clobber (reg:CC CC_REGNUM))]
9872 [(set_attr "conds" "clob")
9873 (set_attr "length" "12")]
9876 (define_insn "*if_arith_arith"
9877 [(set (match_operand:SI 0 "s_register_operand" "=r")
9878 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9879 [(match_operand 8 "cc_register" "") (const_int 0)])
9880 (match_operator:SI 6 "shiftable_operator"
9881 [(match_operand:SI 1 "s_register_operand" "r")
9882 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9883 (match_operator:SI 7 "shiftable_operator"
9884 [(match_operand:SI 3 "s_register_operand" "r")
9885 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9887 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9888 [(set_attr "conds" "use")
9889 (set_attr "length" "8")]
9892 (define_insn "*ifcompare_arith_move"
9893 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9894 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9895 [(match_operand:SI 2 "s_register_operand" "r,r")
9896 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9897 (match_operator:SI 7 "shiftable_operator"
9898 [(match_operand:SI 4 "s_register_operand" "r,r")
9899 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9900 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9901 (clobber (reg:CC CC_REGNUM))]
9904 /* If we have an operation where (op x 0) is the identity operation and
9905 the conditional operator is LT or GE and we are comparing against zero and
9906 everything is in registers then we can do this in two instructions. */
9907 if (operands[3] == const0_rtx
9908 && GET_CODE (operands[7]) != AND
9909 && GET_CODE (operands[5]) == REG
9910 && GET_CODE (operands[1]) == REG
9911 && REGNO (operands[1]) == REGNO (operands[4])
9912 && REGNO (operands[4]) != REGNO (operands[0]))
9914 if (GET_CODE (operands[6]) == LT)
9915 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9916 else if (GET_CODE (operands[6]) == GE)
9917 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9919 if (GET_CODE (operands[3]) == CONST_INT
9920 && !const_ok_for_arm (INTVAL (operands[3])))
9921 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9923 output_asm_insn (\"cmp\\t%2, %3\", operands);
9924 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9925 if (which_alternative != 0)
9926 return \"mov%D6\\t%0, %1\";
9929 [(set_attr "conds" "clob")
9930 (set_attr "length" "8,12")]
9933 (define_insn "*if_arith_move"
9934 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9935 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9936 [(match_operand 6 "cc_register" "") (const_int 0)])
9937 (match_operator:SI 5 "shiftable_operator"
9938 [(match_operand:SI 2 "s_register_operand" "r,r")
9939 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9940 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9944 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9945 [(set_attr "conds" "use")
9946 (set_attr "length" "4,8")
9947 (set_attr "type" "*,*")]
9950 (define_insn "*ifcompare_move_arith"
9951 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9952 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9953 [(match_operand:SI 4 "s_register_operand" "r,r")
9954 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9955 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9956 (match_operator:SI 7 "shiftable_operator"
9957 [(match_operand:SI 2 "s_register_operand" "r,r")
9958 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9959 (clobber (reg:CC CC_REGNUM))]
9962 /* If we have an operation where (op x 0) is the identity operation and
9963 the conditional operator is LT or GE and we are comparing against zero and
9964 everything is in registers then we can do this in two instructions */
9965 if (operands[5] == const0_rtx
9966 && GET_CODE (operands[7]) != AND
9967 && GET_CODE (operands[3]) == REG
9968 && GET_CODE (operands[1]) == REG
9969 && REGNO (operands[1]) == REGNO (operands[2])
9970 && REGNO (operands[2]) != REGNO (operands[0]))
9972 if (GET_CODE (operands[6]) == GE)
9973 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9974 else if (GET_CODE (operands[6]) == LT)
9975 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9978 if (GET_CODE (operands[5]) == CONST_INT
9979 && !const_ok_for_arm (INTVAL (operands[5])))
9980 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9982 output_asm_insn (\"cmp\\t%4, %5\", operands);
9984 if (which_alternative != 0)
9985 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9986 return \"%I7%D6\\t%0, %2, %3\";
9988 [(set_attr "conds" "clob")
9989 (set_attr "length" "8,12")]
9992 (define_insn "*if_move_arith"
9993 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9995 (match_operator 4 "arm_comparison_operator"
9996 [(match_operand 6 "cc_register" "") (const_int 0)])
9997 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9998 (match_operator:SI 5 "shiftable_operator"
9999 [(match_operand:SI 2 "s_register_operand" "r,r")
10000 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10003 %I5%D4\\t%0, %2, %3
10004 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10005 [(set_attr "conds" "use")
10006 (set_attr "length" "4,8")
10007 (set_attr "type" "*,*")]
10010 (define_insn "*ifcompare_move_not"
10011 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10013 (match_operator 5 "arm_comparison_operator"
10014 [(match_operand:SI 3 "s_register_operand" "r,r")
10015 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10016 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10018 (match_operand:SI 2 "s_register_operand" "r,r"))))
10019 (clobber (reg:CC CC_REGNUM))]
10022 [(set_attr "conds" "clob")
10023 (set_attr "length" "8,12")]
10026 (define_insn "*if_move_not"
10027 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10029 (match_operator 4 "arm_comparison_operator"
10030 [(match_operand 3 "cc_register" "") (const_int 0)])
10031 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10032 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10036 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10037 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10038 [(set_attr "conds" "use")
10039 (set_attr "length" "4,8,8")]
10042 (define_insn "*ifcompare_not_move"
10043 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10045 (match_operator 5 "arm_comparison_operator"
10046 [(match_operand:SI 3 "s_register_operand" "r,r")
10047 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10049 (match_operand:SI 2 "s_register_operand" "r,r"))
10050 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10051 (clobber (reg:CC CC_REGNUM))]
10054 [(set_attr "conds" "clob")
10055 (set_attr "length" "8,12")]
10058 (define_insn "*if_not_move"
10059 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10061 (match_operator 4 "arm_comparison_operator"
10062 [(match_operand 3 "cc_register" "") (const_int 0)])
10063 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10064 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10068 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10069 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10070 [(set_attr "conds" "use")
10071 (set_attr "length" "4,8,8")]
10074 (define_insn "*ifcompare_shift_move"
10075 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10077 (match_operator 6 "arm_comparison_operator"
10078 [(match_operand:SI 4 "s_register_operand" "r,r")
10079 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10080 (match_operator:SI 7 "shift_operator"
10081 [(match_operand:SI 2 "s_register_operand" "r,r")
10082 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10083 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10084 (clobber (reg:CC CC_REGNUM))]
10087 [(set_attr "conds" "clob")
10088 (set_attr "length" "8,12")]
10091 (define_insn "*if_shift_move"
10092 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10094 (match_operator 5 "arm_comparison_operator"
10095 [(match_operand 6 "cc_register" "") (const_int 0)])
10096 (match_operator:SI 4 "shift_operator"
10097 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10098 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10099 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10103 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10104 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10105 [(set_attr "conds" "use")
10106 (set_attr "shift" "2")
10107 (set_attr "length" "4,8,8")
10108 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10109 (const_string "alu_shift")
10110 (const_string "alu_shift_reg")))]
10113 (define_insn "*ifcompare_move_shift"
10114 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10116 (match_operator 6 "arm_comparison_operator"
10117 [(match_operand:SI 4 "s_register_operand" "r,r")
10118 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10119 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10120 (match_operator:SI 7 "shift_operator"
10121 [(match_operand:SI 2 "s_register_operand" "r,r")
10122 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10123 (clobber (reg:CC CC_REGNUM))]
10126 [(set_attr "conds" "clob")
10127 (set_attr "length" "8,12")]
10130 (define_insn "*if_move_shift"
10131 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10133 (match_operator 5 "arm_comparison_operator"
10134 [(match_operand 6 "cc_register" "") (const_int 0)])
10135 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10136 (match_operator:SI 4 "shift_operator"
10137 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10138 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10142 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10143 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10144 [(set_attr "conds" "use")
10145 (set_attr "shift" "2")
10146 (set_attr "length" "4,8,8")
10147 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10148 (const_string "alu_shift")
10149 (const_string "alu_shift_reg")))]
10152 (define_insn "*ifcompare_shift_shift"
10153 [(set (match_operand:SI 0 "s_register_operand" "=r")
10155 (match_operator 7 "arm_comparison_operator"
10156 [(match_operand:SI 5 "s_register_operand" "r")
10157 (match_operand:SI 6 "arm_add_operand" "rIL")])
10158 (match_operator:SI 8 "shift_operator"
10159 [(match_operand:SI 1 "s_register_operand" "r")
10160 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10161 (match_operator:SI 9 "shift_operator"
10162 [(match_operand:SI 3 "s_register_operand" "r")
10163 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10164 (clobber (reg:CC CC_REGNUM))]
10167 [(set_attr "conds" "clob")
10168 (set_attr "length" "12")]
10171 (define_insn "*if_shift_shift"
10172 [(set (match_operand:SI 0 "s_register_operand" "=r")
10174 (match_operator 5 "arm_comparison_operator"
10175 [(match_operand 8 "cc_register" "") (const_int 0)])
10176 (match_operator:SI 6 "shift_operator"
10177 [(match_operand:SI 1 "s_register_operand" "r")
10178 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10179 (match_operator:SI 7 "shift_operator"
10180 [(match_operand:SI 3 "s_register_operand" "r")
10181 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10183 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10184 [(set_attr "conds" "use")
10185 (set_attr "shift" "1")
10186 (set_attr "length" "8")
10187 (set (attr "type") (if_then_else
10188 (and (match_operand 2 "const_int_operand" "")
10189 (match_operand 4 "const_int_operand" ""))
10190 (const_string "alu_shift")
10191 (const_string "alu_shift_reg")))]
10194 (define_insn "*ifcompare_not_arith"
10195 [(set (match_operand:SI 0 "s_register_operand" "=r")
10197 (match_operator 6 "arm_comparison_operator"
10198 [(match_operand:SI 4 "s_register_operand" "r")
10199 (match_operand:SI 5 "arm_add_operand" "rIL")])
10200 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10201 (match_operator:SI 7 "shiftable_operator"
10202 [(match_operand:SI 2 "s_register_operand" "r")
10203 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10204 (clobber (reg:CC CC_REGNUM))]
10207 [(set_attr "conds" "clob")
10208 (set_attr "length" "12")]
10211 (define_insn "*if_not_arith"
10212 [(set (match_operand:SI 0 "s_register_operand" "=r")
10214 (match_operator 5 "arm_comparison_operator"
10215 [(match_operand 4 "cc_register" "") (const_int 0)])
10216 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10217 (match_operator:SI 6 "shiftable_operator"
10218 [(match_operand:SI 2 "s_register_operand" "r")
10219 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10221 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10222 [(set_attr "conds" "use")
10223 (set_attr "length" "8")]
10226 (define_insn "*ifcompare_arith_not"
10227 [(set (match_operand:SI 0 "s_register_operand" "=r")
10229 (match_operator 6 "arm_comparison_operator"
10230 [(match_operand:SI 4 "s_register_operand" "r")
10231 (match_operand:SI 5 "arm_add_operand" "rIL")])
10232 (match_operator:SI 7 "shiftable_operator"
10233 [(match_operand:SI 2 "s_register_operand" "r")
10234 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10235 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10236 (clobber (reg:CC CC_REGNUM))]
10239 [(set_attr "conds" "clob")
10240 (set_attr "length" "12")]
10243 (define_insn "*if_arith_not"
10244 [(set (match_operand:SI 0 "s_register_operand" "=r")
10246 (match_operator 5 "arm_comparison_operator"
10247 [(match_operand 4 "cc_register" "") (const_int 0)])
10248 (match_operator:SI 6 "shiftable_operator"
10249 [(match_operand:SI 2 "s_register_operand" "r")
10250 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10251 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10253 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10254 [(set_attr "conds" "use")
10255 (set_attr "length" "8")]
10258 (define_insn "*ifcompare_neg_move"
10259 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10261 (match_operator 5 "arm_comparison_operator"
10262 [(match_operand:SI 3 "s_register_operand" "r,r")
10263 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10264 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10265 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10266 (clobber (reg:CC CC_REGNUM))]
10269 [(set_attr "conds" "clob")
10270 (set_attr "length" "8,12")]
10273 (define_insn "*if_neg_move"
10274 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10276 (match_operator 4 "arm_comparison_operator"
10277 [(match_operand 3 "cc_register" "") (const_int 0)])
10278 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10279 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10282 rsb%d4\\t%0, %2, #0
10283 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10284 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10285 [(set_attr "conds" "use")
10286 (set_attr "length" "4,8,8")]
10289 (define_insn "*ifcompare_move_neg"
10290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10292 (match_operator 5 "arm_comparison_operator"
10293 [(match_operand:SI 3 "s_register_operand" "r,r")
10294 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10295 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10296 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10297 (clobber (reg:CC CC_REGNUM))]
10300 [(set_attr "conds" "clob")
10301 (set_attr "length" "8,12")]
10304 (define_insn "*if_move_neg"
10305 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10307 (match_operator 4 "arm_comparison_operator"
10308 [(match_operand 3 "cc_register" "") (const_int 0)])
10309 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10310 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10313 rsb%D4\\t%0, %2, #0
10314 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10315 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10316 [(set_attr "conds" "use")
10317 (set_attr "length" "4,8,8")]
10320 (define_insn "*arith_adjacentmem"
10321 [(set (match_operand:SI 0 "s_register_operand" "=r")
10322 (match_operator:SI 1 "shiftable_operator"
10323 [(match_operand:SI 2 "memory_operand" "m")
10324 (match_operand:SI 3 "memory_operand" "m")]))
10325 (clobber (match_scratch:SI 4 "=r"))]
10326 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10332 HOST_WIDE_INT val1 = 0, val2 = 0;
10334 if (REGNO (operands[0]) > REGNO (operands[4]))
10336 ldm[1] = operands[4];
10337 ldm[2] = operands[0];
10341 ldm[1] = operands[0];
10342 ldm[2] = operands[4];
10345 base_reg = XEXP (operands[2], 0);
10347 if (!REG_P (base_reg))
10349 val1 = INTVAL (XEXP (base_reg, 1));
10350 base_reg = XEXP (base_reg, 0);
10353 if (!REG_P (XEXP (operands[3], 0)))
10354 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10356 arith[0] = operands[0];
10357 arith[3] = operands[1];
10371 if (val1 !=0 && val2 != 0)
10375 if (val1 == 4 || val2 == 4)
10376 /* Other val must be 8, since we know they are adjacent and neither
10378 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10379 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10381 ldm[0] = ops[0] = operands[4];
10383 ops[2] = GEN_INT (val1);
10384 output_add_immediate (ops);
10386 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10388 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10392 /* Offset is out of range for a single add, so use two ldr. */
10395 ops[2] = GEN_INT (val1);
10396 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10398 ops[2] = GEN_INT (val2);
10399 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10402 else if (val1 != 0)
10405 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10407 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10412 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10414 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10416 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10419 [(set_attr "length" "12")
10420 (set_attr "predicable" "yes")
10421 (set_attr "type" "load1")]
10424 ; This pattern is never tried by combine, so do it as a peephole
10427 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10428 (match_operand:SI 1 "arm_general_register_operand" ""))
10429 (set (reg:CC CC_REGNUM)
10430 (compare:CC (match_dup 1) (const_int 0)))]
10432 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10433 (set (match_dup 0) (match_dup 1))])]
10437 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10438 ; reversed, check that the memory references aren't volatile.
10441 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10442 (match_operand:SI 4 "memory_operand" "m"))
10443 (set (match_operand:SI 1 "s_register_operand" "=rk")
10444 (match_operand:SI 5 "memory_operand" "m"))
10445 (set (match_operand:SI 2 "s_register_operand" "=rk")
10446 (match_operand:SI 6 "memory_operand" "m"))
10447 (set (match_operand:SI 3 "s_register_operand" "=rk")
10448 (match_operand:SI 7 "memory_operand" "m"))]
10449 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10451 return emit_ldm_seq (operands, 4);
10456 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10457 (match_operand:SI 3 "memory_operand" "m"))
10458 (set (match_operand:SI 1 "s_register_operand" "=rk")
10459 (match_operand:SI 4 "memory_operand" "m"))
10460 (set (match_operand:SI 2 "s_register_operand" "=rk")
10461 (match_operand:SI 5 "memory_operand" "m"))]
10462 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10464 return emit_ldm_seq (operands, 3);
10469 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10470 (match_operand:SI 2 "memory_operand" "m"))
10471 (set (match_operand:SI 1 "s_register_operand" "=rk")
10472 (match_operand:SI 3 "memory_operand" "m"))]
10473 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10475 return emit_ldm_seq (operands, 2);
10480 [(set (match_operand:SI 4 "memory_operand" "=m")
10481 (match_operand:SI 0 "s_register_operand" "rk"))
10482 (set (match_operand:SI 5 "memory_operand" "=m")
10483 (match_operand:SI 1 "s_register_operand" "rk"))
10484 (set (match_operand:SI 6 "memory_operand" "=m")
10485 (match_operand:SI 2 "s_register_operand" "rk"))
10486 (set (match_operand:SI 7 "memory_operand" "=m")
10487 (match_operand:SI 3 "s_register_operand" "rk"))]
10488 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10490 return emit_stm_seq (operands, 4);
10495 [(set (match_operand:SI 3 "memory_operand" "=m")
10496 (match_operand:SI 0 "s_register_operand" "rk"))
10497 (set (match_operand:SI 4 "memory_operand" "=m")
10498 (match_operand:SI 1 "s_register_operand" "rk"))
10499 (set (match_operand:SI 5 "memory_operand" "=m")
10500 (match_operand:SI 2 "s_register_operand" "rk"))]
10501 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10503 return emit_stm_seq (operands, 3);
10508 [(set (match_operand:SI 2 "memory_operand" "=m")
10509 (match_operand:SI 0 "s_register_operand" "rk"))
10510 (set (match_operand:SI 3 "memory_operand" "=m")
10511 (match_operand:SI 1 "s_register_operand" "rk"))]
10512 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10514 return emit_stm_seq (operands, 2);
10519 [(set (match_operand:SI 0 "s_register_operand" "")
10520 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10522 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10523 [(match_operand:SI 3 "s_register_operand" "")
10524 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10525 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10527 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10528 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10533 ;; This split can be used because CC_Z mode implies that the following
10534 ;; branch will be an equality, or an unsigned inequality, so the sign
10535 ;; extension is not needed.
10538 [(set (reg:CC_Z CC_REGNUM)
10540 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10542 (match_operand 1 "const_int_operand" "")))
10543 (clobber (match_scratch:SI 2 ""))]
10545 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10546 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10547 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10548 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10550 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10553 ;; ??? Check the patterns above for Thumb-2 usefulness
10555 (define_expand "prologue"
10556 [(clobber (const_int 0))]
10559 arm_expand_prologue ();
10561 thumb1_expand_prologue ();
10566 (define_expand "epilogue"
10567 [(clobber (const_int 0))]
10570 if (crtl->calls_eh_return)
10571 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10573 thumb1_expand_epilogue ();
10574 else if (USE_RETURN_INSN (FALSE))
10576 emit_jump_insn (gen_return ());
10579 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10581 gen_rtx_RETURN (VOIDmode)),
10582 VUNSPEC_EPILOGUE));
10587 ;; Note - although unspec_volatile's USE all hard registers,
10588 ;; USEs are ignored after relaod has completed. Thus we need
10589 ;; to add an unspec of the link register to ensure that flow
10590 ;; does not think that it is unused by the sibcall branch that
10591 ;; will replace the standard function epilogue.
10592 (define_insn "sibcall_epilogue"
10593 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10594 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10597 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10598 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10599 return arm_output_epilogue (next_nonnote_insn (insn));
10601 ;; Length is absolute worst case
10602 [(set_attr "length" "44")
10603 (set_attr "type" "block")
10604 ;; We don't clobber the conditions, but the potential length of this
10605 ;; operation is sufficient to make conditionalizing the sequence
10606 ;; unlikely to be profitable.
10607 (set_attr "conds" "clob")]
10610 (define_insn "*epilogue_insns"
10611 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10615 return arm_output_epilogue (NULL);
10616 else /* TARGET_THUMB1 */
10617 return thumb_unexpanded_epilogue ();
10619 ; Length is absolute worst case
10620 [(set_attr "length" "44")
10621 (set_attr "type" "block")
10622 ;; We don't clobber the conditions, but the potential length of this
10623 ;; operation is sufficient to make conditionalizing the sequence
10624 ;; unlikely to be profitable.
10625 (set_attr "conds" "clob")]
10628 (define_expand "eh_epilogue"
10629 [(use (match_operand:SI 0 "register_operand" ""))
10630 (use (match_operand:SI 1 "register_operand" ""))
10631 (use (match_operand:SI 2 "register_operand" ""))]
10635 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10636 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10638 rtx ra = gen_rtx_REG (Pmode, 2);
10640 emit_move_insn (ra, operands[2]);
10643 /* This is a hack -- we may have crystalized the function type too
10645 cfun->machine->func_type = 0;
10649 ;; This split is only used during output to reduce the number of patterns
10650 ;; that need assembler instructions adding to them. We allowed the setting
10651 ;; of the conditions to be implicit during rtl generation so that
10652 ;; the conditional compare patterns would work. However this conflicts to
10653 ;; some extent with the conditional data operations, so we have to split them
10656 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10657 ;; conditional execution sufficient?
10660 [(set (match_operand:SI 0 "s_register_operand" "")
10661 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10662 [(match_operand 2 "" "") (match_operand 3 "" "")])
10664 (match_operand 4 "" "")))
10665 (clobber (reg:CC CC_REGNUM))]
10666 "TARGET_ARM && reload_completed"
10667 [(set (match_dup 5) (match_dup 6))
10668 (cond_exec (match_dup 7)
10669 (set (match_dup 0) (match_dup 4)))]
10672 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10673 operands[2], operands[3]);
10674 enum rtx_code rc = GET_CODE (operands[1]);
10676 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10677 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10678 if (mode == CCFPmode || mode == CCFPEmode)
10679 rc = reverse_condition_maybe_unordered (rc);
10681 rc = reverse_condition (rc);
10683 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10688 [(set (match_operand:SI 0 "s_register_operand" "")
10689 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10690 [(match_operand 2 "" "") (match_operand 3 "" "")])
10691 (match_operand 4 "" "")
10693 (clobber (reg:CC CC_REGNUM))]
10694 "TARGET_ARM && reload_completed"
10695 [(set (match_dup 5) (match_dup 6))
10696 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10697 (set (match_dup 0) (match_dup 4)))]
10700 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10701 operands[2], operands[3]);
10703 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10704 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10709 [(set (match_operand:SI 0 "s_register_operand" "")
10710 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10711 [(match_operand 2 "" "") (match_operand 3 "" "")])
10712 (match_operand 4 "" "")
10713 (match_operand 5 "" "")))
10714 (clobber (reg:CC CC_REGNUM))]
10715 "TARGET_ARM && reload_completed"
10716 [(set (match_dup 6) (match_dup 7))
10717 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10718 (set (match_dup 0) (match_dup 4)))
10719 (cond_exec (match_dup 8)
10720 (set (match_dup 0) (match_dup 5)))]
10723 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10724 operands[2], operands[3]);
10725 enum rtx_code rc = GET_CODE (operands[1]);
10727 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10728 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10729 if (mode == CCFPmode || mode == CCFPEmode)
10730 rc = reverse_condition_maybe_unordered (rc);
10732 rc = reverse_condition (rc);
10734 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10739 [(set (match_operand:SI 0 "s_register_operand" "")
10740 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10741 [(match_operand:SI 2 "s_register_operand" "")
10742 (match_operand:SI 3 "arm_add_operand" "")])
10743 (match_operand:SI 4 "arm_rhs_operand" "")
10745 (match_operand:SI 5 "s_register_operand" ""))))
10746 (clobber (reg:CC CC_REGNUM))]
10747 "TARGET_ARM && reload_completed"
10748 [(set (match_dup 6) (match_dup 7))
10749 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10750 (set (match_dup 0) (match_dup 4)))
10751 (cond_exec (match_dup 8)
10752 (set (match_dup 0) (not:SI (match_dup 5))))]
10755 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10756 operands[2], operands[3]);
10757 enum rtx_code rc = GET_CODE (operands[1]);
10759 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10760 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10761 if (mode == CCFPmode || mode == CCFPEmode)
10762 rc = reverse_condition_maybe_unordered (rc);
10764 rc = reverse_condition (rc);
10766 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10770 (define_insn "*cond_move_not"
10771 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10772 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10773 [(match_operand 3 "cc_register" "") (const_int 0)])
10774 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10776 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10780 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10781 [(set_attr "conds" "use")
10782 (set_attr "length" "4,8")]
10785 ;; The next two patterns occur when an AND operation is followed by a
10786 ;; scc insn sequence
10788 (define_insn "*sign_extract_onebit"
10789 [(set (match_operand:SI 0 "s_register_operand" "=r")
10790 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10792 (match_operand:SI 2 "const_int_operand" "n")))
10793 (clobber (reg:CC CC_REGNUM))]
10796 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10797 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10798 return \"mvnne\\t%0, #0\";
10800 [(set_attr "conds" "clob")
10801 (set_attr "length" "8")]
10804 (define_insn "*not_signextract_onebit"
10805 [(set (match_operand:SI 0 "s_register_operand" "=r")
10807 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10809 (match_operand:SI 2 "const_int_operand" "n"))))
10810 (clobber (reg:CC CC_REGNUM))]
10813 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10814 output_asm_insn (\"tst\\t%1, %2\", operands);
10815 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10816 return \"movne\\t%0, #0\";
10818 [(set_attr "conds" "clob")
10819 (set_attr "length" "12")]
10821 ;; ??? The above patterns need auditing for Thumb-2
10823 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10824 ;; expressions. For simplicity, the first register is also in the unspec
10826 (define_insn "*push_multi"
10827 [(match_parallel 2 "multi_register_push"
10828 [(set (match_operand:BLK 0 "memory_operand" "=m")
10829 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10830 UNSPEC_PUSH_MULT))])]
10834 int num_saves = XVECLEN (operands[2], 0);
10836 /* For the StrongARM at least it is faster to
10837 use STR to store only a single register.
10838 In Thumb mode always use push, and the assembler will pick
10839 something appropriate. */
10840 if (num_saves == 1 && TARGET_ARM)
10841 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10848 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10850 strcpy (pattern, \"push\\t{%1\");
10852 for (i = 1; i < num_saves; i++)
10854 strcat (pattern, \", %|\");
10856 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10859 strcat (pattern, \"}\");
10860 output_asm_insn (pattern, operands);
10865 [(set_attr "type" "store4")]
10868 (define_insn "stack_tie"
10869 [(set (mem:BLK (scratch))
10870 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10871 (match_operand:SI 1 "s_register_operand" "rk")]
10875 [(set_attr "length" "0")]
10878 ;; Similarly for the floating point registers
10879 (define_insn "*push_fp_multi"
10880 [(match_parallel 2 "multi_register_push"
10881 [(set (match_operand:BLK 0 "memory_operand" "=m")
10882 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10883 UNSPEC_PUSH_MULT))])]
10884 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10889 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10890 output_asm_insn (pattern, operands);
10893 [(set_attr "type" "f_store")]
10896 ;; Special patterns for dealing with the constant pool
10898 (define_insn "align_4"
10899 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10902 assemble_align (32);
10907 (define_insn "align_8"
10908 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10911 assemble_align (64);
10916 (define_insn "consttable_end"
10917 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10920 making_const_table = FALSE;
10925 (define_insn "consttable_1"
10926 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10929 making_const_table = TRUE;
10930 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10931 assemble_zeros (3);
10934 [(set_attr "length" "4")]
10937 (define_insn "consttable_2"
10938 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10941 making_const_table = TRUE;
10942 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10943 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10944 assemble_zeros (2);
10947 [(set_attr "length" "4")]
10950 (define_insn "consttable_4"
10951 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10955 rtx x = operands[0];
10956 making_const_table = TRUE;
10957 switch (GET_MODE_CLASS (GET_MODE (x)))
10960 if (GET_MODE (x) == HFmode)
10961 arm_emit_fp16_const (x);
10965 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10966 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10970 /* XXX: Sometimes gcc does something really dumb and ends up with
10971 a HIGH in a constant pool entry, usually because it's trying to
10972 load into a VFP register. We know this will always be used in
10973 combination with a LO_SUM which ignores the high bits, so just
10974 strip off the HIGH. */
10975 if (GET_CODE (x) == HIGH)
10977 assemble_integer (x, 4, BITS_PER_WORD, 1);
10978 mark_symbol_refs_as_used (x);
10983 [(set_attr "length" "4")]
10986 (define_insn "consttable_8"
10987 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10991 making_const_table = TRUE;
10992 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10997 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10998 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11002 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11007 [(set_attr "length" "8")]
11010 (define_insn "consttable_16"
11011 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11015 making_const_table = TRUE;
11016 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11021 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11022 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11026 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11031 [(set_attr "length" "16")]
11034 ;; Miscellaneous Thumb patterns
11036 (define_expand "tablejump"
11037 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11038 (use (label_ref (match_operand 1 "" "")))])]
11043 /* Hopefully, CSE will eliminate this copy. */
11044 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11045 rtx reg2 = gen_reg_rtx (SImode);
11047 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11048 operands[0] = reg2;
11053 ;; NB never uses BX.
11054 (define_insn "*thumb1_tablejump"
11055 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11056 (use (label_ref (match_operand 1 "" "")))]
11059 [(set_attr "length" "2")]
11062 ;; V5 Instructions,
11064 (define_insn "clzsi2"
11065 [(set (match_operand:SI 0 "s_register_operand" "=r")
11066 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11067 "TARGET_32BIT && arm_arch5"
11069 [(set_attr "predicable" "yes")
11070 (set_attr "insn" "clz")])
11072 (define_insn "rbitsi2"
11073 [(set (match_operand:SI 0 "s_register_operand" "=r")
11074 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11075 "TARGET_32BIT && arm_arch_thumb2"
11077 [(set_attr "predicable" "yes")
11078 (set_attr "insn" "clz")])
11080 (define_expand "ctzsi2"
11081 [(set (match_operand:SI 0 "s_register_operand" "")
11082 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11083 "TARGET_32BIT && arm_arch_thumb2"
11086 rtx tmp = gen_reg_rtx (SImode);
11087 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11088 emit_insn (gen_clzsi2 (operands[0], tmp));
11094 ;; V5E instructions.
11096 (define_insn "prefetch"
11097 [(prefetch (match_operand:SI 0 "address_operand" "p")
11098 (match_operand:SI 1 "" "")
11099 (match_operand:SI 2 "" ""))]
11100 "TARGET_32BIT && arm_arch5e"
11103 ;; General predication pattern
11106 [(match_operator 0 "arm_comparison_operator"
11107 [(match_operand 1 "cc_register" "")
11113 (define_insn "prologue_use"
11114 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11116 "%@ %0 needed for prologue"
11117 [(set_attr "length" "0")]
11121 ;; Patterns for exception handling
11123 (define_expand "eh_return"
11124 [(use (match_operand 0 "general_operand" ""))]
11129 emit_insn (gen_arm_eh_return (operands[0]));
11131 emit_insn (gen_thumb_eh_return (operands[0]));
11136 ;; We can't expand this before we know where the link register is stored.
11137 (define_insn_and_split "arm_eh_return"
11138 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11140 (clobber (match_scratch:SI 1 "=&r"))]
11143 "&& reload_completed"
11147 arm_set_return_address (operands[0], operands[1]);
11152 (define_insn_and_split "thumb_eh_return"
11153 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11155 (clobber (match_scratch:SI 1 "=&l"))]
11158 "&& reload_completed"
11162 thumb_set_return_address (operands[0], operands[1]);
11170 (define_insn "load_tp_hard"
11171 [(set (match_operand:SI 0 "register_operand" "=r")
11172 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11174 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11175 [(set_attr "predicable" "yes")]
11178 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11179 (define_insn "load_tp_soft"
11180 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11181 (clobber (reg:SI LR_REGNUM))
11182 (clobber (reg:SI IP_REGNUM))
11183 (clobber (reg:CC CC_REGNUM))]
11185 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11186 [(set_attr "conds" "clob")]
11189 (define_insn "*arm_movtas_ze"
11190 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11193 (match_operand:SI 1 "const_int_operand" ""))]
11196 [(set_attr "predicable" "yes")
11197 (set_attr "length" "4")]
11200 (define_insn "arm_rev"
11201 [(set (match_operand:SI 0 "s_register_operand" "=r")
11202 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11203 "TARGET_EITHER && arm_arch6"
11205 [(set (attr "length")
11206 (if_then_else (eq_attr "is_thumb" "yes")
11211 (define_expand "arm_legacy_rev"
11212 [(set (match_operand:SI 2 "s_register_operand" "")
11213 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11217 (lshiftrt:SI (match_dup 2)
11219 (set (match_operand:SI 3 "s_register_operand" "")
11220 (rotatert:SI (match_dup 1)
11223 (and:SI (match_dup 2)
11224 (const_int -65281)))
11225 (set (match_operand:SI 0 "s_register_operand" "")
11226 (xor:SI (match_dup 3)
11232 ;; Reuse temporaries to keep register pressure down.
11233 (define_expand "thumb_legacy_rev"
11234 [(set (match_operand:SI 2 "s_register_operand" "")
11235 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11237 (set (match_operand:SI 3 "s_register_operand" "")
11238 (lshiftrt:SI (match_dup 1)
11241 (ior:SI (match_dup 3)
11243 (set (match_operand:SI 4 "s_register_operand" "")
11245 (set (match_operand:SI 5 "s_register_operand" "")
11246 (rotatert:SI (match_dup 1)
11249 (ashift:SI (match_dup 5)
11252 (lshiftrt:SI (match_dup 5)
11255 (ior:SI (match_dup 5)
11258 (rotatert:SI (match_dup 5)
11260 (set (match_operand:SI 0 "s_register_operand" "")
11261 (ior:SI (match_dup 5)
11267 (define_expand "bswapsi2"
11268 [(set (match_operand:SI 0 "s_register_operand" "=r")
11269 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11274 if (!optimize_size)
11276 rtx op2 = gen_reg_rtx (SImode);
11277 rtx op3 = gen_reg_rtx (SImode);
11281 rtx op4 = gen_reg_rtx (SImode);
11282 rtx op5 = gen_reg_rtx (SImode);
11284 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11285 op2, op3, op4, op5));
11289 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11301 ;; Load the FPA co-processor patterns
11303 ;; Load the Maverick co-processor patterns
11304 (include "cirrus.md")
11305 ;; Vector bits common to IWMMXT and Neon
11306 (include "vec-common.md")
11307 ;; Load the Intel Wireless Multimedia Extension patterns
11308 (include "iwmmxt.md")
11309 ;; Load the VFP co-processor patterns
11311 ;; Thumb-2 patterns
11312 (include "thumb2.md")
11314 (include "neon.md")