1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
109 ;; UNSPEC_VOLATILE Usage:
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
143 ;;---------------------------------------------------------------------------
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
151 ;; Operand number of an input operand that is shifted. Zero if the
152 ;; given instruction does not shift one of its input operands.
153 (define_attr "shift" "" (const_int 0))
155 ; Floating Point Unit. If we only have floating point emulation, then there
156 ; is no point in scheduling the floating point insns. (Well, for best
157 ; performance we should try and group them together).
158 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
159 (const (symbol_ref "arm_fpu_attr")))
161 ; LENGTH of an instruction (in bytes)
162 (define_attr "length" "" (const_int 4))
164 ; POOL_RANGE is how far away from a constant pool entry that this insn
165 ; can be placed. If the distance is zero, then this insn will never
166 ; reference the pool.
167 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
168 ; before its address.
169 (define_attr "pool_range" "" (const_int 0))
170 (define_attr "neg_pool_range" "" (const_int 0))
172 ; An assembler sequence may clobber the condition codes without us knowing.
173 ; If such an insn references the pool, then we have no way of knowing how,
174 ; so use the most conservative value for pool_range.
175 (define_asm_attributes
176 [(set_attr "conds" "clob")
177 (set_attr "length" "4")
178 (set_attr "pool_range" "250")])
180 ;; The instruction used to implement a particular pattern. This
181 ;; information is used by pipeline descriptions to provide accurate
182 ;; scheduling information.
185 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
186 (const_string "other"))
188 ; TYPE attribute is used to detect floating point instructions which, if
189 ; running on a co-processor can run in parallel with other, basic instructions
190 ; If write-buffer scheduling is enabled then it can also be used in the
191 ; scheduling of writes.
193 ; Classification of each insn
194 ; Note: vfp.md has different meanings for some of these, and some further
195 ; types as well. See that file for details.
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
250 ;; Classification of NEON instructions for scheduling purposes.
251 ;; Do not set this attribute and the "type" attribute together in
252 ;; any one instruction pattern.
253 (define_attr "neon_type"
264 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
265 neon_mul_qqq_8_16_32_ddd_32,\
266 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
267 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
269 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
270 neon_mla_qqq_32_qqd_32_scalar,\
271 neon_mul_ddd_16_scalar_32_16_long_scalar,\
272 neon_mul_qqd_32_scalar,\
273 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
278 neon_vqshl_vrshl_vqrshl_qqq,\
280 neon_fp_vadd_ddd_vabs_dd,\
281 neon_fp_vadd_qqq_vabs_qq,\
287 neon_fp_vmla_ddd_scalar,\
288 neon_fp_vmla_qqq_scalar,\
289 neon_fp_vrecps_vrsqrts_ddd,\
290 neon_fp_vrecps_vrsqrts_qqq,\
298 neon_vld2_2_regs_vld1_vld2_all_lanes,\
301 neon_vst1_1_2_regs_vst2_2_regs,\
303 neon_vst2_4_regs_vst3_vst4,\
305 neon_vld1_vld2_lane,\
306 neon_vld3_vld4_lane,\
307 neon_vst1_vst2_lane,\
308 neon_vst3_vst4_lane,\
309 neon_vld3_vld4_all_lanes,\
317 (const_string "none"))
319 ; condition codes: this one is used by final_prescan_insn to speed up
320 ; conditionalizing instructions. It saves having to scan the rtl to see if
321 ; it uses or alters the condition codes.
323 ; USE means that the condition codes are used by the insn in the process of
324 ; outputting code, this means (at present) that we can't use the insn in
327 ; SET means that the purpose of the insn is to set the condition codes in a
328 ; well defined manner.
330 ; CLOB means that the condition codes are altered in an undefined manner, if
331 ; they are altered at all
333 ; UNCONDITIONAL means the instions can not be conditionally executed.
335 ; NOCOND means that the condition codes are neither altered nor affect the
336 ; output of this insn
338 (define_attr "conds" "use,set,clob,unconditional,nocond"
339 (if_then_else (eq_attr "type" "call")
340 (const_string "clob")
341 (if_then_else (eq_attr "neon_type" "none")
342 (const_string "nocond")
343 (const_string "unconditional"))))
345 ; Predicable means that the insn can be conditionally executed based on
346 ; an automatically added predicate (additional patterns are generated by
347 ; gen...). We default to 'no' because no Thumb patterns match this rule
348 ; and not all ARM patterns do.
349 (define_attr "predicable" "no,yes" (const_string "no"))
351 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
352 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
353 ; suffer blockages enough to warrant modelling this (and it can adversely
354 ; affect the schedule).
355 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
357 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
358 ; to stall the processor. Used with model_wbuf above.
359 (define_attr "write_conflict" "no,yes"
360 (if_then_else (eq_attr "type"
361 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
363 (const_string "no")))
365 ; Classify the insns into those that take one cycle and those that take more
366 ; than one on the main cpu execution unit.
367 (define_attr "core_cycles" "single,multi"
368 (if_then_else (eq_attr "type"
369 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
370 (const_string "single")
371 (const_string "multi")))
373 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
374 ;; distant label. Only applicable to Thumb code.
375 (define_attr "far_jump" "yes,no" (const_string "no"))
378 ;; The number of machine instructions this pattern expands to.
379 ;; Used for Thumb-2 conditional execution.
380 (define_attr "ce_count" "" (const_int 1))
382 ;;---------------------------------------------------------------------------
385 ; A list of modes that are exactly 64 bits in size. We use this to expand
386 ; some splits that are the same for all modes when operating on ARM
388 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
390 ;; The integer modes up to word size
391 (define_mode_iterator QHSI [QI HI SI])
393 ;;---------------------------------------------------------------------------
396 (include "predicates.md")
397 (include "constraints.md")
399 ;;---------------------------------------------------------------------------
400 ;; Pipeline descriptions
402 ;; Processor type. This is created automatically from arm-cores.def.
403 (include "arm-tune.md")
405 (define_attr "tune_cortexr4" "yes,no"
407 (eq_attr "tune" "cortexr4,cortexr4f")
409 (const_string "no"))))
411 ;; True if the generic scheduling description should be used.
413 (define_attr "generic_sched" "yes,no"
415 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
416 (eq_attr "tune_cortexr4" "yes"))
418 (const_string "yes"))))
420 (define_attr "generic_vfp" "yes,no"
422 (and (eq_attr "fpu" "vfp")
423 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
424 (eq_attr "tune_cortexr4" "no"))
426 (const_string "no"))))
428 (include "arm-generic.md")
429 (include "arm926ejs.md")
430 (include "arm1020e.md")
431 (include "arm1026ejs.md")
432 (include "arm1136jfs.md")
433 (include "cortex-a8.md")
434 (include "cortex-a9.md")
435 (include "cortex-r4.md")
436 (include "cortex-r4f.md")
440 ;;---------------------------------------------------------------------------
445 ;; Note: For DImode insns, there is normally no reason why operands should
446 ;; not be in the same register, what we don't want is for something being
447 ;; written to partially overlap something that is an input.
448 ;; Cirrus 64bit additions should not be split because we have a native
449 ;; 64bit addition instructions.
451 (define_expand "adddi3"
453 [(set (match_operand:DI 0 "s_register_operand" "")
454 (plus:DI (match_operand:DI 1 "s_register_operand" "")
455 (match_operand:DI 2 "s_register_operand" "")))
456 (clobber (reg:CC CC_REGNUM))])]
459 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
461 if (!cirrus_fp_register (operands[0], DImode))
462 operands[0] = force_reg (DImode, operands[0]);
463 if (!cirrus_fp_register (operands[1], DImode))
464 operands[1] = force_reg (DImode, operands[1]);
465 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
471 if (GET_CODE (operands[1]) != REG)
472 operands[1] = force_reg (DImode, operands[1]);
473 if (GET_CODE (operands[2]) != REG)
474 operands[2] = force_reg (DImode, operands[2]);
479 (define_insn "*thumb1_adddi3"
480 [(set (match_operand:DI 0 "register_operand" "=l")
481 (plus:DI (match_operand:DI 1 "register_operand" "%0")
482 (match_operand:DI 2 "register_operand" "l")))
483 (clobber (reg:CC CC_REGNUM))
486 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
487 [(set_attr "length" "4")]
490 (define_insn_and_split "*arm_adddi3"
491 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
492 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
493 (match_operand:DI 2 "s_register_operand" "r, 0")))
494 (clobber (reg:CC CC_REGNUM))]
495 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
497 "TARGET_32BIT && reload_completed"
498 [(parallel [(set (reg:CC_C CC_REGNUM)
499 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
501 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
502 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
503 (plus:SI (match_dup 4) (match_dup 5))))]
506 operands[3] = gen_highpart (SImode, operands[0]);
507 operands[0] = gen_lowpart (SImode, operands[0]);
508 operands[4] = gen_highpart (SImode, operands[1]);
509 operands[1] = gen_lowpart (SImode, operands[1]);
510 operands[5] = gen_highpart (SImode, operands[2]);
511 operands[2] = gen_lowpart (SImode, operands[2]);
513 [(set_attr "conds" "clob")
514 (set_attr "length" "8")]
517 (define_insn_and_split "*adddi_sesidi_di"
518 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
519 (plus:DI (sign_extend:DI
520 (match_operand:SI 2 "s_register_operand" "r,r"))
521 (match_operand:DI 1 "s_register_operand" "0,r")))
522 (clobber (reg:CC CC_REGNUM))]
523 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
525 "TARGET_32BIT && reload_completed"
526 [(parallel [(set (reg:CC_C CC_REGNUM)
527 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
529 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
530 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
531 (plus:SI (ashiftrt:SI (match_dup 2)
536 operands[3] = gen_highpart (SImode, operands[0]);
537 operands[0] = gen_lowpart (SImode, operands[0]);
538 operands[4] = gen_highpart (SImode, operands[1]);
539 operands[1] = gen_lowpart (SImode, operands[1]);
540 operands[2] = gen_lowpart (SImode, operands[2]);
542 [(set_attr "conds" "clob")
543 (set_attr "length" "8")]
546 (define_insn_and_split "*adddi_zesidi_di"
547 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
548 (plus:DI (zero_extend:DI
549 (match_operand:SI 2 "s_register_operand" "r,r"))
550 (match_operand:DI 1 "s_register_operand" "0,r")))
551 (clobber (reg:CC CC_REGNUM))]
552 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
554 "TARGET_32BIT && reload_completed"
555 [(parallel [(set (reg:CC_C CC_REGNUM)
556 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
558 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
559 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
560 (plus:SI (match_dup 4) (const_int 0))))]
563 operands[3] = gen_highpart (SImode, operands[0]);
564 operands[0] = gen_lowpart (SImode, operands[0]);
565 operands[4] = gen_highpart (SImode, operands[1]);
566 operands[1] = gen_lowpart (SImode, operands[1]);
567 operands[2] = gen_lowpart (SImode, operands[2]);
569 [(set_attr "conds" "clob")
570 (set_attr "length" "8")]
573 (define_expand "addsi3"
574 [(set (match_operand:SI 0 "s_register_operand" "")
575 (plus:SI (match_operand:SI 1 "s_register_operand" "")
576 (match_operand:SI 2 "reg_or_int_operand" "")))]
579 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
581 arm_split_constant (PLUS, SImode, NULL_RTX,
582 INTVAL (operands[2]), operands[0], operands[1],
583 optimize && can_create_pseudo_p ());
589 ; If there is a scratch available, this will be faster than synthesizing the
592 [(match_scratch:SI 3 "r")
593 (set (match_operand:SI 0 "arm_general_register_operand" "")
594 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
595 (match_operand:SI 2 "const_int_operand" "")))]
597 !(const_ok_for_arm (INTVAL (operands[2]))
598 || const_ok_for_arm (-INTVAL (operands[2])))
599 && const_ok_for_arm (~INTVAL (operands[2]))"
600 [(set (match_dup 3) (match_dup 2))
601 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
605 ;; The r/r/k alternative is required when reloading the address
606 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
607 ;; put the duplicated register first, and not try the commutative version.
608 (define_insn_and_split "*arm_addsi3"
609 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
610 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
611 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
621 && GET_CODE (operands[2]) == CONST_INT
622 && !(const_ok_for_arm (INTVAL (operands[2]))
623 || const_ok_for_arm (-INTVAL (operands[2])))
624 && (reload_completed || !arm_eliminable_register (operands[1]))"
625 [(clobber (const_int 0))]
627 arm_split_constant (PLUS, SImode, curr_insn,
628 INTVAL (operands[2]), operands[0],
632 [(set_attr "length" "4,4,4,4,4,16")
633 (set_attr "predicable" "yes")]
636 ;; Register group 'k' is a single register group containing only the stack
637 ;; register. Trying to reload it will always fail catastrophically,
638 ;; so never allow those alternatives to match if reloading is needed.
640 (define_insn_and_split "*thumb1_addsi3"
641 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
642 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
643 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
646 static const char * const asms[] =
648 \"add\\t%0, %0, %2\",
649 \"sub\\t%0, %0, #%n2\",
650 \"add\\t%0, %1, %2\",
651 \"add\\t%0, %0, %2\",
652 \"add\\t%0, %0, %2\",
653 \"add\\t%0, %1, %2\",
654 \"add\\t%0, %1, %2\",
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
664 "&& reload_completed && CONST_INT_P (operands[2])
665 && operands[1] != stack_pointer_rtx
666 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
667 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
668 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
670 HOST_WIDE_INT offset = INTVAL (operands[2]);
673 else if (offset < -255)
676 operands[3] = GEN_INT (offset);
677 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
679 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
682 ;; Reloading and elimination of the frame pointer can
683 ;; sometimes cause this optimization to be missed.
685 [(set (match_operand:SI 0 "arm_general_register_operand" "")
686 (match_operand:SI 1 "const_int_operand" ""))
688 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
690 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
691 && (INTVAL (operands[1]) & 3) == 0"
692 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
696 (define_insn "*addsi3_compare0"
697 [(set (reg:CC_NOOV CC_REGNUM)
699 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
700 (match_operand:SI 2 "arm_add_operand" "rI,L"))
702 (set (match_operand:SI 0 "s_register_operand" "=r,r")
703 (plus:SI (match_dup 1) (match_dup 2)))]
707 sub%.\\t%0, %1, #%n2"
708 [(set_attr "conds" "set")]
711 (define_insn "*addsi3_compare0_scratch"
712 [(set (reg:CC_NOOV CC_REGNUM)
714 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
715 (match_operand:SI 1 "arm_add_operand" "rI,L"))
721 [(set_attr "conds" "set")]
724 (define_insn "*compare_negsi_si"
725 [(set (reg:CC_Z CC_REGNUM)
727 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
728 (match_operand:SI 1 "s_register_operand" "r")))]
731 [(set_attr "conds" "set")]
734 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
735 ;; addend is a constant.
736 (define_insn "*cmpsi2_addneg"
737 [(set (reg:CC CC_REGNUM)
739 (match_operand:SI 1 "s_register_operand" "r,r")
740 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
741 (set (match_operand:SI 0 "s_register_operand" "=r,r")
742 (plus:SI (match_dup 1)
743 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
744 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
747 add%.\\t%0, %1, #%n2"
748 [(set_attr "conds" "set")]
751 ;; Convert the sequence
753 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
757 ;; bcs dest ((unsigned)rn >= 1)
758 ;; similarly for the beq variant using bcc.
759 ;; This is a common looping idiom (while (n--))
761 [(set (match_operand:SI 0 "arm_general_register_operand" "")
762 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
764 (set (match_operand 2 "cc_register" "")
765 (compare (match_dup 0) (const_int -1)))
767 (if_then_else (match_operator 3 "equality_operator"
768 [(match_dup 2) (const_int 0)])
769 (match_operand 4 "" "")
770 (match_operand 5 "" "")))]
771 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
775 (match_dup 1) (const_int 1)))
776 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
778 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
781 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
782 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
785 operands[2], const0_rtx);"
788 ;; The next four insns work because they compare the result with one of
789 ;; the operands, and we know that the use of the condition code is
790 ;; either GEU or LTU, so we can use the carry flag from the addition
791 ;; instead of doing the compare a second time.
792 (define_insn "*addsi3_compare_op1"
793 [(set (reg:CC_C CC_REGNUM)
795 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
796 (match_operand:SI 2 "arm_add_operand" "rI,L"))
798 (set (match_operand:SI 0 "s_register_operand" "=r,r")
799 (plus:SI (match_dup 1) (match_dup 2)))]
803 sub%.\\t%0, %1, #%n2"
804 [(set_attr "conds" "set")]
807 (define_insn "*addsi3_compare_op2"
808 [(set (reg:CC_C CC_REGNUM)
810 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
811 (match_operand:SI 2 "arm_add_operand" "rI,L"))
813 (set (match_operand:SI 0 "s_register_operand" "=r,r")
814 (plus:SI (match_dup 1) (match_dup 2)))]
818 sub%.\\t%0, %1, #%n2"
819 [(set_attr "conds" "set")]
822 (define_insn "*compare_addsi2_op0"
823 [(set (reg:CC_C CC_REGNUM)
825 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
826 (match_operand:SI 1 "arm_add_operand" "rI,L"))
832 [(set_attr "conds" "set")]
835 (define_insn "*compare_addsi2_op1"
836 [(set (reg:CC_C CC_REGNUM)
838 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
839 (match_operand:SI 1 "arm_add_operand" "rI,L"))
845 [(set_attr "conds" "set")]
848 (define_insn "*addsi3_carryin"
849 [(set (match_operand:SI 0 "s_register_operand" "=r")
850 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
851 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
852 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
855 [(set_attr "conds" "use")]
858 (define_insn "*addsi3_carryin_shift"
859 [(set (match_operand:SI 0 "s_register_operand" "=r")
860 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
862 (match_operator:SI 2 "shift_operator"
863 [(match_operand:SI 3 "s_register_operand" "r")
864 (match_operand:SI 4 "reg_or_int_operand" "rM")])
865 (match_operand:SI 1 "s_register_operand" "r"))))]
867 "adc%?\\t%0, %1, %3%S2"
868 [(set_attr "conds" "use")
869 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
870 (const_string "alu_shift")
871 (const_string "alu_shift_reg")))]
874 (define_insn "*addsi3_carryin_alt1"
875 [(set (match_operand:SI 0 "s_register_operand" "=r")
876 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
877 (match_operand:SI 2 "arm_rhs_operand" "rI"))
878 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
881 [(set_attr "conds" "use")]
884 (define_insn "*addsi3_carryin_alt2"
885 [(set (match_operand:SI 0 "s_register_operand" "=r")
886 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
887 (match_operand:SI 1 "s_register_operand" "r"))
888 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
891 [(set_attr "conds" "use")]
894 (define_insn "*addsi3_carryin_alt3"
895 [(set (match_operand:SI 0 "s_register_operand" "=r")
896 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
897 (match_operand:SI 2 "arm_rhs_operand" "rI"))
898 (match_operand:SI 1 "s_register_operand" "r")))]
901 [(set_attr "conds" "use")]
904 (define_expand "incscc"
905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
906 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
907 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
908 (match_operand:SI 1 "s_register_operand" "0,?r")))]
913 (define_insn "*arm_incscc"
914 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
915 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
916 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
917 (match_operand:SI 1 "s_register_operand" "0,?r")))]
921 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
922 [(set_attr "conds" "use")
923 (set_attr "length" "4,8")]
926 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
928 [(set (match_operand:SI 0 "s_register_operand" "")
929 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
930 (match_operand:SI 2 "s_register_operand" ""))
932 (clobber (match_operand:SI 3 "s_register_operand" ""))]
934 [(set (match_dup 3) (match_dup 1))
935 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
937 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
940 (define_expand "addsf3"
941 [(set (match_operand:SF 0 "s_register_operand" "")
942 (plus:SF (match_operand:SF 1 "s_register_operand" "")
943 (match_operand:SF 2 "arm_float_add_operand" "")))]
944 "TARGET_32BIT && TARGET_HARD_FLOAT"
947 && !cirrus_fp_register (operands[2], SFmode))
948 operands[2] = force_reg (SFmode, operands[2]);
951 (define_expand "adddf3"
952 [(set (match_operand:DF 0 "s_register_operand" "")
953 (plus:DF (match_operand:DF 1 "s_register_operand" "")
954 (match_operand:DF 2 "arm_float_add_operand" "")))]
955 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
958 && !cirrus_fp_register (operands[2], DFmode))
959 operands[2] = force_reg (DFmode, operands[2]);
962 (define_expand "subdi3"
964 [(set (match_operand:DI 0 "s_register_operand" "")
965 (minus:DI (match_operand:DI 1 "s_register_operand" "")
966 (match_operand:DI 2 "s_register_operand" "")))
967 (clobber (reg:CC CC_REGNUM))])]
970 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
972 && cirrus_fp_register (operands[0], DImode)
973 && cirrus_fp_register (operands[1], DImode))
975 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
981 if (GET_CODE (operands[1]) != REG)
982 operands[1] = force_reg (DImode, operands[1]);
983 if (GET_CODE (operands[2]) != REG)
984 operands[2] = force_reg (DImode, operands[2]);
989 (define_insn "*arm_subdi3"
990 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
991 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
992 (match_operand:DI 2 "s_register_operand" "r,0,0")))
993 (clobber (reg:CC CC_REGNUM))]
995 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
996 [(set_attr "conds" "clob")
997 (set_attr "length" "8")]
1000 (define_insn "*thumb_subdi3"
1001 [(set (match_operand:DI 0 "register_operand" "=l")
1002 (minus:DI (match_operand:DI 1 "register_operand" "0")
1003 (match_operand:DI 2 "register_operand" "l")))
1004 (clobber (reg:CC CC_REGNUM))]
1006 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1007 [(set_attr "length" "4")]
1010 (define_insn "*subdi_di_zesidi"
1011 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1012 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1014 (match_operand:SI 2 "s_register_operand" "r,r"))))
1015 (clobber (reg:CC CC_REGNUM))]
1017 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1018 [(set_attr "conds" "clob")
1019 (set_attr "length" "8")]
1022 (define_insn "*subdi_di_sesidi"
1023 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1024 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1026 (match_operand:SI 2 "s_register_operand" "r,r"))))
1027 (clobber (reg:CC CC_REGNUM))]
1029 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1030 [(set_attr "conds" "clob")
1031 (set_attr "length" "8")]
1034 (define_insn "*subdi_zesidi_di"
1035 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1036 (minus:DI (zero_extend:DI
1037 (match_operand:SI 2 "s_register_operand" "r,r"))
1038 (match_operand:DI 1 "s_register_operand" "0,r")))
1039 (clobber (reg:CC CC_REGNUM))]
1041 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1042 [(set_attr "conds" "clob")
1043 (set_attr "length" "8")]
1046 (define_insn "*subdi_sesidi_di"
1047 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1048 (minus:DI (sign_extend:DI
1049 (match_operand:SI 2 "s_register_operand" "r,r"))
1050 (match_operand:DI 1 "s_register_operand" "0,r")))
1051 (clobber (reg:CC CC_REGNUM))]
1053 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1054 [(set_attr "conds" "clob")
1055 (set_attr "length" "8")]
1058 (define_insn "*subdi_zesidi_zesidi"
1059 [(set (match_operand:DI 0 "s_register_operand" "=r")
1060 (minus:DI (zero_extend:DI
1061 (match_operand:SI 1 "s_register_operand" "r"))
1063 (match_operand:SI 2 "s_register_operand" "r"))))
1064 (clobber (reg:CC CC_REGNUM))]
1066 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1067 [(set_attr "conds" "clob")
1068 (set_attr "length" "8")]
1071 (define_expand "subsi3"
1072 [(set (match_operand:SI 0 "s_register_operand" "")
1073 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1074 (match_operand:SI 2 "s_register_operand" "")))]
1077 if (GET_CODE (operands[1]) == CONST_INT)
1081 arm_split_constant (MINUS, SImode, NULL_RTX,
1082 INTVAL (operands[1]), operands[0],
1083 operands[2], optimize && can_create_pseudo_p ());
1086 else /* TARGET_THUMB1 */
1087 operands[1] = force_reg (SImode, operands[1]);
1092 (define_insn "*thumb1_subsi3_insn"
1093 [(set (match_operand:SI 0 "register_operand" "=l")
1094 (minus:SI (match_operand:SI 1 "register_operand" "l")
1095 (match_operand:SI 2 "register_operand" "l")))]
1098 [(set_attr "length" "2")]
1101 ; ??? Check Thumb-2 split length
1102 (define_insn_and_split "*arm_subsi3_insn"
1103 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1104 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1105 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1112 && GET_CODE (operands[1]) == CONST_INT
1113 && !const_ok_for_arm (INTVAL (operands[1]))"
1114 [(clobber (const_int 0))]
1116 arm_split_constant (MINUS, SImode, curr_insn,
1117 INTVAL (operands[1]), operands[0], operands[2], 0);
1120 [(set_attr "length" "4,4,16")
1121 (set_attr "predicable" "yes")]
1125 [(match_scratch:SI 3 "r")
1126 (set (match_operand:SI 0 "arm_general_register_operand" "")
1127 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1128 (match_operand:SI 2 "arm_general_register_operand" "")))]
1130 && !const_ok_for_arm (INTVAL (operands[1]))
1131 && const_ok_for_arm (~INTVAL (operands[1]))"
1132 [(set (match_dup 3) (match_dup 1))
1133 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1137 (define_insn "*subsi3_compare0"
1138 [(set (reg:CC_NOOV CC_REGNUM)
1140 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1141 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1144 (minus:SI (match_dup 1) (match_dup 2)))]
1149 [(set_attr "conds" "set")]
1152 (define_expand "decscc"
1153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1154 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1155 (match_operator:SI 2 "arm_comparison_operator"
1156 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1161 (define_insn "*arm_decscc"
1162 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1163 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1164 (match_operator:SI 2 "arm_comparison_operator"
1165 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1169 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1170 [(set_attr "conds" "use")
1171 (set_attr "length" "*,8")]
1174 (define_expand "subsf3"
1175 [(set (match_operand:SF 0 "s_register_operand" "")
1176 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1177 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1178 "TARGET_32BIT && TARGET_HARD_FLOAT"
1180 if (TARGET_MAVERICK)
1182 if (!cirrus_fp_register (operands[1], SFmode))
1183 operands[1] = force_reg (SFmode, operands[1]);
1184 if (!cirrus_fp_register (operands[2], SFmode))
1185 operands[2] = force_reg (SFmode, operands[2]);
1189 (define_expand "subdf3"
1190 [(set (match_operand:DF 0 "s_register_operand" "")
1191 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1192 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1193 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1195 if (TARGET_MAVERICK)
1197 if (!cirrus_fp_register (operands[1], DFmode))
1198 operands[1] = force_reg (DFmode, operands[1]);
1199 if (!cirrus_fp_register (operands[2], DFmode))
1200 operands[2] = force_reg (DFmode, operands[2]);
1205 ;; Multiplication insns
1207 (define_expand "mulsi3"
1208 [(set (match_operand:SI 0 "s_register_operand" "")
1209 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1210 (match_operand:SI 1 "s_register_operand" "")))]
1215 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1216 (define_insn "*arm_mulsi3"
1217 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1218 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1219 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1220 "TARGET_32BIT && !arm_arch6"
1221 "mul%?\\t%0, %2, %1"
1222 [(set_attr "insn" "mul")
1223 (set_attr "predicable" "yes")]
1226 (define_insn "*arm_mulsi3_v6"
1227 [(set (match_operand:SI 0 "s_register_operand" "=r")
1228 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1229 (match_operand:SI 2 "s_register_operand" "r")))]
1230 "TARGET_32BIT && arm_arch6"
1231 "mul%?\\t%0, %1, %2"
1232 [(set_attr "insn" "mul")
1233 (set_attr "predicable" "yes")]
1236 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1237 ; 1 and 2; are the same, because reload will make operand 0 match
1238 ; operand 1 without realizing that this conflicts with operand 2. We fix
1239 ; this by adding another alternative to match this case, and then `reload'
1240 ; it ourselves. This alternative must come first.
1241 (define_insn "*thumb_mulsi3"
1242 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1243 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1244 (match_operand:SI 2 "register_operand" "l,l,l")))]
1245 "TARGET_THUMB1 && !arm_arch6"
1247 if (which_alternative < 2)
1248 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1250 return \"mul\\t%0, %2\";
1252 [(set_attr "length" "4,4,2")
1253 (set_attr "insn" "mul")]
1256 (define_insn "*thumb_mulsi3_v6"
1257 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1258 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1259 (match_operand:SI 2 "register_operand" "l,0,0")))]
1260 "TARGET_THUMB1 && arm_arch6"
1265 [(set_attr "length" "2")
1266 (set_attr "insn" "mul")]
1269 (define_insn "*mulsi3_compare0"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1271 (compare:CC_NOOV (mult:SI
1272 (match_operand:SI 2 "s_register_operand" "r,r")
1273 (match_operand:SI 1 "s_register_operand" "%0,r"))
1275 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1276 (mult:SI (match_dup 2) (match_dup 1)))]
1277 "TARGET_ARM && !arm_arch6"
1278 "mul%.\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "insn" "muls")]
1283 (define_insn "*mulsi3_compare0_v6"
1284 [(set (reg:CC_NOOV CC_REGNUM)
1285 (compare:CC_NOOV (mult:SI
1286 (match_operand:SI 2 "s_register_operand" "r")
1287 (match_operand:SI 1 "s_register_operand" "r"))
1289 (set (match_operand:SI 0 "s_register_operand" "=r")
1290 (mult:SI (match_dup 2) (match_dup 1)))]
1291 "TARGET_ARM && arm_arch6 && optimize_size"
1292 "mul%.\\t%0, %2, %1"
1293 [(set_attr "conds" "set")
1294 (set_attr "insn" "muls")]
1297 (define_insn "*mulsi_compare0_scratch"
1298 [(set (reg:CC_NOOV CC_REGNUM)
1299 (compare:CC_NOOV (mult:SI
1300 (match_operand:SI 2 "s_register_operand" "r,r")
1301 (match_operand:SI 1 "s_register_operand" "%0,r"))
1303 (clobber (match_scratch:SI 0 "=&r,&r"))]
1304 "TARGET_ARM && !arm_arch6"
1305 "mul%.\\t%0, %2, %1"
1306 [(set_attr "conds" "set")
1307 (set_attr "insn" "muls")]
1310 (define_insn "*mulsi_compare0_scratch_v6"
1311 [(set (reg:CC_NOOV CC_REGNUM)
1312 (compare:CC_NOOV (mult:SI
1313 (match_operand:SI 2 "s_register_operand" "r")
1314 (match_operand:SI 1 "s_register_operand" "r"))
1316 (clobber (match_scratch:SI 0 "=r"))]
1317 "TARGET_ARM && arm_arch6 && optimize_size"
1318 "mul%.\\t%0, %2, %1"
1319 [(set_attr "conds" "set")
1320 (set_attr "insn" "muls")]
1323 ;; Unnamed templates to match MLA instruction.
1325 (define_insn "*mulsi3addsi"
1326 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1328 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1329 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1330 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1331 "TARGET_32BIT && !arm_arch6"
1332 "mla%?\\t%0, %2, %1, %3"
1333 [(set_attr "insn" "mla")
1334 (set_attr "predicable" "yes")]
1337 (define_insn "*mulsi3addsi_v6"
1338 [(set (match_operand:SI 0 "s_register_operand" "=r")
1340 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1341 (match_operand:SI 1 "s_register_operand" "r"))
1342 (match_operand:SI 3 "s_register_operand" "r")))]
1343 "TARGET_32BIT && arm_arch6"
1344 "mla%?\\t%0, %2, %1, %3"
1345 [(set_attr "insn" "mla")
1346 (set_attr "predicable" "yes")]
1349 (define_insn "*mulsi3addsi_compare0"
1350 [(set (reg:CC_NOOV CC_REGNUM)
1353 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1354 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1355 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1357 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1358 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1360 "TARGET_ARM && arm_arch6"
1361 "mla%.\\t%0, %2, %1, %3"
1362 [(set_attr "conds" "set")
1363 (set_attr "insn" "mlas")]
1366 (define_insn "*mulsi3addsi_compare0_v6"
1367 [(set (reg:CC_NOOV CC_REGNUM)
1370 (match_operand:SI 2 "s_register_operand" "r")
1371 (match_operand:SI 1 "s_register_operand" "r"))
1372 (match_operand:SI 3 "s_register_operand" "r"))
1374 (set (match_operand:SI 0 "s_register_operand" "=r")
1375 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1377 "TARGET_ARM && arm_arch6 && optimize_size"
1378 "mla%.\\t%0, %2, %1, %3"
1379 [(set_attr "conds" "set")
1380 (set_attr "insn" "mlas")]
1383 (define_insn "*mulsi3addsi_compare0_scratch"
1384 [(set (reg:CC_NOOV CC_REGNUM)
1387 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1388 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1389 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1391 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1392 "TARGET_ARM && !arm_arch6"
1393 "mla%.\\t%0, %2, %1, %3"
1394 [(set_attr "conds" "set")
1395 (set_attr "insn" "mlas")]
1398 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1399 [(set (reg:CC_NOOV CC_REGNUM)
1402 (match_operand:SI 2 "s_register_operand" "r")
1403 (match_operand:SI 1 "s_register_operand" "r"))
1404 (match_operand:SI 3 "s_register_operand" "r"))
1406 (clobber (match_scratch:SI 0 "=r"))]
1407 "TARGET_ARM && arm_arch6 && optimize_size"
1408 "mla%.\\t%0, %2, %1, %3"
1409 [(set_attr "conds" "set")
1410 (set_attr "insn" "mlas")]
1413 (define_insn "*mulsi3subsi"
1414 [(set (match_operand:SI 0 "s_register_operand" "=r")
1416 (match_operand:SI 3 "s_register_operand" "r")
1417 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1418 (match_operand:SI 1 "s_register_operand" "r"))))]
1419 "TARGET_32BIT && arm_arch_thumb2"
1420 "mls%?\\t%0, %2, %1, %3"
1421 [(set_attr "insn" "mla")
1422 (set_attr "predicable" "yes")]
1425 ;; Unnamed template to match long long multiply-accumulate (smlal)
1427 (define_insn "*mulsidi3adddi"
1428 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1431 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1432 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1433 (match_operand:DI 1 "s_register_operand" "0")))]
1434 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1435 "smlal%?\\t%Q0, %R0, %3, %2"
1436 [(set_attr "insn" "smlal")
1437 (set_attr "predicable" "yes")]
1440 (define_insn "*mulsidi3adddi_v6"
1441 [(set (match_operand:DI 0 "s_register_operand" "=r")
1444 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1445 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1446 (match_operand:DI 1 "s_register_operand" "0")))]
1447 "TARGET_32BIT && arm_arch6"
1448 "smlal%?\\t%Q0, %R0, %3, %2"
1449 [(set_attr "insn" "smlal")
1450 (set_attr "predicable" "yes")]
1453 ;; 32x32->64 widening multiply.
1454 ;; As with mulsi3, the only difference between the v3-5 and v6+
1455 ;; versions of these patterns is the requirement that the output not
1456 ;; overlap the inputs, but that still means we have to have a named
1457 ;; expander and two different starred insns.
1459 (define_expand "mulsidi3"
1460 [(set (match_operand:DI 0 "s_register_operand" "")
1462 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1463 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1464 "TARGET_32BIT && arm_arch3m"
1468 (define_insn "*mulsidi3_nov6"
1469 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1471 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1472 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1473 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1474 "smull%?\\t%Q0, %R0, %1, %2"
1475 [(set_attr "insn" "smull")
1476 (set_attr "predicable" "yes")]
1479 (define_insn "*mulsidi3_v6"
1480 [(set (match_operand:DI 0 "s_register_operand" "=r")
1482 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1483 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1484 "TARGET_32BIT && arm_arch6"
1485 "smull%?\\t%Q0, %R0, %1, %2"
1486 [(set_attr "insn" "smull")
1487 (set_attr "predicable" "yes")]
1490 (define_expand "umulsidi3"
1491 [(set (match_operand:DI 0 "s_register_operand" "")
1493 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1494 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1495 "TARGET_32BIT && arm_arch3m"
1499 (define_insn "*umulsidi3_nov6"
1500 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1502 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1503 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1504 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1505 "umull%?\\t%Q0, %R0, %1, %2"
1506 [(set_attr "insn" "umull")
1507 (set_attr "predicable" "yes")]
1510 (define_insn "*umulsidi3_v6"
1511 [(set (match_operand:DI 0 "s_register_operand" "=r")
1513 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1514 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1515 "TARGET_32BIT && arm_arch6"
1516 "umull%?\\t%Q0, %R0, %1, %2"
1517 [(set_attr "insn" "umull")
1518 (set_attr "predicable" "yes")]
1521 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1523 (define_insn "*umulsidi3adddi"
1524 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1527 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1528 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1529 (match_operand:DI 1 "s_register_operand" "0")))]
1530 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1531 "umlal%?\\t%Q0, %R0, %3, %2"
1532 [(set_attr "insn" "umlal")
1533 (set_attr "predicable" "yes")]
1536 (define_insn "*umulsidi3adddi_v6"
1537 [(set (match_operand:DI 0 "s_register_operand" "=r")
1540 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1541 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1542 (match_operand:DI 1 "s_register_operand" "0")))]
1543 "TARGET_32BIT && arm_arch6"
1544 "umlal%?\\t%Q0, %R0, %3, %2"
1545 [(set_attr "insn" "umlal")
1546 (set_attr "predicable" "yes")]
1549 (define_expand "smulsi3_highpart"
1551 [(set (match_operand:SI 0 "s_register_operand" "")
1555 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1556 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1558 (clobber (match_scratch:SI 3 ""))])]
1559 "TARGET_32BIT && arm_arch3m"
1563 (define_insn "*smulsi3_highpart_nov6"
1564 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1568 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1569 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1571 (clobber (match_scratch:SI 3 "=&r,&r"))]
1572 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1573 "smull%?\\t%3, %0, %2, %1"
1574 [(set_attr "insn" "smull")
1575 (set_attr "predicable" "yes")]
1578 (define_insn "*smulsi3_highpart_v6"
1579 [(set (match_operand:SI 0 "s_register_operand" "=r")
1583 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1584 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1586 (clobber (match_scratch:SI 3 "=r"))]
1587 "TARGET_32BIT && arm_arch6"
1588 "smull%?\\t%3, %0, %2, %1"
1589 [(set_attr "insn" "smull")
1590 (set_attr "predicable" "yes")]
1593 (define_expand "umulsi3_highpart"
1595 [(set (match_operand:SI 0 "s_register_operand" "")
1599 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1600 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1602 (clobber (match_scratch:SI 3 ""))])]
1603 "TARGET_32BIT && arm_arch3m"
1607 (define_insn "*umulsi3_highpart_nov6"
1608 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1612 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1613 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1615 (clobber (match_scratch:SI 3 "=&r,&r"))]
1616 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1617 "umull%?\\t%3, %0, %2, %1"
1618 [(set_attr "insn" "umull")
1619 (set_attr "predicable" "yes")]
1622 (define_insn "*umulsi3_highpart_v6"
1623 [(set (match_operand:SI 0 "s_register_operand" "=r")
1627 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1628 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1630 (clobber (match_scratch:SI 3 "=r"))]
1631 "TARGET_32BIT && arm_arch6"
1632 "umull%?\\t%3, %0, %2, %1"
1633 [(set_attr "insn" "umull")
1634 (set_attr "predicable" "yes")]
1637 (define_insn "mulhisi3"
1638 [(set (match_operand:SI 0 "s_register_operand" "=r")
1639 (mult:SI (sign_extend:SI
1640 (match_operand:HI 1 "s_register_operand" "%r"))
1642 (match_operand:HI 2 "s_register_operand" "r"))))]
1643 "TARGET_DSP_MULTIPLY"
1644 "smulbb%?\\t%0, %1, %2"
1645 [(set_attr "insn" "smulxy")
1646 (set_attr "predicable" "yes")]
1649 (define_insn "*mulhisi3tb"
1650 [(set (match_operand:SI 0 "s_register_operand" "=r")
1651 (mult:SI (ashiftrt:SI
1652 (match_operand:SI 1 "s_register_operand" "r")
1655 (match_operand:HI 2 "s_register_operand" "r"))))]
1656 "TARGET_DSP_MULTIPLY"
1657 "smultb%?\\t%0, %1, %2"
1658 [(set_attr "insn" "smulxy")
1659 (set_attr "predicable" "yes")]
1662 (define_insn "*mulhisi3bt"
1663 [(set (match_operand:SI 0 "s_register_operand" "=r")
1664 (mult:SI (sign_extend:SI
1665 (match_operand:HI 1 "s_register_operand" "r"))
1667 (match_operand:SI 2 "s_register_operand" "r")
1669 "TARGET_DSP_MULTIPLY"
1670 "smulbt%?\\t%0, %1, %2"
1671 [(set_attr "insn" "smulxy")
1672 (set_attr "predicable" "yes")]
1675 (define_insn "*mulhisi3tt"
1676 [(set (match_operand:SI 0 "s_register_operand" "=r")
1677 (mult:SI (ashiftrt:SI
1678 (match_operand:SI 1 "s_register_operand" "r")
1681 (match_operand:SI 2 "s_register_operand" "r")
1683 "TARGET_DSP_MULTIPLY"
1684 "smultt%?\\t%0, %1, %2"
1685 [(set_attr "insn" "smulxy")
1686 (set_attr "predicable" "yes")]
1689 (define_insn "*mulhisi3addsi"
1690 [(set (match_operand:SI 0 "s_register_operand" "=r")
1691 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1692 (mult:SI (sign_extend:SI
1693 (match_operand:HI 2 "s_register_operand" "%r"))
1695 (match_operand:HI 3 "s_register_operand" "r")))))]
1696 "TARGET_DSP_MULTIPLY"
1697 "smlabb%?\\t%0, %2, %3, %1"
1698 [(set_attr "insn" "smlaxy")
1699 (set_attr "predicable" "yes")]
1702 (define_insn "*mulhidi3adddi"
1703 [(set (match_operand:DI 0 "s_register_operand" "=r")
1705 (match_operand:DI 1 "s_register_operand" "0")
1706 (mult:DI (sign_extend:DI
1707 (match_operand:HI 2 "s_register_operand" "%r"))
1709 (match_operand:HI 3 "s_register_operand" "r")))))]
1710 "TARGET_DSP_MULTIPLY"
1711 "smlalbb%?\\t%Q0, %R0, %2, %3"
1712 [(set_attr "insn" "smlalxy")
1713 (set_attr "predicable" "yes")])
1715 (define_expand "mulsf3"
1716 [(set (match_operand:SF 0 "s_register_operand" "")
1717 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1718 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1719 "TARGET_32BIT && TARGET_HARD_FLOAT"
1722 && !cirrus_fp_register (operands[2], SFmode))
1723 operands[2] = force_reg (SFmode, operands[2]);
1726 (define_expand "muldf3"
1727 [(set (match_operand:DF 0 "s_register_operand" "")
1728 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1729 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1730 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1733 && !cirrus_fp_register (operands[2], DFmode))
1734 operands[2] = force_reg (DFmode, operands[2]);
1739 (define_expand "divsf3"
1740 [(set (match_operand:SF 0 "s_register_operand" "")
1741 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1742 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1743 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1746 (define_expand "divdf3"
1747 [(set (match_operand:DF 0 "s_register_operand" "")
1748 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1749 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1750 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1755 (define_expand "modsf3"
1756 [(set (match_operand:SF 0 "s_register_operand" "")
1757 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1758 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1759 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1762 (define_expand "moddf3"
1763 [(set (match_operand:DF 0 "s_register_operand" "")
1764 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1765 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1766 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1769 ;; Boolean and,ior,xor insns
1771 ;; Split up double word logical operations
1773 ;; Split up simple DImode logical operations. Simply perform the logical
1774 ;; operation on the upper and lower halves of the registers.
1776 [(set (match_operand:DI 0 "s_register_operand" "")
1777 (match_operator:DI 6 "logical_binary_operator"
1778 [(match_operand:DI 1 "s_register_operand" "")
1779 (match_operand:DI 2 "s_register_operand" "")]))]
1780 "TARGET_32BIT && reload_completed
1781 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1782 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1783 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1786 operands[3] = gen_highpart (SImode, operands[0]);
1787 operands[0] = gen_lowpart (SImode, operands[0]);
1788 operands[4] = gen_highpart (SImode, operands[1]);
1789 operands[1] = gen_lowpart (SImode, operands[1]);
1790 operands[5] = gen_highpart (SImode, operands[2]);
1791 operands[2] = gen_lowpart (SImode, operands[2]);
1796 [(set (match_operand:DI 0 "s_register_operand" "")
1797 (match_operator:DI 6 "logical_binary_operator"
1798 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1799 (match_operand:DI 1 "s_register_operand" "")]))]
1800 "TARGET_32BIT && reload_completed"
1801 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1802 (set (match_dup 3) (match_op_dup:SI 6
1803 [(ashiftrt:SI (match_dup 2) (const_int 31))
1807 operands[3] = gen_highpart (SImode, operands[0]);
1808 operands[0] = gen_lowpart (SImode, operands[0]);
1809 operands[4] = gen_highpart (SImode, operands[1]);
1810 operands[1] = gen_lowpart (SImode, operands[1]);
1811 operands[5] = gen_highpart (SImode, operands[2]);
1812 operands[2] = gen_lowpart (SImode, operands[2]);
1816 ;; The zero extend of operand 2 means we can just copy the high part of
1817 ;; operand1 into operand0.
1819 [(set (match_operand:DI 0 "s_register_operand" "")
1821 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1822 (match_operand:DI 1 "s_register_operand" "")))]
1823 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1824 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1825 (set (match_dup 3) (match_dup 4))]
1828 operands[4] = gen_highpart (SImode, operands[1]);
1829 operands[3] = gen_highpart (SImode, operands[0]);
1830 operands[0] = gen_lowpart (SImode, operands[0]);
1831 operands[1] = gen_lowpart (SImode, operands[1]);
1835 ;; The zero extend of operand 2 means we can just copy the high part of
1836 ;; operand1 into operand0.
1838 [(set (match_operand:DI 0 "s_register_operand" "")
1840 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1841 (match_operand:DI 1 "s_register_operand" "")))]
1842 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1843 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1844 (set (match_dup 3) (match_dup 4))]
1847 operands[4] = gen_highpart (SImode, operands[1]);
1848 operands[3] = gen_highpart (SImode, operands[0]);
1849 operands[0] = gen_lowpart (SImode, operands[0]);
1850 operands[1] = gen_lowpart (SImode, operands[1]);
1854 (define_insn "anddi3"
1855 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1856 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1857 (match_operand:DI 2 "s_register_operand" "r,r")))]
1858 "TARGET_32BIT && ! TARGET_IWMMXT"
1860 [(set_attr "length" "8")]
1863 (define_insn_and_split "*anddi_zesidi_di"
1864 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1865 (and:DI (zero_extend:DI
1866 (match_operand:SI 2 "s_register_operand" "r,r"))
1867 (match_operand:DI 1 "s_register_operand" "0,r")))]
1870 "TARGET_32BIT && reload_completed"
1871 ; The zero extend of operand 2 clears the high word of the output
1873 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1874 (set (match_dup 3) (const_int 0))]
1877 operands[3] = gen_highpart (SImode, operands[0]);
1878 operands[0] = gen_lowpart (SImode, operands[0]);
1879 operands[1] = gen_lowpart (SImode, operands[1]);
1881 [(set_attr "length" "8")]
1884 (define_insn "*anddi_sesdi_di"
1885 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1886 (and:DI (sign_extend:DI
1887 (match_operand:SI 2 "s_register_operand" "r,r"))
1888 (match_operand:DI 1 "s_register_operand" "0,r")))]
1891 [(set_attr "length" "8")]
1894 (define_expand "andsi3"
1895 [(set (match_operand:SI 0 "s_register_operand" "")
1896 (and:SI (match_operand:SI 1 "s_register_operand" "")
1897 (match_operand:SI 2 "reg_or_int_operand" "")))]
1902 if (GET_CODE (operands[2]) == CONST_INT)
1904 arm_split_constant (AND, SImode, NULL_RTX,
1905 INTVAL (operands[2]), operands[0],
1906 operands[1], optimize && can_create_pseudo_p ());
1911 else /* TARGET_THUMB1 */
1913 if (GET_CODE (operands[2]) != CONST_INT)
1915 rtx tmp = force_reg (SImode, operands[2]);
1916 if (rtx_equal_p (operands[0], operands[1]))
1920 operands[2] = operands[1];
1928 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1930 operands[2] = force_reg (SImode,
1931 GEN_INT (~INTVAL (operands[2])));
1933 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1938 for (i = 9; i <= 31; i++)
1940 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1942 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1946 else if ((((HOST_WIDE_INT) 1) << i) - 1
1947 == ~INTVAL (operands[2]))
1949 rtx shift = GEN_INT (i);
1950 rtx reg = gen_reg_rtx (SImode);
1952 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1953 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1959 operands[2] = force_reg (SImode, operands[2]);
1965 ; ??? Check split length for Thumb-2
1966 (define_insn_and_split "*arm_andsi3_insn"
1967 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1968 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1969 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1973 bic%?\\t%0, %1, #%B2
1976 && GET_CODE (operands[2]) == CONST_INT
1977 && !(const_ok_for_arm (INTVAL (operands[2]))
1978 || const_ok_for_arm (~INTVAL (operands[2])))"
1979 [(clobber (const_int 0))]
1981 arm_split_constant (AND, SImode, curr_insn,
1982 INTVAL (operands[2]), operands[0], operands[1], 0);
1985 [(set_attr "length" "4,4,16")
1986 (set_attr "predicable" "yes")]
1989 (define_insn "*thumb1_andsi3_insn"
1990 [(set (match_operand:SI 0 "register_operand" "=l")
1991 (and:SI (match_operand:SI 1 "register_operand" "%0")
1992 (match_operand:SI 2 "register_operand" "l")))]
1995 [(set_attr "length" "2")]
1998 (define_insn "*andsi3_compare0"
1999 [(set (reg:CC_NOOV CC_REGNUM)
2001 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2002 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2004 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2005 (and:SI (match_dup 1) (match_dup 2)))]
2009 bic%.\\t%0, %1, #%B2"
2010 [(set_attr "conds" "set")]
2013 (define_insn "*andsi3_compare0_scratch"
2014 [(set (reg:CC_NOOV CC_REGNUM)
2016 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2017 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2019 (clobber (match_scratch:SI 2 "=X,r"))]
2023 bic%.\\t%2, %0, #%B1"
2024 [(set_attr "conds" "set")]
2027 (define_insn "*zeroextractsi_compare0_scratch"
2028 [(set (reg:CC_NOOV CC_REGNUM)
2029 (compare:CC_NOOV (zero_extract:SI
2030 (match_operand:SI 0 "s_register_operand" "r")
2031 (match_operand 1 "const_int_operand" "n")
2032 (match_operand 2 "const_int_operand" "n"))
2035 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2036 && INTVAL (operands[1]) > 0
2037 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2038 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2040 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2041 << INTVAL (operands[2]));
2042 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2045 [(set_attr "conds" "set")]
2048 (define_insn_and_split "*ne_zeroextractsi"
2049 [(set (match_operand:SI 0 "s_register_operand" "=r")
2050 (ne:SI (zero_extract:SI
2051 (match_operand:SI 1 "s_register_operand" "r")
2052 (match_operand:SI 2 "const_int_operand" "n")
2053 (match_operand:SI 3 "const_int_operand" "n"))
2055 (clobber (reg:CC CC_REGNUM))]
2057 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2058 && INTVAL (operands[2]) > 0
2059 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2060 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2063 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2064 && INTVAL (operands[2]) > 0
2065 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2066 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2067 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2068 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2070 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2072 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2073 (match_dup 0) (const_int 1)))]
2075 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2076 << INTVAL (operands[3]));
2078 [(set_attr "conds" "clob")
2079 (set (attr "length")
2080 (if_then_else (eq_attr "is_thumb" "yes")
2085 (define_insn_and_split "*ne_zeroextractsi_shifted"
2086 [(set (match_operand:SI 0 "s_register_operand" "=r")
2087 (ne:SI (zero_extract:SI
2088 (match_operand:SI 1 "s_register_operand" "r")
2089 (match_operand:SI 2 "const_int_operand" "n")
2092 (clobber (reg:CC CC_REGNUM))]
2096 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2097 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2099 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2101 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2102 (match_dup 0) (const_int 1)))]
2104 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2106 [(set_attr "conds" "clob")
2107 (set_attr "length" "8")]
2110 (define_insn_and_split "*ite_ne_zeroextractsi"
2111 [(set (match_operand:SI 0 "s_register_operand" "=r")
2112 (if_then_else:SI (ne (zero_extract:SI
2113 (match_operand:SI 1 "s_register_operand" "r")
2114 (match_operand:SI 2 "const_int_operand" "n")
2115 (match_operand:SI 3 "const_int_operand" "n"))
2117 (match_operand:SI 4 "arm_not_operand" "rIK")
2119 (clobber (reg:CC CC_REGNUM))]
2121 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2122 && INTVAL (operands[2]) > 0
2123 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2124 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2125 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2128 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2129 && INTVAL (operands[2]) > 0
2130 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2131 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2132 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2133 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2134 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2136 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2138 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2139 (match_dup 0) (match_dup 4)))]
2141 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2142 << INTVAL (operands[3]));
2144 [(set_attr "conds" "clob")
2145 (set_attr "length" "8")]
2148 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2149 [(set (match_operand:SI 0 "s_register_operand" "=r")
2150 (if_then_else:SI (ne (zero_extract:SI
2151 (match_operand:SI 1 "s_register_operand" "r")
2152 (match_operand:SI 2 "const_int_operand" "n")
2155 (match_operand:SI 3 "arm_not_operand" "rIK")
2157 (clobber (reg:CC CC_REGNUM))]
2158 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2160 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2161 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2162 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2164 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2166 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2167 (match_dup 0) (match_dup 3)))]
2169 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2171 [(set_attr "conds" "clob")
2172 (set_attr "length" "8")]
2176 [(set (match_operand:SI 0 "s_register_operand" "")
2177 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2178 (match_operand:SI 2 "const_int_operand" "")
2179 (match_operand:SI 3 "const_int_operand" "")))
2180 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2182 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2183 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2185 HOST_WIDE_INT temp = INTVAL (operands[2]);
2187 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2188 operands[3] = GEN_INT (32 - temp);
2192 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2194 [(set (match_operand:SI 0 "s_register_operand" "")
2195 (match_operator:SI 1 "shiftable_operator"
2196 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2197 (match_operand:SI 3 "const_int_operand" "")
2198 (match_operand:SI 4 "const_int_operand" ""))
2199 (match_operand:SI 5 "s_register_operand" "")]))
2200 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2202 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2205 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2208 HOST_WIDE_INT temp = INTVAL (operands[3]);
2210 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2211 operands[4] = GEN_INT (32 - temp);
2216 [(set (match_operand:SI 0 "s_register_operand" "")
2217 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2218 (match_operand:SI 2 "const_int_operand" "")
2219 (match_operand:SI 3 "const_int_operand" "")))]
2221 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2222 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2224 HOST_WIDE_INT temp = INTVAL (operands[2]);
2226 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2227 operands[3] = GEN_INT (32 - temp);
2232 [(set (match_operand:SI 0 "s_register_operand" "")
2233 (match_operator:SI 1 "shiftable_operator"
2234 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2235 (match_operand:SI 3 "const_int_operand" "")
2236 (match_operand:SI 4 "const_int_operand" ""))
2237 (match_operand:SI 5 "s_register_operand" "")]))
2238 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2240 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2243 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2246 HOST_WIDE_INT temp = INTVAL (operands[3]);
2248 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2249 operands[4] = GEN_INT (32 - temp);
2253 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2254 ;;; represented by the bitfield, then this will produce incorrect results.
2255 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2256 ;;; which have a real bit-field insert instruction, the truncation happens
2257 ;;; in the bit-field insert instruction itself. Since arm does not have a
2258 ;;; bit-field insert instruction, we would have to emit code here to truncate
2259 ;;; the value before we insert. This loses some of the advantage of having
2260 ;;; this insv pattern, so this pattern needs to be reevalutated.
2262 (define_expand "insv"
2263 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2264 (match_operand:SI 1 "general_operand" "")
2265 (match_operand:SI 2 "general_operand" ""))
2266 (match_operand:SI 3 "reg_or_int_operand" ""))]
2267 "TARGET_ARM || arm_arch_thumb2"
2270 int start_bit = INTVAL (operands[2]);
2271 int width = INTVAL (operands[1]);
2272 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2273 rtx target, subtarget;
2275 if (arm_arch_thumb2)
2277 bool use_bfi = TRUE;
2279 if (GET_CODE (operands[3]) == CONST_INT)
2281 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2285 emit_insn (gen_insv_zero (operands[0], operands[1],
2290 /* See if the set can be done with a single orr instruction. */
2291 if (val == mask && const_ok_for_arm (val << start_bit))
2297 if (GET_CODE (operands[3]) != REG)
2298 operands[3] = force_reg (SImode, operands[3]);
2300 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2306 target = copy_rtx (operands[0]);
2307 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2308 subreg as the final target. */
2309 if (GET_CODE (target) == SUBREG)
2311 subtarget = gen_reg_rtx (SImode);
2312 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2313 < GET_MODE_SIZE (SImode))
2314 target = SUBREG_REG (target);
2319 if (GET_CODE (operands[3]) == CONST_INT)
2321 /* Since we are inserting a known constant, we may be able to
2322 reduce the number of bits that we have to clear so that
2323 the mask becomes simple. */
2324 /* ??? This code does not check to see if the new mask is actually
2325 simpler. It may not be. */
2326 rtx op1 = gen_reg_rtx (SImode);
2327 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2328 start of this pattern. */
2329 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2330 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2332 emit_insn (gen_andsi3 (op1, operands[0],
2333 gen_int_mode (~mask2, SImode)));
2334 emit_insn (gen_iorsi3 (subtarget, op1,
2335 gen_int_mode (op3_value << start_bit, SImode)));
2337 else if (start_bit == 0
2338 && !(const_ok_for_arm (mask)
2339 || const_ok_for_arm (~mask)))
2341 /* A Trick, since we are setting the bottom bits in the word,
2342 we can shift operand[3] up, operand[0] down, OR them together
2343 and rotate the result back again. This takes 3 insns, and
2344 the third might be mergeable into another op. */
2345 /* The shift up copes with the possibility that operand[3] is
2346 wider than the bitfield. */
2347 rtx op0 = gen_reg_rtx (SImode);
2348 rtx op1 = gen_reg_rtx (SImode);
2350 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2351 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2352 emit_insn (gen_iorsi3 (op1, op1, op0));
2353 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2355 else if ((width + start_bit == 32)
2356 && !(const_ok_for_arm (mask)
2357 || const_ok_for_arm (~mask)))
2359 /* Similar trick, but slightly less efficient. */
2361 rtx op0 = gen_reg_rtx (SImode);
2362 rtx op1 = gen_reg_rtx (SImode);
2364 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2365 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2366 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2367 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2371 rtx op0 = gen_int_mode (mask, SImode);
2372 rtx op1 = gen_reg_rtx (SImode);
2373 rtx op2 = gen_reg_rtx (SImode);
2375 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2377 rtx tmp = gen_reg_rtx (SImode);
2379 emit_insn (gen_movsi (tmp, op0));
2383 /* Mask out any bits in operand[3] that are not needed. */
2384 emit_insn (gen_andsi3 (op1, operands[3], op0));
2386 if (GET_CODE (op0) == CONST_INT
2387 && (const_ok_for_arm (mask << start_bit)
2388 || const_ok_for_arm (~(mask << start_bit))))
2390 op0 = gen_int_mode (~(mask << start_bit), SImode);
2391 emit_insn (gen_andsi3 (op2, operands[0], op0));
2395 if (GET_CODE (op0) == CONST_INT)
2397 rtx tmp = gen_reg_rtx (SImode);
2399 emit_insn (gen_movsi (tmp, op0));
2404 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2406 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2410 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2412 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2415 if (subtarget != target)
2417 /* If TARGET is still a SUBREG, then it must be wider than a word,
2418 so we must be careful only to set the subword we were asked to. */
2419 if (GET_CODE (target) == SUBREG)
2420 emit_move_insn (target, subtarget);
2422 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2429 (define_insn "insv_zero"
2430 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2431 (match_operand:SI 1 "const_int_operand" "M")
2432 (match_operand:SI 2 "const_int_operand" "M"))
2436 [(set_attr "length" "4")
2437 (set_attr "predicable" "yes")]
2440 (define_insn "insv_t2"
2441 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2442 (match_operand:SI 1 "const_int_operand" "M")
2443 (match_operand:SI 2 "const_int_operand" "M"))
2444 (match_operand:SI 3 "s_register_operand" "r"))]
2446 "bfi%?\t%0, %3, %2, %1"
2447 [(set_attr "length" "4")
2448 (set_attr "predicable" "yes")]
2451 ; constants for op 2 will never be given to these patterns.
2452 (define_insn_and_split "*anddi_notdi_di"
2453 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2454 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2455 (match_operand:DI 2 "s_register_operand" "r,0")))]
2458 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2459 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2460 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2463 operands[3] = gen_highpart (SImode, operands[0]);
2464 operands[0] = gen_lowpart (SImode, operands[0]);
2465 operands[4] = gen_highpart (SImode, operands[1]);
2466 operands[1] = gen_lowpart (SImode, operands[1]);
2467 operands[5] = gen_highpart (SImode, operands[2]);
2468 operands[2] = gen_lowpart (SImode, operands[2]);
2470 [(set_attr "length" "8")
2471 (set_attr "predicable" "yes")]
2474 (define_insn_and_split "*anddi_notzesidi_di"
2475 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2476 (and:DI (not:DI (zero_extend:DI
2477 (match_operand:SI 2 "s_register_operand" "r,r")))
2478 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2481 bic%?\\t%Q0, %Q1, %2
2483 ; (not (zero_extend ...)) allows us to just copy the high word from
2484 ; operand1 to operand0.
2487 && operands[0] != operands[1]"
2488 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2489 (set (match_dup 3) (match_dup 4))]
2492 operands[3] = gen_highpart (SImode, operands[0]);
2493 operands[0] = gen_lowpart (SImode, operands[0]);
2494 operands[4] = gen_highpart (SImode, operands[1]);
2495 operands[1] = gen_lowpart (SImode, operands[1]);
2497 [(set_attr "length" "4,8")
2498 (set_attr "predicable" "yes")]
2501 (define_insn_and_split "*anddi_notsesidi_di"
2502 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2503 (and:DI (not:DI (sign_extend:DI
2504 (match_operand:SI 2 "s_register_operand" "r,r")))
2505 (match_operand:DI 1 "s_register_operand" "0,r")))]
2508 "TARGET_32BIT && reload_completed"
2509 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2510 (set (match_dup 3) (and:SI (not:SI
2511 (ashiftrt:SI (match_dup 2) (const_int 31)))
2515 operands[3] = gen_highpart (SImode, operands[0]);
2516 operands[0] = gen_lowpart (SImode, operands[0]);
2517 operands[4] = gen_highpart (SImode, operands[1]);
2518 operands[1] = gen_lowpart (SImode, operands[1]);
2520 [(set_attr "length" "8")
2521 (set_attr "predicable" "yes")]
2524 (define_insn "andsi_notsi_si"
2525 [(set (match_operand:SI 0 "s_register_operand" "=r")
2526 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2527 (match_operand:SI 1 "s_register_operand" "r")))]
2529 "bic%?\\t%0, %1, %2"
2530 [(set_attr "predicable" "yes")]
2533 (define_insn "bicsi3"
2534 [(set (match_operand:SI 0 "register_operand" "=l")
2535 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2536 (match_operand:SI 2 "register_operand" "0")))]
2539 [(set_attr "length" "2")]
2542 (define_insn "andsi_not_shiftsi_si"
2543 [(set (match_operand:SI 0 "s_register_operand" "=r")
2544 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2545 [(match_operand:SI 2 "s_register_operand" "r")
2546 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2547 (match_operand:SI 1 "s_register_operand" "r")))]
2549 "bic%?\\t%0, %1, %2%S4"
2550 [(set_attr "predicable" "yes")
2551 (set_attr "shift" "2")
2552 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2553 (const_string "alu_shift")
2554 (const_string "alu_shift_reg")))]
2557 (define_insn "*andsi_notsi_si_compare0"
2558 [(set (reg:CC_NOOV CC_REGNUM)
2560 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2561 (match_operand:SI 1 "s_register_operand" "r"))
2563 (set (match_operand:SI 0 "s_register_operand" "=r")
2564 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2566 "bic%.\\t%0, %1, %2"
2567 [(set_attr "conds" "set")]
2570 (define_insn "*andsi_notsi_si_compare0_scratch"
2571 [(set (reg:CC_NOOV CC_REGNUM)
2573 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2574 (match_operand:SI 1 "s_register_operand" "r"))
2576 (clobber (match_scratch:SI 0 "=r"))]
2578 "bic%.\\t%0, %1, %2"
2579 [(set_attr "conds" "set")]
2582 (define_insn "iordi3"
2583 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2584 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2585 (match_operand:DI 2 "s_register_operand" "r,r")))]
2586 "TARGET_32BIT && ! TARGET_IWMMXT"
2588 [(set_attr "length" "8")
2589 (set_attr "predicable" "yes")]
2592 (define_insn "*iordi_zesidi_di"
2593 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2594 (ior:DI (zero_extend:DI
2595 (match_operand:SI 2 "s_register_operand" "r,r"))
2596 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2599 orr%?\\t%Q0, %Q1, %2
2601 [(set_attr "length" "4,8")
2602 (set_attr "predicable" "yes")]
2605 (define_insn "*iordi_sesidi_di"
2606 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2607 (ior:DI (sign_extend:DI
2608 (match_operand:SI 2 "s_register_operand" "r,r"))
2609 (match_operand:DI 1 "s_register_operand" "0,r")))]
2612 [(set_attr "length" "8")
2613 (set_attr "predicable" "yes")]
2616 (define_expand "iorsi3"
2617 [(set (match_operand:SI 0 "s_register_operand" "")
2618 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2619 (match_operand:SI 2 "reg_or_int_operand" "")))]
2622 if (GET_CODE (operands[2]) == CONST_INT)
2626 arm_split_constant (IOR, SImode, NULL_RTX,
2627 INTVAL (operands[2]), operands[0], operands[1],
2628 optimize && can_create_pseudo_p ());
2631 else /* TARGET_THUMB1 */
2633 rtx tmp = force_reg (SImode, operands[2]);
2634 if (rtx_equal_p (operands[0], operands[1]))
2638 operands[2] = operands[1];
2646 (define_insn_and_split "*arm_iorsi3"
2647 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2648 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2649 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2655 && GET_CODE (operands[2]) == CONST_INT
2656 && !const_ok_for_arm (INTVAL (operands[2]))"
2657 [(clobber (const_int 0))]
2659 arm_split_constant (IOR, SImode, curr_insn,
2660 INTVAL (operands[2]), operands[0], operands[1], 0);
2663 [(set_attr "length" "4,16")
2664 (set_attr "predicable" "yes")]
2667 (define_insn "*thumb1_iorsi3"
2668 [(set (match_operand:SI 0 "register_operand" "=l")
2669 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2670 (match_operand:SI 2 "register_operand" "l")))]
2673 [(set_attr "length" "2")]
2677 [(match_scratch:SI 3 "r")
2678 (set (match_operand:SI 0 "arm_general_register_operand" "")
2679 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2680 (match_operand:SI 2 "const_int_operand" "")))]
2682 && !const_ok_for_arm (INTVAL (operands[2]))
2683 && const_ok_for_arm (~INTVAL (operands[2]))"
2684 [(set (match_dup 3) (match_dup 2))
2685 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2689 (define_insn "*iorsi3_compare0"
2690 [(set (reg:CC_NOOV CC_REGNUM)
2691 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2692 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2694 (set (match_operand:SI 0 "s_register_operand" "=r")
2695 (ior:SI (match_dup 1) (match_dup 2)))]
2697 "orr%.\\t%0, %1, %2"
2698 [(set_attr "conds" "set")]
2701 (define_insn "*iorsi3_compare0_scratch"
2702 [(set (reg:CC_NOOV CC_REGNUM)
2703 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2704 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2706 (clobber (match_scratch:SI 0 "=r"))]
2708 "orr%.\\t%0, %1, %2"
2709 [(set_attr "conds" "set")]
2712 (define_insn "xordi3"
2713 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2714 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2715 (match_operand:DI 2 "s_register_operand" "r,r")))]
2716 "TARGET_32BIT && !TARGET_IWMMXT"
2718 [(set_attr "length" "8")
2719 (set_attr "predicable" "yes")]
2722 (define_insn "*xordi_zesidi_di"
2723 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2724 (xor:DI (zero_extend:DI
2725 (match_operand:SI 2 "s_register_operand" "r,r"))
2726 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2729 eor%?\\t%Q0, %Q1, %2
2731 [(set_attr "length" "4,8")
2732 (set_attr "predicable" "yes")]
2735 (define_insn "*xordi_sesidi_di"
2736 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2737 (xor:DI (sign_extend:DI
2738 (match_operand:SI 2 "s_register_operand" "r,r"))
2739 (match_operand:DI 1 "s_register_operand" "0,r")))]
2742 [(set_attr "length" "8")
2743 (set_attr "predicable" "yes")]
2746 (define_expand "xorsi3"
2747 [(set (match_operand:SI 0 "s_register_operand" "")
2748 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2749 (match_operand:SI 2 "reg_or_int_operand" "")))]
2751 "if (GET_CODE (operands[2]) == CONST_INT)
2755 arm_split_constant (XOR, SImode, NULL_RTX,
2756 INTVAL (operands[2]), operands[0], operands[1],
2757 optimize && can_create_pseudo_p ());
2760 else /* TARGET_THUMB1 */
2762 rtx tmp = force_reg (SImode, operands[2]);
2763 if (rtx_equal_p (operands[0], operands[1]))
2767 operands[2] = operands[1];
2774 (define_insn "*arm_xorsi3"
2775 [(set (match_operand:SI 0 "s_register_operand" "=r")
2776 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2777 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2779 "eor%?\\t%0, %1, %2"
2780 [(set_attr "predicable" "yes")]
2783 (define_insn "*thumb1_xorsi3"
2784 [(set (match_operand:SI 0 "register_operand" "=l")
2785 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2786 (match_operand:SI 2 "register_operand" "l")))]
2789 [(set_attr "length" "2")]
2792 (define_insn "*xorsi3_compare0"
2793 [(set (reg:CC_NOOV CC_REGNUM)
2794 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2795 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2797 (set (match_operand:SI 0 "s_register_operand" "=r")
2798 (xor:SI (match_dup 1) (match_dup 2)))]
2800 "eor%.\\t%0, %1, %2"
2801 [(set_attr "conds" "set")]
2804 (define_insn "*xorsi3_compare0_scratch"
2805 [(set (reg:CC_NOOV CC_REGNUM)
2806 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2807 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2811 [(set_attr "conds" "set")]
2814 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2815 ; (NOT D) we can sometimes merge the final NOT into one of the following
2819 [(set (match_operand:SI 0 "s_register_operand" "")
2820 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2821 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2822 (match_operand:SI 3 "arm_rhs_operand" "")))
2823 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2825 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2826 (not:SI (match_dup 3))))
2827 (set (match_dup 0) (not:SI (match_dup 4)))]
2831 (define_insn "*andsi_iorsi3_notsi"
2832 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2833 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2834 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2835 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2837 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2838 [(set_attr "length" "8")
2839 (set_attr "ce_count" "2")
2840 (set_attr "predicable" "yes")]
2843 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2844 ; insns are available?
2846 [(set (match_operand:SI 0 "s_register_operand" "")
2847 (match_operator:SI 1 "logical_binary_operator"
2848 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2849 (match_operand:SI 3 "const_int_operand" "")
2850 (match_operand:SI 4 "const_int_operand" ""))
2851 (match_operator:SI 9 "logical_binary_operator"
2852 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2853 (match_operand:SI 6 "const_int_operand" ""))
2854 (match_operand:SI 7 "s_register_operand" "")])]))
2855 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2857 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2858 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2861 [(ashift:SI (match_dup 2) (match_dup 4))
2865 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2868 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2872 [(set (match_operand:SI 0 "s_register_operand" "")
2873 (match_operator:SI 1 "logical_binary_operator"
2874 [(match_operator:SI 9 "logical_binary_operator"
2875 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2876 (match_operand:SI 6 "const_int_operand" ""))
2877 (match_operand:SI 7 "s_register_operand" "")])
2878 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2879 (match_operand:SI 3 "const_int_operand" "")
2880 (match_operand:SI 4 "const_int_operand" ""))]))
2881 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2883 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2884 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2887 [(ashift:SI (match_dup 2) (match_dup 4))
2891 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2894 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2898 [(set (match_operand:SI 0 "s_register_operand" "")
2899 (match_operator:SI 1 "logical_binary_operator"
2900 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2901 (match_operand:SI 3 "const_int_operand" "")
2902 (match_operand:SI 4 "const_int_operand" ""))
2903 (match_operator:SI 9 "logical_binary_operator"
2904 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2905 (match_operand:SI 6 "const_int_operand" ""))
2906 (match_operand:SI 7 "s_register_operand" "")])]))
2907 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2909 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2910 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2913 [(ashift:SI (match_dup 2) (match_dup 4))
2917 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2920 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2924 [(set (match_operand:SI 0 "s_register_operand" "")
2925 (match_operator:SI 1 "logical_binary_operator"
2926 [(match_operator:SI 9 "logical_binary_operator"
2927 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2928 (match_operand:SI 6 "const_int_operand" ""))
2929 (match_operand:SI 7 "s_register_operand" "")])
2930 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2931 (match_operand:SI 3 "const_int_operand" "")
2932 (match_operand:SI 4 "const_int_operand" ""))]))
2933 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2935 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2936 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2939 [(ashift:SI (match_dup 2) (match_dup 4))
2943 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2946 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2950 ;; Minimum and maximum insns
2952 (define_expand "smaxsi3"
2954 (set (match_operand:SI 0 "s_register_operand" "")
2955 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2956 (match_operand:SI 2 "arm_rhs_operand" "")))
2957 (clobber (reg:CC CC_REGNUM))])]
2960 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2962 /* No need for a clobber of the condition code register here. */
2963 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2964 gen_rtx_SMAX (SImode, operands[1],
2970 (define_insn "*smax_0"
2971 [(set (match_operand:SI 0 "s_register_operand" "=r")
2972 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2975 "bic%?\\t%0, %1, %1, asr #31"
2976 [(set_attr "predicable" "yes")]
2979 (define_insn "*smax_m1"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r")
2981 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2984 "orr%?\\t%0, %1, %1, asr #31"
2985 [(set_attr "predicable" "yes")]
2988 (define_insn "*arm_smax_insn"
2989 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2990 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2991 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2992 (clobber (reg:CC CC_REGNUM))]
2995 cmp\\t%1, %2\;movlt\\t%0, %2
2996 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2997 [(set_attr "conds" "clob")
2998 (set_attr "length" "8,12")]
3001 (define_expand "sminsi3"
3003 (set (match_operand:SI 0 "s_register_operand" "")
3004 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3005 (match_operand:SI 2 "arm_rhs_operand" "")))
3006 (clobber (reg:CC CC_REGNUM))])]
3009 if (operands[2] == const0_rtx)
3011 /* No need for a clobber of the condition code register here. */
3012 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3013 gen_rtx_SMIN (SImode, operands[1],
3019 (define_insn "*smin_0"
3020 [(set (match_operand:SI 0 "s_register_operand" "=r")
3021 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3024 "and%?\\t%0, %1, %1, asr #31"
3025 [(set_attr "predicable" "yes")]
3028 (define_insn "*arm_smin_insn"
3029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3030 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3031 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3032 (clobber (reg:CC CC_REGNUM))]
3035 cmp\\t%1, %2\;movge\\t%0, %2
3036 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3037 [(set_attr "conds" "clob")
3038 (set_attr "length" "8,12")]
3041 (define_expand "umaxsi3"
3043 (set (match_operand:SI 0 "s_register_operand" "")
3044 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3045 (match_operand:SI 2 "arm_rhs_operand" "")))
3046 (clobber (reg:CC CC_REGNUM))])]
3051 (define_insn "*arm_umaxsi3"
3052 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3053 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3054 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3055 (clobber (reg:CC CC_REGNUM))]
3058 cmp\\t%1, %2\;movcc\\t%0, %2
3059 cmp\\t%1, %2\;movcs\\t%0, %1
3060 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3061 [(set_attr "conds" "clob")
3062 (set_attr "length" "8,8,12")]
3065 (define_expand "uminsi3"
3067 (set (match_operand:SI 0 "s_register_operand" "")
3068 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3069 (match_operand:SI 2 "arm_rhs_operand" "")))
3070 (clobber (reg:CC CC_REGNUM))])]
3075 (define_insn "*arm_uminsi3"
3076 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3077 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3078 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3079 (clobber (reg:CC CC_REGNUM))]
3082 cmp\\t%1, %2\;movcs\\t%0, %2
3083 cmp\\t%1, %2\;movcc\\t%0, %1
3084 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3085 [(set_attr "conds" "clob")
3086 (set_attr "length" "8,8,12")]
3089 (define_insn "*store_minmaxsi"
3090 [(set (match_operand:SI 0 "memory_operand" "=m")
3091 (match_operator:SI 3 "minmax_operator"
3092 [(match_operand:SI 1 "s_register_operand" "r")
3093 (match_operand:SI 2 "s_register_operand" "r")]))
3094 (clobber (reg:CC CC_REGNUM))]
3097 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3098 operands[1], operands[2]);
3099 output_asm_insn (\"cmp\\t%1, %2\", operands);
3101 output_asm_insn (\"ite\t%d3\", operands);
3102 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3103 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3106 [(set_attr "conds" "clob")
3107 (set (attr "length")
3108 (if_then_else (eq_attr "is_thumb" "yes")
3111 (set_attr "type" "store1")]
3114 ; Reject the frame pointer in operand[1], since reloading this after
3115 ; it has been eliminated can cause carnage.
3116 (define_insn "*minmax_arithsi"
3117 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3118 (match_operator:SI 4 "shiftable_operator"
3119 [(match_operator:SI 5 "minmax_operator"
3120 [(match_operand:SI 2 "s_register_operand" "r,r")
3121 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3122 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3123 (clobber (reg:CC CC_REGNUM))]
3124 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3127 enum rtx_code code = GET_CODE (operands[4]);
3130 if (which_alternative != 0 || operands[3] != const0_rtx
3131 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3136 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3137 operands[2], operands[3]);
3138 output_asm_insn (\"cmp\\t%2, %3\", operands);
3142 output_asm_insn (\"ite\\t%d5\", operands);
3144 output_asm_insn (\"it\\t%d5\", operands);
3146 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3148 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3151 [(set_attr "conds" "clob")
3152 (set (attr "length")
3153 (if_then_else (eq_attr "is_thumb" "yes")
3159 ;; Shift and rotation insns
3161 (define_expand "ashldi3"
3162 [(set (match_operand:DI 0 "s_register_operand" "")
3163 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3164 (match_operand:SI 2 "reg_or_int_operand" "")))]
3167 if (GET_CODE (operands[2]) == CONST_INT)
3169 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3171 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3174 /* Ideally we shouldn't fail here if we could know that operands[1]
3175 ends up already living in an iwmmxt register. Otherwise it's
3176 cheaper to have the alternate code being generated than moving
3177 values to iwmmxt regs and back. */
3180 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3185 (define_insn "arm_ashldi3_1bit"
3186 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3187 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3189 (clobber (reg:CC CC_REGNUM))]
3191 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3192 [(set_attr "conds" "clob")
3193 (set_attr "length" "8")]
3196 (define_expand "ashlsi3"
3197 [(set (match_operand:SI 0 "s_register_operand" "")
3198 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3199 (match_operand:SI 2 "arm_rhs_operand" "")))]
3202 if (GET_CODE (operands[2]) == CONST_INT
3203 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3205 emit_insn (gen_movsi (operands[0], const0_rtx));
3211 (define_insn "*thumb1_ashlsi3"
3212 [(set (match_operand:SI 0 "register_operand" "=l,l")
3213 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3214 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3217 [(set_attr "length" "2")]
3220 (define_expand "ashrdi3"
3221 [(set (match_operand:DI 0 "s_register_operand" "")
3222 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3223 (match_operand:SI 2 "reg_or_int_operand" "")))]
3226 if (GET_CODE (operands[2]) == CONST_INT)
3228 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3230 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3233 /* Ideally we shouldn't fail here if we could know that operands[1]
3234 ends up already living in an iwmmxt register. Otherwise it's
3235 cheaper to have the alternate code being generated than moving
3236 values to iwmmxt regs and back. */
3239 else if (!TARGET_REALLY_IWMMXT)
3244 (define_insn "arm_ashrdi3_1bit"
3245 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3246 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3248 (clobber (reg:CC CC_REGNUM))]
3250 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3251 [(set_attr "conds" "clob")
3252 (set_attr "length" "8")]
3255 (define_expand "ashrsi3"
3256 [(set (match_operand:SI 0 "s_register_operand" "")
3257 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3258 (match_operand:SI 2 "arm_rhs_operand" "")))]
3261 if (GET_CODE (operands[2]) == CONST_INT
3262 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3263 operands[2] = GEN_INT (31);
3267 (define_insn "*thumb1_ashrsi3"
3268 [(set (match_operand:SI 0 "register_operand" "=l,l")
3269 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3270 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3273 [(set_attr "length" "2")]
3276 (define_expand "lshrdi3"
3277 [(set (match_operand:DI 0 "s_register_operand" "")
3278 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3279 (match_operand:SI 2 "reg_or_int_operand" "")))]
3282 if (GET_CODE (operands[2]) == CONST_INT)
3284 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3286 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3289 /* Ideally we shouldn't fail here if we could know that operands[1]
3290 ends up already living in an iwmmxt register. Otherwise it's
3291 cheaper to have the alternate code being generated than moving
3292 values to iwmmxt regs and back. */
3295 else if (!TARGET_REALLY_IWMMXT)
3300 (define_insn "arm_lshrdi3_1bit"
3301 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3302 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3304 (clobber (reg:CC CC_REGNUM))]
3306 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3307 [(set_attr "conds" "clob")
3308 (set_attr "length" "8")]
3311 (define_expand "lshrsi3"
3312 [(set (match_operand:SI 0 "s_register_operand" "")
3313 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3314 (match_operand:SI 2 "arm_rhs_operand" "")))]
3317 if (GET_CODE (operands[2]) == CONST_INT
3318 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3320 emit_insn (gen_movsi (operands[0], const0_rtx));
3326 (define_insn "*thumb1_lshrsi3"
3327 [(set (match_operand:SI 0 "register_operand" "=l,l")
3328 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3329 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3332 [(set_attr "length" "2")]
3335 (define_expand "rotlsi3"
3336 [(set (match_operand:SI 0 "s_register_operand" "")
3337 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3338 (match_operand:SI 2 "reg_or_int_operand" "")))]
3341 if (GET_CODE (operands[2]) == CONST_INT)
3342 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3345 rtx reg = gen_reg_rtx (SImode);
3346 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3352 (define_expand "rotrsi3"
3353 [(set (match_operand:SI 0 "s_register_operand" "")
3354 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3355 (match_operand:SI 2 "arm_rhs_operand" "")))]
3360 if (GET_CODE (operands[2]) == CONST_INT
3361 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3362 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3364 else /* TARGET_THUMB1 */
3366 if (GET_CODE (operands [2]) == CONST_INT)
3367 operands [2] = force_reg (SImode, operands[2]);
3372 (define_insn "*thumb1_rotrsi3"
3373 [(set (match_operand:SI 0 "register_operand" "=l")
3374 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3375 (match_operand:SI 2 "register_operand" "l")))]
3378 [(set_attr "length" "2")]
3381 (define_insn "*arm_shiftsi3"
3382 [(set (match_operand:SI 0 "s_register_operand" "=r")
3383 (match_operator:SI 3 "shift_operator"
3384 [(match_operand:SI 1 "s_register_operand" "r")
3385 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3387 "* return arm_output_shift(operands, 0);"
3388 [(set_attr "predicable" "yes")
3389 (set_attr "shift" "1")
3390 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3391 (const_string "alu_shift")
3392 (const_string "alu_shift_reg")))]
3395 (define_insn "*shiftsi3_compare0"
3396 [(set (reg:CC_NOOV CC_REGNUM)
3397 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3398 [(match_operand:SI 1 "s_register_operand" "r")
3399 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3401 (set (match_operand:SI 0 "s_register_operand" "=r")
3402 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3404 "* return arm_output_shift(operands, 1);"
3405 [(set_attr "conds" "set")
3406 (set_attr "shift" "1")
3407 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3408 (const_string "alu_shift")
3409 (const_string "alu_shift_reg")))]
3412 (define_insn "*shiftsi3_compare0_scratch"
3413 [(set (reg:CC_NOOV CC_REGNUM)
3414 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3415 [(match_operand:SI 1 "s_register_operand" "r")
3416 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3418 (clobber (match_scratch:SI 0 "=r"))]
3420 "* return arm_output_shift(operands, 1);"
3421 [(set_attr "conds" "set")
3422 (set_attr "shift" "1")]
3425 (define_insn "*arm_notsi_shiftsi"
3426 [(set (match_operand:SI 0 "s_register_operand" "=r")
3427 (not:SI (match_operator:SI 3 "shift_operator"
3428 [(match_operand:SI 1 "s_register_operand" "r")
3429 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3432 [(set_attr "predicable" "yes")
3433 (set_attr "shift" "1")
3434 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3435 (const_string "alu_shift")
3436 (const_string "alu_shift_reg")))]
3439 (define_insn "*arm_notsi_shiftsi_compare0"
3440 [(set (reg:CC_NOOV CC_REGNUM)
3441 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3442 [(match_operand:SI 1 "s_register_operand" "r")
3443 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3445 (set (match_operand:SI 0 "s_register_operand" "=r")
3446 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3449 [(set_attr "conds" "set")
3450 (set_attr "shift" "1")
3451 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3452 (const_string "alu_shift")
3453 (const_string "alu_shift_reg")))]
3456 (define_insn "*arm_not_shiftsi_compare0_scratch"
3457 [(set (reg:CC_NOOV CC_REGNUM)
3458 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3459 [(match_operand:SI 1 "s_register_operand" "r")
3460 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3462 (clobber (match_scratch:SI 0 "=r"))]
3465 [(set_attr "conds" "set")
3466 (set_attr "shift" "1")
3467 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3468 (const_string "alu_shift")
3469 (const_string "alu_shift_reg")))]
3472 ;; We don't really have extzv, but defining this using shifts helps
3473 ;; to reduce register pressure later on.
3475 (define_expand "extzv"
3477 (ashift:SI (match_operand:SI 1 "register_operand" "")
3478 (match_operand:SI 2 "const_int_operand" "")))
3479 (set (match_operand:SI 0 "register_operand" "")
3480 (lshiftrt:SI (match_dup 4)
3481 (match_operand:SI 3 "const_int_operand" "")))]
3482 "TARGET_THUMB1 || arm_arch_thumb2"
3485 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3486 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3488 if (arm_arch_thumb2)
3490 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3495 operands[3] = GEN_INT (rshift);
3499 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3503 operands[2] = GEN_INT (lshift);
3504 operands[4] = gen_reg_rtx (SImode);
3509 [(set (match_operand:SI 0 "s_register_operand" "=r")
3510 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3511 (match_operand:SI 2 "const_int_operand" "M")
3512 (match_operand:SI 3 "const_int_operand" "M")))]
3514 "sbfx%?\t%0, %1, %3, %2"
3515 [(set_attr "length" "4")
3516 (set_attr "predicable" "yes")]
3519 (define_insn "extzv_t2"
3520 [(set (match_operand:SI 0 "s_register_operand" "=r")
3521 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3522 (match_operand:SI 2 "const_int_operand" "M")
3523 (match_operand:SI 3 "const_int_operand" "M")))]
3525 "ubfx%?\t%0, %1, %3, %2"
3526 [(set_attr "length" "4")
3527 (set_attr "predicable" "yes")]
3531 ;; Unary arithmetic insns
3533 (define_expand "negdi2"
3535 [(set (match_operand:DI 0 "s_register_operand" "")
3536 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3537 (clobber (reg:CC CC_REGNUM))])]
3542 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3543 ;; The first alternative allows the common case of a *full* overlap.
3544 (define_insn "*arm_negdi2"
3545 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3546 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3547 (clobber (reg:CC CC_REGNUM))]
3549 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3550 [(set_attr "conds" "clob")
3551 (set_attr "length" "8")]
3554 (define_insn "*thumb1_negdi2"
3555 [(set (match_operand:DI 0 "register_operand" "=&l")
3556 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3557 (clobber (reg:CC CC_REGNUM))]
3559 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3560 [(set_attr "length" "6")]
3563 (define_expand "negsi2"
3564 [(set (match_operand:SI 0 "s_register_operand" "")
3565 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3570 (define_insn "*arm_negsi2"
3571 [(set (match_operand:SI 0 "s_register_operand" "=r")
3572 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3574 "rsb%?\\t%0, %1, #0"
3575 [(set_attr "predicable" "yes")]
3578 (define_insn "*thumb1_negsi2"
3579 [(set (match_operand:SI 0 "register_operand" "=l")
3580 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3583 [(set_attr "length" "2")]
3586 (define_expand "negsf2"
3587 [(set (match_operand:SF 0 "s_register_operand" "")
3588 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3589 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3593 (define_expand "negdf2"
3594 [(set (match_operand:DF 0 "s_register_operand" "")
3595 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3596 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3599 ;; abssi2 doesn't really clobber the condition codes if a different register
3600 ;; is being set. To keep things simple, assume during rtl manipulations that
3601 ;; it does, but tell the final scan operator the truth. Similarly for
3604 (define_expand "abssi2"
3606 [(set (match_operand:SI 0 "s_register_operand" "")
3607 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3608 (clobber (match_dup 2))])]
3612 operands[2] = gen_rtx_SCRATCH (SImode);
3614 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3617 (define_insn "*arm_abssi2"
3618 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3619 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3620 (clobber (reg:CC CC_REGNUM))]
3623 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3624 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3625 [(set_attr "conds" "clob,*")
3626 (set_attr "shift" "1")
3627 ;; predicable can't be set based on the variant, so left as no
3628 (set_attr "length" "8")]
3631 (define_insn_and_split "*thumb1_abssi2"
3632 [(set (match_operand:SI 0 "s_register_operand" "=l")
3633 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3634 (clobber (match_scratch:SI 2 "=&l"))]
3637 "TARGET_THUMB1 && reload_completed"
3638 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3639 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3640 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3642 [(set_attr "length" "6")]
3645 (define_insn "*arm_neg_abssi2"
3646 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3647 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3648 (clobber (reg:CC CC_REGNUM))]
3651 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3652 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3653 [(set_attr "conds" "clob,*")
3654 (set_attr "shift" "1")
3655 ;; predicable can't be set based on the variant, so left as no
3656 (set_attr "length" "8")]
3659 (define_insn_and_split "*thumb1_neg_abssi2"
3660 [(set (match_operand:SI 0 "s_register_operand" "=l")
3661 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3662 (clobber (match_scratch:SI 2 "=&l"))]
3665 "TARGET_THUMB1 && reload_completed"
3666 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3667 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3668 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3670 [(set_attr "length" "6")]
3673 (define_expand "abssf2"
3674 [(set (match_operand:SF 0 "s_register_operand" "")
3675 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3676 "TARGET_32BIT && TARGET_HARD_FLOAT"
3679 (define_expand "absdf2"
3680 [(set (match_operand:DF 0 "s_register_operand" "")
3681 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3682 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3685 (define_expand "sqrtsf2"
3686 [(set (match_operand:SF 0 "s_register_operand" "")
3687 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3688 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3691 (define_expand "sqrtdf2"
3692 [(set (match_operand:DF 0 "s_register_operand" "")
3693 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3694 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3697 (define_insn_and_split "one_cmpldi2"
3698 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3699 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3702 "TARGET_32BIT && reload_completed"
3703 [(set (match_dup 0) (not:SI (match_dup 1)))
3704 (set (match_dup 2) (not:SI (match_dup 3)))]
3707 operands[2] = gen_highpart (SImode, operands[0]);
3708 operands[0] = gen_lowpart (SImode, operands[0]);
3709 operands[3] = gen_highpart (SImode, operands[1]);
3710 operands[1] = gen_lowpart (SImode, operands[1]);
3712 [(set_attr "length" "8")
3713 (set_attr "predicable" "yes")]
3716 (define_expand "one_cmplsi2"
3717 [(set (match_operand:SI 0 "s_register_operand" "")
3718 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3723 (define_insn "*arm_one_cmplsi2"
3724 [(set (match_operand:SI 0 "s_register_operand" "=r")
3725 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3728 [(set_attr "predicable" "yes")]
3731 (define_insn "*thumb1_one_cmplsi2"
3732 [(set (match_operand:SI 0 "register_operand" "=l")
3733 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3736 [(set_attr "length" "2")]
3739 (define_insn "*notsi_compare0"
3740 [(set (reg:CC_NOOV CC_REGNUM)
3741 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3743 (set (match_operand:SI 0 "s_register_operand" "=r")
3744 (not:SI (match_dup 1)))]
3747 [(set_attr "conds" "set")]
3750 (define_insn "*notsi_compare0_scratch"
3751 [(set (reg:CC_NOOV CC_REGNUM)
3752 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3754 (clobber (match_scratch:SI 0 "=r"))]
3757 [(set_attr "conds" "set")]
3760 ;; Fixed <--> Floating conversion insns
3762 (define_expand "floatsihf2"
3763 [(set (match_operand:HF 0 "general_operand" "")
3764 (float:HF (match_operand:SI 1 "general_operand" "")))]
3768 rtx op1 = gen_reg_rtx (SFmode);
3769 expand_float (op1, operands[1], 0);
3770 op1 = convert_to_mode (HFmode, op1, 0);
3771 emit_move_insn (operands[0], op1);
3776 (define_expand "floatdihf2"
3777 [(set (match_operand:HF 0 "general_operand" "")
3778 (float:HF (match_operand:DI 1 "general_operand" "")))]
3782 rtx op1 = gen_reg_rtx (SFmode);
3783 expand_float (op1, operands[1], 0);
3784 op1 = convert_to_mode (HFmode, op1, 0);
3785 emit_move_insn (operands[0], op1);
3790 (define_expand "floatsisf2"
3791 [(set (match_operand:SF 0 "s_register_operand" "")
3792 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3793 "TARGET_32BIT && TARGET_HARD_FLOAT"
3795 if (TARGET_MAVERICK)
3797 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3802 (define_expand "floatsidf2"
3803 [(set (match_operand:DF 0 "s_register_operand" "")
3804 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3805 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3807 if (TARGET_MAVERICK)
3809 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3814 (define_expand "fix_trunchfsi2"
3815 [(set (match_operand:SI 0 "general_operand" "")
3816 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3820 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3821 expand_fix (operands[0], op1, 0);
3826 (define_expand "fix_trunchfdi2"
3827 [(set (match_operand:DI 0 "general_operand" "")
3828 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3832 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3833 expand_fix (operands[0], op1, 0);
3838 (define_expand "fix_truncsfsi2"
3839 [(set (match_operand:SI 0 "s_register_operand" "")
3840 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3841 "TARGET_32BIT && TARGET_HARD_FLOAT"
3843 if (TARGET_MAVERICK)
3845 if (!cirrus_fp_register (operands[0], SImode))
3846 operands[0] = force_reg (SImode, operands[0]);
3847 if (!cirrus_fp_register (operands[1], SFmode))
3848 operands[1] = force_reg (SFmode, operands[0]);
3849 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3854 (define_expand "fix_truncdfsi2"
3855 [(set (match_operand:SI 0 "s_register_operand" "")
3856 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3857 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3859 if (TARGET_MAVERICK)
3861 if (!cirrus_fp_register (operands[1], DFmode))
3862 operands[1] = force_reg (DFmode, operands[0]);
3863 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3870 (define_expand "truncdfsf2"
3871 [(set (match_operand:SF 0 "s_register_operand" "")
3873 (match_operand:DF 1 "s_register_operand" "")))]
3874 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3878 /* DFmode -> HFmode conversions have to go through SFmode. */
3879 (define_expand "truncdfhf2"
3880 [(set (match_operand:HF 0 "general_operand" "")
3882 (match_operand:DF 1 "general_operand" "")))]
3887 op1 = convert_to_mode (SFmode, operands[1], 0);
3888 op1 = convert_to_mode (HFmode, op1, 0);
3889 emit_move_insn (operands[0], op1);
3894 ;; Zero and sign extension instructions.
3896 (define_expand "zero_extendsidi2"
3897 [(set (match_operand:DI 0 "s_register_operand" "")
3898 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3903 (define_insn "*arm_zero_extendsidi2"
3904 [(set (match_operand:DI 0 "s_register_operand" "=r")
3905 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3908 if (REGNO (operands[1])
3909 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3910 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3911 return \"mov%?\\t%R0, #0\";
3913 [(set_attr "length" "8")
3914 (set_attr "predicable" "yes")]
3917 (define_expand "zero_extendqidi2"
3918 [(set (match_operand:DI 0 "s_register_operand" "")
3919 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3924 (define_insn "*arm_zero_extendqidi2"
3925 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3926 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3929 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3930 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3931 [(set_attr "length" "8")
3932 (set_attr "predicable" "yes")
3933 (set_attr "type" "*,load_byte")
3934 (set_attr "pool_range" "*,4092")
3935 (set_attr "neg_pool_range" "*,4084")]
3938 (define_expand "extendsidi2"
3939 [(set (match_operand:DI 0 "s_register_operand" "")
3940 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3945 (define_insn "*arm_extendsidi2"
3946 [(set (match_operand:DI 0 "s_register_operand" "=r")
3947 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3950 if (REGNO (operands[1])
3951 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3952 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3953 return \"mov%?\\t%R0, %Q0, asr #31\";
3955 [(set_attr "length" "8")
3956 (set_attr "shift" "1")
3957 (set_attr "predicable" "yes")]
3960 (define_expand "zero_extendhisi2"
3962 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3964 (set (match_operand:SI 0 "s_register_operand" "")
3965 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3969 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3971 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3972 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3976 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3978 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3982 if (!s_register_operand (operands[1], HImode))
3983 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3987 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3988 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3992 operands[1] = gen_lowpart (SImode, operands[1]);
3993 operands[2] = gen_reg_rtx (SImode);
3997 (define_insn "*thumb1_zero_extendhisi2"
3998 [(set (match_operand:SI 0 "register_operand" "=l")
3999 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4000 "TARGET_THUMB1 && !arm_arch6"
4002 rtx mem = XEXP (operands[1], 0);
4004 if (GET_CODE (mem) == CONST)
4005 mem = XEXP (mem, 0);
4007 if (GET_CODE (mem) == LABEL_REF)
4008 return \"ldr\\t%0, %1\";
4010 if (GET_CODE (mem) == PLUS)
4012 rtx a = XEXP (mem, 0);
4013 rtx b = XEXP (mem, 1);
4015 /* This can happen due to bugs in reload. */
4016 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4019 ops[0] = operands[0];
4022 output_asm_insn (\"mov %0, %1\", ops);
4024 XEXP (mem, 0) = operands[0];
4027 else if ( GET_CODE (a) == LABEL_REF
4028 && GET_CODE (b) == CONST_INT)
4029 return \"ldr\\t%0, %1\";
4032 return \"ldrh\\t%0, %1\";
4034 [(set_attr "length" "4")
4035 (set_attr "type" "load_byte")
4036 (set_attr "pool_range" "60")]
4039 (define_insn "*thumb1_zero_extendhisi2_v6"
4040 [(set (match_operand:SI 0 "register_operand" "=l,l")
4041 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4042 "TARGET_THUMB1 && arm_arch6"
4046 if (which_alternative == 0)
4047 return \"uxth\\t%0, %1\";
4049 mem = XEXP (operands[1], 0);
4051 if (GET_CODE (mem) == CONST)
4052 mem = XEXP (mem, 0);
4054 if (GET_CODE (mem) == LABEL_REF)
4055 return \"ldr\\t%0, %1\";
4057 if (GET_CODE (mem) == PLUS)
4059 rtx a = XEXP (mem, 0);
4060 rtx b = XEXP (mem, 1);
4062 /* This can happen due to bugs in reload. */
4063 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4066 ops[0] = operands[0];
4069 output_asm_insn (\"mov %0, %1\", ops);
4071 XEXP (mem, 0) = operands[0];
4074 else if ( GET_CODE (a) == LABEL_REF
4075 && GET_CODE (b) == CONST_INT)
4076 return \"ldr\\t%0, %1\";
4079 return \"ldrh\\t%0, %1\";
4081 [(set_attr "length" "2,4")
4082 (set_attr "type" "alu_shift,load_byte")
4083 (set_attr "pool_range" "*,60")]
4086 (define_insn "*arm_zero_extendhisi2"
4087 [(set (match_operand:SI 0 "s_register_operand" "=r")
4088 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4089 "TARGET_ARM && arm_arch4 && !arm_arch6"
4091 [(set_attr "type" "load_byte")
4092 (set_attr "predicable" "yes")
4093 (set_attr "pool_range" "256")
4094 (set_attr "neg_pool_range" "244")]
4097 (define_insn "*arm_zero_extendhisi2_v6"
4098 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4099 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4100 "TARGET_ARM && arm_arch6"
4104 [(set_attr "type" "alu_shift,load_byte")
4105 (set_attr "predicable" "yes")
4106 (set_attr "pool_range" "*,256")
4107 (set_attr "neg_pool_range" "*,244")]
4110 (define_insn "*arm_zero_extendhisi2addsi"
4111 [(set (match_operand:SI 0 "s_register_operand" "=r")
4112 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4113 (match_operand:SI 2 "s_register_operand" "r")))]
4115 "uxtah%?\\t%0, %2, %1"
4116 [(set_attr "type" "alu_shift")
4117 (set_attr "predicable" "yes")]
4120 (define_expand "zero_extendqisi2"
4121 [(set (match_operand:SI 0 "s_register_operand" "")
4122 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4125 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4129 emit_insn (gen_andsi3 (operands[0],
4130 gen_lowpart (SImode, operands[1]),
4133 else /* TARGET_THUMB */
4135 rtx temp = gen_reg_rtx (SImode);
4138 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4139 operands[1] = gen_lowpart (SImode, operands[1]);
4142 ops[1] = operands[1];
4143 ops[2] = GEN_INT (24);
4145 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4146 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4148 ops[0] = operands[0];
4150 ops[2] = GEN_INT (24);
4152 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4153 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4160 (define_insn "*thumb1_zero_extendqisi2"
4161 [(set (match_operand:SI 0 "register_operand" "=l")
4162 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4163 "TARGET_THUMB1 && !arm_arch6"
4165 [(set_attr "length" "2")
4166 (set_attr "type" "load_byte")
4167 (set_attr "pool_range" "32")]
4170 (define_insn "*thumb1_zero_extendqisi2_v6"
4171 [(set (match_operand:SI 0 "register_operand" "=l,l")
4172 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4173 "TARGET_THUMB1 && arm_arch6"
4177 [(set_attr "length" "2,2")
4178 (set_attr "type" "alu_shift,load_byte")
4179 (set_attr "pool_range" "*,32")]
4182 (define_insn "*arm_zero_extendqisi2"
4183 [(set (match_operand:SI 0 "s_register_operand" "=r")
4184 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4185 "TARGET_ARM && !arm_arch6"
4186 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4187 [(set_attr "type" "load_byte")
4188 (set_attr "predicable" "yes")
4189 (set_attr "pool_range" "4096")
4190 (set_attr "neg_pool_range" "4084")]
4193 (define_insn "*arm_zero_extendqisi2_v6"
4194 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4195 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4196 "TARGET_ARM && arm_arch6"
4199 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4200 [(set_attr "type" "alu_shift,load_byte")
4201 (set_attr "predicable" "yes")
4202 (set_attr "pool_range" "*,4096")
4203 (set_attr "neg_pool_range" "*,4084")]
4206 (define_insn "*arm_zero_extendqisi2addsi"
4207 [(set (match_operand:SI 0 "s_register_operand" "=r")
4208 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4209 (match_operand:SI 2 "s_register_operand" "r")))]
4211 "uxtab%?\\t%0, %2, %1"
4212 [(set_attr "predicable" "yes")
4213 (set_attr "insn" "xtab")
4214 (set_attr "type" "alu_shift")]
4218 [(set (match_operand:SI 0 "s_register_operand" "")
4219 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4220 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4221 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4222 [(set (match_dup 2) (match_dup 1))
4223 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4228 [(set (match_operand:SI 0 "s_register_operand" "")
4229 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4230 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4231 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4232 [(set (match_dup 2) (match_dup 1))
4233 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4237 (define_code_iterator ior_xor [ior xor])
4240 [(set (match_operand:SI 0 "s_register_operand" "")
4241 (ior_xor:SI (and:SI (ashift:SI
4242 (match_operand:SI 1 "s_register_operand" "")
4243 (match_operand:SI 2 "const_int_operand" ""))
4244 (match_operand:SI 3 "const_int_operand" ""))
4246 (match_operator 5 "subreg_lowpart_operator"
4247 [(match_operand:SI 4 "s_register_operand" "")]))))]
4249 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4250 == (GET_MODE_MASK (GET_MODE (operands[5]))
4251 & (GET_MODE_MASK (GET_MODE (operands[5]))
4252 << (INTVAL (operands[2])))))"
4253 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4255 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4256 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4259 (define_insn "*compareqi_eq0"
4260 [(set (reg:CC_Z CC_REGNUM)
4261 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4265 [(set_attr "conds" "set")]
4268 (define_expand "extendhisi2"
4270 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4272 (set (match_operand:SI 0 "s_register_operand" "")
4273 (ashiftrt:SI (match_dup 2)
4278 if (GET_CODE (operands[1]) == MEM)
4282 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4287 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4288 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4293 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4295 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4299 if (!s_register_operand (operands[1], HImode))
4300 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4305 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4307 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4308 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4313 operands[1] = gen_lowpart (SImode, operands[1]);
4314 operands[2] = gen_reg_rtx (SImode);
4318 (define_insn "thumb1_extendhisi2"
4319 [(set (match_operand:SI 0 "register_operand" "=l")
4320 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4321 (clobber (match_scratch:SI 2 "=&l"))]
4322 "TARGET_THUMB1 && !arm_arch6"
4326 rtx mem = XEXP (operands[1], 0);
4328 /* This code used to try to use 'V', and fix the address only if it was
4329 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4330 range of QImode offsets, and offsettable_address_p does a QImode
4333 if (GET_CODE (mem) == CONST)
4334 mem = XEXP (mem, 0);
4336 if (GET_CODE (mem) == LABEL_REF)
4337 return \"ldr\\t%0, %1\";
4339 if (GET_CODE (mem) == PLUS)
4341 rtx a = XEXP (mem, 0);
4342 rtx b = XEXP (mem, 1);
4344 if (GET_CODE (a) == LABEL_REF
4345 && GET_CODE (b) == CONST_INT)
4346 return \"ldr\\t%0, %1\";
4348 if (GET_CODE (b) == REG)
4349 return \"ldrsh\\t%0, %1\";
4357 ops[2] = const0_rtx;
4360 gcc_assert (GET_CODE (ops[1]) == REG);
4362 ops[0] = operands[0];
4363 ops[3] = operands[2];
4364 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4367 [(set_attr "length" "4")
4368 (set_attr "type" "load_byte")
4369 (set_attr "pool_range" "1020")]
4372 ;; We used to have an early-clobber on the scratch register here.
4373 ;; However, there's a bug somewhere in reload which means that this
4374 ;; can be partially ignored during spill allocation if the memory
4375 ;; address also needs reloading; this causes us to die later on when
4376 ;; we try to verify the operands. Fortunately, we don't really need
4377 ;; the early-clobber: we can always use operand 0 if operand 2
4378 ;; overlaps the address.
4379 (define_insn "*thumb1_extendhisi2_insn_v6"
4380 [(set (match_operand:SI 0 "register_operand" "=l,l")
4381 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4382 (clobber (match_scratch:SI 2 "=X,l"))]
4383 "TARGET_THUMB1 && arm_arch6"
4389 if (which_alternative == 0)
4390 return \"sxth\\t%0, %1\";
4392 mem = XEXP (operands[1], 0);
4394 /* This code used to try to use 'V', and fix the address only if it was
4395 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4396 range of QImode offsets, and offsettable_address_p does a QImode
4399 if (GET_CODE (mem) == CONST)
4400 mem = XEXP (mem, 0);
4402 if (GET_CODE (mem) == LABEL_REF)
4403 return \"ldr\\t%0, %1\";
4405 if (GET_CODE (mem) == PLUS)
4407 rtx a = XEXP (mem, 0);
4408 rtx b = XEXP (mem, 1);
4410 if (GET_CODE (a) == LABEL_REF
4411 && GET_CODE (b) == CONST_INT)
4412 return \"ldr\\t%0, %1\";
4414 if (GET_CODE (b) == REG)
4415 return \"ldrsh\\t%0, %1\";
4423 ops[2] = const0_rtx;
4426 gcc_assert (GET_CODE (ops[1]) == REG);
4428 ops[0] = operands[0];
4429 if (reg_mentioned_p (operands[2], ops[1]))
4432 ops[3] = operands[2];
4433 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4436 [(set_attr "length" "2,4")
4437 (set_attr "type" "alu_shift,load_byte")
4438 (set_attr "pool_range" "*,1020")]
4441 ;; This pattern will only be used when ldsh is not available
4442 (define_expand "extendhisi2_mem"
4443 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4445 (zero_extend:SI (match_dup 7)))
4446 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4447 (set (match_operand:SI 0 "" "")
4448 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4453 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4455 mem1 = change_address (operands[1], QImode, addr);
4456 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4457 operands[0] = gen_lowpart (SImode, operands[0]);
4459 operands[2] = gen_reg_rtx (SImode);
4460 operands[3] = gen_reg_rtx (SImode);
4461 operands[6] = gen_reg_rtx (SImode);
4464 if (BYTES_BIG_ENDIAN)
4466 operands[4] = operands[2];
4467 operands[5] = operands[3];
4471 operands[4] = operands[3];
4472 operands[5] = operands[2];
4477 (define_insn "*arm_extendhisi2"
4478 [(set (match_operand:SI 0 "s_register_operand" "=r")
4479 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4480 "TARGET_ARM && arm_arch4 && !arm_arch6"
4481 "ldr%(sh%)\\t%0, %1"
4482 [(set_attr "type" "load_byte")
4483 (set_attr "predicable" "yes")
4484 (set_attr "pool_range" "256")
4485 (set_attr "neg_pool_range" "244")]
4488 ;; ??? Check Thumb-2 pool range
4489 (define_insn "*arm_extendhisi2_v6"
4490 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4491 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4492 "TARGET_32BIT && arm_arch6"
4496 [(set_attr "type" "alu_shift,load_byte")
4497 (set_attr "predicable" "yes")
4498 (set_attr "pool_range" "*,256")
4499 (set_attr "neg_pool_range" "*,244")]
4502 (define_insn "*arm_extendhisi2addsi"
4503 [(set (match_operand:SI 0 "s_register_operand" "=r")
4504 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4505 (match_operand:SI 2 "s_register_operand" "r")))]
4507 "sxtah%?\\t%0, %2, %1"
4510 (define_expand "extendqihi2"
4512 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4514 (set (match_operand:HI 0 "s_register_operand" "")
4515 (ashiftrt:SI (match_dup 2)
4520 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4522 emit_insn (gen_rtx_SET (VOIDmode,
4524 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4527 if (!s_register_operand (operands[1], QImode))
4528 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4529 operands[0] = gen_lowpart (SImode, operands[0]);
4530 operands[1] = gen_lowpart (SImode, operands[1]);
4531 operands[2] = gen_reg_rtx (SImode);
4535 (define_insn "*arm_extendqihi_insn"
4536 [(set (match_operand:HI 0 "s_register_operand" "=r")
4537 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4538 "TARGET_ARM && arm_arch4"
4539 "ldr%(sb%)\\t%0, %1"
4540 [(set_attr "type" "load_byte")
4541 (set_attr "predicable" "yes")
4542 (set_attr "pool_range" "256")
4543 (set_attr "neg_pool_range" "244")]
4546 (define_expand "extendqisi2"
4548 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4550 (set (match_operand:SI 0 "s_register_operand" "")
4551 (ashiftrt:SI (match_dup 2)
4556 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4558 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4559 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4563 if (!s_register_operand (operands[1], QImode))
4564 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4568 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4569 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4573 operands[1] = gen_lowpart (SImode, operands[1]);
4574 operands[2] = gen_reg_rtx (SImode);
4578 (define_insn "*arm_extendqisi"
4579 [(set (match_operand:SI 0 "s_register_operand" "=r")
4580 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4581 "TARGET_ARM && arm_arch4 && !arm_arch6"
4582 "ldr%(sb%)\\t%0, %1"
4583 [(set_attr "type" "load_byte")
4584 (set_attr "predicable" "yes")
4585 (set_attr "pool_range" "256")
4586 (set_attr "neg_pool_range" "244")]
4589 (define_insn "*arm_extendqisi_v6"
4590 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4592 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4593 "TARGET_ARM && arm_arch6"
4597 [(set_attr "type" "alu_shift,load_byte")
4598 (set_attr "predicable" "yes")
4599 (set_attr "pool_range" "*,256")
4600 (set_attr "neg_pool_range" "*,244")]
4603 (define_insn "*arm_extendqisi2addsi"
4604 [(set (match_operand:SI 0 "s_register_operand" "=r")
4605 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4606 (match_operand:SI 2 "s_register_operand" "r")))]
4608 "sxtab%?\\t%0, %2, %1"
4609 [(set_attr "type" "alu_shift")
4610 (set_attr "insn" "xtab")
4611 (set_attr "predicable" "yes")]
4614 (define_insn "*thumb1_extendqisi2"
4615 [(set (match_operand:SI 0 "register_operand" "=l,l")
4616 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4617 "TARGET_THUMB1 && !arm_arch6"
4621 rtx mem = XEXP (operands[1], 0);
4623 if (GET_CODE (mem) == CONST)
4624 mem = XEXP (mem, 0);
4626 if (GET_CODE (mem) == LABEL_REF)
4627 return \"ldr\\t%0, %1\";
4629 if (GET_CODE (mem) == PLUS
4630 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4631 return \"ldr\\t%0, %1\";
4633 if (which_alternative == 0)
4634 return \"ldrsb\\t%0, %1\";
4636 ops[0] = operands[0];
4638 if (GET_CODE (mem) == PLUS)
4640 rtx a = XEXP (mem, 0);
4641 rtx b = XEXP (mem, 1);
4646 if (GET_CODE (a) == REG)
4648 if (GET_CODE (b) == REG)
4649 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4650 else if (REGNO (a) == REGNO (ops[0]))
4652 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4653 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4654 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4657 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4661 gcc_assert (GET_CODE (b) == REG);
4662 if (REGNO (b) == REGNO (ops[0]))
4664 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4665 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4666 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4669 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4672 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4674 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4675 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4676 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4681 ops[2] = const0_rtx;
4683 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4687 [(set_attr "length" "2,6")
4688 (set_attr "type" "load_byte,load_byte")
4689 (set_attr "pool_range" "32,32")]
4692 (define_insn "*thumb1_extendqisi2_v6"
4693 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4694 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4695 "TARGET_THUMB1 && arm_arch6"
4701 if (which_alternative == 0)
4702 return \"sxtb\\t%0, %1\";
4704 mem = XEXP (operands[1], 0);
4706 if (GET_CODE (mem) == CONST)
4707 mem = XEXP (mem, 0);
4709 if (GET_CODE (mem) == LABEL_REF)
4710 return \"ldr\\t%0, %1\";
4712 if (GET_CODE (mem) == PLUS
4713 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4714 return \"ldr\\t%0, %1\";
4716 if (which_alternative == 0)
4717 return \"ldrsb\\t%0, %1\";
4719 ops[0] = operands[0];
4721 if (GET_CODE (mem) == PLUS)
4723 rtx a = XEXP (mem, 0);
4724 rtx b = XEXP (mem, 1);
4729 if (GET_CODE (a) == REG)
4731 if (GET_CODE (b) == REG)
4732 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4733 else if (REGNO (a) == REGNO (ops[0]))
4735 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4736 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4739 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4743 gcc_assert (GET_CODE (b) == REG);
4744 if (REGNO (b) == REGNO (ops[0]))
4746 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4747 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4750 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4753 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4755 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4756 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4761 ops[2] = const0_rtx;
4763 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4767 [(set_attr "length" "2,2,4")
4768 (set_attr "type" "alu_shift,load_byte,load_byte")
4769 (set_attr "pool_range" "*,32,32")]
4772 (define_expand "extendsfdf2"
4773 [(set (match_operand:DF 0 "s_register_operand" "")
4774 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4775 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4779 /* HFmode -> DFmode conversions have to go through SFmode. */
4780 (define_expand "extendhfdf2"
4781 [(set (match_operand:DF 0 "general_operand" "")
4782 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4787 op1 = convert_to_mode (SFmode, operands[1], 0);
4788 op1 = convert_to_mode (DFmode, op1, 0);
4789 emit_insn (gen_movdf (operands[0], op1));
4794 ;; Move insns (including loads and stores)
4796 ;; XXX Just some ideas about movti.
4797 ;; I don't think these are a good idea on the arm, there just aren't enough
4799 ;;(define_expand "loadti"
4800 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4801 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4804 ;;(define_expand "storeti"
4805 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4806 ;; (match_operand:TI 1 "s_register_operand" ""))]
4809 ;;(define_expand "movti"
4810 ;; [(set (match_operand:TI 0 "general_operand" "")
4811 ;; (match_operand:TI 1 "general_operand" ""))]
4817 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4818 ;; operands[1] = copy_to_reg (operands[1]);
4819 ;; if (GET_CODE (operands[0]) == MEM)
4820 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4821 ;; else if (GET_CODE (operands[1]) == MEM)
4822 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4826 ;; emit_insn (insn);
4830 ;; Recognize garbage generated above.
4833 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4834 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4838 ;; register mem = (which_alternative < 3);
4839 ;; register const char *template;
4841 ;; operands[mem] = XEXP (operands[mem], 0);
4842 ;; switch (which_alternative)
4844 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4845 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4846 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4847 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4848 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4849 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4851 ;; output_asm_insn (template, operands);
4855 (define_expand "movdi"
4856 [(set (match_operand:DI 0 "general_operand" "")
4857 (match_operand:DI 1 "general_operand" ""))]
4860 if (can_create_pseudo_p ())
4862 if (GET_CODE (operands[0]) != REG)
4863 operands[1] = force_reg (DImode, operands[1]);
4868 (define_insn "*arm_movdi"
4869 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4870 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4872 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4874 && ( register_operand (operands[0], DImode)
4875 || register_operand (operands[1], DImode))"
4877 switch (which_alternative)
4884 return output_move_double (operands);
4887 [(set_attr "length" "8,12,16,8,8")
4888 (set_attr "type" "*,*,*,load2,store2")
4889 (set_attr "pool_range" "*,*,*,1020,*")
4890 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4894 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4895 (match_operand:ANY64 1 "const_double_operand" ""))]
4898 && (arm_const_double_inline_cost (operands[1])
4899 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4902 arm_split_constant (SET, SImode, curr_insn,
4903 INTVAL (gen_lowpart (SImode, operands[1])),
4904 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4905 arm_split_constant (SET, SImode, curr_insn,
4906 INTVAL (gen_highpart_mode (SImode,
4907 GET_MODE (operands[0]),
4909 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4914 ; If optimizing for size, or if we have load delay slots, then
4915 ; we want to split the constant into two separate operations.
4916 ; In both cases this may split a trivial part into a single data op
4917 ; leaving a single complex constant to load. We can also get longer
4918 ; offsets in a LDR which means we get better chances of sharing the pool
4919 ; entries. Finally, we can normally do a better job of scheduling
4920 ; LDR instructions than we can with LDM.
4921 ; This pattern will only match if the one above did not.
4923 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4924 (match_operand:ANY64 1 "const_double_operand" ""))]
4925 "TARGET_ARM && reload_completed
4926 && arm_const_double_by_parts (operands[1])"
4927 [(set (match_dup 0) (match_dup 1))
4928 (set (match_dup 2) (match_dup 3))]
4930 operands[2] = gen_highpart (SImode, operands[0]);
4931 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4933 operands[0] = gen_lowpart (SImode, operands[0]);
4934 operands[1] = gen_lowpart (SImode, operands[1]);
4939 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4940 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4941 "TARGET_EITHER && reload_completed"
4942 [(set (match_dup 0) (match_dup 1))
4943 (set (match_dup 2) (match_dup 3))]
4945 operands[2] = gen_highpart (SImode, operands[0]);
4946 operands[3] = gen_highpart (SImode, operands[1]);
4947 operands[0] = gen_lowpart (SImode, operands[0]);
4948 operands[1] = gen_lowpart (SImode, operands[1]);
4950 /* Handle a partial overlap. */
4951 if (rtx_equal_p (operands[0], operands[3]))
4953 rtx tmp0 = operands[0];
4954 rtx tmp1 = operands[1];
4956 operands[0] = operands[2];
4957 operands[1] = operands[3];
4964 ;; We can't actually do base+index doubleword loads if the index and
4965 ;; destination overlap. Split here so that we at least have chance to
4968 [(set (match_operand:DI 0 "s_register_operand" "")
4969 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4970 (match_operand:SI 2 "s_register_operand" ""))))]
4972 && reg_overlap_mentioned_p (operands[0], operands[1])
4973 && reg_overlap_mentioned_p (operands[0], operands[2])"
4975 (plus:SI (match_dup 1)
4978 (mem:DI (match_dup 4)))]
4980 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4984 ;;; ??? This should have alternatives for constants.
4985 ;;; ??? This was originally identical to the movdf_insn pattern.
4986 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4987 ;;; thumb_reorg with a memory reference.
4988 (define_insn "*thumb1_movdi_insn"
4989 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4990 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4992 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4993 && ( register_operand (operands[0], DImode)
4994 || register_operand (operands[1], DImode))"
4997 switch (which_alternative)
5001 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5002 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5003 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5005 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5007 operands[1] = GEN_INT (- INTVAL (operands[1]));
5008 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5010 return \"ldmia\\t%1, {%0, %H0}\";
5012 return \"stmia\\t%0, {%1, %H1}\";
5014 return thumb_load_double_from_address (operands);
5016 operands[2] = gen_rtx_MEM (SImode,
5017 plus_constant (XEXP (operands[0], 0), 4));
5018 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5021 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5022 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5023 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5026 [(set_attr "length" "4,4,6,2,2,6,4,4")
5027 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5028 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5031 (define_expand "movsi"
5032 [(set (match_operand:SI 0 "general_operand" "")
5033 (match_operand:SI 1 "general_operand" ""))]
5037 rtx base, offset, tmp;
5041 /* Everything except mem = const or mem = mem can be done easily. */
5042 if (GET_CODE (operands[0]) == MEM)
5043 operands[1] = force_reg (SImode, operands[1]);
5044 if (arm_general_register_operand (operands[0], SImode)
5045 && GET_CODE (operands[1]) == CONST_INT
5046 && !(const_ok_for_arm (INTVAL (operands[1]))
5047 || const_ok_for_arm (~INTVAL (operands[1]))))
5049 arm_split_constant (SET, SImode, NULL_RTX,
5050 INTVAL (operands[1]), operands[0], NULL_RTX,
5051 optimize && can_create_pseudo_p ());
5055 if (TARGET_USE_MOVT && !target_word_relocations
5056 && GET_CODE (operands[1]) == SYMBOL_REF
5057 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5059 arm_emit_movpair (operands[0], operands[1]);
5063 else /* TARGET_THUMB1... */
5065 if (can_create_pseudo_p ())
5067 if (GET_CODE (operands[0]) != REG)
5068 operands[1] = force_reg (SImode, operands[1]);
5072 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5074 split_const (operands[1], &base, &offset);
5075 if (GET_CODE (base) == SYMBOL_REF
5076 && !offset_within_block_p (base, INTVAL (offset)))
5078 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5079 emit_move_insn (tmp, base);
5080 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5085 /* Recognize the case where operand[1] is a reference to thread-local
5086 data and load its address to a register. */
5087 if (arm_tls_referenced_p (operands[1]))
5089 rtx tmp = operands[1];
5092 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5094 addend = XEXP (XEXP (tmp, 0), 1);
5095 tmp = XEXP (XEXP (tmp, 0), 0);
5098 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5099 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5101 tmp = legitimize_tls_address (tmp,
5102 !can_create_pseudo_p () ? operands[0] : 0);
5105 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5106 tmp = force_operand (tmp, operands[0]);
5111 && (CONSTANT_P (operands[1])
5112 || symbol_mentioned_p (operands[1])
5113 || label_mentioned_p (operands[1])))
5114 operands[1] = legitimize_pic_address (operands[1], SImode,
5115 (!can_create_pseudo_p ()
5122 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5123 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5124 ;; so this does not matter.
5125 (define_insn "*arm_movt"
5126 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5127 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5128 (match_operand:SI 2 "general_operand" "i")))]
5130 "movt%?\t%0, #:upper16:%c2"
5131 [(set_attr "predicable" "yes")
5132 (set_attr "length" "4")]
5135 (define_insn "*arm_movsi_insn"
5136 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5137 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5138 "TARGET_ARM && ! TARGET_IWMMXT
5139 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5140 && ( register_operand (operands[0], SImode)
5141 || register_operand (operands[1], SImode))"
5149 [(set_attr "type" "*,*,*,*,load1,store1")
5150 (set_attr "predicable" "yes")
5151 (set_attr "pool_range" "*,*,*,*,4096,*")
5152 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5156 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5157 (match_operand:SI 1 "const_int_operand" ""))]
5159 && (!(const_ok_for_arm (INTVAL (operands[1]))
5160 || const_ok_for_arm (~INTVAL (operands[1]))))"
5161 [(clobber (const_int 0))]
5163 arm_split_constant (SET, SImode, NULL_RTX,
5164 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5169 (define_insn "*thumb1_movsi_insn"
5170 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5171 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5173 && ( register_operand (operands[0], SImode)
5174 || register_operand (operands[1], SImode))"
5185 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5186 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5187 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5191 [(set (match_operand:SI 0 "register_operand" "")
5192 (match_operand:SI 1 "const_int_operand" ""))]
5193 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5194 [(set (match_dup 0) (match_dup 1))
5195 (set (match_dup 0) (neg:SI (match_dup 0)))]
5196 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5200 [(set (match_operand:SI 0 "register_operand" "")
5201 (match_operand:SI 1 "const_int_operand" ""))]
5202 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5203 [(set (match_dup 0) (match_dup 1))
5204 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5207 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5208 unsigned HOST_WIDE_INT mask = 0xff;
5211 for (i = 0; i < 25; i++)
5212 if ((val & (mask << i)) == val)
5215 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5219 operands[1] = GEN_INT (val >> i);
5220 operands[2] = GEN_INT (i);
5224 ;; When generating pic, we need to load the symbol offset into a register.
5225 ;; So that the optimizer does not confuse this with a normal symbol load
5226 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5227 ;; since that is the only type of relocation we can use.
5229 ;; The rather odd constraints on the following are to force reload to leave
5230 ;; the insn alone, and to force the minipool generation pass to then move
5231 ;; the GOT symbol to memory.
5233 (define_insn "pic_load_addr_32bit"
5234 [(set (match_operand:SI 0 "s_register_operand" "=r")
5235 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5236 "TARGET_32BIT && flag_pic"
5238 [(set_attr "type" "load1")
5239 (set_attr "pool_range" "4096")
5240 (set (attr "neg_pool_range")
5241 (if_then_else (eq_attr "is_thumb" "no")
5246 (define_insn "pic_load_addr_thumb1"
5247 [(set (match_operand:SI 0 "s_register_operand" "=l")
5248 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5249 "TARGET_THUMB1 && flag_pic"
5251 [(set_attr "type" "load1")
5252 (set (attr "pool_range") (const_int 1024))]
5255 (define_insn "pic_add_dot_plus_four"
5256 [(set (match_operand:SI 0 "register_operand" "=r")
5257 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5259 (match_operand 2 "" "")]
5263 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5264 INTVAL (operands[2]));
5265 return \"add\\t%0, %|pc\";
5267 [(set_attr "length" "2")]
5270 (define_insn "pic_add_dot_plus_eight"
5271 [(set (match_operand:SI 0 "register_operand" "=r")
5272 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5274 (match_operand 2 "" "")]
5278 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5279 INTVAL (operands[2]));
5280 return \"add%?\\t%0, %|pc, %1\";
5282 [(set_attr "predicable" "yes")]
5285 (define_insn "tls_load_dot_plus_eight"
5286 [(set (match_operand:SI 0 "register_operand" "=r")
5287 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5289 (match_operand 2 "" "")]
5293 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5294 INTVAL (operands[2]));
5295 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5297 [(set_attr "predicable" "yes")]
5300 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5301 ;; followed by a load. These sequences can be crunched down to
5302 ;; tls_load_dot_plus_eight by a peephole.
5305 [(set (match_operand:SI 0 "register_operand" "")
5306 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5308 (match_operand 1 "" "")]
5310 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5311 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5313 (mem:SI (unspec:SI [(match_dup 3)
5320 (define_insn "pic_offset_arm"
5321 [(set (match_operand:SI 0 "register_operand" "=r")
5322 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5323 (unspec:SI [(match_operand:SI 2 "" "X")]
5324 UNSPEC_PIC_OFFSET))))]
5325 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5326 "ldr%?\\t%0, [%1,%2]"
5327 [(set_attr "type" "load1")]
5330 (define_expand "builtin_setjmp_receiver"
5331 [(label_ref (match_operand 0 "" ""))]
5335 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5337 if (arm_pic_register != INVALID_REGNUM)
5338 arm_load_pic_register (1UL << 3);
5342 ;; If copying one reg to another we can set the condition codes according to
5343 ;; its value. Such a move is common after a return from subroutine and the
5344 ;; result is being tested against zero.
5346 (define_insn "*movsi_compare0"
5347 [(set (reg:CC CC_REGNUM)
5348 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5350 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5356 [(set_attr "conds" "set")]
5359 ;; Subroutine to store a half word from a register into memory.
5360 ;; Operand 0 is the source register (HImode)
5361 ;; Operand 1 is the destination address in a register (SImode)
5363 ;; In both this routine and the next, we must be careful not to spill
5364 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5365 ;; can generate unrecognizable rtl.
5367 (define_expand "storehi"
5368 [;; store the low byte
5369 (set (match_operand 1 "" "") (match_dup 3))
5370 ;; extract the high byte
5372 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5373 ;; store the high byte
5374 (set (match_dup 4) (match_dup 5))]
5378 rtx op1 = operands[1];
5379 rtx addr = XEXP (op1, 0);
5380 enum rtx_code code = GET_CODE (addr);
5382 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5384 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5386 operands[4] = adjust_address (op1, QImode, 1);
5387 operands[1] = adjust_address (operands[1], QImode, 0);
5388 operands[3] = gen_lowpart (QImode, operands[0]);
5389 operands[0] = gen_lowpart (SImode, operands[0]);
5390 operands[2] = gen_reg_rtx (SImode);
5391 operands[5] = gen_lowpart (QImode, operands[2]);
5395 (define_expand "storehi_bigend"
5396 [(set (match_dup 4) (match_dup 3))
5398 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5399 (set (match_operand 1 "" "") (match_dup 5))]
5403 rtx op1 = operands[1];
5404 rtx addr = XEXP (op1, 0);
5405 enum rtx_code code = GET_CODE (addr);
5407 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5409 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5411 operands[4] = adjust_address (op1, QImode, 1);
5412 operands[1] = adjust_address (operands[1], QImode, 0);
5413 operands[3] = gen_lowpart (QImode, operands[0]);
5414 operands[0] = gen_lowpart (SImode, operands[0]);
5415 operands[2] = gen_reg_rtx (SImode);
5416 operands[5] = gen_lowpart (QImode, operands[2]);
5420 ;; Subroutine to store a half word integer constant into memory.
5421 (define_expand "storeinthi"
5422 [(set (match_operand 0 "" "")
5423 (match_operand 1 "" ""))
5424 (set (match_dup 3) (match_dup 2))]
5428 HOST_WIDE_INT value = INTVAL (operands[1]);
5429 rtx addr = XEXP (operands[0], 0);
5430 rtx op0 = operands[0];
5431 enum rtx_code code = GET_CODE (addr);
5433 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5435 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5437 operands[1] = gen_reg_rtx (SImode);
5438 if (BYTES_BIG_ENDIAN)
5440 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5441 if ((value & 255) == ((value >> 8) & 255))
5442 operands[2] = operands[1];
5445 operands[2] = gen_reg_rtx (SImode);
5446 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5451 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5452 if ((value & 255) == ((value >> 8) & 255))
5453 operands[2] = operands[1];
5456 operands[2] = gen_reg_rtx (SImode);
5457 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5461 operands[3] = adjust_address (op0, QImode, 1);
5462 operands[0] = adjust_address (operands[0], QImode, 0);
5463 operands[2] = gen_lowpart (QImode, operands[2]);
5464 operands[1] = gen_lowpart (QImode, operands[1]);
5468 (define_expand "storehi_single_op"
5469 [(set (match_operand:HI 0 "memory_operand" "")
5470 (match_operand:HI 1 "general_operand" ""))]
5471 "TARGET_32BIT && arm_arch4"
5473 if (!s_register_operand (operands[1], HImode))
5474 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5478 (define_expand "movhi"
5479 [(set (match_operand:HI 0 "general_operand" "")
5480 (match_operand:HI 1 "general_operand" ""))]
5485 if (can_create_pseudo_p ())
5487 if (GET_CODE (operands[0]) == MEM)
5491 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5494 if (GET_CODE (operands[1]) == CONST_INT)
5495 emit_insn (gen_storeinthi (operands[0], operands[1]));
5498 if (GET_CODE (operands[1]) == MEM)
5499 operands[1] = force_reg (HImode, operands[1]);
5500 if (BYTES_BIG_ENDIAN)
5501 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5503 emit_insn (gen_storehi (operands[1], operands[0]));
5507 /* Sign extend a constant, and keep it in an SImode reg. */
5508 else if (GET_CODE (operands[1]) == CONST_INT)
5510 rtx reg = gen_reg_rtx (SImode);
5511 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5513 /* If the constant is already valid, leave it alone. */
5514 if (!const_ok_for_arm (val))
5516 /* If setting all the top bits will make the constant
5517 loadable in a single instruction, then set them.
5518 Otherwise, sign extend the number. */
5520 if (const_ok_for_arm (~(val | ~0xffff)))
5522 else if (val & 0x8000)
5526 emit_insn (gen_movsi (reg, GEN_INT (val)));
5527 operands[1] = gen_lowpart (HImode, reg);
5529 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5530 && GET_CODE (operands[1]) == MEM)
5532 rtx reg = gen_reg_rtx (SImode);
5534 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5535 operands[1] = gen_lowpart (HImode, reg);
5537 else if (!arm_arch4)
5539 if (GET_CODE (operands[1]) == MEM)
5542 rtx offset = const0_rtx;
5543 rtx reg = gen_reg_rtx (SImode);
5545 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5546 || (GET_CODE (base) == PLUS
5547 && (GET_CODE (offset = XEXP (base, 1))
5549 && ((INTVAL(offset) & 1) != 1)
5550 && GET_CODE (base = XEXP (base, 0)) == REG))
5551 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5555 new_rtx = widen_memory_access (operands[1], SImode,
5556 ((INTVAL (offset) & ~3)
5557 - INTVAL (offset)));
5558 emit_insn (gen_movsi (reg, new_rtx));
5559 if (((INTVAL (offset) & 2) != 0)
5560 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5562 rtx reg2 = gen_reg_rtx (SImode);
5564 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5569 emit_insn (gen_movhi_bytes (reg, operands[1]));
5571 operands[1] = gen_lowpart (HImode, reg);
5575 /* Handle loading a large integer during reload. */
5576 else if (GET_CODE (operands[1]) == CONST_INT
5577 && !const_ok_for_arm (INTVAL (operands[1]))
5578 && !const_ok_for_arm (~INTVAL (operands[1])))
5580 /* Writing a constant to memory needs a scratch, which should
5581 be handled with SECONDARY_RELOADs. */
5582 gcc_assert (GET_CODE (operands[0]) == REG);
5584 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5585 emit_insn (gen_movsi (operands[0], operands[1]));
5589 else if (TARGET_THUMB2)
5591 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5592 if (can_create_pseudo_p ())
5594 if (GET_CODE (operands[0]) != REG)
5595 operands[1] = force_reg (HImode, operands[1]);
5596 /* Zero extend a constant, and keep it in an SImode reg. */
5597 else if (GET_CODE (operands[1]) == CONST_INT)
5599 rtx reg = gen_reg_rtx (SImode);
5600 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5602 emit_insn (gen_movsi (reg, GEN_INT (val)));
5603 operands[1] = gen_lowpart (HImode, reg);
5607 else /* TARGET_THUMB1 */
5609 if (can_create_pseudo_p ())
5611 if (GET_CODE (operands[1]) == CONST_INT)
5613 rtx reg = gen_reg_rtx (SImode);
5615 emit_insn (gen_movsi (reg, operands[1]));
5616 operands[1] = gen_lowpart (HImode, reg);
5619 /* ??? We shouldn't really get invalid addresses here, but this can
5620 happen if we are passed a SP (never OK for HImode/QImode) or
5621 virtual register (also rejected as illegitimate for HImode/QImode)
5622 relative address. */
5623 /* ??? This should perhaps be fixed elsewhere, for instance, in
5624 fixup_stack_1, by checking for other kinds of invalid addresses,
5625 e.g. a bare reference to a virtual register. This may confuse the
5626 alpha though, which must handle this case differently. */
5627 if (GET_CODE (operands[0]) == MEM
5628 && !memory_address_p (GET_MODE (operands[0]),
5629 XEXP (operands[0], 0)))
5631 = replace_equiv_address (operands[0],
5632 copy_to_reg (XEXP (operands[0], 0)));
5634 if (GET_CODE (operands[1]) == MEM
5635 && !memory_address_p (GET_MODE (operands[1]),
5636 XEXP (operands[1], 0)))
5638 = replace_equiv_address (operands[1],
5639 copy_to_reg (XEXP (operands[1], 0)));
5641 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5643 rtx reg = gen_reg_rtx (SImode);
5645 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5646 operands[1] = gen_lowpart (HImode, reg);
5649 if (GET_CODE (operands[0]) == MEM)
5650 operands[1] = force_reg (HImode, operands[1]);
5652 else if (GET_CODE (operands[1]) == CONST_INT
5653 && !satisfies_constraint_I (operands[1]))
5655 /* Handle loading a large integer during reload. */
5657 /* Writing a constant to memory needs a scratch, which should
5658 be handled with SECONDARY_RELOADs. */
5659 gcc_assert (GET_CODE (operands[0]) == REG);
5661 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5662 emit_insn (gen_movsi (operands[0], operands[1]));
5669 (define_insn "*thumb1_movhi_insn"
5670 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5671 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5673 && ( register_operand (operands[0], HImode)
5674 || register_operand (operands[1], HImode))"
5676 switch (which_alternative)
5678 case 0: return \"add %0, %1, #0\";
5679 case 2: return \"strh %1, %0\";
5680 case 3: return \"mov %0, %1\";
5681 case 4: return \"mov %0, %1\";
5682 case 5: return \"mov %0, %1\";
5683 default: gcc_unreachable ();
5685 /* The stack pointer can end up being taken as an index register.
5686 Catch this case here and deal with it. */
5687 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5688 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5689 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5692 ops[0] = operands[0];
5693 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5695 output_asm_insn (\"mov %0, %1\", ops);
5697 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5700 return \"ldrh %0, %1\";
5702 [(set_attr "length" "2,4,2,2,2,2")
5703 (set_attr "type" "*,load1,store1,*,*,*")]
5707 (define_expand "movhi_bytes"
5708 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5710 (zero_extend:SI (match_dup 6)))
5711 (set (match_operand:SI 0 "" "")
5712 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5717 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5719 mem1 = change_address (operands[1], QImode, addr);
5720 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5721 operands[0] = gen_lowpart (SImode, operands[0]);
5723 operands[2] = gen_reg_rtx (SImode);
5724 operands[3] = gen_reg_rtx (SImode);
5727 if (BYTES_BIG_ENDIAN)
5729 operands[4] = operands[2];
5730 operands[5] = operands[3];
5734 operands[4] = operands[3];
5735 operands[5] = operands[2];
5740 (define_expand "movhi_bigend"
5742 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5745 (ashiftrt:SI (match_dup 2) (const_int 16)))
5746 (set (match_operand:HI 0 "s_register_operand" "")
5750 operands[2] = gen_reg_rtx (SImode);
5751 operands[3] = gen_reg_rtx (SImode);
5752 operands[4] = gen_lowpart (HImode, operands[3]);
5756 ;; Pattern to recognize insn generated default case above
5757 (define_insn "*movhi_insn_arch4"
5758 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5759 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5762 && (GET_CODE (operands[1]) != CONST_INT
5763 || const_ok_for_arm (INTVAL (operands[1]))
5764 || const_ok_for_arm (~INTVAL (operands[1])))"
5766 mov%?\\t%0, %1\\t%@ movhi
5767 mvn%?\\t%0, #%B1\\t%@ movhi
5768 str%(h%)\\t%1, %0\\t%@ movhi
5769 ldr%(h%)\\t%0, %1\\t%@ movhi"
5770 [(set_attr "type" "*,*,store1,load1")
5771 (set_attr "predicable" "yes")
5772 (set_attr "pool_range" "*,*,*,256")
5773 (set_attr "neg_pool_range" "*,*,*,244")]
5776 (define_insn "*movhi_bytes"
5777 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5778 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5781 mov%?\\t%0, %1\\t%@ movhi
5782 mvn%?\\t%0, #%B1\\t%@ movhi"
5783 [(set_attr "predicable" "yes")]
5786 (define_expand "thumb_movhi_clobber"
5787 [(set (match_operand:HI 0 "memory_operand" "")
5788 (match_operand:HI 1 "register_operand" ""))
5789 (clobber (match_operand:DI 2 "register_operand" ""))]
5792 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5793 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5795 emit_insn (gen_movhi (operands[0], operands[1]));
5798 /* XXX Fixme, need to handle other cases here as well. */
5803 ;; We use a DImode scratch because we may occasionally need an additional
5804 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5805 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5806 (define_expand "reload_outhi"
5807 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5808 (match_operand:HI 1 "s_register_operand" "r")
5809 (match_operand:DI 2 "s_register_operand" "=&l")])]
5812 arm_reload_out_hi (operands);
5814 thumb_reload_out_hi (operands);
5819 (define_expand "reload_inhi"
5820 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5821 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5822 (match_operand:DI 2 "s_register_operand" "=&r")])]
5826 arm_reload_in_hi (operands);
5828 thumb_reload_out_hi (operands);
5832 (define_expand "movqi"
5833 [(set (match_operand:QI 0 "general_operand" "")
5834 (match_operand:QI 1 "general_operand" ""))]
5837 /* Everything except mem = const or mem = mem can be done easily */
5839 if (can_create_pseudo_p ())
5841 if (GET_CODE (operands[1]) == CONST_INT)
5843 rtx reg = gen_reg_rtx (SImode);
5845 /* For thumb we want an unsigned immediate, then we are more likely
5846 to be able to use a movs insn. */
5848 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5850 emit_insn (gen_movsi (reg, operands[1]));
5851 operands[1] = gen_lowpart (QImode, reg);
5856 /* ??? We shouldn't really get invalid addresses here, but this can
5857 happen if we are passed a SP (never OK for HImode/QImode) or
5858 virtual register (also rejected as illegitimate for HImode/QImode)
5859 relative address. */
5860 /* ??? This should perhaps be fixed elsewhere, for instance, in
5861 fixup_stack_1, by checking for other kinds of invalid addresses,
5862 e.g. a bare reference to a virtual register. This may confuse the
5863 alpha though, which must handle this case differently. */
5864 if (GET_CODE (operands[0]) == MEM
5865 && !memory_address_p (GET_MODE (operands[0]),
5866 XEXP (operands[0], 0)))
5868 = replace_equiv_address (operands[0],
5869 copy_to_reg (XEXP (operands[0], 0)));
5870 if (GET_CODE (operands[1]) == MEM
5871 && !memory_address_p (GET_MODE (operands[1]),
5872 XEXP (operands[1], 0)))
5874 = replace_equiv_address (operands[1],
5875 copy_to_reg (XEXP (operands[1], 0)));
5878 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5880 rtx reg = gen_reg_rtx (SImode);
5882 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5883 operands[1] = gen_lowpart (QImode, reg);
5886 if (GET_CODE (operands[0]) == MEM)
5887 operands[1] = force_reg (QImode, operands[1]);
5889 else if (TARGET_THUMB
5890 && GET_CODE (operands[1]) == CONST_INT
5891 && !satisfies_constraint_I (operands[1]))
5893 /* Handle loading a large integer during reload. */
5895 /* Writing a constant to memory needs a scratch, which should
5896 be handled with SECONDARY_RELOADs. */
5897 gcc_assert (GET_CODE (operands[0]) == REG);
5899 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5900 emit_insn (gen_movsi (operands[0], operands[1]));
5907 (define_insn "*arm_movqi_insn"
5908 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5909 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5911 && ( register_operand (operands[0], QImode)
5912 || register_operand (operands[1], QImode))"
5918 [(set_attr "type" "*,*,load1,store1")
5919 (set_attr "predicable" "yes")]
5922 (define_insn "*thumb1_movqi_insn"
5923 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5924 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5926 && ( register_operand (operands[0], QImode)
5927 || register_operand (operands[1], QImode))"
5935 [(set_attr "length" "2")
5936 (set_attr "type" "*,load1,store1,*,*,*")
5937 (set_attr "pool_range" "*,32,*,*,*,*")]
5941 (define_expand "movhf"
5942 [(set (match_operand:HF 0 "general_operand" "")
5943 (match_operand:HF 1 "general_operand" ""))]
5948 if (GET_CODE (operands[0]) == MEM)
5949 operands[1] = force_reg (HFmode, operands[1]);
5951 else /* TARGET_THUMB1 */
5953 if (can_create_pseudo_p ())
5955 if (GET_CODE (operands[0]) != REG)
5956 operands[1] = force_reg (HFmode, operands[1]);
5962 (define_insn "*arm32_movhf"
5963 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5964 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5965 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5966 && ( s_register_operand (operands[0], HFmode)
5967 || s_register_operand (operands[1], HFmode))"
5969 switch (which_alternative)
5971 case 0: /* ARM register from memory */
5972 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5973 case 1: /* memory from ARM register */
5974 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5975 case 2: /* ARM register from ARM register */
5976 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5977 case 3: /* ARM register from constant */
5983 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5984 bits = real_to_target (NULL, &r, HFmode);
5985 ops[0] = operands[0];
5986 ops[1] = GEN_INT (bits);
5987 ops[2] = GEN_INT (bits & 0xff00);
5988 ops[3] = GEN_INT (bits & 0x00ff);
5990 if (arm_arch_thumb2)
5991 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5993 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6000 [(set_attr "conds" "unconditional")
6001 (set_attr "type" "load1,store1,*,*")
6002 (set_attr "length" "4,4,4,8")
6003 (set_attr "predicable" "yes")
6007 (define_insn "*thumb1_movhf"
6008 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6009 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6011 && ( s_register_operand (operands[0], HFmode)
6012 || s_register_operand (operands[1], HFmode))"
6014 switch (which_alternative)
6019 gcc_assert (GET_CODE(operands[1]) == MEM);
6020 addr = XEXP (operands[1], 0);
6021 if (GET_CODE (addr) == LABEL_REF
6022 || (GET_CODE (addr) == CONST
6023 && GET_CODE (XEXP (addr, 0)) == PLUS
6024 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6025 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6027 /* Constant pool entry. */
6028 return \"ldr\\t%0, %1\";
6030 return \"ldrh\\t%0, %1\";
6032 case 2: return \"strh\\t%1, %0\";
6033 default: return \"mov\\t%0, %1\";
6036 [(set_attr "length" "2")
6037 (set_attr "type" "*,load1,store1,*,*")
6038 (set_attr "pool_range" "*,1020,*,*,*")]
6041 (define_expand "movsf"
6042 [(set (match_operand:SF 0 "general_operand" "")
6043 (match_operand:SF 1 "general_operand" ""))]
6048 if (GET_CODE (operands[0]) == MEM)
6049 operands[1] = force_reg (SFmode, operands[1]);
6051 else /* TARGET_THUMB1 */
6053 if (can_create_pseudo_p ())
6055 if (GET_CODE (operands[0]) != REG)
6056 operands[1] = force_reg (SFmode, operands[1]);
6062 ;; Transform a floating-point move of a constant into a core register into
6063 ;; an SImode operation.
6065 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6066 (match_operand:SF 1 "immediate_operand" ""))]
6069 && GET_CODE (operands[1]) == CONST_DOUBLE"
6070 [(set (match_dup 2) (match_dup 3))]
6072 operands[2] = gen_lowpart (SImode, operands[0]);
6073 operands[3] = gen_lowpart (SImode, operands[1]);
6074 if (operands[2] == 0 || operands[3] == 0)
6079 (define_insn "*arm_movsf_soft_insn"
6080 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6081 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6083 && TARGET_SOFT_FLOAT
6084 && (GET_CODE (operands[0]) != MEM
6085 || register_operand (operands[1], SFmode))"
6088 ldr%?\\t%0, %1\\t%@ float
6089 str%?\\t%1, %0\\t%@ float"
6090 [(set_attr "length" "4,4,4")
6091 (set_attr "predicable" "yes")
6092 (set_attr "type" "*,load1,store1")
6093 (set_attr "pool_range" "*,4096,*")
6094 (set_attr "neg_pool_range" "*,4084,*")]
6097 ;;; ??? This should have alternatives for constants.
6098 (define_insn "*thumb1_movsf_insn"
6099 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6100 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6102 && ( register_operand (operands[0], SFmode)
6103 || register_operand (operands[1], SFmode))"
6112 [(set_attr "length" "2")
6113 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6114 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6117 (define_expand "movdf"
6118 [(set (match_operand:DF 0 "general_operand" "")
6119 (match_operand:DF 1 "general_operand" ""))]
6124 if (GET_CODE (operands[0]) == MEM)
6125 operands[1] = force_reg (DFmode, operands[1]);
6127 else /* TARGET_THUMB */
6129 if (can_create_pseudo_p ())
6131 if (GET_CODE (operands[0]) != REG)
6132 operands[1] = force_reg (DFmode, operands[1]);
6138 ;; Reloading a df mode value stored in integer regs to memory can require a
6140 (define_expand "reload_outdf"
6141 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6142 (match_operand:DF 1 "s_register_operand" "r")
6143 (match_operand:SI 2 "s_register_operand" "=&r")]
6147 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6150 operands[2] = XEXP (operands[0], 0);
6151 else if (code == POST_INC || code == PRE_DEC)
6153 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6154 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6155 emit_insn (gen_movdi (operands[0], operands[1]));
6158 else if (code == PRE_INC)
6160 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6162 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6165 else if (code == POST_DEC)
6166 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6168 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6169 XEXP (XEXP (operands[0], 0), 1)));
6171 emit_insn (gen_rtx_SET (VOIDmode,
6172 replace_equiv_address (operands[0], operands[2]),
6175 if (code == POST_DEC)
6176 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6182 (define_insn "*movdf_soft_insn"
6183 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6184 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6185 "TARGET_ARM && TARGET_SOFT_FLOAT
6186 && ( register_operand (operands[0], DFmode)
6187 || register_operand (operands[1], DFmode))"
6189 switch (which_alternative)
6196 return output_move_double (operands);
6199 [(set_attr "length" "8,12,16,8,8")
6200 (set_attr "type" "*,*,*,load2,store2")
6201 (set_attr "pool_range" "1020")
6202 (set_attr "neg_pool_range" "1008")]
6205 ;;; ??? This should have alternatives for constants.
6206 ;;; ??? This was originally identical to the movdi_insn pattern.
6207 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6208 ;;; thumb_reorg with a memory reference.
6209 (define_insn "*thumb_movdf_insn"
6210 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6211 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6213 && ( register_operand (operands[0], DFmode)
6214 || register_operand (operands[1], DFmode))"
6216 switch (which_alternative)
6220 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6221 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6222 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6224 return \"ldmia\\t%1, {%0, %H0}\";
6226 return \"stmia\\t%0, {%1, %H1}\";
6228 return thumb_load_double_from_address (operands);
6230 operands[2] = gen_rtx_MEM (SImode,
6231 plus_constant (XEXP (operands[0], 0), 4));
6232 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6235 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6236 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6237 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6240 [(set_attr "length" "4,2,2,6,4,4")
6241 (set_attr "type" "*,load2,store2,load2,store2,*")
6242 (set_attr "pool_range" "*,*,*,1020,*,*")]
6245 (define_expand "movxf"
6246 [(set (match_operand:XF 0 "general_operand" "")
6247 (match_operand:XF 1 "general_operand" ""))]
6248 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6250 if (GET_CODE (operands[0]) == MEM)
6251 operands[1] = force_reg (XFmode, operands[1]);
6257 ;; load- and store-multiple insns
6258 ;; The arm can load/store any set of registers, provided that they are in
6259 ;; ascending order; but that is beyond GCC so stick with what it knows.
6261 (define_expand "load_multiple"
6262 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6263 (match_operand:SI 1 "" ""))
6264 (use (match_operand:SI 2 "" ""))])]
6267 HOST_WIDE_INT offset = 0;
6269 /* Support only fixed point registers. */
6270 if (GET_CODE (operands[2]) != CONST_INT
6271 || INTVAL (operands[2]) > 14
6272 || INTVAL (operands[2]) < 2
6273 || GET_CODE (operands[1]) != MEM
6274 || GET_CODE (operands[0]) != REG
6275 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6276 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6280 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6281 force_reg (SImode, XEXP (operands[1], 0)),
6282 TRUE, FALSE, operands[1], &offset);
6285 ;; Load multiple with write-back
6287 (define_insn "*ldmsi_postinc4"
6288 [(match_parallel 0 "load_multiple_operation"
6289 [(set (match_operand:SI 1 "s_register_operand" "=r")
6290 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6292 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6293 (mem:SI (match_dup 2)))
6294 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6295 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6296 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6297 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6298 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6299 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6300 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6301 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6302 [(set_attr "type" "load4")
6303 (set_attr "predicable" "yes")]
6306 (define_insn "*ldmsi_postinc4_thumb1"
6307 [(match_parallel 0 "load_multiple_operation"
6308 [(set (match_operand:SI 1 "s_register_operand" "=l")
6309 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6311 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6312 (mem:SI (match_dup 2)))
6313 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6314 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6315 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6316 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6317 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6318 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6319 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6320 "ldmia\\t%1!, {%3, %4, %5, %6}"
6321 [(set_attr "type" "load4")]
6324 (define_insn "*ldmsi_postinc3"
6325 [(match_parallel 0 "load_multiple_operation"
6326 [(set (match_operand:SI 1 "s_register_operand" "=r")
6327 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6329 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6330 (mem:SI (match_dup 2)))
6331 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6332 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6333 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6334 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6335 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6336 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6337 [(set_attr "type" "load3")
6338 (set_attr "predicable" "yes")]
6341 (define_insn "*ldmsi_postinc2"
6342 [(match_parallel 0 "load_multiple_operation"
6343 [(set (match_operand:SI 1 "s_register_operand" "=r")
6344 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6346 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6347 (mem:SI (match_dup 2)))
6348 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6349 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6350 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6351 "ldm%(ia%)\\t%1!, {%3, %4}"
6352 [(set_attr "type" "load2")
6353 (set_attr "predicable" "yes")]
6356 ;; Ordinary load multiple
6358 (define_insn "*ldmsi4"
6359 [(match_parallel 0 "load_multiple_operation"
6360 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6361 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6362 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6363 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6364 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6365 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6366 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6367 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6368 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6369 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6370 [(set_attr "type" "load4")
6371 (set_attr "predicable" "yes")]
6374 (define_insn "*ldmsi3"
6375 [(match_parallel 0 "load_multiple_operation"
6376 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6377 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6378 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6379 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6380 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6381 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6382 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6383 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6384 [(set_attr "type" "load3")
6385 (set_attr "predicable" "yes")]
6388 (define_insn "*ldmsi2"
6389 [(match_parallel 0 "load_multiple_operation"
6390 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6391 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6392 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6393 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6394 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6395 "ldm%(ia%)\\t%1, {%2, %3}"
6396 [(set_attr "type" "load2")
6397 (set_attr "predicable" "yes")]
6400 (define_expand "store_multiple"
6401 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6402 (match_operand:SI 1 "" ""))
6403 (use (match_operand:SI 2 "" ""))])]
6406 HOST_WIDE_INT offset = 0;
6408 /* Support only fixed point registers. */
6409 if (GET_CODE (operands[2]) != CONST_INT
6410 || INTVAL (operands[2]) > 14
6411 || INTVAL (operands[2]) < 2
6412 || GET_CODE (operands[1]) != REG
6413 || GET_CODE (operands[0]) != MEM
6414 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6415 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6419 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6420 force_reg (SImode, XEXP (operands[0], 0)),
6421 TRUE, FALSE, operands[0], &offset);
6424 ;; Store multiple with write-back
6426 (define_insn "*stmsi_postinc4"
6427 [(match_parallel 0 "store_multiple_operation"
6428 [(set (match_operand:SI 1 "s_register_operand" "=r")
6429 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6431 (set (mem:SI (match_dup 2))
6432 (match_operand:SI 3 "arm_hard_register_operand" ""))
6433 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6434 (match_operand:SI 4 "arm_hard_register_operand" ""))
6435 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6436 (match_operand:SI 5 "arm_hard_register_operand" ""))
6437 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6438 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6439 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6440 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6441 [(set_attr "predicable" "yes")
6442 (set_attr "type" "store4")]
6445 (define_insn "*stmsi_postinc4_thumb1"
6446 [(match_parallel 0 "store_multiple_operation"
6447 [(set (match_operand:SI 1 "s_register_operand" "=l")
6448 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6450 (set (mem:SI (match_dup 2))
6451 (match_operand:SI 3 "arm_hard_register_operand" ""))
6452 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6453 (match_operand:SI 4 "arm_hard_register_operand" ""))
6454 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6455 (match_operand:SI 5 "arm_hard_register_operand" ""))
6456 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6457 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6458 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6459 "stmia\\t%1!, {%3, %4, %5, %6}"
6460 [(set_attr "type" "store4")]
6463 (define_insn "*stmsi_postinc3"
6464 [(match_parallel 0 "store_multiple_operation"
6465 [(set (match_operand:SI 1 "s_register_operand" "=r")
6466 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6468 (set (mem:SI (match_dup 2))
6469 (match_operand:SI 3 "arm_hard_register_operand" ""))
6470 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6471 (match_operand:SI 4 "arm_hard_register_operand" ""))
6472 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6473 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6474 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6475 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6476 [(set_attr "predicable" "yes")
6477 (set_attr "type" "store3")]
6480 (define_insn "*stmsi_postinc2"
6481 [(match_parallel 0 "store_multiple_operation"
6482 [(set (match_operand:SI 1 "s_register_operand" "=r")
6483 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6485 (set (mem:SI (match_dup 2))
6486 (match_operand:SI 3 "arm_hard_register_operand" ""))
6487 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6488 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6489 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6490 "stm%(ia%)\\t%1!, {%3, %4}"
6491 [(set_attr "predicable" "yes")
6492 (set_attr "type" "store2")]
6495 ;; Ordinary store multiple
6497 (define_insn "*stmsi4"
6498 [(match_parallel 0 "store_multiple_operation"
6499 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6500 (match_operand:SI 2 "arm_hard_register_operand" ""))
6501 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6502 (match_operand:SI 3 "arm_hard_register_operand" ""))
6503 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6504 (match_operand:SI 4 "arm_hard_register_operand" ""))
6505 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6506 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6507 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6508 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6509 [(set_attr "predicable" "yes")
6510 (set_attr "type" "store4")]
6513 (define_insn "*stmsi3"
6514 [(match_parallel 0 "store_multiple_operation"
6515 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6516 (match_operand:SI 2 "arm_hard_register_operand" ""))
6517 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6518 (match_operand:SI 3 "arm_hard_register_operand" ""))
6519 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6520 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6521 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6522 "stm%(ia%)\\t%1, {%2, %3, %4}"
6523 [(set_attr "predicable" "yes")
6524 (set_attr "type" "store3")]
6527 (define_insn "*stmsi2"
6528 [(match_parallel 0 "store_multiple_operation"
6529 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6530 (match_operand:SI 2 "arm_hard_register_operand" ""))
6531 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6532 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6533 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6534 "stm%(ia%)\\t%1, {%2, %3}"
6535 [(set_attr "predicable" "yes")
6536 (set_attr "type" "store2")]
6539 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6540 ;; We could let this apply for blocks of less than this, but it clobbers so
6541 ;; many registers that there is then probably a better way.
6543 (define_expand "movmemqi"
6544 [(match_operand:BLK 0 "general_operand" "")
6545 (match_operand:BLK 1 "general_operand" "")
6546 (match_operand:SI 2 "const_int_operand" "")
6547 (match_operand:SI 3 "const_int_operand" "")]
6552 if (arm_gen_movmemqi (operands))
6556 else /* TARGET_THUMB1 */
6558 if ( INTVAL (operands[3]) != 4
6559 || INTVAL (operands[2]) > 48)
6562 thumb_expand_movmemqi (operands);
6568 ;; Thumb block-move insns
6570 (define_insn "movmem12b"
6571 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6572 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6573 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6574 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6575 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6576 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6577 (set (match_operand:SI 0 "register_operand" "=l")
6578 (plus:SI (match_dup 2) (const_int 12)))
6579 (set (match_operand:SI 1 "register_operand" "=l")
6580 (plus:SI (match_dup 3) (const_int 12)))
6581 (clobber (match_scratch:SI 4 "=&l"))
6582 (clobber (match_scratch:SI 5 "=&l"))
6583 (clobber (match_scratch:SI 6 "=&l"))]
6585 "* return thumb_output_move_mem_multiple (3, operands);"
6586 [(set_attr "length" "4")
6587 ; This isn't entirely accurate... It loads as well, but in terms of
6588 ; scheduling the following insn it is better to consider it as a store
6589 (set_attr "type" "store3")]
6592 (define_insn "movmem8b"
6593 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6594 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6595 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6596 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6597 (set (match_operand:SI 0 "register_operand" "=l")
6598 (plus:SI (match_dup 2) (const_int 8)))
6599 (set (match_operand:SI 1 "register_operand" "=l")
6600 (plus:SI (match_dup 3) (const_int 8)))
6601 (clobber (match_scratch:SI 4 "=&l"))
6602 (clobber (match_scratch:SI 5 "=&l"))]
6604 "* return thumb_output_move_mem_multiple (2, operands);"
6605 [(set_attr "length" "4")
6606 ; This isn't entirely accurate... It loads as well, but in terms of
6607 ; scheduling the following insn it is better to consider it as a store
6608 (set_attr "type" "store2")]
6613 ;; Compare & branch insns
6614 ;; The range calculations are based as follows:
6615 ;; For forward branches, the address calculation returns the address of
6616 ;; the next instruction. This is 2 beyond the branch instruction.
6617 ;; For backward branches, the address calculation returns the address of
6618 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6619 ;; instruction for the shortest sequence, and 4 before the branch instruction
6620 ;; if we have to jump around an unconditional branch.
6621 ;; To the basic branch range the PC offset must be added (this is +4).
6622 ;; So for forward branches we have
6623 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6624 ;; And for backward branches we have
6625 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6627 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6628 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6630 (define_expand "cbranchsi4"
6631 [(set (pc) (if_then_else
6632 (match_operator 0 "arm_comparison_operator"
6633 [(match_operand:SI 1 "s_register_operand" "")
6634 (match_operand:SI 2 "nonmemory_operand" "")])
6635 (label_ref (match_operand 3 "" ""))
6637 "TARGET_THUMB1 || TARGET_32BIT"
6641 if (!arm_add_operand (operands[2], SImode))
6642 operands[2] = force_reg (SImode, operands[2]);
6643 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6647 if (thumb1_cmpneg_operand (operands[2], SImode))
6649 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6650 operands[3], operands[0]));
6653 if (!thumb1_cmp_operand (operands[2], SImode))
6654 operands[2] = force_reg (SImode, operands[2]);
6657 ;; A pattern to recognize a special situation and optimize for it.
6658 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6659 ;; due to the available addressing modes. Hence, convert a signed comparison
6660 ;; with zero into an unsigned comparison with 127 if possible.
6661 (define_expand "cbranchqi4"
6662 [(set (pc) (if_then_else
6663 (match_operator 0 "lt_ge_comparison_operator"
6664 [(match_operand:QI 1 "memory_operand" "")
6665 (match_operand:QI 2 "const0_operand" "")])
6666 (label_ref (match_operand 3 "" ""))
6671 xops[1] = gen_reg_rtx (SImode);
6672 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6673 xops[2] = GEN_INT (127);
6674 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6675 VOIDmode, xops[1], xops[2]);
6676 xops[3] = operands[3];
6677 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6681 (define_expand "cbranchsf4"
6682 [(set (pc) (if_then_else
6683 (match_operator 0 "arm_comparison_operator"
6684 [(match_operand:SF 1 "s_register_operand" "")
6685 (match_operand:SF 2 "arm_float_compare_operand" "")])
6686 (label_ref (match_operand 3 "" ""))
6688 "TARGET_32BIT && TARGET_HARD_FLOAT"
6689 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6690 operands[3])); DONE;"
6693 (define_expand "cbranchdf4"
6694 [(set (pc) (if_then_else
6695 (match_operator 0 "arm_comparison_operator"
6696 [(match_operand:DF 1 "s_register_operand" "")
6697 (match_operand:DF 2 "arm_float_compare_operand" "")])
6698 (label_ref (match_operand 3 "" ""))
6700 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6701 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6702 operands[3])); DONE;"
6705 ;; this uses the Cirrus DI compare instruction
6706 (define_expand "cbranchdi4"
6707 [(set (pc) (if_then_else
6708 (match_operator 0 "arm_comparison_operator"
6709 [(match_operand:DI 1 "cirrus_fp_register" "")
6710 (match_operand:DI 2 "cirrus_fp_register" "")])
6711 (label_ref (match_operand 3 "" ""))
6713 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6714 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6715 operands[3])); DONE;"
6718 (define_insn "cbranchsi4_insn"
6719 [(set (pc) (if_then_else
6720 (match_operator 0 "arm_comparison_operator"
6721 [(match_operand:SI 1 "s_register_operand" "l,*h")
6722 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6723 (label_ref (match_operand 3 "" ""))
6727 rtx t = prev_nonnote_insn (insn);
6730 && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
6732 t = XEXP (SET_SRC (PATTERN (t)), 0);
6733 if (!rtx_equal_p (XEXP (t, 0), operands[1])
6734 || !rtx_equal_p (XEXP (t, 1), operands[2]))
6740 output_asm_insn (\"cmp\\t%1, %2\", operands);
6742 switch (get_attr_length (insn))
6744 case 4: return \"b%d0\\t%l3\";
6745 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6746 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6749 [(set (attr "far_jump")
6751 (eq_attr "length" "8")
6752 (const_string "yes")
6753 (const_string "no")))
6754 (set (attr "length")
6756 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6757 (le (minus (match_dup 3) (pc)) (const_int 256)))
6760 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6761 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6766 (define_insn "cbranchsi4_scratch"
6767 [(set (pc) (if_then_else
6768 (match_operator 4 "arm_comparison_operator"
6769 [(match_operand:SI 1 "s_register_operand" "l,0")
6770 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6771 (label_ref (match_operand 3 "" ""))
6773 (clobber (match_scratch:SI 0 "=l,l"))]
6776 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6778 switch (get_attr_length (insn))
6780 case 4: return \"b%d4\\t%l3\";
6781 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6782 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6785 [(set (attr "far_jump")
6787 (eq_attr "length" "8")
6788 (const_string "yes")
6789 (const_string "no")))
6790 (set (attr "length")
6792 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6793 (le (minus (match_dup 3) (pc)) (const_int 256)))
6796 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6797 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6802 (define_insn "*movsi_cbranchsi4"
6805 (match_operator 3 "arm_comparison_operator"
6806 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6808 (label_ref (match_operand 2 "" ""))
6810 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6814 if (which_alternative == 0)
6815 output_asm_insn (\"cmp\t%0, #0\", operands);
6816 else if (which_alternative == 1)
6817 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6820 output_asm_insn (\"cmp\t%1, #0\", operands);
6821 if (which_alternative == 2)
6822 output_asm_insn (\"mov\t%0, %1\", operands);
6824 output_asm_insn (\"str\t%1, %0\", operands);
6826 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6828 case 4: return \"b%d3\\t%l2\";
6829 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6830 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6833 [(set (attr "far_jump")
6835 (ior (and (gt (symbol_ref ("which_alternative"))
6837 (eq_attr "length" "8"))
6838 (eq_attr "length" "10"))
6839 (const_string "yes")
6840 (const_string "no")))
6841 (set (attr "length")
6843 (le (symbol_ref ("which_alternative"))
6846 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6847 (le (minus (match_dup 2) (pc)) (const_int 256)))
6850 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6851 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6855 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6856 (le (minus (match_dup 2) (pc)) (const_int 256)))
6859 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6860 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6866 [(set (match_operand:SI 0 "low_register_operand" "")
6867 (match_operand:SI 1 "low_register_operand" ""))
6869 (if_then_else (match_operator 2 "arm_comparison_operator"
6870 [(match_dup 1) (const_int 0)])
6871 (label_ref (match_operand 3 "" ""))
6876 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6877 (label_ref (match_dup 3))
6879 (set (match_dup 0) (match_dup 1))])]
6883 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6884 ;; merge cases like this because the op1 is a hard register in
6885 ;; CLASS_LIKELY_SPILLED_P.
6887 [(set (match_operand:SI 0 "low_register_operand" "")
6888 (match_operand:SI 1 "low_register_operand" ""))
6890 (if_then_else (match_operator 2 "arm_comparison_operator"
6891 [(match_dup 0) (const_int 0)])
6892 (label_ref (match_operand 3 "" ""))
6897 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6898 (label_ref (match_dup 3))
6900 (set (match_dup 0) (match_dup 1))])]
6904 (define_insn "*negated_cbranchsi4"
6907 (match_operator 0 "equality_operator"
6908 [(match_operand:SI 1 "s_register_operand" "l")
6909 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6910 (label_ref (match_operand 3 "" ""))
6914 output_asm_insn (\"cmn\\t%1, %2\", operands);
6915 switch (get_attr_length (insn))
6917 case 4: return \"b%d0\\t%l3\";
6918 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6919 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6922 [(set (attr "far_jump")
6924 (eq_attr "length" "8")
6925 (const_string "yes")
6926 (const_string "no")))
6927 (set (attr "length")
6929 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6930 (le (minus (match_dup 3) (pc)) (const_int 256)))
6933 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6934 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6939 (define_insn "*tbit_cbranch"
6942 (match_operator 0 "equality_operator"
6943 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6945 (match_operand:SI 2 "const_int_operand" "i"))
6947 (label_ref (match_operand 3 "" ""))
6949 (clobber (match_scratch:SI 4 "=l"))]
6954 op[0] = operands[4];
6955 op[1] = operands[1];
6956 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6958 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6959 switch (get_attr_length (insn))
6961 case 4: return \"b%d0\\t%l3\";
6962 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6963 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6966 [(set (attr "far_jump")
6968 (eq_attr "length" "8")
6969 (const_string "yes")
6970 (const_string "no")))
6971 (set (attr "length")
6973 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6974 (le (minus (match_dup 3) (pc)) (const_int 256)))
6977 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6978 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6983 (define_insn "*tlobits_cbranch"
6986 (match_operator 0 "equality_operator"
6987 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6988 (match_operand:SI 2 "const_int_operand" "i")
6991 (label_ref (match_operand 3 "" ""))
6993 (clobber (match_scratch:SI 4 "=l"))]
6998 op[0] = operands[4];
6999 op[1] = operands[1];
7000 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7002 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7003 switch (get_attr_length (insn))
7005 case 4: return \"b%d0\\t%l3\";
7006 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7007 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7010 [(set (attr "far_jump")
7012 (eq_attr "length" "8")
7013 (const_string "yes")
7014 (const_string "no")))
7015 (set (attr "length")
7017 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7018 (le (minus (match_dup 3) (pc)) (const_int 256)))
7021 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7022 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7027 (define_insn "*tstsi3_cbranch"
7030 (match_operator 3 "equality_operator"
7031 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7032 (match_operand:SI 1 "s_register_operand" "l"))
7034 (label_ref (match_operand 2 "" ""))
7039 output_asm_insn (\"tst\\t%0, %1\", operands);
7040 switch (get_attr_length (insn))
7042 case 4: return \"b%d3\\t%l2\";
7043 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7044 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7047 [(set (attr "far_jump")
7049 (eq_attr "length" "8")
7050 (const_string "yes")
7051 (const_string "no")))
7052 (set (attr "length")
7054 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7055 (le (minus (match_dup 2) (pc)) (const_int 256)))
7058 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7059 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7064 (define_insn "*andsi3_cbranch"
7067 (match_operator 5 "equality_operator"
7068 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7069 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7071 (label_ref (match_operand 4 "" ""))
7073 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7074 (and:SI (match_dup 2) (match_dup 3)))
7075 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7079 if (which_alternative == 0)
7080 output_asm_insn (\"and\\t%0, %3\", operands);
7081 else if (which_alternative == 1)
7083 output_asm_insn (\"and\\t%1, %3\", operands);
7084 output_asm_insn (\"mov\\t%0, %1\", operands);
7088 output_asm_insn (\"and\\t%1, %3\", operands);
7089 output_asm_insn (\"str\\t%1, %0\", operands);
7092 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7094 case 4: return \"b%d5\\t%l4\";
7095 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7096 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7099 [(set (attr "far_jump")
7101 (ior (and (eq (symbol_ref ("which_alternative"))
7103 (eq_attr "length" "8"))
7104 (eq_attr "length" "10"))
7105 (const_string "yes")
7106 (const_string "no")))
7107 (set (attr "length")
7109 (eq (symbol_ref ("which_alternative"))
7112 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7113 (le (minus (match_dup 4) (pc)) (const_int 256)))
7116 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7117 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7121 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7122 (le (minus (match_dup 4) (pc)) (const_int 256)))
7125 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7126 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7131 (define_insn "*orrsi3_cbranch_scratch"
7134 (match_operator 4 "equality_operator"
7135 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7136 (match_operand:SI 2 "s_register_operand" "l"))
7138 (label_ref (match_operand 3 "" ""))
7140 (clobber (match_scratch:SI 0 "=l"))]
7144 output_asm_insn (\"orr\\t%0, %2\", operands);
7145 switch (get_attr_length (insn))
7147 case 4: return \"b%d4\\t%l3\";
7148 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7149 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7152 [(set (attr "far_jump")
7154 (eq_attr "length" "8")
7155 (const_string "yes")
7156 (const_string "no")))
7157 (set (attr "length")
7159 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7160 (le (minus (match_dup 3) (pc)) (const_int 256)))
7163 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7164 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7169 (define_insn "*orrsi3_cbranch"
7172 (match_operator 5 "equality_operator"
7173 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7174 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7176 (label_ref (match_operand 4 "" ""))
7178 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7179 (ior:SI (match_dup 2) (match_dup 3)))
7180 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7184 if (which_alternative == 0)
7185 output_asm_insn (\"orr\\t%0, %3\", operands);
7186 else if (which_alternative == 1)
7188 output_asm_insn (\"orr\\t%1, %3\", operands);
7189 output_asm_insn (\"mov\\t%0, %1\", operands);
7193 output_asm_insn (\"orr\\t%1, %3\", operands);
7194 output_asm_insn (\"str\\t%1, %0\", operands);
7197 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7199 case 4: return \"b%d5\\t%l4\";
7200 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7201 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7204 [(set (attr "far_jump")
7206 (ior (and (eq (symbol_ref ("which_alternative"))
7208 (eq_attr "length" "8"))
7209 (eq_attr "length" "10"))
7210 (const_string "yes")
7211 (const_string "no")))
7212 (set (attr "length")
7214 (eq (symbol_ref ("which_alternative"))
7217 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7218 (le (minus (match_dup 4) (pc)) (const_int 256)))
7221 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7222 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7226 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7227 (le (minus (match_dup 4) (pc)) (const_int 256)))
7230 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7231 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7236 (define_insn "*xorsi3_cbranch_scratch"
7239 (match_operator 4 "equality_operator"
7240 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7241 (match_operand:SI 2 "s_register_operand" "l"))
7243 (label_ref (match_operand 3 "" ""))
7245 (clobber (match_scratch:SI 0 "=l"))]
7249 output_asm_insn (\"eor\\t%0, %2\", operands);
7250 switch (get_attr_length (insn))
7252 case 4: return \"b%d4\\t%l3\";
7253 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7254 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7257 [(set (attr "far_jump")
7259 (eq_attr "length" "8")
7260 (const_string "yes")
7261 (const_string "no")))
7262 (set (attr "length")
7264 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7265 (le (minus (match_dup 3) (pc)) (const_int 256)))
7268 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7269 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7274 (define_insn "*xorsi3_cbranch"
7277 (match_operator 5 "equality_operator"
7278 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7279 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7281 (label_ref (match_operand 4 "" ""))
7283 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7284 (xor:SI (match_dup 2) (match_dup 3)))
7285 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7289 if (which_alternative == 0)
7290 output_asm_insn (\"eor\\t%0, %3\", operands);
7291 else if (which_alternative == 1)
7293 output_asm_insn (\"eor\\t%1, %3\", operands);
7294 output_asm_insn (\"mov\\t%0, %1\", operands);
7298 output_asm_insn (\"eor\\t%1, %3\", operands);
7299 output_asm_insn (\"str\\t%1, %0\", operands);
7302 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7304 case 4: return \"b%d5\\t%l4\";
7305 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7306 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7309 [(set (attr "far_jump")
7311 (ior (and (eq (symbol_ref ("which_alternative"))
7313 (eq_attr "length" "8"))
7314 (eq_attr "length" "10"))
7315 (const_string "yes")
7316 (const_string "no")))
7317 (set (attr "length")
7319 (eq (symbol_ref ("which_alternative"))
7322 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7323 (le (minus (match_dup 4) (pc)) (const_int 256)))
7326 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7327 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7331 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7332 (le (minus (match_dup 4) (pc)) (const_int 256)))
7335 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7336 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7341 (define_insn "*bicsi3_cbranch_scratch"
7344 (match_operator 4 "equality_operator"
7345 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7346 (match_operand:SI 1 "s_register_operand" "0"))
7348 (label_ref (match_operand 3 "" ""))
7350 (clobber (match_scratch:SI 0 "=l"))]
7354 output_asm_insn (\"bic\\t%0, %2\", operands);
7355 switch (get_attr_length (insn))
7357 case 4: return \"b%d4\\t%l3\";
7358 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7359 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7362 [(set (attr "far_jump")
7364 (eq_attr "length" "8")
7365 (const_string "yes")
7366 (const_string "no")))
7367 (set (attr "length")
7369 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7370 (le (minus (match_dup 3) (pc)) (const_int 256)))
7373 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7374 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7379 (define_insn "*bicsi3_cbranch"
7382 (match_operator 5 "equality_operator"
7383 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7384 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7386 (label_ref (match_operand 4 "" ""))
7388 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7389 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7390 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7394 if (which_alternative == 0)
7395 output_asm_insn (\"bic\\t%0, %3\", operands);
7396 else if (which_alternative <= 2)
7398 output_asm_insn (\"bic\\t%1, %3\", operands);
7399 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7400 conditions again, since we're only testing for equality. */
7401 output_asm_insn (\"mov\\t%0, %1\", operands);
7405 output_asm_insn (\"bic\\t%1, %3\", operands);
7406 output_asm_insn (\"str\\t%1, %0\", operands);
7409 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7411 case 4: return \"b%d5\\t%l4\";
7412 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7413 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7416 [(set (attr "far_jump")
7418 (ior (and (eq (symbol_ref ("which_alternative"))
7420 (eq_attr "length" "8"))
7421 (eq_attr "length" "10"))
7422 (const_string "yes")
7423 (const_string "no")))
7424 (set (attr "length")
7426 (eq (symbol_ref ("which_alternative"))
7429 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7430 (le (minus (match_dup 4) (pc)) (const_int 256)))
7433 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7434 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7438 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7439 (le (minus (match_dup 4) (pc)) (const_int 256)))
7442 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7443 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7448 (define_insn "*cbranchne_decr1"
7450 (if_then_else (match_operator 3 "equality_operator"
7451 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7453 (label_ref (match_operand 4 "" ""))
7455 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7456 (plus:SI (match_dup 2) (const_int -1)))
7457 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7462 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7464 VOIDmode, operands[2], const1_rtx);
7465 cond[1] = operands[4];
7467 if (which_alternative == 0)
7468 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7469 else if (which_alternative == 1)
7471 /* We must provide an alternative for a hi reg because reload
7472 cannot handle output reloads on a jump instruction, but we
7473 can't subtract into that. Fortunately a mov from lo to hi
7474 does not clobber the condition codes. */
7475 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7476 output_asm_insn (\"mov\\t%0, %1\", operands);
7480 /* Similarly, but the target is memory. */
7481 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7482 output_asm_insn (\"str\\t%1, %0\", operands);
7485 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7488 output_asm_insn (\"b%d0\\t%l1\", cond);
7491 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7492 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7494 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7495 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7499 [(set (attr "far_jump")
7501 (ior (and (eq (symbol_ref ("which_alternative"))
7503 (eq_attr "length" "8"))
7504 (eq_attr "length" "10"))
7505 (const_string "yes")
7506 (const_string "no")))
7507 (set_attr_alternative "length"
7511 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7512 (le (minus (match_dup 4) (pc)) (const_int 256)))
7515 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7516 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7521 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7522 (le (minus (match_dup 4) (pc)) (const_int 256)))
7525 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7526 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7531 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7532 (le (minus (match_dup 4) (pc)) (const_int 256)))
7535 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7536 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7541 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7542 (le (minus (match_dup 4) (pc)) (const_int 256)))
7545 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7546 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7551 (define_insn "*addsi3_cbranch"
7554 (match_operator 4 "arm_comparison_operator"
7556 (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
7557 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
7559 (label_ref (match_operand 5 "" ""))
7562 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7563 (plus:SI (match_dup 2) (match_dup 3)))
7564 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7566 && (GET_CODE (operands[4]) == EQ
7567 || GET_CODE (operands[4]) == NE
7568 || GET_CODE (operands[4]) == GE
7569 || GET_CODE (operands[4]) == LT)"
7574 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7575 cond[1] = operands[2];
7576 cond[2] = operands[3];
7578 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7579 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7581 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7583 if (which_alternative >= 2
7584 && which_alternative < 4)
7585 output_asm_insn (\"mov\\t%0, %1\", operands);
7586 else if (which_alternative >= 4)
7587 output_asm_insn (\"str\\t%1, %0\", operands);
7589 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7592 return \"b%d4\\t%l5\";
7594 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7596 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7600 [(set (attr "far_jump")
7602 (ior (and (lt (symbol_ref ("which_alternative"))
7604 (eq_attr "length" "8"))
7605 (eq_attr "length" "10"))
7606 (const_string "yes")
7607 (const_string "no")))
7608 (set (attr "length")
7610 (lt (symbol_ref ("which_alternative"))
7613 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7614 (le (minus (match_dup 5) (pc)) (const_int 256)))
7617 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7618 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7622 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7623 (le (minus (match_dup 5) (pc)) (const_int 256)))
7626 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7627 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7632 (define_insn "*addsi3_cbranch_scratch"
7635 (match_operator 3 "arm_comparison_operator"
7637 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7638 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7640 (label_ref (match_operand 4 "" ""))
7642 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7644 && (GET_CODE (operands[3]) == EQ
7645 || GET_CODE (operands[3]) == NE
7646 || GET_CODE (operands[3]) == GE
7647 || GET_CODE (operands[3]) == LT)"
7650 switch (which_alternative)
7653 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7656 output_asm_insn (\"cmn\t%1, %2\", operands);
7659 if (INTVAL (operands[2]) < 0)
7660 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7662 output_asm_insn (\"add\t%0, %1, %2\", operands);
7665 if (INTVAL (operands[2]) < 0)
7666 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7668 output_asm_insn (\"add\t%0, %0, %2\", operands);
7672 switch (get_attr_length (insn))
7675 return \"b%d3\\t%l4\";
7677 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7679 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7683 [(set (attr "far_jump")
7685 (eq_attr "length" "8")
7686 (const_string "yes")
7687 (const_string "no")))
7688 (set (attr "length")
7690 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7691 (le (minus (match_dup 4) (pc)) (const_int 256)))
7694 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7695 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7700 (define_insn "*subsi3_cbranch"
7703 (match_operator 4 "arm_comparison_operator"
7705 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7706 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7708 (label_ref (match_operand 5 "" ""))
7710 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7711 (minus:SI (match_dup 2) (match_dup 3)))
7712 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7714 && (GET_CODE (operands[4]) == EQ
7715 || GET_CODE (operands[4]) == NE
7716 || GET_CODE (operands[4]) == GE
7717 || GET_CODE (operands[4]) == LT)"
7720 if (which_alternative == 0)
7721 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7722 else if (which_alternative == 1)
7724 /* We must provide an alternative for a hi reg because reload
7725 cannot handle output reloads on a jump instruction, but we
7726 can't subtract into that. Fortunately a mov from lo to hi
7727 does not clobber the condition codes. */
7728 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7729 output_asm_insn (\"mov\\t%0, %1\", operands);
7733 /* Similarly, but the target is memory. */
7734 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7735 output_asm_insn (\"str\\t%1, %0\", operands);
7738 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7741 return \"b%d4\\t%l5\";
7743 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7745 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7749 [(set (attr "far_jump")
7751 (ior (and (eq (symbol_ref ("which_alternative"))
7753 (eq_attr "length" "8"))
7754 (eq_attr "length" "10"))
7755 (const_string "yes")
7756 (const_string "no")))
7757 (set (attr "length")
7759 (eq (symbol_ref ("which_alternative"))
7762 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7763 (le (minus (match_dup 5) (pc)) (const_int 256)))
7766 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7767 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7771 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7772 (le (minus (match_dup 5) (pc)) (const_int 256)))
7775 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7776 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7781 (define_insn "*subsi3_cbranch_scratch"
7784 (match_operator 0 "arm_comparison_operator"
7785 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7786 (match_operand:SI 2 "nonmemory_operand" "l"))
7788 (label_ref (match_operand 3 "" ""))
7791 && (GET_CODE (operands[0]) == EQ
7792 || GET_CODE (operands[0]) == NE
7793 || GET_CODE (operands[0]) == GE
7794 || GET_CODE (operands[0]) == LT)"
7796 output_asm_insn (\"cmp\\t%1, %2\", operands);
7797 switch (get_attr_length (insn))
7799 case 4: return \"b%d0\\t%l3\";
7800 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7801 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7804 [(set (attr "far_jump")
7806 (eq_attr "length" "8")
7807 (const_string "yes")
7808 (const_string "no")))
7809 (set (attr "length")
7811 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7812 (le (minus (match_dup 3) (pc)) (const_int 256)))
7815 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7816 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7821 ;; Comparison and test insns
7823 (define_insn "*arm_cmpsi_insn"
7824 [(set (reg:CC CC_REGNUM)
7825 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7826 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7831 [(set_attr "conds" "set")]
7834 (define_insn "*arm_cmpsi_shiftsi"
7835 [(set (reg:CC CC_REGNUM)
7836 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7837 (match_operator:SI 3 "shift_operator"
7838 [(match_operand:SI 1 "s_register_operand" "r")
7839 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7842 [(set_attr "conds" "set")
7843 (set_attr "shift" "1")
7844 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7845 (const_string "alu_shift")
7846 (const_string "alu_shift_reg")))]
7849 (define_insn "*arm_cmpsi_shiftsi_swp"
7850 [(set (reg:CC_SWP CC_REGNUM)
7851 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7852 [(match_operand:SI 1 "s_register_operand" "r")
7853 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7854 (match_operand:SI 0 "s_register_operand" "r")))]
7857 [(set_attr "conds" "set")
7858 (set_attr "shift" "1")
7859 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7860 (const_string "alu_shift")
7861 (const_string "alu_shift_reg")))]
7864 (define_insn "*arm_cmpsi_negshiftsi_si"
7865 [(set (reg:CC_Z CC_REGNUM)
7867 (neg:SI (match_operator:SI 1 "shift_operator"
7868 [(match_operand:SI 2 "s_register_operand" "r")
7869 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7870 (match_operand:SI 0 "s_register_operand" "r")))]
7873 [(set_attr "conds" "set")
7874 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7875 (const_string "alu_shift")
7876 (const_string "alu_shift_reg")))]
7879 ;; Cirrus SF compare instruction
7880 (define_insn "*cirrus_cmpsf"
7881 [(set (reg:CCFP CC_REGNUM)
7882 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7883 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7884 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7885 "cfcmps%?\\tr15, %V0, %V1"
7886 [(set_attr "type" "mav_farith")
7887 (set_attr "cirrus" "compare")]
7890 ;; Cirrus DF compare instruction
7891 (define_insn "*cirrus_cmpdf"
7892 [(set (reg:CCFP CC_REGNUM)
7893 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7894 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7895 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7896 "cfcmpd%?\\tr15, %V0, %V1"
7897 [(set_attr "type" "mav_farith")
7898 (set_attr "cirrus" "compare")]
7901 (define_insn "*cirrus_cmpdi"
7902 [(set (reg:CC CC_REGNUM)
7903 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7904 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7905 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7906 "cfcmp64%?\\tr15, %V0, %V1"
7907 [(set_attr "type" "mav_farith")
7908 (set_attr "cirrus" "compare")]
7911 ; This insn allows redundant compares to be removed by cse, nothing should
7912 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7913 ; is deleted later on. The match_dup will match the mode here, so that
7914 ; mode changes of the condition codes aren't lost by this even though we don't
7915 ; specify what they are.
7917 (define_insn "*deleted_compare"
7918 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7920 "\\t%@ deleted compare"
7921 [(set_attr "conds" "set")
7922 (set_attr "length" "0")]
7926 ;; Conditional branch insns
7928 (define_expand "cbranch_cc"
7930 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7931 (match_operand 2 "" "")])
7932 (label_ref (match_operand 3 "" ""))
7935 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7936 operands[1], operands[2]);
7937 operands[2] = const0_rtx;"
7941 ;; Patterns to match conditional branch insns.
7944 (define_insn "*arm_cond_branch"
7946 (if_then_else (match_operator 1 "arm_comparison_operator"
7947 [(match_operand 2 "cc_register" "") (const_int 0)])
7948 (label_ref (match_operand 0 "" ""))
7952 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7954 arm_ccfsm_state += 2;
7957 return \"b%d1\\t%l0\";
7959 [(set_attr "conds" "use")
7960 (set_attr "type" "branch")]
7963 (define_insn "*arm_cond_branch_reversed"
7965 (if_then_else (match_operator 1 "arm_comparison_operator"
7966 [(match_operand 2 "cc_register" "") (const_int 0)])
7968 (label_ref (match_operand 0 "" ""))))]
7971 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7973 arm_ccfsm_state += 2;
7976 return \"b%D1\\t%l0\";
7978 [(set_attr "conds" "use")
7979 (set_attr "type" "branch")]
7986 (define_expand "cstore_cc"
7987 [(set (match_operand:SI 0 "s_register_operand" "")
7988 (match_operator:SI 1 "" [(match_operand 2 "" "")
7989 (match_operand 3 "" "")]))]
7991 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7992 operands[2], operands[3]);
7993 operands[3] = const0_rtx;"
7996 (define_insn "*mov_scc"
7997 [(set (match_operand:SI 0 "s_register_operand" "=r")
7998 (match_operator:SI 1 "arm_comparison_operator"
7999 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8001 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8002 [(set_attr "conds" "use")
8003 (set_attr "length" "8")]
8006 (define_insn "*mov_negscc"
8007 [(set (match_operand:SI 0 "s_register_operand" "=r")
8008 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8009 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8011 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8012 [(set_attr "conds" "use")
8013 (set_attr "length" "8")]
8016 (define_insn "*mov_notscc"
8017 [(set (match_operand:SI 0 "s_register_operand" "=r")
8018 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8019 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8021 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8022 [(set_attr "conds" "use")
8023 (set_attr "length" "8")]
8026 (define_expand "cstoresi4"
8027 [(set (match_operand:SI 0 "s_register_operand" "")
8028 (match_operator:SI 1 "arm_comparison_operator"
8029 [(match_operand:SI 2 "s_register_operand" "")
8030 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8031 "TARGET_32BIT || TARGET_THUMB1"
8033 rtx op3, scratch, scratch2;
8037 if (!arm_add_operand (operands[3], SImode))
8038 operands[3] = force_reg (SImode, operands[3]);
8039 emit_insn (gen_cstore_cc (operands[0], operands[1],
8040 operands[2], operands[3]));
8044 if (operands[3] == const0_rtx)
8046 switch (GET_CODE (operands[1]))
8049 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8053 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8057 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8058 NULL_RTX, 0, OPTAB_WIDEN);
8059 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8060 NULL_RTX, 0, OPTAB_WIDEN);
8061 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8062 operands[0], 1, OPTAB_WIDEN);
8066 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8068 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8069 NULL_RTX, 1, OPTAB_WIDEN);
8073 scratch = expand_binop (SImode, ashr_optab, operands[2],
8074 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8075 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8076 NULL_RTX, 0, OPTAB_WIDEN);
8077 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8081 /* LT is handled by generic code. No need for unsigned with 0. */
8088 switch (GET_CODE (operands[1]))
8091 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8092 NULL_RTX, 0, OPTAB_WIDEN);
8093 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8097 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8098 NULL_RTX, 0, OPTAB_WIDEN);
8099 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8103 op3 = force_reg (SImode, operands[3]);
8105 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8106 NULL_RTX, 1, OPTAB_WIDEN);
8107 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8108 NULL_RTX, 0, OPTAB_WIDEN);
8109 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8115 if (!thumb1_cmp_operand (op3, SImode))
8116 op3 = force_reg (SImode, op3);
8117 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8118 NULL_RTX, 0, OPTAB_WIDEN);
8119 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8120 NULL_RTX, 1, OPTAB_WIDEN);
8121 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8126 op3 = force_reg (SImode, operands[3]);
8127 scratch = force_reg (SImode, const0_rtx);
8128 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8134 if (!thumb1_cmp_operand (op3, SImode))
8135 op3 = force_reg (SImode, op3);
8136 scratch = force_reg (SImode, const0_rtx);
8137 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8143 if (!thumb1_cmp_operand (op3, SImode))
8144 op3 = force_reg (SImode, op3);
8145 scratch = gen_reg_rtx (SImode);
8146 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8150 op3 = force_reg (SImode, operands[3]);
8151 scratch = gen_reg_rtx (SImode);
8152 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8155 /* No good sequences for GT, LT. */
8162 (define_expand "cstoresf4"
8163 [(set (match_operand:SI 0 "s_register_operand" "")
8164 (match_operator:SI 1 "arm_comparison_operator"
8165 [(match_operand:SF 2 "s_register_operand" "")
8166 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8167 "TARGET_32BIT && TARGET_HARD_FLOAT"
8168 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8169 operands[2], operands[3])); DONE;"
8172 (define_expand "cstoredf4"
8173 [(set (match_operand:SI 0 "s_register_operand" "")
8174 (match_operator:SI 1 "arm_comparison_operator"
8175 [(match_operand:DF 2 "s_register_operand" "")
8176 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8177 "TARGET_32BIT && TARGET_HARD_FLOAT"
8178 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8179 operands[2], operands[3])); DONE;"
8182 ;; this uses the Cirrus DI compare instruction
8183 (define_expand "cstoredi4"
8184 [(set (match_operand:SI 0 "s_register_operand" "")
8185 (match_operator:SI 1 "arm_comparison_operator"
8186 [(match_operand:DI 2 "cirrus_fp_register" "")
8187 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8188 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8189 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8190 operands[2], operands[3])); DONE;"
8194 (define_expand "cstoresi_eq0_thumb1"
8196 [(set (match_operand:SI 0 "s_register_operand" "")
8197 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8199 (clobber (match_dup:SI 2))])]
8201 "operands[2] = gen_reg_rtx (SImode);"
8204 (define_expand "cstoresi_ne0_thumb1"
8206 [(set (match_operand:SI 0 "s_register_operand" "")
8207 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8209 (clobber (match_dup:SI 2))])]
8211 "operands[2] = gen_reg_rtx (SImode);"
8214 (define_insn "*cstoresi_eq0_thumb1_insn"
8215 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8216 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8218 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8221 neg\\t%0, %1\;adc\\t%0, %0, %1
8222 neg\\t%2, %1\;adc\\t%0, %1, %2"
8223 [(set_attr "length" "4")]
8226 (define_insn "*cstoresi_ne0_thumb1_insn"
8227 [(set (match_operand:SI 0 "s_register_operand" "=l")
8228 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8230 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8232 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8233 [(set_attr "length" "4")]
8236 ;; Used as part of the expansion of thumb ltu and gtu sequences
8237 (define_insn "cstoresi_nltu_thumb1"
8238 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8239 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8240 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8242 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8243 [(set_attr "length" "4")]
8246 (define_insn_and_split "cstoresi_ltu_thumb1"
8247 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8248 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8249 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8254 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8255 (set (match_dup 0) (neg:SI (match_dup 3)))]
8256 "operands[3] = gen_reg_rtx (SImode);"
8257 [(set_attr "length" "4")]
8260 ;; Used as part of the expansion of thumb les sequence.
8261 (define_insn "thumb1_addsi3_addgeu"
8262 [(set (match_operand:SI 0 "s_register_operand" "=l")
8263 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8264 (match_operand:SI 2 "s_register_operand" "l"))
8265 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8266 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8268 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8269 [(set_attr "length" "4")]
8273 ;; Conditional move insns
8275 (define_expand "movsicc"
8276 [(set (match_operand:SI 0 "s_register_operand" "")
8277 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8278 (match_operand:SI 2 "arm_not_operand" "")
8279 (match_operand:SI 3 "arm_not_operand" "")))]
8283 enum rtx_code code = GET_CODE (operands[1]);
8286 if (code == UNEQ || code == LTGT)
8289 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8290 XEXP (operands[1], 1));
8291 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8295 (define_expand "movsfcc"
8296 [(set (match_operand:SF 0 "s_register_operand" "")
8297 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8298 (match_operand:SF 2 "s_register_operand" "")
8299 (match_operand:SF 3 "nonmemory_operand" "")))]
8300 "TARGET_32BIT && TARGET_HARD_FLOAT"
8303 enum rtx_code code = GET_CODE (operands[1]);
8306 if (code == UNEQ || code == LTGT)
8309 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8310 Otherwise, ensure it is a valid FP add operand */
8311 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8312 || (!arm_float_add_operand (operands[3], SFmode)))
8313 operands[3] = force_reg (SFmode, operands[3]);
8315 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8316 XEXP (operands[1], 1));
8317 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8321 (define_expand "movdfcc"
8322 [(set (match_operand:DF 0 "s_register_operand" "")
8323 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8324 (match_operand:DF 2 "s_register_operand" "")
8325 (match_operand:DF 3 "arm_float_add_operand" "")))]
8326 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8329 enum rtx_code code = GET_CODE (operands[1]);
8332 if (code == UNEQ || code == LTGT)
8335 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8336 XEXP (operands[1], 1));
8337 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8341 (define_insn "*movsicc_insn"
8342 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8344 (match_operator 3 "arm_comparison_operator"
8345 [(match_operand 4 "cc_register" "") (const_int 0)])
8346 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8347 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8354 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8355 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8356 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8357 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8358 [(set_attr "length" "4,4,4,4,8,8,8,8")
8359 (set_attr "conds" "use")]
8362 (define_insn "*movsfcc_soft_insn"
8363 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8364 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8365 [(match_operand 4 "cc_register" "") (const_int 0)])
8366 (match_operand:SF 1 "s_register_operand" "0,r")
8367 (match_operand:SF 2 "s_register_operand" "r,0")))]
8368 "TARGET_ARM && TARGET_SOFT_FLOAT"
8372 [(set_attr "conds" "use")]
8376 ;; Jump and linkage insns
8378 (define_expand "jump"
8380 (label_ref (match_operand 0 "" "")))]
8385 (define_insn "*arm_jump"
8387 (label_ref (match_operand 0 "" "")))]
8391 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8393 arm_ccfsm_state += 2;
8396 return \"b%?\\t%l0\";
8399 [(set_attr "predicable" "yes")]
8402 (define_insn "*thumb_jump"
8404 (label_ref (match_operand 0 "" "")))]
8407 if (get_attr_length (insn) == 2)
8409 return \"bl\\t%l0\\t%@ far jump\";
8411 [(set (attr "far_jump")
8413 (eq_attr "length" "4")
8414 (const_string "yes")
8415 (const_string "no")))
8416 (set (attr "length")
8418 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8419 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8424 (define_expand "call"
8425 [(parallel [(call (match_operand 0 "memory_operand" "")
8426 (match_operand 1 "general_operand" ""))
8427 (use (match_operand 2 "" ""))
8428 (clobber (reg:SI LR_REGNUM))])]
8434 /* In an untyped call, we can get NULL for operand 2. */
8435 if (operands[2] == NULL_RTX)
8436 operands[2] = const0_rtx;
8438 /* Decide if we should generate indirect calls by loading the
8439 32-bit address of the callee into a register before performing the
8441 callee = XEXP (operands[0], 0);
8442 if (GET_CODE (callee) == SYMBOL_REF
8443 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8445 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8447 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8448 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8453 (define_expand "call_internal"
8454 [(parallel [(call (match_operand 0 "memory_operand" "")
8455 (match_operand 1 "general_operand" ""))
8456 (use (match_operand 2 "" ""))
8457 (clobber (reg:SI LR_REGNUM))])])
8459 (define_insn "*call_reg_armv5"
8460 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8461 (match_operand 1 "" ""))
8462 (use (match_operand 2 "" ""))
8463 (clobber (reg:SI LR_REGNUM))]
8464 "TARGET_ARM && arm_arch5"
8466 [(set_attr "type" "call")]
8469 (define_insn "*call_reg_arm"
8470 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8471 (match_operand 1 "" ""))
8472 (use (match_operand 2 "" ""))
8473 (clobber (reg:SI LR_REGNUM))]
8474 "TARGET_ARM && !arm_arch5"
8476 return output_call (operands);
8478 ;; length is worst case, normally it is only two
8479 [(set_attr "length" "12")
8480 (set_attr "type" "call")]
8484 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8485 ;; considered a function call by the branch predictor of some cores (PR40887).
8486 ;; Falls back to blx rN (*call_reg_armv5).
8488 (define_insn "*call_mem"
8489 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8490 (match_operand 1 "" ""))
8491 (use (match_operand 2 "" ""))
8492 (clobber (reg:SI LR_REGNUM))]
8493 "TARGET_ARM && !arm_arch5"
8495 return output_call_mem (operands);
8497 [(set_attr "length" "12")
8498 (set_attr "type" "call")]
8501 (define_insn "*call_reg_thumb1_v5"
8502 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8503 (match_operand 1 "" ""))
8504 (use (match_operand 2 "" ""))
8505 (clobber (reg:SI LR_REGNUM))]
8506 "TARGET_THUMB1 && arm_arch5"
8508 [(set_attr "length" "2")
8509 (set_attr "type" "call")]
8512 (define_insn "*call_reg_thumb1"
8513 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8514 (match_operand 1 "" ""))
8515 (use (match_operand 2 "" ""))
8516 (clobber (reg:SI LR_REGNUM))]
8517 "TARGET_THUMB1 && !arm_arch5"
8520 if (!TARGET_CALLER_INTERWORKING)
8521 return thumb_call_via_reg (operands[0]);
8522 else if (operands[1] == const0_rtx)
8523 return \"bl\\t%__interwork_call_via_%0\";
8524 else if (frame_pointer_needed)
8525 return \"bl\\t%__interwork_r7_call_via_%0\";
8527 return \"bl\\t%__interwork_r11_call_via_%0\";
8529 [(set_attr "type" "call")]
8532 (define_expand "call_value"
8533 [(parallel [(set (match_operand 0 "" "")
8534 (call (match_operand 1 "memory_operand" "")
8535 (match_operand 2 "general_operand" "")))
8536 (use (match_operand 3 "" ""))
8537 (clobber (reg:SI LR_REGNUM))])]
8543 /* In an untyped call, we can get NULL for operand 2. */
8544 if (operands[3] == 0)
8545 operands[3] = const0_rtx;
8547 /* Decide if we should generate indirect calls by loading the
8548 32-bit address of the callee into a register before performing the
8550 callee = XEXP (operands[1], 0);
8551 if (GET_CODE (callee) == SYMBOL_REF
8552 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8554 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8556 pat = gen_call_value_internal (operands[0], operands[1],
8557 operands[2], operands[3]);
8558 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8563 (define_expand "call_value_internal"
8564 [(parallel [(set (match_operand 0 "" "")
8565 (call (match_operand 1 "memory_operand" "")
8566 (match_operand 2 "general_operand" "")))
8567 (use (match_operand 3 "" ""))
8568 (clobber (reg:SI LR_REGNUM))])])
8570 (define_insn "*call_value_reg_armv5"
8571 [(set (match_operand 0 "" "")
8572 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8573 (match_operand 2 "" "")))
8574 (use (match_operand 3 "" ""))
8575 (clobber (reg:SI LR_REGNUM))]
8576 "TARGET_ARM && arm_arch5"
8578 [(set_attr "type" "call")]
8581 (define_insn "*call_value_reg_arm"
8582 [(set (match_operand 0 "" "")
8583 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8584 (match_operand 2 "" "")))
8585 (use (match_operand 3 "" ""))
8586 (clobber (reg:SI LR_REGNUM))]
8587 "TARGET_ARM && !arm_arch5"
8589 return output_call (&operands[1]);
8591 [(set_attr "length" "12")
8592 (set_attr "type" "call")]
8595 ;; Note: see *call_mem
8597 (define_insn "*call_value_mem"
8598 [(set (match_operand 0 "" "")
8599 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8600 (match_operand 2 "" "")))
8601 (use (match_operand 3 "" ""))
8602 (clobber (reg:SI LR_REGNUM))]
8603 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8605 return output_call_mem (&operands[1]);
8607 [(set_attr "length" "12")
8608 (set_attr "type" "call")]
8611 (define_insn "*call_value_reg_thumb1_v5"
8612 [(set (match_operand 0 "" "")
8613 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8614 (match_operand 2 "" "")))
8615 (use (match_operand 3 "" ""))
8616 (clobber (reg:SI LR_REGNUM))]
8617 "TARGET_THUMB1 && arm_arch5"
8619 [(set_attr "length" "2")
8620 (set_attr "type" "call")]
8623 (define_insn "*call_value_reg_thumb1"
8624 [(set (match_operand 0 "" "")
8625 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8626 (match_operand 2 "" "")))
8627 (use (match_operand 3 "" ""))
8628 (clobber (reg:SI LR_REGNUM))]
8629 "TARGET_THUMB1 && !arm_arch5"
8632 if (!TARGET_CALLER_INTERWORKING)
8633 return thumb_call_via_reg (operands[1]);
8634 else if (operands[2] == const0_rtx)
8635 return \"bl\\t%__interwork_call_via_%1\";
8636 else if (frame_pointer_needed)
8637 return \"bl\\t%__interwork_r7_call_via_%1\";
8639 return \"bl\\t%__interwork_r11_call_via_%1\";
8641 [(set_attr "type" "call")]
8644 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8645 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8647 (define_insn "*call_symbol"
8648 [(call (mem:SI (match_operand:SI 0 "" ""))
8649 (match_operand 1 "" ""))
8650 (use (match_operand 2 "" ""))
8651 (clobber (reg:SI LR_REGNUM))]
8653 && (GET_CODE (operands[0]) == SYMBOL_REF)
8654 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8657 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8659 [(set_attr "type" "call")]
8662 (define_insn "*call_value_symbol"
8663 [(set (match_operand 0 "" "")
8664 (call (mem:SI (match_operand:SI 1 "" ""))
8665 (match_operand:SI 2 "" "")))
8666 (use (match_operand 3 "" ""))
8667 (clobber (reg:SI LR_REGNUM))]
8669 && (GET_CODE (operands[1]) == SYMBOL_REF)
8670 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8673 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8675 [(set_attr "type" "call")]
8678 (define_insn "*call_insn"
8679 [(call (mem:SI (match_operand:SI 0 "" ""))
8680 (match_operand:SI 1 "" ""))
8681 (use (match_operand 2 "" ""))
8682 (clobber (reg:SI LR_REGNUM))]
8684 && GET_CODE (operands[0]) == SYMBOL_REF
8685 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8687 [(set_attr "length" "4")
8688 (set_attr "type" "call")]
8691 (define_insn "*call_value_insn"
8692 [(set (match_operand 0 "" "")
8693 (call (mem:SI (match_operand 1 "" ""))
8694 (match_operand 2 "" "")))
8695 (use (match_operand 3 "" ""))
8696 (clobber (reg:SI LR_REGNUM))]
8698 && GET_CODE (operands[1]) == SYMBOL_REF
8699 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8701 [(set_attr "length" "4")
8702 (set_attr "type" "call")]
8705 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8706 (define_expand "sibcall"
8707 [(parallel [(call (match_operand 0 "memory_operand" "")
8708 (match_operand 1 "general_operand" ""))
8710 (use (match_operand 2 "" ""))])]
8714 if (operands[2] == NULL_RTX)
8715 operands[2] = const0_rtx;
8719 (define_expand "sibcall_value"
8720 [(parallel [(set (match_operand 0 "" "")
8721 (call (match_operand 1 "memory_operand" "")
8722 (match_operand 2 "general_operand" "")))
8724 (use (match_operand 3 "" ""))])]
8728 if (operands[3] == NULL_RTX)
8729 operands[3] = const0_rtx;
8733 (define_insn "*sibcall_insn"
8734 [(call (mem:SI (match_operand:SI 0 "" "X"))
8735 (match_operand 1 "" ""))
8737 (use (match_operand 2 "" ""))]
8738 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8740 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8742 [(set_attr "type" "call")]
8745 (define_insn "*sibcall_value_insn"
8746 [(set (match_operand 0 "" "")
8747 (call (mem:SI (match_operand:SI 1 "" "X"))
8748 (match_operand 2 "" "")))
8750 (use (match_operand 3 "" ""))]
8751 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8753 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8755 [(set_attr "type" "call")]
8758 (define_expand "return"
8760 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8763 ;; Often the return insn will be the same as loading from memory, so set attr
8764 (define_insn "*arm_return"
8766 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8769 if (arm_ccfsm_state == 2)
8771 arm_ccfsm_state += 2;
8774 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8776 [(set_attr "type" "load1")
8777 (set_attr "length" "12")
8778 (set_attr "predicable" "yes")]
8781 (define_insn "*cond_return"
8783 (if_then_else (match_operator 0 "arm_comparison_operator"
8784 [(match_operand 1 "cc_register" "") (const_int 0)])
8787 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8790 if (arm_ccfsm_state == 2)
8792 arm_ccfsm_state += 2;
8795 return output_return_instruction (operands[0], TRUE, FALSE);
8797 [(set_attr "conds" "use")
8798 (set_attr "length" "12")
8799 (set_attr "type" "load1")]
8802 (define_insn "*cond_return_inverted"
8804 (if_then_else (match_operator 0 "arm_comparison_operator"
8805 [(match_operand 1 "cc_register" "") (const_int 0)])
8808 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8811 if (arm_ccfsm_state == 2)
8813 arm_ccfsm_state += 2;
8816 return output_return_instruction (operands[0], TRUE, TRUE);
8818 [(set_attr "conds" "use")
8819 (set_attr "length" "12")
8820 (set_attr "type" "load1")]
8823 ;; Generate a sequence of instructions to determine if the processor is
8824 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8827 (define_expand "return_addr_mask"
8829 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8831 (set (match_operand:SI 0 "s_register_operand" "")
8832 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8834 (const_int 67108860)))] ; 0x03fffffc
8837 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8840 (define_insn "*check_arch2"
8841 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8842 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8845 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8846 [(set_attr "length" "8")
8847 (set_attr "conds" "set")]
8850 ;; Call subroutine returning any type.
8852 (define_expand "untyped_call"
8853 [(parallel [(call (match_operand 0 "" "")
8855 (match_operand 1 "" "")
8856 (match_operand 2 "" "")])]
8861 rtx par = gen_rtx_PARALLEL (VOIDmode,
8862 rtvec_alloc (XVECLEN (operands[2], 0)));
8863 rtx addr = gen_reg_rtx (Pmode);
8867 emit_move_insn (addr, XEXP (operands[1], 0));
8868 mem = change_address (operands[1], BLKmode, addr);
8870 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8872 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8874 /* Default code only uses r0 as a return value, but we could
8875 be using anything up to 4 registers. */
8876 if (REGNO (src) == R0_REGNUM)
8877 src = gen_rtx_REG (TImode, R0_REGNUM);
8879 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8881 size += GET_MODE_SIZE (GET_MODE (src));
8884 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8889 for (i = 0; i < XVECLEN (par, 0); i++)
8891 HOST_WIDE_INT offset = 0;
8892 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8895 emit_move_insn (addr, plus_constant (addr, size));
8897 mem = change_address (mem, GET_MODE (reg), NULL);
8898 if (REGNO (reg) == R0_REGNUM)
8900 /* On thumb we have to use a write-back instruction. */
8901 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8902 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8903 size = TARGET_ARM ? 16 : 0;
8907 emit_move_insn (mem, reg);
8908 size = GET_MODE_SIZE (GET_MODE (reg));
8912 /* The optimizer does not know that the call sets the function value
8913 registers we stored in the result block. We avoid problems by
8914 claiming that all hard registers are used and clobbered at this
8916 emit_insn (gen_blockage ());
8922 (define_expand "untyped_return"
8923 [(match_operand:BLK 0 "memory_operand" "")
8924 (match_operand 1 "" "")]
8929 rtx addr = gen_reg_rtx (Pmode);
8933 emit_move_insn (addr, XEXP (operands[0], 0));
8934 mem = change_address (operands[0], BLKmode, addr);
8936 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8938 HOST_WIDE_INT offset = 0;
8939 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8942 emit_move_insn (addr, plus_constant (addr, size));
8944 mem = change_address (mem, GET_MODE (reg), NULL);
8945 if (REGNO (reg) == R0_REGNUM)
8947 /* On thumb we have to use a write-back instruction. */
8948 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8949 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8950 size = TARGET_ARM ? 16 : 0;
8954 emit_move_insn (reg, mem);
8955 size = GET_MODE_SIZE (GET_MODE (reg));
8959 /* Emit USE insns before the return. */
8960 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8961 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8963 /* Construct the return. */
8964 expand_naked_return ();
8970 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8971 ;; all of memory. This blocks insns from being moved across this point.
8973 (define_insn "blockage"
8974 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8977 [(set_attr "length" "0")
8978 (set_attr "type" "block")]
8981 (define_expand "casesi"
8982 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8983 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8984 (match_operand:SI 2 "const_int_operand" "") ; total range
8985 (match_operand:SI 3 "" "") ; table label
8986 (match_operand:SI 4 "" "")] ; Out of range label
8987 "TARGET_32BIT || optimize_size || flag_pic"
8990 enum insn_code code;
8991 if (operands[1] != const0_rtx)
8993 rtx reg = gen_reg_rtx (SImode);
8995 emit_insn (gen_addsi3 (reg, operands[0],
8996 GEN_INT (-INTVAL (operands[1]))));
9001 code = CODE_FOR_arm_casesi_internal;
9002 else if (TARGET_THUMB1)
9003 code = CODE_FOR_thumb1_casesi_internal_pic;
9005 code = CODE_FOR_thumb2_casesi_internal_pic;
9007 code = CODE_FOR_thumb2_casesi_internal;
9009 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9010 operands[2] = force_reg (SImode, operands[2]);
9012 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9013 operands[3], operands[4]));
9018 ;; The USE in this pattern is needed to tell flow analysis that this is
9019 ;; a CASESI insn. It has no other purpose.
9020 (define_insn "arm_casesi_internal"
9021 [(parallel [(set (pc)
9023 (leu (match_operand:SI 0 "s_register_operand" "r")
9024 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9025 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9026 (label_ref (match_operand 2 "" ""))))
9027 (label_ref (match_operand 3 "" ""))))
9028 (clobber (reg:CC CC_REGNUM))
9029 (use (label_ref (match_dup 2)))])]
9033 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9034 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9036 [(set_attr "conds" "clob")
9037 (set_attr "length" "12")]
9040 (define_expand "thumb1_casesi_internal_pic"
9041 [(match_operand:SI 0 "s_register_operand" "")
9042 (match_operand:SI 1 "thumb1_cmp_operand" "")
9043 (match_operand 2 "" "")
9044 (match_operand 3 "" "")]
9048 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9049 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9051 reg0 = gen_rtx_REG (SImode, 0);
9052 emit_move_insn (reg0, operands[0]);
9053 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9058 (define_insn "thumb1_casesi_dispatch"
9059 [(parallel [(set (pc) (unspec [(reg:SI 0)
9060 (label_ref (match_operand 0 "" ""))
9061 ;; (label_ref (match_operand 1 "" ""))
9063 UNSPEC_THUMB1_CASESI))
9064 (clobber (reg:SI IP_REGNUM))
9065 (clobber (reg:SI LR_REGNUM))])]
9067 "* return thumb1_output_casesi(operands);"
9068 [(set_attr "length" "4")]
9071 (define_expand "indirect_jump"
9073 (match_operand:SI 0 "s_register_operand" ""))]
9076 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9077 address and use bx. */
9081 tmp = gen_reg_rtx (SImode);
9082 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9088 ;; NB Never uses BX.
9089 (define_insn "*arm_indirect_jump"
9091 (match_operand:SI 0 "s_register_operand" "r"))]
9093 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9094 [(set_attr "predicable" "yes")]
9097 (define_insn "*load_indirect_jump"
9099 (match_operand:SI 0 "memory_operand" "m"))]
9101 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9102 [(set_attr "type" "load1")
9103 (set_attr "pool_range" "4096")
9104 (set_attr "neg_pool_range" "4084")
9105 (set_attr "predicable" "yes")]
9108 ;; NB Never uses BX.
9109 (define_insn "*thumb1_indirect_jump"
9111 (match_operand:SI 0 "register_operand" "l*r"))]
9114 [(set_attr "conds" "clob")
9115 (set_attr "length" "2")]
9125 if (TARGET_UNIFIED_ASM)
9128 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9129 return \"mov\\tr8, r8\";
9131 [(set (attr "length")
9132 (if_then_else (eq_attr "is_thumb" "yes")
9138 ;; Patterns to allow combination of arithmetic, cond code and shifts
9140 (define_insn "*arith_shiftsi"
9141 [(set (match_operand:SI 0 "s_register_operand" "=r")
9142 (match_operator:SI 1 "shiftable_operator"
9143 [(match_operator:SI 3 "shift_operator"
9144 [(match_operand:SI 4 "s_register_operand" "r")
9145 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9146 (match_operand:SI 2 "s_register_operand" "r")]))]
9148 "%i1%?\\t%0, %2, %4%S3"
9149 [(set_attr "predicable" "yes")
9150 (set_attr "shift" "4")
9151 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9152 (const_string "alu_shift")
9153 (const_string "alu_shift_reg")))]
9157 [(set (match_operand:SI 0 "s_register_operand" "")
9158 (match_operator:SI 1 "shiftable_operator"
9159 [(match_operator:SI 2 "shiftable_operator"
9160 [(match_operator:SI 3 "shift_operator"
9161 [(match_operand:SI 4 "s_register_operand" "")
9162 (match_operand:SI 5 "reg_or_int_operand" "")])
9163 (match_operand:SI 6 "s_register_operand" "")])
9164 (match_operand:SI 7 "arm_rhs_operand" "")]))
9165 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9168 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9171 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9174 (define_insn "*arith_shiftsi_compare0"
9175 [(set (reg:CC_NOOV CC_REGNUM)
9176 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9177 [(match_operator:SI 3 "shift_operator"
9178 [(match_operand:SI 4 "s_register_operand" "r")
9179 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9180 (match_operand:SI 2 "s_register_operand" "r")])
9182 (set (match_operand:SI 0 "s_register_operand" "=r")
9183 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9186 "%i1%.\\t%0, %2, %4%S3"
9187 [(set_attr "conds" "set")
9188 (set_attr "shift" "4")
9189 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9190 (const_string "alu_shift")
9191 (const_string "alu_shift_reg")))]
9194 (define_insn "*arith_shiftsi_compare0_scratch"
9195 [(set (reg:CC_NOOV CC_REGNUM)
9196 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9197 [(match_operator:SI 3 "shift_operator"
9198 [(match_operand:SI 4 "s_register_operand" "r")
9199 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9200 (match_operand:SI 2 "s_register_operand" "r")])
9202 (clobber (match_scratch:SI 0 "=r"))]
9204 "%i1%.\\t%0, %2, %4%S3"
9205 [(set_attr "conds" "set")
9206 (set_attr "shift" "4")
9207 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9208 (const_string "alu_shift")
9209 (const_string "alu_shift_reg")))]
9212 (define_insn "*sub_shiftsi"
9213 [(set (match_operand:SI 0 "s_register_operand" "=r")
9214 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9215 (match_operator:SI 2 "shift_operator"
9216 [(match_operand:SI 3 "s_register_operand" "r")
9217 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9219 "sub%?\\t%0, %1, %3%S2"
9220 [(set_attr "predicable" "yes")
9221 (set_attr "shift" "3")
9222 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9223 (const_string "alu_shift")
9224 (const_string "alu_shift_reg")))]
9227 (define_insn "*sub_shiftsi_compare0"
9228 [(set (reg:CC_NOOV CC_REGNUM)
9230 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9231 (match_operator:SI 2 "shift_operator"
9232 [(match_operand:SI 3 "s_register_operand" "r")
9233 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9235 (set (match_operand:SI 0 "s_register_operand" "=r")
9236 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9239 "sub%.\\t%0, %1, %3%S2"
9240 [(set_attr "conds" "set")
9241 (set_attr "shift" "3")
9242 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9243 (const_string "alu_shift")
9244 (const_string "alu_shift_reg")))]
9247 (define_insn "*sub_shiftsi_compare0_scratch"
9248 [(set (reg:CC_NOOV CC_REGNUM)
9250 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9251 (match_operator:SI 2 "shift_operator"
9252 [(match_operand:SI 3 "s_register_operand" "r")
9253 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9255 (clobber (match_scratch:SI 0 "=r"))]
9257 "sub%.\\t%0, %1, %3%S2"
9258 [(set_attr "conds" "set")
9259 (set_attr "shift" "3")
9260 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9261 (const_string "alu_shift")
9262 (const_string "alu_shift_reg")))]
9267 (define_insn "*and_scc"
9268 [(set (match_operand:SI 0 "s_register_operand" "=r")
9269 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9270 [(match_operand 3 "cc_register" "") (const_int 0)])
9271 (match_operand:SI 2 "s_register_operand" "r")))]
9273 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9274 [(set_attr "conds" "use")
9275 (set_attr "length" "8")]
9278 (define_insn "*ior_scc"
9279 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9280 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9281 [(match_operand 3 "cc_register" "") (const_int 0)])
9282 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9286 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9287 [(set_attr "conds" "use")
9288 (set_attr "length" "4,8")]
9291 (define_insn "*compare_scc"
9292 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9293 (match_operator:SI 1 "arm_comparison_operator"
9294 [(match_operand:SI 2 "s_register_operand" "r,r")
9295 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9296 (clobber (reg:CC CC_REGNUM))]
9299 if (operands[3] == const0_rtx)
9301 if (GET_CODE (operands[1]) == LT)
9302 return \"mov\\t%0, %2, lsr #31\";
9304 if (GET_CODE (operands[1]) == GE)
9305 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9307 if (GET_CODE (operands[1]) == EQ)
9308 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9311 if (GET_CODE (operands[1]) == NE)
9313 if (which_alternative == 1)
9314 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9315 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9317 if (which_alternative == 1)
9318 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9320 output_asm_insn (\"cmp\\t%2, %3\", operands);
9321 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9323 [(set_attr "conds" "clob")
9324 (set_attr "length" "12")]
9327 (define_insn "*cond_move"
9328 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9329 (if_then_else:SI (match_operator 3 "equality_operator"
9330 [(match_operator 4 "arm_comparison_operator"
9331 [(match_operand 5 "cc_register" "") (const_int 0)])
9333 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9334 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9337 if (GET_CODE (operands[3]) == NE)
9339 if (which_alternative != 1)
9340 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9341 if (which_alternative != 0)
9342 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9345 if (which_alternative != 0)
9346 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9347 if (which_alternative != 1)
9348 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9351 [(set_attr "conds" "use")
9352 (set_attr "length" "4,4,8")]
9355 (define_insn "*cond_arith"
9356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9357 (match_operator:SI 5 "shiftable_operator"
9358 [(match_operator:SI 4 "arm_comparison_operator"
9359 [(match_operand:SI 2 "s_register_operand" "r,r")
9360 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9361 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9362 (clobber (reg:CC CC_REGNUM))]
9365 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9366 return \"%i5\\t%0, %1, %2, lsr #31\";
9368 output_asm_insn (\"cmp\\t%2, %3\", operands);
9369 if (GET_CODE (operands[5]) == AND)
9370 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9371 else if (GET_CODE (operands[5]) == MINUS)
9372 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9373 else if (which_alternative != 0)
9374 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9375 return \"%i5%d4\\t%0, %1, #1\";
9377 [(set_attr "conds" "clob")
9378 (set_attr "length" "12")]
9381 (define_insn "*cond_sub"
9382 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9383 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9384 (match_operator:SI 4 "arm_comparison_operator"
9385 [(match_operand:SI 2 "s_register_operand" "r,r")
9386 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9387 (clobber (reg:CC CC_REGNUM))]
9390 output_asm_insn (\"cmp\\t%2, %3\", operands);
9391 if (which_alternative != 0)
9392 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9393 return \"sub%d4\\t%0, %1, #1\";
9395 [(set_attr "conds" "clob")
9396 (set_attr "length" "8,12")]
9399 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9400 (define_insn "*cmp_ite0"
9401 [(set (match_operand 6 "dominant_cc_register" "")
9404 (match_operator 4 "arm_comparison_operator"
9405 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9406 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9407 (match_operator:SI 5 "arm_comparison_operator"
9408 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9409 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9415 static const char * const opcodes[4][2] =
9417 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9418 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9419 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9420 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9421 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9422 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9423 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9424 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9427 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9429 return opcodes[which_alternative][swap];
9431 [(set_attr "conds" "set")
9432 (set_attr "length" "8")]
9435 (define_insn "*cmp_ite1"
9436 [(set (match_operand 6 "dominant_cc_register" "")
9439 (match_operator 4 "arm_comparison_operator"
9440 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9441 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9442 (match_operator:SI 5 "arm_comparison_operator"
9443 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9444 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9450 static const char * const opcodes[4][2] =
9452 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9453 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9454 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9455 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9456 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9457 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9458 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9459 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9462 comparison_dominates_p (GET_CODE (operands[5]),
9463 reverse_condition (GET_CODE (operands[4])));
9465 return opcodes[which_alternative][swap];
9467 [(set_attr "conds" "set")
9468 (set_attr "length" "8")]
9471 (define_insn "*cmp_and"
9472 [(set (match_operand 6 "dominant_cc_register" "")
9475 (match_operator 4 "arm_comparison_operator"
9476 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9477 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9478 (match_operator:SI 5 "arm_comparison_operator"
9479 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9480 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9485 static const char *const opcodes[4][2] =
9487 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9488 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9489 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9490 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9491 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9492 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9493 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9494 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9497 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9499 return opcodes[which_alternative][swap];
9501 [(set_attr "conds" "set")
9502 (set_attr "predicable" "no")
9503 (set_attr "length" "8")]
9506 (define_insn "*cmp_ior"
9507 [(set (match_operand 6 "dominant_cc_register" "")
9510 (match_operator 4 "arm_comparison_operator"
9511 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9512 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9513 (match_operator:SI 5 "arm_comparison_operator"
9514 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9515 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9520 static const char *const opcodes[4][2] =
9522 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9523 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9524 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9525 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9526 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9527 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9528 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9529 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9532 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9534 return opcodes[which_alternative][swap];
9537 [(set_attr "conds" "set")
9538 (set_attr "length" "8")]
9541 (define_insn_and_split "*ior_scc_scc"
9542 [(set (match_operand:SI 0 "s_register_operand" "=r")
9543 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9544 [(match_operand:SI 1 "s_register_operand" "r")
9545 (match_operand:SI 2 "arm_add_operand" "rIL")])
9546 (match_operator:SI 6 "arm_comparison_operator"
9547 [(match_operand:SI 4 "s_register_operand" "r")
9548 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9549 (clobber (reg:CC CC_REGNUM))]
9551 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9554 "TARGET_ARM && reload_completed"
9558 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9559 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9561 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9563 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9566 [(set_attr "conds" "clob")
9567 (set_attr "length" "16")])
9569 ; If the above pattern is followed by a CMP insn, then the compare is
9570 ; redundant, since we can rework the conditional instruction that follows.
9571 (define_insn_and_split "*ior_scc_scc_cmp"
9572 [(set (match_operand 0 "dominant_cc_register" "")
9573 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9574 [(match_operand:SI 1 "s_register_operand" "r")
9575 (match_operand:SI 2 "arm_add_operand" "rIL")])
9576 (match_operator:SI 6 "arm_comparison_operator"
9577 [(match_operand:SI 4 "s_register_operand" "r")
9578 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9580 (set (match_operand:SI 7 "s_register_operand" "=r")
9581 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9582 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9585 "TARGET_ARM && reload_completed"
9589 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9590 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9592 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9594 [(set_attr "conds" "set")
9595 (set_attr "length" "16")])
9597 (define_insn_and_split "*and_scc_scc"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r")
9599 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9600 [(match_operand:SI 1 "s_register_operand" "r")
9601 (match_operand:SI 2 "arm_add_operand" "rIL")])
9602 (match_operator:SI 6 "arm_comparison_operator"
9603 [(match_operand:SI 4 "s_register_operand" "r")
9604 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9605 (clobber (reg:CC CC_REGNUM))]
9607 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9610 "TARGET_ARM && reload_completed
9611 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9616 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9617 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9619 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9621 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9624 [(set_attr "conds" "clob")
9625 (set_attr "length" "16")])
9627 ; If the above pattern is followed by a CMP insn, then the compare is
9628 ; redundant, since we can rework the conditional instruction that follows.
9629 (define_insn_and_split "*and_scc_scc_cmp"
9630 [(set (match_operand 0 "dominant_cc_register" "")
9631 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9632 [(match_operand:SI 1 "s_register_operand" "r")
9633 (match_operand:SI 2 "arm_add_operand" "rIL")])
9634 (match_operator:SI 6 "arm_comparison_operator"
9635 [(match_operand:SI 4 "s_register_operand" "r")
9636 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9638 (set (match_operand:SI 7 "s_register_operand" "=r")
9639 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9640 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9643 "TARGET_ARM && reload_completed"
9647 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9648 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9650 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9652 [(set_attr "conds" "set")
9653 (set_attr "length" "16")])
9655 ;; If there is no dominance in the comparison, then we can still save an
9656 ;; instruction in the AND case, since we can know that the second compare
9657 ;; need only zero the value if false (if true, then the value is already
9659 (define_insn_and_split "*and_scc_scc_nodom"
9660 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9661 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9662 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9663 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9664 (match_operator:SI 6 "arm_comparison_operator"
9665 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9666 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9667 (clobber (reg:CC CC_REGNUM))]
9669 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9672 "TARGET_ARM && reload_completed"
9673 [(parallel [(set (match_dup 0)
9674 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9675 (clobber (reg:CC CC_REGNUM))])
9676 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9678 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9681 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9682 operands[4], operands[5]),
9684 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9686 [(set_attr "conds" "clob")
9687 (set_attr "length" "20")])
9690 [(set (reg:CC_NOOV CC_REGNUM)
9691 (compare:CC_NOOV (ior:SI
9692 (and:SI (match_operand:SI 0 "s_register_operand" "")
9694 (match_operator:SI 1 "arm_comparison_operator"
9695 [(match_operand:SI 2 "s_register_operand" "")
9696 (match_operand:SI 3 "arm_add_operand" "")]))
9698 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9701 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9703 (set (reg:CC_NOOV CC_REGNUM)
9704 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9709 [(set (reg:CC_NOOV CC_REGNUM)
9710 (compare:CC_NOOV (ior:SI
9711 (match_operator:SI 1 "arm_comparison_operator"
9712 [(match_operand:SI 2 "s_register_operand" "")
9713 (match_operand:SI 3 "arm_add_operand" "")])
9714 (and:SI (match_operand:SI 0 "s_register_operand" "")
9717 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9720 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9722 (set (reg:CC_NOOV CC_REGNUM)
9723 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9726 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9728 (define_insn "*negscc"
9729 [(set (match_operand:SI 0 "s_register_operand" "=r")
9730 (neg:SI (match_operator 3 "arm_comparison_operator"
9731 [(match_operand:SI 1 "s_register_operand" "r")
9732 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9733 (clobber (reg:CC CC_REGNUM))]
9736 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9737 return \"mov\\t%0, %1, asr #31\";
9739 if (GET_CODE (operands[3]) == NE)
9740 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9742 output_asm_insn (\"cmp\\t%1, %2\", operands);
9743 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9744 return \"mvn%d3\\t%0, #0\";
9746 [(set_attr "conds" "clob")
9747 (set_attr "length" "12")]
9750 (define_insn "movcond"
9751 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9753 (match_operator 5 "arm_comparison_operator"
9754 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9755 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9756 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9757 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9758 (clobber (reg:CC CC_REGNUM))]
9761 if (GET_CODE (operands[5]) == LT
9762 && (operands[4] == const0_rtx))
9764 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9766 if (operands[2] == const0_rtx)
9767 return \"and\\t%0, %1, %3, asr #31\";
9768 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9770 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9772 if (operands[1] == const0_rtx)
9773 return \"bic\\t%0, %2, %3, asr #31\";
9774 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9776 /* The only case that falls through to here is when both ops 1 & 2
9780 if (GET_CODE (operands[5]) == GE
9781 && (operands[4] == const0_rtx))
9783 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9785 if (operands[2] == const0_rtx)
9786 return \"bic\\t%0, %1, %3, asr #31\";
9787 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9789 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9791 if (operands[1] == const0_rtx)
9792 return \"and\\t%0, %2, %3, asr #31\";
9793 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9795 /* The only case that falls through to here is when both ops 1 & 2
9798 if (GET_CODE (operands[4]) == CONST_INT
9799 && !const_ok_for_arm (INTVAL (operands[4])))
9800 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9802 output_asm_insn (\"cmp\\t%3, %4\", operands);
9803 if (which_alternative != 0)
9804 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9805 if (which_alternative != 1)
9806 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9809 [(set_attr "conds" "clob")
9810 (set_attr "length" "8,8,12")]
9813 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9815 (define_insn "*ifcompare_plus_move"
9816 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9817 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9818 [(match_operand:SI 4 "s_register_operand" "r,r")
9819 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9821 (match_operand:SI 2 "s_register_operand" "r,r")
9822 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9823 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9824 (clobber (reg:CC CC_REGNUM))]
9827 [(set_attr "conds" "clob")
9828 (set_attr "length" "8,12")]
9831 (define_insn "*if_plus_move"
9832 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9834 (match_operator 4 "arm_comparison_operator"
9835 [(match_operand 5 "cc_register" "") (const_int 0)])
9837 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9838 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9839 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9843 sub%d4\\t%0, %2, #%n3
9844 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9845 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9846 [(set_attr "conds" "use")
9847 (set_attr "length" "4,4,8,8")
9848 (set_attr "type" "*,*,*,*")]
9851 (define_insn "*ifcompare_move_plus"
9852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9853 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9854 [(match_operand:SI 4 "s_register_operand" "r,r")
9855 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9856 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9858 (match_operand:SI 2 "s_register_operand" "r,r")
9859 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9860 (clobber (reg:CC CC_REGNUM))]
9863 [(set_attr "conds" "clob")
9864 (set_attr "length" "8,12")]
9867 (define_insn "*if_move_plus"
9868 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9870 (match_operator 4 "arm_comparison_operator"
9871 [(match_operand 5 "cc_register" "") (const_int 0)])
9872 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9874 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9875 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9879 sub%D4\\t%0, %2, #%n3
9880 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9881 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9882 [(set_attr "conds" "use")
9883 (set_attr "length" "4,4,8,8")
9884 (set_attr "type" "*,*,*,*")]
9887 (define_insn "*ifcompare_arith_arith"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r")
9889 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9890 [(match_operand:SI 5 "s_register_operand" "r")
9891 (match_operand:SI 6 "arm_add_operand" "rIL")])
9892 (match_operator:SI 8 "shiftable_operator"
9893 [(match_operand:SI 1 "s_register_operand" "r")
9894 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9895 (match_operator:SI 7 "shiftable_operator"
9896 [(match_operand:SI 3 "s_register_operand" "r")
9897 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9898 (clobber (reg:CC CC_REGNUM))]
9901 [(set_attr "conds" "clob")
9902 (set_attr "length" "12")]
9905 (define_insn "*if_arith_arith"
9906 [(set (match_operand:SI 0 "s_register_operand" "=r")
9907 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9908 [(match_operand 8 "cc_register" "") (const_int 0)])
9909 (match_operator:SI 6 "shiftable_operator"
9910 [(match_operand:SI 1 "s_register_operand" "r")
9911 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9912 (match_operator:SI 7 "shiftable_operator"
9913 [(match_operand:SI 3 "s_register_operand" "r")
9914 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9916 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9917 [(set_attr "conds" "use")
9918 (set_attr "length" "8")]
9921 (define_insn "*ifcompare_arith_move"
9922 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9923 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9924 [(match_operand:SI 2 "s_register_operand" "r,r")
9925 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9926 (match_operator:SI 7 "shiftable_operator"
9927 [(match_operand:SI 4 "s_register_operand" "r,r")
9928 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9929 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9930 (clobber (reg:CC CC_REGNUM))]
9933 /* If we have an operation where (op x 0) is the identity operation and
9934 the conditional operator is LT or GE and we are comparing against zero and
9935 everything is in registers then we can do this in two instructions. */
9936 if (operands[3] == const0_rtx
9937 && GET_CODE (operands[7]) != AND
9938 && GET_CODE (operands[5]) == REG
9939 && GET_CODE (operands[1]) == REG
9940 && REGNO (operands[1]) == REGNO (operands[4])
9941 && REGNO (operands[4]) != REGNO (operands[0]))
9943 if (GET_CODE (operands[6]) == LT)
9944 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9945 else if (GET_CODE (operands[6]) == GE)
9946 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9948 if (GET_CODE (operands[3]) == CONST_INT
9949 && !const_ok_for_arm (INTVAL (operands[3])))
9950 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9952 output_asm_insn (\"cmp\\t%2, %3\", operands);
9953 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9954 if (which_alternative != 0)
9955 return \"mov%D6\\t%0, %1\";
9958 [(set_attr "conds" "clob")
9959 (set_attr "length" "8,12")]
9962 (define_insn "*if_arith_move"
9963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9964 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9965 [(match_operand 6 "cc_register" "") (const_int 0)])
9966 (match_operator:SI 5 "shiftable_operator"
9967 [(match_operand:SI 2 "s_register_operand" "r,r")
9968 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9969 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9973 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9974 [(set_attr "conds" "use")
9975 (set_attr "length" "4,8")
9976 (set_attr "type" "*,*")]
9979 (define_insn "*ifcompare_move_arith"
9980 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9981 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9982 [(match_operand:SI 4 "s_register_operand" "r,r")
9983 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9984 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9985 (match_operator:SI 7 "shiftable_operator"
9986 [(match_operand:SI 2 "s_register_operand" "r,r")
9987 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9988 (clobber (reg:CC CC_REGNUM))]
9991 /* If we have an operation where (op x 0) is the identity operation and
9992 the conditional operator is LT or GE and we are comparing against zero and
9993 everything is in registers then we can do this in two instructions */
9994 if (operands[5] == const0_rtx
9995 && GET_CODE (operands[7]) != AND
9996 && GET_CODE (operands[3]) == REG
9997 && GET_CODE (operands[1]) == REG
9998 && REGNO (operands[1]) == REGNO (operands[2])
9999 && REGNO (operands[2]) != REGNO (operands[0]))
10001 if (GET_CODE (operands[6]) == GE)
10002 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10003 else if (GET_CODE (operands[6]) == LT)
10004 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10007 if (GET_CODE (operands[5]) == CONST_INT
10008 && !const_ok_for_arm (INTVAL (operands[5])))
10009 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10011 output_asm_insn (\"cmp\\t%4, %5\", operands);
10013 if (which_alternative != 0)
10014 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10015 return \"%I7%D6\\t%0, %2, %3\";
10017 [(set_attr "conds" "clob")
10018 (set_attr "length" "8,12")]
10021 (define_insn "*if_move_arith"
10022 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10024 (match_operator 4 "arm_comparison_operator"
10025 [(match_operand 6 "cc_register" "") (const_int 0)])
10026 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10027 (match_operator:SI 5 "shiftable_operator"
10028 [(match_operand:SI 2 "s_register_operand" "r,r")
10029 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10032 %I5%D4\\t%0, %2, %3
10033 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10034 [(set_attr "conds" "use")
10035 (set_attr "length" "4,8")
10036 (set_attr "type" "*,*")]
10039 (define_insn "*ifcompare_move_not"
10040 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10042 (match_operator 5 "arm_comparison_operator"
10043 [(match_operand:SI 3 "s_register_operand" "r,r")
10044 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10045 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10047 (match_operand:SI 2 "s_register_operand" "r,r"))))
10048 (clobber (reg:CC CC_REGNUM))]
10051 [(set_attr "conds" "clob")
10052 (set_attr "length" "8,12")]
10055 (define_insn "*if_move_not"
10056 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10058 (match_operator 4 "arm_comparison_operator"
10059 [(match_operand 3 "cc_register" "") (const_int 0)])
10060 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10061 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10065 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10066 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10067 [(set_attr "conds" "use")
10068 (set_attr "length" "4,8,8")]
10071 (define_insn "*ifcompare_not_move"
10072 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10074 (match_operator 5 "arm_comparison_operator"
10075 [(match_operand:SI 3 "s_register_operand" "r,r")
10076 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10078 (match_operand:SI 2 "s_register_operand" "r,r"))
10079 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10080 (clobber (reg:CC CC_REGNUM))]
10083 [(set_attr "conds" "clob")
10084 (set_attr "length" "8,12")]
10087 (define_insn "*if_not_move"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10090 (match_operator 4 "arm_comparison_operator"
10091 [(match_operand 3 "cc_register" "") (const_int 0)])
10092 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10093 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10097 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10098 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10099 [(set_attr "conds" "use")
10100 (set_attr "length" "4,8,8")]
10103 (define_insn "*ifcompare_shift_move"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10106 (match_operator 6 "arm_comparison_operator"
10107 [(match_operand:SI 4 "s_register_operand" "r,r")
10108 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10109 (match_operator:SI 7 "shift_operator"
10110 [(match_operand:SI 2 "s_register_operand" "r,r")
10111 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10112 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10113 (clobber (reg:CC CC_REGNUM))]
10116 [(set_attr "conds" "clob")
10117 (set_attr "length" "8,12")]
10120 (define_insn "*if_shift_move"
10121 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10123 (match_operator 5 "arm_comparison_operator"
10124 [(match_operand 6 "cc_register" "") (const_int 0)])
10125 (match_operator:SI 4 "shift_operator"
10126 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10127 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10128 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10132 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10133 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10134 [(set_attr "conds" "use")
10135 (set_attr "shift" "2")
10136 (set_attr "length" "4,8,8")
10137 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10138 (const_string "alu_shift")
10139 (const_string "alu_shift_reg")))]
10142 (define_insn "*ifcompare_move_shift"
10143 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10145 (match_operator 6 "arm_comparison_operator"
10146 [(match_operand:SI 4 "s_register_operand" "r,r")
10147 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10148 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10149 (match_operator:SI 7 "shift_operator"
10150 [(match_operand:SI 2 "s_register_operand" "r,r")
10151 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10152 (clobber (reg:CC CC_REGNUM))]
10155 [(set_attr "conds" "clob")
10156 (set_attr "length" "8,12")]
10159 (define_insn "*if_move_shift"
10160 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10162 (match_operator 5 "arm_comparison_operator"
10163 [(match_operand 6 "cc_register" "") (const_int 0)])
10164 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10165 (match_operator:SI 4 "shift_operator"
10166 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10167 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10171 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10172 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10173 [(set_attr "conds" "use")
10174 (set_attr "shift" "2")
10175 (set_attr "length" "4,8,8")
10176 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10177 (const_string "alu_shift")
10178 (const_string "alu_shift_reg")))]
10181 (define_insn "*ifcompare_shift_shift"
10182 [(set (match_operand:SI 0 "s_register_operand" "=r")
10184 (match_operator 7 "arm_comparison_operator"
10185 [(match_operand:SI 5 "s_register_operand" "r")
10186 (match_operand:SI 6 "arm_add_operand" "rIL")])
10187 (match_operator:SI 8 "shift_operator"
10188 [(match_operand:SI 1 "s_register_operand" "r")
10189 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10190 (match_operator:SI 9 "shift_operator"
10191 [(match_operand:SI 3 "s_register_operand" "r")
10192 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10193 (clobber (reg:CC CC_REGNUM))]
10196 [(set_attr "conds" "clob")
10197 (set_attr "length" "12")]
10200 (define_insn "*if_shift_shift"
10201 [(set (match_operand:SI 0 "s_register_operand" "=r")
10203 (match_operator 5 "arm_comparison_operator"
10204 [(match_operand 8 "cc_register" "") (const_int 0)])
10205 (match_operator:SI 6 "shift_operator"
10206 [(match_operand:SI 1 "s_register_operand" "r")
10207 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10208 (match_operator:SI 7 "shift_operator"
10209 [(match_operand:SI 3 "s_register_operand" "r")
10210 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10212 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10213 [(set_attr "conds" "use")
10214 (set_attr "shift" "1")
10215 (set_attr "length" "8")
10216 (set (attr "type") (if_then_else
10217 (and (match_operand 2 "const_int_operand" "")
10218 (match_operand 4 "const_int_operand" ""))
10219 (const_string "alu_shift")
10220 (const_string "alu_shift_reg")))]
10223 (define_insn "*ifcompare_not_arith"
10224 [(set (match_operand:SI 0 "s_register_operand" "=r")
10226 (match_operator 6 "arm_comparison_operator"
10227 [(match_operand:SI 4 "s_register_operand" "r")
10228 (match_operand:SI 5 "arm_add_operand" "rIL")])
10229 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10230 (match_operator:SI 7 "shiftable_operator"
10231 [(match_operand:SI 2 "s_register_operand" "r")
10232 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10233 (clobber (reg:CC CC_REGNUM))]
10236 [(set_attr "conds" "clob")
10237 (set_attr "length" "12")]
10240 (define_insn "*if_not_arith"
10241 [(set (match_operand:SI 0 "s_register_operand" "=r")
10243 (match_operator 5 "arm_comparison_operator"
10244 [(match_operand 4 "cc_register" "") (const_int 0)])
10245 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10246 (match_operator:SI 6 "shiftable_operator"
10247 [(match_operand:SI 2 "s_register_operand" "r")
10248 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10250 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10251 [(set_attr "conds" "use")
10252 (set_attr "length" "8")]
10255 (define_insn "*ifcompare_arith_not"
10256 [(set (match_operand:SI 0 "s_register_operand" "=r")
10258 (match_operator 6 "arm_comparison_operator"
10259 [(match_operand:SI 4 "s_register_operand" "r")
10260 (match_operand:SI 5 "arm_add_operand" "rIL")])
10261 (match_operator:SI 7 "shiftable_operator"
10262 [(match_operand:SI 2 "s_register_operand" "r")
10263 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10264 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10265 (clobber (reg:CC CC_REGNUM))]
10268 [(set_attr "conds" "clob")
10269 (set_attr "length" "12")]
10272 (define_insn "*if_arith_not"
10273 [(set (match_operand:SI 0 "s_register_operand" "=r")
10275 (match_operator 5 "arm_comparison_operator"
10276 [(match_operand 4 "cc_register" "") (const_int 0)])
10277 (match_operator:SI 6 "shiftable_operator"
10278 [(match_operand:SI 2 "s_register_operand" "r")
10279 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10280 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10282 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10283 [(set_attr "conds" "use")
10284 (set_attr "length" "8")]
10287 (define_insn "*ifcompare_neg_move"
10288 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10290 (match_operator 5 "arm_comparison_operator"
10291 [(match_operand:SI 3 "s_register_operand" "r,r")
10292 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10293 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10294 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10295 (clobber (reg:CC CC_REGNUM))]
10298 [(set_attr "conds" "clob")
10299 (set_attr "length" "8,12")]
10302 (define_insn "*if_neg_move"
10303 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10305 (match_operator 4 "arm_comparison_operator"
10306 [(match_operand 3 "cc_register" "") (const_int 0)])
10307 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10308 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10311 rsb%d4\\t%0, %2, #0
10312 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10313 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10314 [(set_attr "conds" "use")
10315 (set_attr "length" "4,8,8")]
10318 (define_insn "*ifcompare_move_neg"
10319 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10321 (match_operator 5 "arm_comparison_operator"
10322 [(match_operand:SI 3 "s_register_operand" "r,r")
10323 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10324 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10325 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10326 (clobber (reg:CC CC_REGNUM))]
10329 [(set_attr "conds" "clob")
10330 (set_attr "length" "8,12")]
10333 (define_insn "*if_move_neg"
10334 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10336 (match_operator 4 "arm_comparison_operator"
10337 [(match_operand 3 "cc_register" "") (const_int 0)])
10338 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10339 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10342 rsb%D4\\t%0, %2, #0
10343 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10344 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10345 [(set_attr "conds" "use")
10346 (set_attr "length" "4,8,8")]
10349 (define_insn "*arith_adjacentmem"
10350 [(set (match_operand:SI 0 "s_register_operand" "=r")
10351 (match_operator:SI 1 "shiftable_operator"
10352 [(match_operand:SI 2 "memory_operand" "m")
10353 (match_operand:SI 3 "memory_operand" "m")]))
10354 (clobber (match_scratch:SI 4 "=r"))]
10355 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10361 HOST_WIDE_INT val1 = 0, val2 = 0;
10363 if (REGNO (operands[0]) > REGNO (operands[4]))
10365 ldm[1] = operands[4];
10366 ldm[2] = operands[0];
10370 ldm[1] = operands[0];
10371 ldm[2] = operands[4];
10374 base_reg = XEXP (operands[2], 0);
10376 if (!REG_P (base_reg))
10378 val1 = INTVAL (XEXP (base_reg, 1));
10379 base_reg = XEXP (base_reg, 0);
10382 if (!REG_P (XEXP (operands[3], 0)))
10383 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10385 arith[0] = operands[0];
10386 arith[3] = operands[1];
10400 if (val1 !=0 && val2 != 0)
10404 if (val1 == 4 || val2 == 4)
10405 /* Other val must be 8, since we know they are adjacent and neither
10407 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10408 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10410 ldm[0] = ops[0] = operands[4];
10412 ops[2] = GEN_INT (val1);
10413 output_add_immediate (ops);
10415 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10417 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10421 /* Offset is out of range for a single add, so use two ldr. */
10424 ops[2] = GEN_INT (val1);
10425 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10427 ops[2] = GEN_INT (val2);
10428 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10431 else if (val1 != 0)
10434 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10436 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10441 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10443 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10445 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10448 [(set_attr "length" "12")
10449 (set_attr "predicable" "yes")
10450 (set_attr "type" "load1")]
10453 ; This pattern is never tried by combine, so do it as a peephole
10456 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10457 (match_operand:SI 1 "arm_general_register_operand" ""))
10458 (set (reg:CC CC_REGNUM)
10459 (compare:CC (match_dup 1) (const_int 0)))]
10461 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10462 (set (match_dup 0) (match_dup 1))])]
10466 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10467 ; reversed, check that the memory references aren't volatile.
10470 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10471 (match_operand:SI 4 "memory_operand" "m"))
10472 (set (match_operand:SI 1 "s_register_operand" "=rk")
10473 (match_operand:SI 5 "memory_operand" "m"))
10474 (set (match_operand:SI 2 "s_register_operand" "=rk")
10475 (match_operand:SI 6 "memory_operand" "m"))
10476 (set (match_operand:SI 3 "s_register_operand" "=rk")
10477 (match_operand:SI 7 "memory_operand" "m"))]
10478 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10480 return emit_ldm_seq (operands, 4);
10485 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10486 (match_operand:SI 3 "memory_operand" "m"))
10487 (set (match_operand:SI 1 "s_register_operand" "=rk")
10488 (match_operand:SI 4 "memory_operand" "m"))
10489 (set (match_operand:SI 2 "s_register_operand" "=rk")
10490 (match_operand:SI 5 "memory_operand" "m"))]
10491 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10493 return emit_ldm_seq (operands, 3);
10498 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10499 (match_operand:SI 2 "memory_operand" "m"))
10500 (set (match_operand:SI 1 "s_register_operand" "=rk")
10501 (match_operand:SI 3 "memory_operand" "m"))]
10502 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10504 return emit_ldm_seq (operands, 2);
10509 [(set (match_operand:SI 4 "memory_operand" "=m")
10510 (match_operand:SI 0 "s_register_operand" "rk"))
10511 (set (match_operand:SI 5 "memory_operand" "=m")
10512 (match_operand:SI 1 "s_register_operand" "rk"))
10513 (set (match_operand:SI 6 "memory_operand" "=m")
10514 (match_operand:SI 2 "s_register_operand" "rk"))
10515 (set (match_operand:SI 7 "memory_operand" "=m")
10516 (match_operand:SI 3 "s_register_operand" "rk"))]
10517 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10519 return emit_stm_seq (operands, 4);
10524 [(set (match_operand:SI 3 "memory_operand" "=m")
10525 (match_operand:SI 0 "s_register_operand" "rk"))
10526 (set (match_operand:SI 4 "memory_operand" "=m")
10527 (match_operand:SI 1 "s_register_operand" "rk"))
10528 (set (match_operand:SI 5 "memory_operand" "=m")
10529 (match_operand:SI 2 "s_register_operand" "rk"))]
10530 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10532 return emit_stm_seq (operands, 3);
10537 [(set (match_operand:SI 2 "memory_operand" "=m")
10538 (match_operand:SI 0 "s_register_operand" "rk"))
10539 (set (match_operand:SI 3 "memory_operand" "=m")
10540 (match_operand:SI 1 "s_register_operand" "rk"))]
10541 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10543 return emit_stm_seq (operands, 2);
10548 [(set (match_operand:SI 0 "s_register_operand" "")
10549 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10551 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10552 [(match_operand:SI 3 "s_register_operand" "")
10553 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10554 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10556 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10557 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10562 ;; This split can be used because CC_Z mode implies that the following
10563 ;; branch will be an equality, or an unsigned inequality, so the sign
10564 ;; extension is not needed.
10567 [(set (reg:CC_Z CC_REGNUM)
10569 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10571 (match_operand 1 "const_int_operand" "")))
10572 (clobber (match_scratch:SI 2 ""))]
10574 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10575 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10576 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10577 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10579 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10582 ;; ??? Check the patterns above for Thumb-2 usefulness
10584 (define_expand "prologue"
10585 [(clobber (const_int 0))]
10588 arm_expand_prologue ();
10590 thumb1_expand_prologue ();
10595 (define_expand "epilogue"
10596 [(clobber (const_int 0))]
10599 if (crtl->calls_eh_return)
10600 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10602 thumb1_expand_epilogue ();
10603 else if (USE_RETURN_INSN (FALSE))
10605 emit_jump_insn (gen_return ());
10608 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10610 gen_rtx_RETURN (VOIDmode)),
10611 VUNSPEC_EPILOGUE));
10616 ;; Note - although unspec_volatile's USE all hard registers,
10617 ;; USEs are ignored after relaod has completed. Thus we need
10618 ;; to add an unspec of the link register to ensure that flow
10619 ;; does not think that it is unused by the sibcall branch that
10620 ;; will replace the standard function epilogue.
10621 (define_insn "sibcall_epilogue"
10622 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10623 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10626 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10627 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10628 return arm_output_epilogue (next_nonnote_insn (insn));
10630 ;; Length is absolute worst case
10631 [(set_attr "length" "44")
10632 (set_attr "type" "block")
10633 ;; We don't clobber the conditions, but the potential length of this
10634 ;; operation is sufficient to make conditionalizing the sequence
10635 ;; unlikely to be profitable.
10636 (set_attr "conds" "clob")]
10639 (define_insn "*epilogue_insns"
10640 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10644 return arm_output_epilogue (NULL);
10645 else /* TARGET_THUMB1 */
10646 return thumb_unexpanded_epilogue ();
10648 ; Length is absolute worst case
10649 [(set_attr "length" "44")
10650 (set_attr "type" "block")
10651 ;; We don't clobber the conditions, but the potential length of this
10652 ;; operation is sufficient to make conditionalizing the sequence
10653 ;; unlikely to be profitable.
10654 (set_attr "conds" "clob")]
10657 (define_expand "eh_epilogue"
10658 [(use (match_operand:SI 0 "register_operand" ""))
10659 (use (match_operand:SI 1 "register_operand" ""))
10660 (use (match_operand:SI 2 "register_operand" ""))]
10664 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10665 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10667 rtx ra = gen_rtx_REG (Pmode, 2);
10669 emit_move_insn (ra, operands[2]);
10672 /* This is a hack -- we may have crystalized the function type too
10674 cfun->machine->func_type = 0;
10678 ;; This split is only used during output to reduce the number of patterns
10679 ;; that need assembler instructions adding to them. We allowed the setting
10680 ;; of the conditions to be implicit during rtl generation so that
10681 ;; the conditional compare patterns would work. However this conflicts to
10682 ;; some extent with the conditional data operations, so we have to split them
10685 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10686 ;; conditional execution sufficient?
10689 [(set (match_operand:SI 0 "s_register_operand" "")
10690 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10691 [(match_operand 2 "" "") (match_operand 3 "" "")])
10693 (match_operand 4 "" "")))
10694 (clobber (reg:CC CC_REGNUM))]
10695 "TARGET_ARM && reload_completed"
10696 [(set (match_dup 5) (match_dup 6))
10697 (cond_exec (match_dup 7)
10698 (set (match_dup 0) (match_dup 4)))]
10701 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10702 operands[2], operands[3]);
10703 enum rtx_code rc = GET_CODE (operands[1]);
10705 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10706 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10707 if (mode == CCFPmode || mode == CCFPEmode)
10708 rc = reverse_condition_maybe_unordered (rc);
10710 rc = reverse_condition (rc);
10712 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10717 [(set (match_operand:SI 0 "s_register_operand" "")
10718 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10719 [(match_operand 2 "" "") (match_operand 3 "" "")])
10720 (match_operand 4 "" "")
10722 (clobber (reg:CC CC_REGNUM))]
10723 "TARGET_ARM && reload_completed"
10724 [(set (match_dup 5) (match_dup 6))
10725 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10726 (set (match_dup 0) (match_dup 4)))]
10729 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10730 operands[2], operands[3]);
10732 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10733 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10738 [(set (match_operand:SI 0 "s_register_operand" "")
10739 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10740 [(match_operand 2 "" "") (match_operand 3 "" "")])
10741 (match_operand 4 "" "")
10742 (match_operand 5 "" "")))
10743 (clobber (reg:CC CC_REGNUM))]
10744 "TARGET_ARM && reload_completed"
10745 [(set (match_dup 6) (match_dup 7))
10746 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10747 (set (match_dup 0) (match_dup 4)))
10748 (cond_exec (match_dup 8)
10749 (set (match_dup 0) (match_dup 5)))]
10752 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10753 operands[2], operands[3]);
10754 enum rtx_code rc = GET_CODE (operands[1]);
10756 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10757 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10758 if (mode == CCFPmode || mode == CCFPEmode)
10759 rc = reverse_condition_maybe_unordered (rc);
10761 rc = reverse_condition (rc);
10763 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10768 [(set (match_operand:SI 0 "s_register_operand" "")
10769 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10770 [(match_operand:SI 2 "s_register_operand" "")
10771 (match_operand:SI 3 "arm_add_operand" "")])
10772 (match_operand:SI 4 "arm_rhs_operand" "")
10774 (match_operand:SI 5 "s_register_operand" ""))))
10775 (clobber (reg:CC CC_REGNUM))]
10776 "TARGET_ARM && reload_completed"
10777 [(set (match_dup 6) (match_dup 7))
10778 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10779 (set (match_dup 0) (match_dup 4)))
10780 (cond_exec (match_dup 8)
10781 (set (match_dup 0) (not:SI (match_dup 5))))]
10784 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10785 operands[2], operands[3]);
10786 enum rtx_code rc = GET_CODE (operands[1]);
10788 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10789 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10790 if (mode == CCFPmode || mode == CCFPEmode)
10791 rc = reverse_condition_maybe_unordered (rc);
10793 rc = reverse_condition (rc);
10795 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10799 (define_insn "*cond_move_not"
10800 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10801 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10802 [(match_operand 3 "cc_register" "") (const_int 0)])
10803 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10805 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10809 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10810 [(set_attr "conds" "use")
10811 (set_attr "length" "4,8")]
10814 ;; The next two patterns occur when an AND operation is followed by a
10815 ;; scc insn sequence
10817 (define_insn "*sign_extract_onebit"
10818 [(set (match_operand:SI 0 "s_register_operand" "=r")
10819 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10821 (match_operand:SI 2 "const_int_operand" "n")))
10822 (clobber (reg:CC CC_REGNUM))]
10825 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10826 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10827 return \"mvnne\\t%0, #0\";
10829 [(set_attr "conds" "clob")
10830 (set_attr "length" "8")]
10833 (define_insn "*not_signextract_onebit"
10834 [(set (match_operand:SI 0 "s_register_operand" "=r")
10836 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10838 (match_operand:SI 2 "const_int_operand" "n"))))
10839 (clobber (reg:CC CC_REGNUM))]
10842 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10843 output_asm_insn (\"tst\\t%1, %2\", operands);
10844 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10845 return \"movne\\t%0, #0\";
10847 [(set_attr "conds" "clob")
10848 (set_attr "length" "12")]
10850 ;; ??? The above patterns need auditing for Thumb-2
10852 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10853 ;; expressions. For simplicity, the first register is also in the unspec
10855 (define_insn "*push_multi"
10856 [(match_parallel 2 "multi_register_push"
10857 [(set (match_operand:BLK 0 "memory_operand" "=m")
10858 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10859 UNSPEC_PUSH_MULT))])]
10863 int num_saves = XVECLEN (operands[2], 0);
10865 /* For the StrongARM at least it is faster to
10866 use STR to store only a single register.
10867 In Thumb mode always use push, and the assembler will pick
10868 something appropriate. */
10869 if (num_saves == 1 && TARGET_ARM)
10870 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10877 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10879 strcpy (pattern, \"push\\t{%1\");
10881 for (i = 1; i < num_saves; i++)
10883 strcat (pattern, \", %|\");
10885 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10888 strcat (pattern, \"}\");
10889 output_asm_insn (pattern, operands);
10894 [(set_attr "type" "store4")]
10897 (define_insn "stack_tie"
10898 [(set (mem:BLK (scratch))
10899 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10900 (match_operand:SI 1 "s_register_operand" "rk")]
10904 [(set_attr "length" "0")]
10907 ;; Similarly for the floating point registers
10908 (define_insn "*push_fp_multi"
10909 [(match_parallel 2 "multi_register_push"
10910 [(set (match_operand:BLK 0 "memory_operand" "=m")
10911 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10912 UNSPEC_PUSH_MULT))])]
10913 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10918 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10919 output_asm_insn (pattern, operands);
10922 [(set_attr "type" "f_store")]
10925 ;; Special patterns for dealing with the constant pool
10927 (define_insn "align_4"
10928 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10931 assemble_align (32);
10936 (define_insn "align_8"
10937 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10940 assemble_align (64);
10945 (define_insn "consttable_end"
10946 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10949 making_const_table = FALSE;
10954 (define_insn "consttable_1"
10955 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10958 making_const_table = TRUE;
10959 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10960 assemble_zeros (3);
10963 [(set_attr "length" "4")]
10966 (define_insn "consttable_2"
10967 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10970 making_const_table = TRUE;
10971 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10972 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10973 assemble_zeros (2);
10976 [(set_attr "length" "4")]
10979 (define_insn "consttable_4"
10980 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10984 rtx x = operands[0];
10985 making_const_table = TRUE;
10986 switch (GET_MODE_CLASS (GET_MODE (x)))
10989 if (GET_MODE (x) == HFmode)
10990 arm_emit_fp16_const (x);
10994 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10995 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10999 /* XXX: Sometimes gcc does something really dumb and ends up with
11000 a HIGH in a constant pool entry, usually because it's trying to
11001 load into a VFP register. We know this will always be used in
11002 combination with a LO_SUM which ignores the high bits, so just
11003 strip off the HIGH. */
11004 if (GET_CODE (x) == HIGH)
11006 assemble_integer (x, 4, BITS_PER_WORD, 1);
11007 mark_symbol_refs_as_used (x);
11012 [(set_attr "length" "4")]
11015 (define_insn "consttable_8"
11016 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11020 making_const_table = TRUE;
11021 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11026 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11027 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11031 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11036 [(set_attr "length" "8")]
11039 (define_insn "consttable_16"
11040 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11044 making_const_table = TRUE;
11045 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11050 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11051 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11055 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11060 [(set_attr "length" "16")]
11063 ;; Miscellaneous Thumb patterns
11065 (define_expand "tablejump"
11066 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11067 (use (label_ref (match_operand 1 "" "")))])]
11072 /* Hopefully, CSE will eliminate this copy. */
11073 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11074 rtx reg2 = gen_reg_rtx (SImode);
11076 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11077 operands[0] = reg2;
11082 ;; NB never uses BX.
11083 (define_insn "*thumb1_tablejump"
11084 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11085 (use (label_ref (match_operand 1 "" "")))]
11088 [(set_attr "length" "2")]
11091 ;; V5 Instructions,
11093 (define_insn "clzsi2"
11094 [(set (match_operand:SI 0 "s_register_operand" "=r")
11095 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11096 "TARGET_32BIT && arm_arch5"
11098 [(set_attr "predicable" "yes")
11099 (set_attr "insn" "clz")])
11101 (define_insn "rbitsi2"
11102 [(set (match_operand:SI 0 "s_register_operand" "=r")
11103 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11104 "TARGET_32BIT && arm_arch_thumb2"
11106 [(set_attr "predicable" "yes")
11107 (set_attr "insn" "clz")])
11109 (define_expand "ctzsi2"
11110 [(set (match_operand:SI 0 "s_register_operand" "")
11111 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11112 "TARGET_32BIT && arm_arch_thumb2"
11115 rtx tmp = gen_reg_rtx (SImode);
11116 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11117 emit_insn (gen_clzsi2 (operands[0], tmp));
11123 ;; V5E instructions.
11125 (define_insn "prefetch"
11126 [(prefetch (match_operand:SI 0 "address_operand" "p")
11127 (match_operand:SI 1 "" "")
11128 (match_operand:SI 2 "" ""))]
11129 "TARGET_32BIT && arm_arch5e"
11132 ;; General predication pattern
11135 [(match_operator 0 "arm_comparison_operator"
11136 [(match_operand 1 "cc_register" "")
11142 (define_insn "prologue_use"
11143 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11145 "%@ %0 needed for prologue"
11146 [(set_attr "length" "0")]
11150 ;; Patterns for exception handling
11152 (define_expand "eh_return"
11153 [(use (match_operand 0 "general_operand" ""))]
11158 emit_insn (gen_arm_eh_return (operands[0]));
11160 emit_insn (gen_thumb_eh_return (operands[0]));
11165 ;; We can't expand this before we know where the link register is stored.
11166 (define_insn_and_split "arm_eh_return"
11167 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11169 (clobber (match_scratch:SI 1 "=&r"))]
11172 "&& reload_completed"
11176 arm_set_return_address (operands[0], operands[1]);
11181 (define_insn_and_split "thumb_eh_return"
11182 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11184 (clobber (match_scratch:SI 1 "=&l"))]
11187 "&& reload_completed"
11191 thumb_set_return_address (operands[0], operands[1]);
11199 (define_insn "load_tp_hard"
11200 [(set (match_operand:SI 0 "register_operand" "=r")
11201 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11203 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11204 [(set_attr "predicable" "yes")]
11207 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11208 (define_insn "load_tp_soft"
11209 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11210 (clobber (reg:SI LR_REGNUM))
11211 (clobber (reg:SI IP_REGNUM))
11212 (clobber (reg:CC CC_REGNUM))]
11214 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11215 [(set_attr "conds" "clob")]
11218 (define_insn "*arm_movtas_ze"
11219 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11222 (match_operand:SI 1 "const_int_operand" ""))]
11225 [(set_attr "predicable" "yes")
11226 (set_attr "length" "4")]
11229 (define_insn "arm_rev"
11230 [(set (match_operand:SI 0 "s_register_operand" "=r")
11231 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11232 "TARGET_EITHER && arm_arch6"
11234 [(set (attr "length")
11235 (if_then_else (eq_attr "is_thumb" "yes")
11240 (define_expand "arm_legacy_rev"
11241 [(set (match_operand:SI 2 "s_register_operand" "")
11242 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11246 (lshiftrt:SI (match_dup 2)
11248 (set (match_operand:SI 3 "s_register_operand" "")
11249 (rotatert:SI (match_dup 1)
11252 (and:SI (match_dup 2)
11253 (const_int -65281)))
11254 (set (match_operand:SI 0 "s_register_operand" "")
11255 (xor:SI (match_dup 3)
11261 ;; Reuse temporaries to keep register pressure down.
11262 (define_expand "thumb_legacy_rev"
11263 [(set (match_operand:SI 2 "s_register_operand" "")
11264 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11266 (set (match_operand:SI 3 "s_register_operand" "")
11267 (lshiftrt:SI (match_dup 1)
11270 (ior:SI (match_dup 3)
11272 (set (match_operand:SI 4 "s_register_operand" "")
11274 (set (match_operand:SI 5 "s_register_operand" "")
11275 (rotatert:SI (match_dup 1)
11278 (ashift:SI (match_dup 5)
11281 (lshiftrt:SI (match_dup 5)
11284 (ior:SI (match_dup 5)
11287 (rotatert:SI (match_dup 5)
11289 (set (match_operand:SI 0 "s_register_operand" "")
11290 (ior:SI (match_dup 5)
11296 (define_expand "bswapsi2"
11297 [(set (match_operand:SI 0 "s_register_operand" "=r")
11298 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11303 if (!optimize_size)
11305 rtx op2 = gen_reg_rtx (SImode);
11306 rtx op3 = gen_reg_rtx (SImode);
11310 rtx op4 = gen_reg_rtx (SImode);
11311 rtx op5 = gen_reg_rtx (SImode);
11313 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11314 op2, op3, op4, op5));
11318 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11330 ;; Load the FPA co-processor patterns
11332 ;; Load the Maverick co-processor patterns
11333 (include "cirrus.md")
11334 ;; Vector bits common to IWMMXT and Neon
11335 (include "vec-common.md")
11336 ;; Load the Intel Wireless Multimedia Extension patterns
11337 (include "iwmmxt.md")
11338 ;; Load the VFP co-processor patterns
11340 ;; Thumb-2 patterns
11341 (include "thumb2.md")
11343 (include "neon.md")