1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 ;; and Martin Simmons (@harleqn.co.uk).
6 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 ;; This file is part of GCC.
10 ;; GCC is free software; you can redistribute it and/or modify it
11 ;; under the terms of the GNU General Public License as published
12 ;; by the Free Software Foundation; either version 2, or (at your
13 ;; option) any later version.
15 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
16 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 ;; License for more details.
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GCC; see the file COPYING. If not, write to
22 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
23 ;; Boston, MA 02110-1301, USA.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
56 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
57 ; operand 0 is the result,
58 ; operand 1 the parameter.
59 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
60 ; operand 0 is the result,
61 ; operand 1 the parameter.
62 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
63 ; operand 0 is the first register,
64 ; subsequent registers are in parallel (use ...)
66 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
67 ; usage, that is, we will add the pic_register
68 ; value to it before trying to dereference it.
69 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
70 ; GLOBAL_OFFSET_TABLE. The operation is fully
71 ; described by the RTL but must be wrapped to
72 ; prevent combine from trying to rip it apart.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 20) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
101 ;; UNSPEC_VOLATILE Usage:
104 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
106 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
107 ; instruction epilogue sequence that isn't expanded
108 ; into normal RTL. Used for both normal and sibcall
110 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
111 ; for inlined constants.
112 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
114 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
116 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
118 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
120 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
122 (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction.
123 (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction.
124 (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN
125 (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions
126 (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions
127 (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions
128 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
133 ;;---------------------------------------------------------------------------
136 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
137 ; generating ARM code. This is used to control the length of some insn
138 ; patterns that share the same RTL in both ARM and Thumb code.
139 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
141 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
142 ; scheduling decisions for the load unit and the multiplier.
143 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
145 ; IS_XSCALE is set to 'yes' when compiling for XScale.
146 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
148 ;; Operand number of an input operand that is shifted. Zero if the
149 ;; given instruction does not shift one of its input operands.
150 (define_attr "shift" "" (const_int 0))
152 ; Floating Point Unit. If we only have floating point emulation, then there
153 ; is no point in scheduling the floating point insns. (Well, for best
154 ; performance we should try and group them together).
155 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
156 (const (symbol_ref "arm_fpu_attr")))
158 ; LENGTH of an instruction (in bytes)
159 (define_attr "length" "" (const_int 4))
161 ; POOL_RANGE is how far away from a constant pool entry that this insn
162 ; can be placed. If the distance is zero, then this insn will never
163 ; reference the pool.
164 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
165 ; before its address.
166 (define_attr "pool_range" "" (const_int 0))
167 (define_attr "neg_pool_range" "" (const_int 0))
169 ; An assembler sequence may clobber the condition codes without us knowing.
170 ; If such an insn references the pool, then we have no way of knowing how,
171 ; so use the most conservative value for pool_range.
172 (define_asm_attributes
173 [(set_attr "conds" "clob")
174 (set_attr "length" "4")
175 (set_attr "pool_range" "250")])
177 ;; The instruction used to implement a particular pattern. This
178 ;; information is used by pipeline descriptions to provide accurate
179 ;; scheduling information.
182 "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other"
183 (const_string "other"))
185 ; TYPE attribute is used to detect floating point instructions which, if
186 ; running on a co-processor can run in parallel with other, basic instructions
187 ; If write-buffer scheduling is enabled then it can also be used in the
188 ; scheduling of writes.
190 ; Classification of each insn
191 ; alu any alu instruction that doesn't hit memory or fp
192 ; regs or have a shifted source operand
193 ; alu_shift any data instruction that doesn't hit memory or fp
194 ; regs, but has a source operand shifted by a constant
195 ; alu_shift_reg any data instruction that doesn't hit memory or fp
196 ; regs, but has a source operand shifted by a register value
197 ; mult a multiply instruction
198 ; block blockage insn, this blocks all functional units
199 ; float a floating point arithmetic operation (subject to expansion)
200 ; fdivd DFmode floating point division
201 ; fdivs SFmode floating point division
202 ; fmul Floating point multiply
203 ; ffmul Fast floating point multiply
204 ; farith Floating point arithmetic (4 cycle)
205 ; ffarith Fast floating point arithmetic (2 cycle)
206 ; float_em a floating point arithmetic operation that is normally emulated
207 ; even on a machine with an fpa.
208 ; f_load a floating point load from memory
209 ; f_store a floating point store to memory
210 ; f_load[sd] single/double load from memory
211 ; f_store[sd] single/double store to memory
212 ; f_flag a transfer of co-processor flags to the CPSR
213 ; f_mem_r a transfer of a floating point register to a real reg via mem
214 ; r_mem_f the reverse of f_mem_r
215 ; f_2_r fast transfer float to arm (no memory needed)
216 ; r_2_f fast transfer arm to float
217 ; f_cvt convert floating<->integral
219 ; call a subroutine call
220 ; load_byte load byte(s) from memory to arm registers
221 ; load1 load 1 word from memory to arm registers
222 ; load2 load 2 words from memory to arm registers
223 ; load3 load 3 words from memory to arm registers
224 ; load4 load 4 words from memory to arm registers
225 ; store store 1 word to memory from arm registers
226 ; store2 store 2 words
227 ; store3 store 3 words
228 ; store4 store 4 (or more) words
229 ; Additions for Cirrus Maverick co-processor:
230 ; mav_farith Floating point arithmetic (4 cycle)
231 ; mav_dmult Double multiplies (7 cycle)
234 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
236 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
237 (const_string "mult")
238 (const_string "alu")))
240 ; Load scheduling, set from the arm_ld_sched variable
241 ; initialized by arm_override_options()
242 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
244 ; condition codes: this one is used by final_prescan_insn to speed up
245 ; conditionalizing instructions. It saves having to scan the rtl to see if
246 ; it uses or alters the condition codes.
248 ; USE means that the condition codes are used by the insn in the process of
249 ; outputting code, this means (at present) that we can't use the insn in
252 ; SET means that the purpose of the insn is to set the condition codes in a
253 ; well defined manner.
255 ; CLOB means that the condition codes are altered in an undefined manner, if
256 ; they are altered at all
258 ; JUMP_CLOB is used when the condition cannot be represented by a single
259 ; instruction (UNEQ and LTGT). These cannot be predicated.
261 ; NOCOND means that the condition codes are neither altered nor affect the
262 ; output of this insn
264 (define_attr "conds" "use,set,clob,jump_clob,nocond"
265 (if_then_else (eq_attr "type" "call")
266 (const_string "clob")
267 (const_string "nocond")))
269 ; Predicable means that the insn can be conditionally executed based on
270 ; an automatically added predicate (additional patterns are generated by
271 ; gen...). We default to 'no' because no Thumb patterns match this rule
272 ; and not all ARM patterns do.
273 (define_attr "predicable" "no,yes" (const_string "no"))
275 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
276 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
277 ; suffer blockages enough to warrant modelling this (and it can adversely
278 ; affect the schedule).
279 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
281 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
282 ; to stall the processor. Used with model_wbuf above.
283 (define_attr "write_conflict" "no,yes"
284 (if_then_else (eq_attr "type"
285 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
287 (const_string "no")))
289 ; Classify the insns into those that take one cycle and those that take more
290 ; than one on the main cpu execution unit.
291 (define_attr "core_cycles" "single,multi"
292 (if_then_else (eq_attr "type"
293 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
294 (const_string "single")
295 (const_string "multi")))
297 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
298 ;; distant label. Only applicable to Thumb code.
299 (define_attr "far_jump" "yes,no" (const_string "no"))
302 ;; The number of machine instructions this pattern expands to.
303 ;; Used for Thumb-2 conditional execution.
304 (define_attr "ce_count" "" (const_int 1))
306 ;;---------------------------------------------------------------------------
309 ; A list of modes that are exactly 64 bits in size. We use this to expand
310 ; some splits that are the same for all modes when operating on ARM
312 (define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
314 ;;---------------------------------------------------------------------------
317 (include "predicates.md")
318 (include "constraints.md")
320 ;;---------------------------------------------------------------------------
321 ;; Pipeline descriptions
323 ;; Processor type. This is created automatically from arm-cores.def.
324 (include "arm-tune.md")
326 ;; True if the generic scheduling description should be used.
328 (define_attr "generic_sched" "yes,no"
330 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs")
332 (const_string "yes"))))
334 (define_attr "generic_vfp" "yes,no"
336 (and (eq_attr "fpu" "vfp")
337 (eq_attr "tune" "!arm1020e,arm1022e"))
339 (const_string "no"))))
341 (include "arm-generic.md")
342 (include "arm926ejs.md")
343 (include "arm1020e.md")
344 (include "arm1026ejs.md")
345 (include "arm1136jfs.md")
348 ;;---------------------------------------------------------------------------
353 ;; Note: For DImode insns, there is normally no reason why operands should
354 ;; not be in the same register, what we don't want is for something being
355 ;; written to partially overlap something that is an input.
356 ;; Cirrus 64bit additions should not be split because we have a native
357 ;; 64bit addition instructions.
359 (define_expand "adddi3"
361 [(set (match_operand:DI 0 "s_register_operand" "")
362 (plus:DI (match_operand:DI 1 "s_register_operand" "")
363 (match_operand:DI 2 "s_register_operand" "")))
364 (clobber (reg:CC CC_REGNUM))])]
367 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
369 if (!cirrus_fp_register (operands[0], DImode))
370 operands[0] = force_reg (DImode, operands[0]);
371 if (!cirrus_fp_register (operands[1], DImode))
372 operands[1] = force_reg (DImode, operands[1]);
373 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
379 if (GET_CODE (operands[1]) != REG)
380 operands[1] = force_reg (SImode, operands[1]);
381 if (GET_CODE (operands[2]) != REG)
382 operands[2] = force_reg (SImode, operands[2]);
387 (define_insn "*thumb1_adddi3"
388 [(set (match_operand:DI 0 "register_operand" "=l")
389 (plus:DI (match_operand:DI 1 "register_operand" "%0")
390 (match_operand:DI 2 "register_operand" "l")))
391 (clobber (reg:CC CC_REGNUM))
394 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
395 [(set_attr "length" "4")]
398 (define_insn_and_split "*arm_adddi3"
399 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
400 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
401 (match_operand:DI 2 "s_register_operand" "r, 0")))
402 (clobber (reg:CC CC_REGNUM))]
403 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
405 "TARGET_32BIT && reload_completed"
406 [(parallel [(set (reg:CC_C CC_REGNUM)
407 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
409 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
410 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
411 (plus:SI (match_dup 4) (match_dup 5))))]
414 operands[3] = gen_highpart (SImode, operands[0]);
415 operands[0] = gen_lowpart (SImode, operands[0]);
416 operands[4] = gen_highpart (SImode, operands[1]);
417 operands[1] = gen_lowpart (SImode, operands[1]);
418 operands[5] = gen_highpart (SImode, operands[2]);
419 operands[2] = gen_lowpart (SImode, operands[2]);
421 [(set_attr "conds" "clob")
422 (set_attr "length" "8")]
425 (define_insn_and_split "*adddi_sesidi_di"
426 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
427 (plus:DI (sign_extend:DI
428 (match_operand:SI 2 "s_register_operand" "r,r"))
429 (match_operand:DI 1 "s_register_operand" "r,0")))
430 (clobber (reg:CC CC_REGNUM))]
431 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
433 "TARGET_32BIT && reload_completed"
434 [(parallel [(set (reg:CC_C CC_REGNUM)
435 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
437 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
438 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
439 (plus:SI (ashiftrt:SI (match_dup 2)
444 operands[3] = gen_highpart (SImode, operands[0]);
445 operands[0] = gen_lowpart (SImode, operands[0]);
446 operands[4] = gen_highpart (SImode, operands[1]);
447 operands[1] = gen_lowpart (SImode, operands[1]);
448 operands[2] = gen_lowpart (SImode, operands[2]);
450 [(set_attr "conds" "clob")
451 (set_attr "length" "8")]
454 (define_insn_and_split "*adddi_zesidi_di"
455 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
456 (plus:DI (zero_extend:DI
457 (match_operand:SI 2 "s_register_operand" "r,r"))
458 (match_operand:DI 1 "s_register_operand" "r,0")))
459 (clobber (reg:CC CC_REGNUM))]
460 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
462 "TARGET_32BIT && reload_completed"
463 [(parallel [(set (reg:CC_C CC_REGNUM)
464 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
466 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
467 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
468 (plus:SI (match_dup 4) (const_int 0))))]
471 operands[3] = gen_highpart (SImode, operands[0]);
472 operands[0] = gen_lowpart (SImode, operands[0]);
473 operands[4] = gen_highpart (SImode, operands[1]);
474 operands[1] = gen_lowpart (SImode, operands[1]);
475 operands[2] = gen_lowpart (SImode, operands[2]);
477 [(set_attr "conds" "clob")
478 (set_attr "length" "8")]
481 (define_expand "addsi3"
482 [(set (match_operand:SI 0 "s_register_operand" "")
483 (plus:SI (match_operand:SI 1 "s_register_operand" "")
484 (match_operand:SI 2 "reg_or_int_operand" "")))]
487 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
489 arm_split_constant (PLUS, SImode, NULL_RTX,
490 INTVAL (operands[2]), operands[0], operands[1],
491 optimize && !no_new_pseudos);
497 ; If there is a scratch available, this will be faster than synthesizing the
500 [(match_scratch:SI 3 "r")
501 (set (match_operand:SI 0 "arm_general_register_operand" "")
502 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
503 (match_operand:SI 2 "const_int_operand" "")))]
505 !(const_ok_for_arm (INTVAL (operands[2]))
506 || const_ok_for_arm (-INTVAL (operands[2])))
507 && const_ok_for_arm (~INTVAL (operands[2]))"
508 [(set (match_dup 3) (match_dup 2))
509 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
513 (define_insn_and_split "*arm_addsi3"
514 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
515 (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
516 (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
523 GET_CODE (operands[2]) == CONST_INT
524 && !(const_ok_for_arm (INTVAL (operands[2]))
525 || const_ok_for_arm (-INTVAL (operands[2])))"
526 [(clobber (const_int 0))]
528 arm_split_constant (PLUS, SImode, curr_insn,
529 INTVAL (operands[2]), operands[0],
533 [(set_attr "length" "4,4,16")
534 (set_attr "predicable" "yes")]
537 ;; Register group 'k' is a single register group containing only the stack
538 ;; register. Trying to reload it will always fail catastrophically,
539 ;; so never allow those alternatives to match if reloading is needed.
541 (define_insn "*thumb1_addsi3"
542 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
543 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
544 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
547 static const char * const asms[] =
549 \"add\\t%0, %0, %2\",
550 \"sub\\t%0, %0, #%n2\",
551 \"add\\t%0, %1, %2\",
552 \"add\\t%0, %0, %2\",
553 \"add\\t%0, %0, %2\",
554 \"add\\t%0, %1, %2\",
557 if ((which_alternative == 2 || which_alternative == 6)
558 && GET_CODE (operands[2]) == CONST_INT
559 && INTVAL (operands[2]) < 0)
560 return \"sub\\t%0, %1, #%n2\";
561 return asms[which_alternative];
563 [(set_attr "length" "2")]
566 ;; Reloading and elimination of the frame pointer can
567 ;; sometimes cause this optimization to be missed.
569 [(set (match_operand:SI 0 "arm_general_register_operand" "")
570 (match_operand:SI 1 "const_int_operand" ""))
572 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
574 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
575 && (INTVAL (operands[1]) & 3) == 0"
576 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
580 ;; ??? Make Thumb-2 variants which prefer low regs
581 (define_insn "*addsi3_compare0"
582 [(set (reg:CC_NOOV CC_REGNUM)
584 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
585 (match_operand:SI 2 "arm_add_operand" "rI,L"))
587 (set (match_operand:SI 0 "s_register_operand" "=r,r")
588 (plus:SI (match_dup 1) (match_dup 2)))]
592 sub%.\\t%0, %1, #%n2"
593 [(set_attr "conds" "set")]
596 (define_insn "*addsi3_compare0_scratch"
597 [(set (reg:CC_NOOV CC_REGNUM)
599 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
600 (match_operand:SI 1 "arm_add_operand" "rI,L"))
606 [(set_attr "conds" "set")]
609 (define_insn "*compare_negsi_si"
610 [(set (reg:CC_Z CC_REGNUM)
612 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
613 (match_operand:SI 1 "s_register_operand" "r")))]
616 [(set_attr "conds" "set")]
619 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
620 ;; addend is a constant.
621 (define_insn "*cmpsi2_addneg"
622 [(set (reg:CC CC_REGNUM)
624 (match_operand:SI 1 "s_register_operand" "r,r")
625 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
626 (set (match_operand:SI 0 "s_register_operand" "=r,r")
627 (plus:SI (match_dup 1)
628 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
629 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
632 add%.\\t%0, %1, #%n2"
633 [(set_attr "conds" "set")]
636 ;; Convert the sequence
638 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
642 ;; bcs dest ((unsigned)rn >= 1)
643 ;; similarly for the beq variant using bcc.
644 ;; This is a common looping idiom (while (n--))
646 [(set (match_operand:SI 0 "arm_general_register_operand" "")
647 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
649 (set (match_operand 2 "cc_register" "")
650 (compare (match_dup 0) (const_int -1)))
652 (if_then_else (match_operator 3 "equality_operator"
653 [(match_dup 2) (const_int 0)])
654 (match_operand 4 "" "")
655 (match_operand 5 "" "")))]
656 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
660 (match_dup 1) (const_int 1)))
661 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
663 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
666 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
667 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
670 operands[2], const0_rtx);"
673 ;; The next four insns work because they compare the result with one of
674 ;; the operands, and we know that the use of the condition code is
675 ;; either GEU or LTU, so we can use the carry flag from the addition
676 ;; instead of doing the compare a second time.
677 (define_insn "*addsi3_compare_op1"
678 [(set (reg:CC_C CC_REGNUM)
680 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
681 (match_operand:SI 2 "arm_add_operand" "rI,L"))
683 (set (match_operand:SI 0 "s_register_operand" "=r,r")
684 (plus:SI (match_dup 1) (match_dup 2)))]
688 sub%.\\t%0, %1, #%n2"
689 [(set_attr "conds" "set")]
692 (define_insn "*addsi3_compare_op2"
693 [(set (reg:CC_C CC_REGNUM)
695 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
696 (match_operand:SI 2 "arm_add_operand" "rI,L"))
698 (set (match_operand:SI 0 "s_register_operand" "=r,r")
699 (plus:SI (match_dup 1) (match_dup 2)))]
703 sub%.\\t%0, %1, #%n2"
704 [(set_attr "conds" "set")]
707 (define_insn "*compare_addsi2_op0"
708 [(set (reg:CC_C CC_REGNUM)
710 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
711 (match_operand:SI 1 "arm_add_operand" "rI,L"))
717 [(set_attr "conds" "set")]
720 (define_insn "*compare_addsi2_op1"
721 [(set (reg:CC_C CC_REGNUM)
723 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
724 (match_operand:SI 1 "arm_add_operand" "rI,L"))
730 [(set_attr "conds" "set")]
733 (define_insn "*addsi3_carryin"
734 [(set (match_operand:SI 0 "s_register_operand" "=r")
735 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
736 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
737 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
740 [(set_attr "conds" "use")]
743 (define_insn "*addsi3_carryin_shift"
744 [(set (match_operand:SI 0 "s_register_operand" "=r")
745 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
747 (match_operator:SI 2 "shift_operator"
748 [(match_operand:SI 3 "s_register_operand" "r")
749 (match_operand:SI 4 "reg_or_int_operand" "rM")])
750 (match_operand:SI 1 "s_register_operand" "r"))))]
752 "adc%?\\t%0, %1, %3%S2"
753 [(set_attr "conds" "use")
754 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
755 (const_string "alu_shift")
756 (const_string "alu_shift_reg")))]
759 (define_insn "*addsi3_carryin_alt1"
760 [(set (match_operand:SI 0 "s_register_operand" "=r")
761 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
762 (match_operand:SI 2 "arm_rhs_operand" "rI"))
763 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
766 [(set_attr "conds" "use")]
769 (define_insn "*addsi3_carryin_alt2"
770 [(set (match_operand:SI 0 "s_register_operand" "=r")
771 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
772 (match_operand:SI 1 "s_register_operand" "r"))
773 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
776 [(set_attr "conds" "use")]
779 (define_insn "*addsi3_carryin_alt3"
780 [(set (match_operand:SI 0 "s_register_operand" "=r")
781 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
782 (match_operand:SI 2 "arm_rhs_operand" "rI"))
783 (match_operand:SI 1 "s_register_operand" "r")))]
786 [(set_attr "conds" "use")]
789 (define_expand "incscc"
790 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
791 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
792 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
793 (match_operand:SI 1 "s_register_operand" "0,?r")))]
798 (define_insn "*arm_incscc"
799 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
801 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
802 (match_operand:SI 1 "s_register_operand" "0,?r")))]
806 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
807 [(set_attr "conds" "use")
808 (set_attr "length" "4,8")]
811 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
813 [(set (match_operand:SI 0 "s_register_operand" "")
814 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
815 (match_operand:SI 2 "s_register_operand" ""))
817 (clobber (match_operand:SI 3 "s_register_operand" ""))]
819 [(set (match_dup 3) (match_dup 1))
820 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
822 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
825 (define_expand "addsf3"
826 [(set (match_operand:SF 0 "s_register_operand" "")
827 (plus:SF (match_operand:SF 1 "s_register_operand" "")
828 (match_operand:SF 2 "arm_float_add_operand" "")))]
829 "TARGET_32BIT && TARGET_HARD_FLOAT"
832 && !cirrus_fp_register (operands[2], SFmode))
833 operands[2] = force_reg (SFmode, operands[2]);
836 (define_expand "adddf3"
837 [(set (match_operand:DF 0 "s_register_operand" "")
838 (plus:DF (match_operand:DF 1 "s_register_operand" "")
839 (match_operand:DF 2 "arm_float_add_operand" "")))]
840 "TARGET_32BIT && TARGET_HARD_FLOAT"
843 && !cirrus_fp_register (operands[2], DFmode))
844 operands[2] = force_reg (DFmode, operands[2]);
847 (define_expand "subdi3"
849 [(set (match_operand:DI 0 "s_register_operand" "")
850 (minus:DI (match_operand:DI 1 "s_register_operand" "")
851 (match_operand:DI 2 "s_register_operand" "")))
852 (clobber (reg:CC CC_REGNUM))])]
855 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
857 && cirrus_fp_register (operands[0], DImode)
858 && cirrus_fp_register (operands[1], DImode))
860 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
866 if (GET_CODE (operands[1]) != REG)
867 operands[1] = force_reg (SImode, operands[1]);
868 if (GET_CODE (operands[2]) != REG)
869 operands[2] = force_reg (SImode, operands[2]);
874 (define_insn "*arm_subdi3"
875 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
876 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
877 (match_operand:DI 2 "s_register_operand" "r,0,0")))
878 (clobber (reg:CC CC_REGNUM))]
880 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
881 [(set_attr "conds" "clob")
882 (set_attr "length" "8")]
885 (define_insn "*thumb_subdi3"
886 [(set (match_operand:DI 0 "register_operand" "=l")
887 (minus:DI (match_operand:DI 1 "register_operand" "0")
888 (match_operand:DI 2 "register_operand" "l")))
889 (clobber (reg:CC CC_REGNUM))]
891 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
892 [(set_attr "length" "4")]
895 (define_insn "*subdi_di_zesidi"
896 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
897 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
899 (match_operand:SI 2 "s_register_operand" "r,r"))))
900 (clobber (reg:CC CC_REGNUM))]
902 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
903 [(set_attr "conds" "clob")
904 (set_attr "length" "8")]
907 (define_insn "*subdi_di_sesidi"
908 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
909 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
911 (match_operand:SI 2 "s_register_operand" "r,r"))))
912 (clobber (reg:CC CC_REGNUM))]
914 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
915 [(set_attr "conds" "clob")
916 (set_attr "length" "8")]
919 (define_insn "*subdi_zesidi_di"
920 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
921 (minus:DI (zero_extend:DI
922 (match_operand:SI 2 "s_register_operand" "r,r"))
923 (match_operand:DI 1 "s_register_operand" "?r,0")))
924 (clobber (reg:CC CC_REGNUM))]
926 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
927 [(set_attr "conds" "clob")
928 (set_attr "length" "8")]
931 (define_insn "*subdi_sesidi_di"
932 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
933 (minus:DI (sign_extend:DI
934 (match_operand:SI 2 "s_register_operand" "r,r"))
935 (match_operand:DI 1 "s_register_operand" "?r,0")))
936 (clobber (reg:CC CC_REGNUM))]
938 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
939 [(set_attr "conds" "clob")
940 (set_attr "length" "8")]
943 (define_insn "*subdi_zesidi_zesidi"
944 [(set (match_operand:DI 0 "s_register_operand" "=r")
945 (minus:DI (zero_extend:DI
946 (match_operand:SI 1 "s_register_operand" "r"))
948 (match_operand:SI 2 "s_register_operand" "r"))))
949 (clobber (reg:CC CC_REGNUM))]
951 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
952 [(set_attr "conds" "clob")
953 (set_attr "length" "8")]
956 (define_expand "subsi3"
957 [(set (match_operand:SI 0 "s_register_operand" "")
958 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
959 (match_operand:SI 2 "s_register_operand" "")))]
962 if (GET_CODE (operands[1]) == CONST_INT)
966 arm_split_constant (MINUS, SImode, NULL_RTX,
967 INTVAL (operands[1]), operands[0],
968 operands[2], optimize && !no_new_pseudos);
971 else /* TARGET_THUMB1 */
972 operands[1] = force_reg (SImode, operands[1]);
977 (define_insn "*thumb1_subsi3_insn"
978 [(set (match_operand:SI 0 "register_operand" "=l")
979 (minus:SI (match_operand:SI 1 "register_operand" "l")
980 (match_operand:SI 2 "register_operand" "l")))]
983 [(set_attr "length" "2")]
986 ; ??? Check Thumb-2 split length
987 (define_insn_and_split "*arm_subsi3_insn"
988 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
989 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
990 (match_operand:SI 2 "s_register_operand" "r,r")))]
996 && GET_CODE (operands[1]) == CONST_INT
997 && !const_ok_for_arm (INTVAL (operands[1]))"
998 [(clobber (const_int 0))]
1000 arm_split_constant (MINUS, SImode, curr_insn,
1001 INTVAL (operands[1]), operands[0], operands[2], 0);
1004 [(set_attr "length" "4,16")
1005 (set_attr "predicable" "yes")]
1009 [(match_scratch:SI 3 "r")
1010 (set (match_operand:SI 0 "arm_general_register_operand" "")
1011 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1012 (match_operand:SI 2 "arm_general_register_operand" "")))]
1014 && !const_ok_for_arm (INTVAL (operands[1]))
1015 && const_ok_for_arm (~INTVAL (operands[1]))"
1016 [(set (match_dup 3) (match_dup 1))
1017 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1021 (define_insn "*subsi3_compare0"
1022 [(set (reg:CC_NOOV CC_REGNUM)
1024 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1025 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1027 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1028 (minus:SI (match_dup 1) (match_dup 2)))]
1033 [(set_attr "conds" "set")]
1036 (define_expand "decscc"
1037 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1038 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1039 (match_operator:SI 2 "arm_comparison_operator"
1040 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1045 (define_insn "*arm_decscc"
1046 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1047 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1048 (match_operator:SI 2 "arm_comparison_operator"
1049 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1053 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1054 [(set_attr "conds" "use")
1055 (set_attr "length" "*,8")]
1058 (define_expand "subsf3"
1059 [(set (match_operand:SF 0 "s_register_operand" "")
1060 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1061 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1062 "TARGET_32BIT && TARGET_HARD_FLOAT"
1064 if (TARGET_MAVERICK)
1066 if (!cirrus_fp_register (operands[1], SFmode))
1067 operands[1] = force_reg (SFmode, operands[1]);
1068 if (!cirrus_fp_register (operands[2], SFmode))
1069 operands[2] = force_reg (SFmode, operands[2]);
1073 (define_expand "subdf3"
1074 [(set (match_operand:DF 0 "s_register_operand" "")
1075 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1076 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1077 "TARGET_32BIT && TARGET_HARD_FLOAT"
1079 if (TARGET_MAVERICK)
1081 if (!cirrus_fp_register (operands[1], DFmode))
1082 operands[1] = force_reg (DFmode, operands[1]);
1083 if (!cirrus_fp_register (operands[2], DFmode))
1084 operands[2] = force_reg (DFmode, operands[2]);
1089 ;; Multiplication insns
1091 (define_expand "mulsi3"
1092 [(set (match_operand:SI 0 "s_register_operand" "")
1093 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1094 (match_operand:SI 1 "s_register_operand" "")))]
1099 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1100 (define_insn "*arm_mulsi3"
1101 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1102 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1103 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1104 "TARGET_32BIT && !arm_arch6"
1105 "mul%?\\t%0, %2, %1"
1106 [(set_attr "insn" "mul")
1107 (set_attr "predicable" "yes")]
1110 (define_insn "*arm_mulsi3_v6"
1111 [(set (match_operand:SI 0 "s_register_operand" "=r")
1112 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1113 (match_operand:SI 2 "s_register_operand" "r")))]
1114 "TARGET_32BIT && arm_arch6"
1115 "mul%?\\t%0, %1, %2"
1116 [(set_attr "insn" "mul")
1117 (set_attr "predicable" "yes")]
1120 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1121 ; 1 and 2; are the same, because reload will make operand 0 match
1122 ; operand 1 without realizing that this conflicts with operand 2. We fix
1123 ; this by adding another alternative to match this case, and then `reload'
1124 ; it ourselves. This alternative must come first.
1125 (define_insn "*thumb_mulsi3"
1126 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1127 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1128 (match_operand:SI 2 "register_operand" "l,l,l")))]
1129 "TARGET_THUMB1 && !arm_arch6"
1131 if (which_alternative < 2)
1132 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1134 return \"mul\\t%0, %2\";
1136 [(set_attr "length" "4,4,2")
1137 (set_attr "insn" "mul")]
1140 (define_insn "*thumb_mulsi3_v6"
1141 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1142 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1143 (match_operand:SI 2 "register_operand" "l,0,0")))]
1144 "TARGET_THUMB1 && arm_arch6"
1149 [(set_attr "length" "2")
1150 (set_attr "insn" "mul")]
1153 (define_insn "*mulsi3_compare0"
1154 [(set (reg:CC_NOOV CC_REGNUM)
1155 (compare:CC_NOOV (mult:SI
1156 (match_operand:SI 2 "s_register_operand" "r,r")
1157 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1159 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1160 (mult:SI (match_dup 2) (match_dup 1)))]
1161 "TARGET_ARM && !arm_arch6"
1162 "mul%.\\t%0, %2, %1"
1163 [(set_attr "conds" "set")
1164 (set_attr "insn" "muls")]
1167 (define_insn "*mulsi3_compare0_v6"
1168 [(set (reg:CC_NOOV CC_REGNUM)
1169 (compare:CC_NOOV (mult:SI
1170 (match_operand:SI 2 "s_register_operand" "r")
1171 (match_operand:SI 1 "s_register_operand" "r"))
1173 (set (match_operand:SI 0 "s_register_operand" "=r")
1174 (mult:SI (match_dup 2) (match_dup 1)))]
1175 "TARGET_ARM && arm_arch6 && optimize_size"
1176 "mul%.\\t%0, %2, %1"
1177 [(set_attr "conds" "set")
1178 (set_attr "insn" "muls")]
1181 (define_insn "*mulsi_compare0_scratch"
1182 [(set (reg:CC_NOOV CC_REGNUM)
1183 (compare:CC_NOOV (mult:SI
1184 (match_operand:SI 2 "s_register_operand" "r,r")
1185 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1187 (clobber (match_scratch:SI 0 "=&r,&r"))]
1188 "TARGET_ARM && !arm_arch6"
1189 "mul%.\\t%0, %2, %1"
1190 [(set_attr "conds" "set")
1191 (set_attr "insn" "muls")]
1194 (define_insn "*mulsi_compare0_scratch_v6"
1195 [(set (reg:CC_NOOV CC_REGNUM)
1196 (compare:CC_NOOV (mult:SI
1197 (match_operand:SI 2 "s_register_operand" "r")
1198 (match_operand:SI 1 "s_register_operand" "r"))
1200 (clobber (match_scratch:SI 0 "=r"))]
1201 "TARGET_ARM && arm_arch6 && optimize_size"
1202 "mul%.\\t%0, %2, %1"
1203 [(set_attr "conds" "set")
1204 (set_attr "insn" "muls")]
1207 ;; Unnamed templates to match MLA instruction.
1209 (define_insn "*mulsi3addsi"
1210 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1212 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1213 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1214 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1215 "TARGET_32BIT && !arm_arch6"
1216 "mla%?\\t%0, %2, %1, %3"
1217 [(set_attr "insn" "mla")
1218 (set_attr "predicable" "yes")]
1221 (define_insn "*mulsi3addsi_v6"
1222 [(set (match_operand:SI 0 "s_register_operand" "=r")
1224 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1225 (match_operand:SI 1 "s_register_operand" "r"))
1226 (match_operand:SI 3 "s_register_operand" "r")))]
1227 "TARGET_32BIT && arm_arch6"
1228 "mla%?\\t%0, %2, %1, %3"
1229 [(set_attr "insn" "mla")
1230 (set_attr "predicable" "yes")]
1233 (define_insn "*mulsi3addsi_compare0"
1234 [(set (reg:CC_NOOV CC_REGNUM)
1237 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1238 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1239 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1241 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1242 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1244 "TARGET_ARM && arm_arch6"
1245 "mla%.\\t%0, %2, %1, %3"
1246 [(set_attr "conds" "set")
1247 (set_attr "insn" "mlas")]
1250 (define_insn "*mulsi3addsi_compare0_v6"
1251 [(set (reg:CC_NOOV CC_REGNUM)
1254 (match_operand:SI 2 "s_register_operand" "r")
1255 (match_operand:SI 1 "s_register_operand" "r"))
1256 (match_operand:SI 3 "s_register_operand" "r"))
1258 (set (match_operand:SI 0 "s_register_operand" "=r")
1259 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1261 "TARGET_ARM && arm_arch6 && optimize_size"
1262 "mla%.\\t%0, %2, %1, %3"
1263 [(set_attr "conds" "set")
1264 (set_attr "insn" "mlas")]
1267 (define_insn "*mulsi3addsi_compare0_scratch"
1268 [(set (reg:CC_NOOV CC_REGNUM)
1271 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1272 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1273 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1275 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1276 "TARGET_ARM && !arm_arch6"
1277 "mla%.\\t%0, %2, %1, %3"
1278 [(set_attr "conds" "set")
1279 (set_attr "insn" "mlas")]
1282 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1283 [(set (reg:CC_NOOV CC_REGNUM)
1286 (match_operand:SI 2 "s_register_operand" "r")
1287 (match_operand:SI 1 "s_register_operand" "r"))
1288 (match_operand:SI 3 "s_register_operand" "r"))
1290 (clobber (match_scratch:SI 0 "=r"))]
1291 "TARGET_ARM && arm_arch6 && optimize_size"
1292 "mla%.\\t%0, %2, %1, %3"
1293 [(set_attr "conds" "set")
1294 (set_attr "insn" "mlas")]
1297 (define_insn "*mulsi3subsi"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1300 (match_operand:SI 3 "s_register_operand" "r")
1301 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1302 (match_operand:SI 1 "s_register_operand" "r"))))]
1303 "TARGET_32BIT && arm_arch_thumb2"
1304 "mls%?\\t%0, %2, %1, %3"
1305 [(set_attr "insn" "mla")
1306 (set_attr "predicable" "yes")]
1309 ;; Unnamed template to match long long multiply-accumulate (smlal)
1311 (define_insn "*mulsidi3adddi"
1312 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1315 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1316 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1317 (match_operand:DI 1 "s_register_operand" "0")))]
1318 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1319 "smlal%?\\t%Q0, %R0, %3, %2"
1320 [(set_attr "insn" "smlal")
1321 (set_attr "predicable" "yes")]
1324 (define_insn "*mulsidi3adddi_v6"
1325 [(set (match_operand:DI 0 "s_register_operand" "=r")
1328 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1329 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1330 (match_operand:DI 1 "s_register_operand" "0")))]
1331 "TARGET_32BIT && arm_arch6"
1332 "smlal%?\\t%Q0, %R0, %3, %2"
1333 [(set_attr "insn" "smlal")
1334 (set_attr "predicable" "yes")]
1337 (define_insn "mulsidi3"
1338 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1340 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1341 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1342 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1343 "smull%?\\t%Q0, %R0, %1, %2"
1344 [(set_attr "insn" "smull")
1345 (set_attr "predicable" "yes")]
1348 (define_insn "mulsidi3_v6"
1349 [(set (match_operand:DI 0 "s_register_operand" "=r")
1351 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1352 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1353 "TARGET_32BIT && arm_arch6"
1354 "smull%?\\t%Q0, %R0, %1, %2"
1355 [(set_attr "insn" "smull")
1356 (set_attr "predicable" "yes")]
1359 (define_insn "umulsidi3"
1360 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1362 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1363 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1364 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1365 "umull%?\\t%Q0, %R0, %1, %2"
1366 [(set_attr "insn" "umull")
1367 (set_attr "predicable" "yes")]
1370 (define_insn "umulsidi3_v6"
1371 [(set (match_operand:DI 0 "s_register_operand" "=r")
1373 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1374 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1375 "TARGET_32BIT && arm_arch6"
1376 "umull%?\\t%Q0, %R0, %1, %2"
1377 [(set_attr "insn" "umull")
1378 (set_attr "predicable" "yes")]
1381 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1383 (define_insn "*umulsidi3adddi"
1384 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1387 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1388 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1389 (match_operand:DI 1 "s_register_operand" "0")))]
1390 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1391 "umlal%?\\t%Q0, %R0, %3, %2"
1392 [(set_attr "insn" "umlal")
1393 (set_attr "predicable" "yes")]
1396 (define_insn "*umulsidi3adddi_v6"
1397 [(set (match_operand:DI 0 "s_register_operand" "=r")
1400 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1401 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1402 (match_operand:DI 1 "s_register_operand" "0")))]
1403 "TARGET_32BIT && arm_arch6"
1404 "umlal%?\\t%Q0, %R0, %3, %2"
1405 [(set_attr "insn" "umlal")
1406 (set_attr "predicable" "yes")]
1409 (define_insn "smulsi3_highpart"
1410 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1414 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1415 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1417 (clobber (match_scratch:SI 3 "=&r,&r"))]
1418 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1419 "smull%?\\t%3, %0, %2, %1"
1420 [(set_attr "insn" "smull")
1421 (set_attr "predicable" "yes")]
1424 (define_insn "smulsi3_highpart_v6"
1425 [(set (match_operand:SI 0 "s_register_operand" "=r")
1429 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1430 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1432 (clobber (match_scratch:SI 3 "=r"))]
1433 "TARGET_32BIT && arm_arch6"
1434 "smull%?\\t%3, %0, %2, %1"
1435 [(set_attr "insn" "smull")
1436 (set_attr "predicable" "yes")]
1439 (define_insn "umulsi3_highpart"
1440 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1444 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1445 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1447 (clobber (match_scratch:SI 3 "=&r,&r"))]
1448 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1449 "umull%?\\t%3, %0, %2, %1"
1450 [(set_attr "insn" "umull")
1451 (set_attr "predicable" "yes")]
1454 (define_insn "umulsi3_highpart_v6"
1455 [(set (match_operand:SI 0 "s_register_operand" "=r")
1459 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1460 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1462 (clobber (match_scratch:SI 3 "=r"))]
1463 "TARGET_32BIT && arm_arch6"
1464 "umull%?\\t%3, %0, %2, %1"
1465 [(set_attr "insn" "umull")
1466 (set_attr "predicable" "yes")]
1469 (define_insn "mulhisi3"
1470 [(set (match_operand:SI 0 "s_register_operand" "=r")
1471 (mult:SI (sign_extend:SI
1472 (match_operand:HI 1 "s_register_operand" "%r"))
1474 (match_operand:HI 2 "s_register_operand" "r"))))]
1475 "TARGET_DSP_MULTIPLY"
1476 "smulbb%?\\t%0, %1, %2"
1477 [(set_attr "insn" "smulxy")
1478 (set_attr "predicable" "yes")]
1481 (define_insn "*mulhisi3tb"
1482 [(set (match_operand:SI 0 "s_register_operand" "=r")
1483 (mult:SI (ashiftrt:SI
1484 (match_operand:SI 1 "s_register_operand" "r")
1487 (match_operand:HI 2 "s_register_operand" "r"))))]
1488 "TARGET_DSP_MULTIPLY"
1489 "smultb%?\\t%0, %1, %2"
1490 [(set_attr "insn" "smulxy")
1491 (set_attr "predicable" "yes")]
1494 (define_insn "*mulhisi3bt"
1495 [(set (match_operand:SI 0 "s_register_operand" "=r")
1496 (mult:SI (sign_extend:SI
1497 (match_operand:HI 1 "s_register_operand" "r"))
1499 (match_operand:SI 2 "s_register_operand" "r")
1501 "TARGET_DSP_MULTIPLY"
1502 "smulbt%?\\t%0, %1, %2"
1503 [(set_attr "insn" "smulxy")
1504 (set_attr "predicable" "yes")]
1507 (define_insn "*mulhisi3tt"
1508 [(set (match_operand:SI 0 "s_register_operand" "=r")
1509 (mult:SI (ashiftrt:SI
1510 (match_operand:SI 1 "s_register_operand" "r")
1513 (match_operand:SI 2 "s_register_operand" "r")
1515 "TARGET_DSP_MULTIPLY"
1516 "smultt%?\\t%0, %1, %2"
1517 [(set_attr "insn" "smulxy")
1518 (set_attr "predicable" "yes")]
1521 (define_insn "*mulhisi3addsi"
1522 [(set (match_operand:SI 0 "s_register_operand" "=r")
1523 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1524 (mult:SI (sign_extend:SI
1525 (match_operand:HI 2 "s_register_operand" "%r"))
1527 (match_operand:HI 3 "s_register_operand" "r")))))]
1528 "TARGET_DSP_MULTIPLY"
1529 "smlabb%?\\t%0, %2, %3, %1"
1530 [(set_attr "insn" "smlaxy")
1531 (set_attr "predicable" "yes")]
1534 (define_insn "*mulhidi3adddi"
1535 [(set (match_operand:DI 0 "s_register_operand" "=r")
1537 (match_operand:DI 1 "s_register_operand" "0")
1538 (mult:DI (sign_extend:DI
1539 (match_operand:HI 2 "s_register_operand" "%r"))
1541 (match_operand:HI 3 "s_register_operand" "r")))))]
1542 "TARGET_DSP_MULTIPLY"
1543 "smlalbb%?\\t%Q0, %R0, %2, %3"
1544 [(set_attr "insn" "smlalxy")
1545 (set_attr "predicable" "yes")])
1547 (define_expand "mulsf3"
1548 [(set (match_operand:SF 0 "s_register_operand" "")
1549 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1550 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1551 "TARGET_32BIT && TARGET_HARD_FLOAT"
1554 && !cirrus_fp_register (operands[2], SFmode))
1555 operands[2] = force_reg (SFmode, operands[2]);
1558 (define_expand "muldf3"
1559 [(set (match_operand:DF 0 "s_register_operand" "")
1560 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1561 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1562 "TARGET_32BIT && TARGET_HARD_FLOAT"
1565 && !cirrus_fp_register (operands[2], DFmode))
1566 operands[2] = force_reg (DFmode, operands[2]);
1571 (define_expand "divsf3"
1572 [(set (match_operand:SF 0 "s_register_operand" "")
1573 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1574 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1575 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1578 (define_expand "divdf3"
1579 [(set (match_operand:DF 0 "s_register_operand" "")
1580 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1581 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1582 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1587 (define_expand "modsf3"
1588 [(set (match_operand:SF 0 "s_register_operand" "")
1589 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1590 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1591 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1594 (define_expand "moddf3"
1595 [(set (match_operand:DF 0 "s_register_operand" "")
1596 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1597 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1598 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1601 ;; Boolean and,ior,xor insns
1603 ;; Split up double word logical operations
1605 ;; Split up simple DImode logical operations. Simply perform the logical
1606 ;; operation on the upper and lower halves of the registers.
1608 [(set (match_operand:DI 0 "s_register_operand" "")
1609 (match_operator:DI 6 "logical_binary_operator"
1610 [(match_operand:DI 1 "s_register_operand" "")
1611 (match_operand:DI 2 "s_register_operand" "")]))]
1612 "TARGET_32BIT && reload_completed
1613 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1614 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1615 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1618 operands[3] = gen_highpart (SImode, operands[0]);
1619 operands[0] = gen_lowpart (SImode, operands[0]);
1620 operands[4] = gen_highpart (SImode, operands[1]);
1621 operands[1] = gen_lowpart (SImode, operands[1]);
1622 operands[5] = gen_highpart (SImode, operands[2]);
1623 operands[2] = gen_lowpart (SImode, operands[2]);
1628 [(set (match_operand:DI 0 "s_register_operand" "")
1629 (match_operator:DI 6 "logical_binary_operator"
1630 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1631 (match_operand:DI 1 "s_register_operand" "")]))]
1632 "TARGET_32BIT && reload_completed"
1633 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1634 (set (match_dup 3) (match_op_dup:SI 6
1635 [(ashiftrt:SI (match_dup 2) (const_int 31))
1639 operands[3] = gen_highpart (SImode, operands[0]);
1640 operands[0] = gen_lowpart (SImode, operands[0]);
1641 operands[4] = gen_highpart (SImode, operands[1]);
1642 operands[1] = gen_lowpart (SImode, operands[1]);
1643 operands[5] = gen_highpart (SImode, operands[2]);
1644 operands[2] = gen_lowpart (SImode, operands[2]);
1648 ;; The zero extend of operand 2 means we can just copy the high part of
1649 ;; operand1 into operand0.
1651 [(set (match_operand:DI 0 "s_register_operand" "")
1653 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1654 (match_operand:DI 1 "s_register_operand" "")))]
1655 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1656 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1657 (set (match_dup 3) (match_dup 4))]
1660 operands[4] = gen_highpart (SImode, operands[1]);
1661 operands[3] = gen_highpart (SImode, operands[0]);
1662 operands[0] = gen_lowpart (SImode, operands[0]);
1663 operands[1] = gen_lowpart (SImode, operands[1]);
1667 ;; The zero extend of operand 2 means we can just copy the high part of
1668 ;; operand1 into operand0.
1670 [(set (match_operand:DI 0 "s_register_operand" "")
1672 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1673 (match_operand:DI 1 "s_register_operand" "")))]
1674 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1675 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1676 (set (match_dup 3) (match_dup 4))]
1679 operands[4] = gen_highpart (SImode, operands[1]);
1680 operands[3] = gen_highpart (SImode, operands[0]);
1681 operands[0] = gen_lowpart (SImode, operands[0]);
1682 operands[1] = gen_lowpart (SImode, operands[1]);
1686 (define_insn "anddi3"
1687 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1688 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1689 (match_operand:DI 2 "s_register_operand" "r,r")))]
1690 "TARGET_32BIT && ! TARGET_IWMMXT"
1692 [(set_attr "length" "8")]
1695 (define_insn_and_split "*anddi_zesidi_di"
1696 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1697 (and:DI (zero_extend:DI
1698 (match_operand:SI 2 "s_register_operand" "r,r"))
1699 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1702 "TARGET_32BIT && reload_completed"
1703 ; The zero extend of operand 2 clears the high word of the output
1705 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1706 (set (match_dup 3) (const_int 0))]
1709 operands[3] = gen_highpart (SImode, operands[0]);
1710 operands[0] = gen_lowpart (SImode, operands[0]);
1711 operands[1] = gen_lowpart (SImode, operands[1]);
1713 [(set_attr "length" "8")]
1716 (define_insn "*anddi_sesdi_di"
1717 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1718 (and:DI (sign_extend:DI
1719 (match_operand:SI 2 "s_register_operand" "r,r"))
1720 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1723 [(set_attr "length" "8")]
1726 (define_expand "andsi3"
1727 [(set (match_operand:SI 0 "s_register_operand" "")
1728 (and:SI (match_operand:SI 1 "s_register_operand" "")
1729 (match_operand:SI 2 "reg_or_int_operand" "")))]
1734 if (GET_CODE (operands[2]) == CONST_INT)
1736 arm_split_constant (AND, SImode, NULL_RTX,
1737 INTVAL (operands[2]), operands[0],
1738 operands[1], optimize && !no_new_pseudos);
1743 else /* TARGET_THUMB1 */
1745 if (GET_CODE (operands[2]) != CONST_INT)
1746 operands[2] = force_reg (SImode, operands[2]);
1751 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1753 operands[2] = force_reg (SImode,
1754 GEN_INT (~INTVAL (operands[2])));
1756 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1761 for (i = 9; i <= 31; i++)
1763 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1765 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1769 else if ((((HOST_WIDE_INT) 1) << i) - 1
1770 == ~INTVAL (operands[2]))
1772 rtx shift = GEN_INT (i);
1773 rtx reg = gen_reg_rtx (SImode);
1775 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1776 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1782 operands[2] = force_reg (SImode, operands[2]);
1788 ; ??? Check split length for Thumb-2
1789 (define_insn_and_split "*arm_andsi3_insn"
1790 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1791 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1792 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1796 bic%?\\t%0, %1, #%B2
1799 && GET_CODE (operands[2]) == CONST_INT
1800 && !(const_ok_for_arm (INTVAL (operands[2]))
1801 || const_ok_for_arm (~INTVAL (operands[2])))"
1802 [(clobber (const_int 0))]
1804 arm_split_constant (AND, SImode, curr_insn,
1805 INTVAL (operands[2]), operands[0], operands[1], 0);
1808 [(set_attr "length" "4,4,16")
1809 (set_attr "predicable" "yes")]
1812 (define_insn "*thumb1_andsi3_insn"
1813 [(set (match_operand:SI 0 "register_operand" "=l")
1814 (and:SI (match_operand:SI 1 "register_operand" "%0")
1815 (match_operand:SI 2 "register_operand" "l")))]
1818 [(set_attr "length" "2")]
1821 (define_insn "*andsi3_compare0"
1822 [(set (reg:CC_NOOV CC_REGNUM)
1824 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1825 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1827 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1828 (and:SI (match_dup 1) (match_dup 2)))]
1832 bic%.\\t%0, %1, #%B2"
1833 [(set_attr "conds" "set")]
1836 (define_insn "*andsi3_compare0_scratch"
1837 [(set (reg:CC_NOOV CC_REGNUM)
1839 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1840 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1842 (clobber (match_scratch:SI 2 "=X,r"))]
1846 bic%.\\t%2, %0, #%B1"
1847 [(set_attr "conds" "set")]
1850 (define_insn "*zeroextractsi_compare0_scratch"
1851 [(set (reg:CC_NOOV CC_REGNUM)
1852 (compare:CC_NOOV (zero_extract:SI
1853 (match_operand:SI 0 "s_register_operand" "r")
1854 (match_operand 1 "const_int_operand" "n")
1855 (match_operand 2 "const_int_operand" "n"))
1858 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1859 && INTVAL (operands[1]) > 0
1860 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1861 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1863 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1864 << INTVAL (operands[2]));
1865 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1868 [(set_attr "conds" "set")]
1871 (define_insn_and_split "*ne_zeroextractsi"
1872 [(set (match_operand:SI 0 "s_register_operand" "=r")
1873 (ne:SI (zero_extract:SI
1874 (match_operand:SI 1 "s_register_operand" "r")
1875 (match_operand:SI 2 "const_int_operand" "n")
1876 (match_operand:SI 3 "const_int_operand" "n"))
1878 (clobber (reg:CC CC_REGNUM))]
1880 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1881 && INTVAL (operands[2]) > 0
1882 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1883 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1886 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1887 && INTVAL (operands[2]) > 0
1888 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1889 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1890 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1891 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1893 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1895 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1896 (match_dup 0) (const_int 1)))]
1898 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1899 << INTVAL (operands[3]));
1901 [(set_attr "conds" "clob")
1902 (set (attr "length")
1903 (if_then_else (eq_attr "is_thumb" "yes")
1908 (define_insn_and_split "*ne_zeroextractsi_shifted"
1909 [(set (match_operand:SI 0 "s_register_operand" "=r")
1910 (ne:SI (zero_extract:SI
1911 (match_operand:SI 1 "s_register_operand" "r")
1912 (match_operand:SI 2 "const_int_operand" "n")
1915 (clobber (reg:CC CC_REGNUM))]
1919 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1920 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1922 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1924 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1925 (match_dup 0) (const_int 1)))]
1927 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1929 [(set_attr "conds" "clob")
1930 (set_attr "length" "8")]
1933 (define_insn_and_split "*ite_ne_zeroextractsi"
1934 [(set (match_operand:SI 0 "s_register_operand" "=r")
1935 (if_then_else:SI (ne (zero_extract:SI
1936 (match_operand:SI 1 "s_register_operand" "r")
1937 (match_operand:SI 2 "const_int_operand" "n")
1938 (match_operand:SI 3 "const_int_operand" "n"))
1940 (match_operand:SI 4 "arm_not_operand" "rIK")
1942 (clobber (reg:CC CC_REGNUM))]
1944 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1945 && INTVAL (operands[2]) > 0
1946 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1947 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1948 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1951 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1952 && INTVAL (operands[2]) > 0
1953 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1954 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1955 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1956 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1957 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1959 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1961 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1962 (match_dup 0) (match_dup 4)))]
1964 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1965 << INTVAL (operands[3]));
1967 [(set_attr "conds" "clob")
1968 (set_attr "length" "8")]
1971 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
1972 [(set (match_operand:SI 0 "s_register_operand" "=r")
1973 (if_then_else:SI (ne (zero_extract:SI
1974 (match_operand:SI 1 "s_register_operand" "r")
1975 (match_operand:SI 2 "const_int_operand" "n")
1978 (match_operand:SI 3 "arm_not_operand" "rIK")
1980 (clobber (reg:CC CC_REGNUM))]
1981 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1983 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1984 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1985 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1987 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1989 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1990 (match_dup 0) (match_dup 3)))]
1992 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1994 [(set_attr "conds" "clob")
1995 (set_attr "length" "8")]
1999 [(set (match_operand:SI 0 "s_register_operand" "")
2000 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2001 (match_operand:SI 2 "const_int_operand" "")
2002 (match_operand:SI 3 "const_int_operand" "")))
2003 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2005 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2006 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2008 HOST_WIDE_INT temp = INTVAL (operands[2]);
2010 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2011 operands[3] = GEN_INT (32 - temp);
2015 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2017 [(set (match_operand:SI 0 "s_register_operand" "")
2018 (match_operator:SI 1 "shiftable_operator"
2019 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2020 (match_operand:SI 3 "const_int_operand" "")
2021 (match_operand:SI 4 "const_int_operand" ""))
2022 (match_operand:SI 5 "s_register_operand" "")]))
2023 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2025 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2028 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2031 HOST_WIDE_INT temp = INTVAL (operands[3]);
2033 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2034 operands[4] = GEN_INT (32 - temp);
2039 [(set (match_operand:SI 0 "s_register_operand" "")
2040 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2041 (match_operand:SI 2 "const_int_operand" "")
2042 (match_operand:SI 3 "const_int_operand" "")))]
2044 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2045 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2047 HOST_WIDE_INT temp = INTVAL (operands[2]);
2049 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2050 operands[3] = GEN_INT (32 - temp);
2055 [(set (match_operand:SI 0 "s_register_operand" "")
2056 (match_operator:SI 1 "shiftable_operator"
2057 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2058 (match_operand:SI 3 "const_int_operand" "")
2059 (match_operand:SI 4 "const_int_operand" ""))
2060 (match_operand:SI 5 "s_register_operand" "")]))
2061 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2063 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2066 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2069 HOST_WIDE_INT temp = INTVAL (operands[3]);
2071 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2072 operands[4] = GEN_INT (32 - temp);
2076 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2077 ;;; represented by the bitfield, then this will produce incorrect results.
2078 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2079 ;;; which have a real bit-field insert instruction, the truncation happens
2080 ;;; in the bit-field insert instruction itself. Since arm does not have a
2081 ;;; bit-field insert instruction, we would have to emit code here to truncate
2082 ;;; the value before we insert. This loses some of the advantage of having
2083 ;;; this insv pattern, so this pattern needs to be reevalutated.
2085 ; ??? Use Thumb-2 bitfield insert/extract instructions
2086 (define_expand "insv"
2087 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2088 (match_operand:SI 1 "general_operand" "")
2089 (match_operand:SI 2 "general_operand" ""))
2090 (match_operand:SI 3 "reg_or_int_operand" ""))]
2094 int start_bit = INTVAL (operands[2]);
2095 int width = INTVAL (operands[1]);
2096 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2097 rtx target, subtarget;
2099 target = operands[0];
2100 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2101 subreg as the final target. */
2102 if (GET_CODE (target) == SUBREG)
2104 subtarget = gen_reg_rtx (SImode);
2105 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2106 < GET_MODE_SIZE (SImode))
2107 target = SUBREG_REG (target);
2112 if (GET_CODE (operands[3]) == CONST_INT)
2114 /* Since we are inserting a known constant, we may be able to
2115 reduce the number of bits that we have to clear so that
2116 the mask becomes simple. */
2117 /* ??? This code does not check to see if the new mask is actually
2118 simpler. It may not be. */
2119 rtx op1 = gen_reg_rtx (SImode);
2120 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2121 start of this pattern. */
2122 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2123 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2125 emit_insn (gen_andsi3 (op1, operands[0],
2126 gen_int_mode (~mask2, SImode)));
2127 emit_insn (gen_iorsi3 (subtarget, op1,
2128 gen_int_mode (op3_value << start_bit, SImode)));
2130 else if (start_bit == 0
2131 && !(const_ok_for_arm (mask)
2132 || const_ok_for_arm (~mask)))
2134 /* A Trick, since we are setting the bottom bits in the word,
2135 we can shift operand[3] up, operand[0] down, OR them together
2136 and rotate the result back again. This takes 3 insns, and
2137 the third might be mergeable into another op. */
2138 /* The shift up copes with the possibility that operand[3] is
2139 wider than the bitfield. */
2140 rtx op0 = gen_reg_rtx (SImode);
2141 rtx op1 = gen_reg_rtx (SImode);
2143 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2144 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2145 emit_insn (gen_iorsi3 (op1, op1, op0));
2146 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2148 else if ((width + start_bit == 32)
2149 && !(const_ok_for_arm (mask)
2150 || const_ok_for_arm (~mask)))
2152 /* Similar trick, but slightly less efficient. */
2154 rtx op0 = gen_reg_rtx (SImode);
2155 rtx op1 = gen_reg_rtx (SImode);
2157 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2158 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2159 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2160 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2164 rtx op0 = gen_int_mode (mask, SImode);
2165 rtx op1 = gen_reg_rtx (SImode);
2166 rtx op2 = gen_reg_rtx (SImode);
2168 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2170 rtx tmp = gen_reg_rtx (SImode);
2172 emit_insn (gen_movsi (tmp, op0));
2176 /* Mask out any bits in operand[3] that are not needed. */
2177 emit_insn (gen_andsi3 (op1, operands[3], op0));
2179 if (GET_CODE (op0) == CONST_INT
2180 && (const_ok_for_arm (mask << start_bit)
2181 || const_ok_for_arm (~(mask << start_bit))))
2183 op0 = gen_int_mode (~(mask << start_bit), SImode);
2184 emit_insn (gen_andsi3 (op2, operands[0], op0));
2188 if (GET_CODE (op0) == CONST_INT)
2190 rtx tmp = gen_reg_rtx (SImode);
2192 emit_insn (gen_movsi (tmp, op0));
2197 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2199 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2203 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2205 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2208 if (subtarget != target)
2210 /* If TARGET is still a SUBREG, then it must be wider than a word,
2211 so we must be careful only to set the subword we were asked to. */
2212 if (GET_CODE (target) == SUBREG)
2213 emit_move_insn (target, subtarget);
2215 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2222 ; constants for op 2 will never be given to these patterns.
2223 (define_insn_and_split "*anddi_notdi_di"
2224 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2225 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2226 (match_operand:DI 2 "s_register_operand" "0,r")))]
2229 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2230 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2231 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2234 operands[3] = gen_highpart (SImode, operands[0]);
2235 operands[0] = gen_lowpart (SImode, operands[0]);
2236 operands[4] = gen_highpart (SImode, operands[1]);
2237 operands[1] = gen_lowpart (SImode, operands[1]);
2238 operands[5] = gen_highpart (SImode, operands[2]);
2239 operands[2] = gen_lowpart (SImode, operands[2]);
2241 [(set_attr "length" "8")
2242 (set_attr "predicable" "yes")]
2245 (define_insn_and_split "*anddi_notzesidi_di"
2246 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2247 (and:DI (not:DI (zero_extend:DI
2248 (match_operand:SI 2 "s_register_operand" "r,r")))
2249 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2252 bic%?\\t%Q0, %Q1, %2
2254 ; (not (zero_extend ...)) allows us to just copy the high word from
2255 ; operand1 to operand0.
2258 && operands[0] != operands[1]"
2259 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2260 (set (match_dup 3) (match_dup 4))]
2263 operands[3] = gen_highpart (SImode, operands[0]);
2264 operands[0] = gen_lowpart (SImode, operands[0]);
2265 operands[4] = gen_highpart (SImode, operands[1]);
2266 operands[1] = gen_lowpart (SImode, operands[1]);
2268 [(set_attr "length" "4,8")
2269 (set_attr "predicable" "yes")]
2272 (define_insn_and_split "*anddi_notsesidi_di"
2273 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2274 (and:DI (not:DI (sign_extend:DI
2275 (match_operand:SI 2 "s_register_operand" "r,r")))
2276 (match_operand:DI 1 "s_register_operand" "0,r")))]
2279 "TARGET_32BIT && reload_completed"
2280 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2281 (set (match_dup 3) (and:SI (not:SI
2282 (ashiftrt:SI (match_dup 2) (const_int 31)))
2286 operands[3] = gen_highpart (SImode, operands[0]);
2287 operands[0] = gen_lowpart (SImode, operands[0]);
2288 operands[4] = gen_highpart (SImode, operands[1]);
2289 operands[1] = gen_lowpart (SImode, operands[1]);
2291 [(set_attr "length" "8")
2292 (set_attr "predicable" "yes")]
2295 (define_insn "andsi_notsi_si"
2296 [(set (match_operand:SI 0 "s_register_operand" "=r")
2297 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2298 (match_operand:SI 1 "s_register_operand" "r")))]
2300 "bic%?\\t%0, %1, %2"
2301 [(set_attr "predicable" "yes")]
2304 (define_insn "bicsi3"
2305 [(set (match_operand:SI 0 "register_operand" "=l")
2306 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2307 (match_operand:SI 2 "register_operand" "0")))]
2310 [(set_attr "length" "2")]
2313 (define_insn "andsi_not_shiftsi_si"
2314 [(set (match_operand:SI 0 "s_register_operand" "=r")
2315 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2316 [(match_operand:SI 2 "s_register_operand" "r")
2317 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2318 (match_operand:SI 1 "s_register_operand" "r")))]
2320 "bic%?\\t%0, %1, %2%S4"
2321 [(set_attr "predicable" "yes")
2322 (set_attr "shift" "2")
2323 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2324 (const_string "alu_shift")
2325 (const_string "alu_shift_reg")))]
2328 (define_insn "*andsi_notsi_si_compare0"
2329 [(set (reg:CC_NOOV CC_REGNUM)
2331 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2332 (match_operand:SI 1 "s_register_operand" "r"))
2334 (set (match_operand:SI 0 "s_register_operand" "=r")
2335 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2337 "bic%.\\t%0, %1, %2"
2338 [(set_attr "conds" "set")]
2341 (define_insn "*andsi_notsi_si_compare0_scratch"
2342 [(set (reg:CC_NOOV CC_REGNUM)
2344 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2345 (match_operand:SI 1 "s_register_operand" "r"))
2347 (clobber (match_scratch:SI 0 "=r"))]
2349 "bic%.\\t%0, %1, %2"
2350 [(set_attr "conds" "set")]
2353 (define_insn "iordi3"
2354 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2355 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2356 (match_operand:DI 2 "s_register_operand" "r,r")))]
2357 "TARGET_32BIT && ! TARGET_IWMMXT"
2359 [(set_attr "length" "8")
2360 (set_attr "predicable" "yes")]
2363 (define_insn "*iordi_zesidi_di"
2364 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2365 (ior:DI (zero_extend:DI
2366 (match_operand:SI 2 "s_register_operand" "r,r"))
2367 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2370 orr%?\\t%Q0, %Q1, %2
2372 [(set_attr "length" "4,8")
2373 (set_attr "predicable" "yes")]
2376 (define_insn "*iordi_sesidi_di"
2377 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2378 (ior:DI (sign_extend:DI
2379 (match_operand:SI 2 "s_register_operand" "r,r"))
2380 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2383 [(set_attr "length" "8")
2384 (set_attr "predicable" "yes")]
2387 (define_expand "iorsi3"
2388 [(set (match_operand:SI 0 "s_register_operand" "")
2389 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2390 (match_operand:SI 2 "reg_or_int_operand" "")))]
2393 if (GET_CODE (operands[2]) == CONST_INT)
2397 arm_split_constant (IOR, SImode, NULL_RTX,
2398 INTVAL (operands[2]), operands[0], operands[1],
2399 optimize && !no_new_pseudos);
2402 else /* TARGET_THUMB1 */
2403 operands [2] = force_reg (SImode, operands [2]);
2408 (define_insn_and_split "*arm_iorsi3"
2409 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2410 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2411 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2417 && GET_CODE (operands[2]) == CONST_INT
2418 && !const_ok_for_arm (INTVAL (operands[2]))"
2419 [(clobber (const_int 0))]
2421 arm_split_constant (IOR, SImode, curr_insn,
2422 INTVAL (operands[2]), operands[0], operands[1], 0);
2425 [(set_attr "length" "4,16")
2426 (set_attr "predicable" "yes")]
2429 (define_insn "*thumb1_iorsi3"
2430 [(set (match_operand:SI 0 "register_operand" "=l")
2431 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2432 (match_operand:SI 2 "register_operand" "l")))]
2435 [(set_attr "length" "2")]
2439 [(match_scratch:SI 3 "r")
2440 (set (match_operand:SI 0 "arm_general_register_operand" "")
2441 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2442 (match_operand:SI 2 "const_int_operand" "")))]
2444 && !const_ok_for_arm (INTVAL (operands[2]))
2445 && const_ok_for_arm (~INTVAL (operands[2]))"
2446 [(set (match_dup 3) (match_dup 2))
2447 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2451 (define_insn "*iorsi3_compare0"
2452 [(set (reg:CC_NOOV CC_REGNUM)
2453 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2454 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2456 (set (match_operand:SI 0 "s_register_operand" "=r")
2457 (ior:SI (match_dup 1) (match_dup 2)))]
2459 "orr%.\\t%0, %1, %2"
2460 [(set_attr "conds" "set")]
2463 (define_insn "*iorsi3_compare0_scratch"
2464 [(set (reg:CC_NOOV CC_REGNUM)
2465 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2466 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2468 (clobber (match_scratch:SI 0 "=r"))]
2470 "orr%.\\t%0, %1, %2"
2471 [(set_attr "conds" "set")]
2474 (define_insn "xordi3"
2475 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2476 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2477 (match_operand:DI 2 "s_register_operand" "r,r")))]
2478 "TARGET_32BIT && !TARGET_IWMMXT"
2480 [(set_attr "length" "8")
2481 (set_attr "predicable" "yes")]
2484 (define_insn "*xordi_zesidi_di"
2485 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2486 (xor:DI (zero_extend:DI
2487 (match_operand:SI 2 "s_register_operand" "r,r"))
2488 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2491 eor%?\\t%Q0, %Q1, %2
2493 [(set_attr "length" "4,8")
2494 (set_attr "predicable" "yes")]
2497 (define_insn "*xordi_sesidi_di"
2498 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2499 (xor:DI (sign_extend:DI
2500 (match_operand:SI 2 "s_register_operand" "r,r"))
2501 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2504 [(set_attr "length" "8")
2505 (set_attr "predicable" "yes")]
2508 (define_expand "xorsi3"
2509 [(set (match_operand:SI 0 "s_register_operand" "")
2510 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2511 (match_operand:SI 2 "arm_rhs_operand" "")))]
2514 if (GET_CODE (operands[2]) == CONST_INT)
2515 operands[2] = force_reg (SImode, operands[2]);
2519 (define_insn "*arm_xorsi3"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2522 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2524 "eor%?\\t%0, %1, %2"
2525 [(set_attr "predicable" "yes")]
2528 (define_insn "*thumb1_xorsi3"
2529 [(set (match_operand:SI 0 "register_operand" "=l")
2530 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2531 (match_operand:SI 2 "register_operand" "l")))]
2534 [(set_attr "length" "2")]
2537 (define_insn "*xorsi3_compare0"
2538 [(set (reg:CC_NOOV CC_REGNUM)
2539 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2540 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2542 (set (match_operand:SI 0 "s_register_operand" "=r")
2543 (xor:SI (match_dup 1) (match_dup 2)))]
2545 "eor%.\\t%0, %1, %2"
2546 [(set_attr "conds" "set")]
2549 (define_insn "*xorsi3_compare0_scratch"
2550 [(set (reg:CC_NOOV CC_REGNUM)
2551 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2552 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2556 [(set_attr "conds" "set")]
2559 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2560 ; (NOT D) we can sometimes merge the final NOT into one of the following
2564 [(set (match_operand:SI 0 "s_register_operand" "")
2565 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2566 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2567 (match_operand:SI 3 "arm_rhs_operand" "")))
2568 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2570 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2571 (not:SI (match_dup 3))))
2572 (set (match_dup 0) (not:SI (match_dup 4)))]
2576 (define_insn "*andsi_iorsi3_notsi"
2577 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2578 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2579 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2580 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2582 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2583 [(set_attr "length" "8")
2584 (set_attr "ce_count" "2")
2585 (set_attr "predicable" "yes")]
2588 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2589 ; insns are available?
2591 [(set (match_operand:SI 0 "s_register_operand" "")
2592 (match_operator:SI 1 "logical_binary_operator"
2593 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2594 (match_operand:SI 3 "const_int_operand" "")
2595 (match_operand:SI 4 "const_int_operand" ""))
2596 (match_operator:SI 9 "logical_binary_operator"
2597 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2598 (match_operand:SI 6 "const_int_operand" ""))
2599 (match_operand:SI 7 "s_register_operand" "")])]))
2600 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2602 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2603 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2606 [(ashift:SI (match_dup 2) (match_dup 4))
2610 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2613 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2617 [(set (match_operand:SI 0 "s_register_operand" "")
2618 (match_operator:SI 1 "logical_binary_operator"
2619 [(match_operator:SI 9 "logical_binary_operator"
2620 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2621 (match_operand:SI 6 "const_int_operand" ""))
2622 (match_operand:SI 7 "s_register_operand" "")])
2623 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2624 (match_operand:SI 3 "const_int_operand" "")
2625 (match_operand:SI 4 "const_int_operand" ""))]))
2626 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2628 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2629 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2632 [(ashift:SI (match_dup 2) (match_dup 4))
2636 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2639 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2643 [(set (match_operand:SI 0 "s_register_operand" "")
2644 (match_operator:SI 1 "logical_binary_operator"
2645 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2646 (match_operand:SI 3 "const_int_operand" "")
2647 (match_operand:SI 4 "const_int_operand" ""))
2648 (match_operator:SI 9 "logical_binary_operator"
2649 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2650 (match_operand:SI 6 "const_int_operand" ""))
2651 (match_operand:SI 7 "s_register_operand" "")])]))
2652 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2654 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2655 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2658 [(ashift:SI (match_dup 2) (match_dup 4))
2662 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2665 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2669 [(set (match_operand:SI 0 "s_register_operand" "")
2670 (match_operator:SI 1 "logical_binary_operator"
2671 [(match_operator:SI 9 "logical_binary_operator"
2672 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2673 (match_operand:SI 6 "const_int_operand" ""))
2674 (match_operand:SI 7 "s_register_operand" "")])
2675 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2676 (match_operand:SI 3 "const_int_operand" "")
2677 (match_operand:SI 4 "const_int_operand" ""))]))
2678 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2680 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2681 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2684 [(ashift:SI (match_dup 2) (match_dup 4))
2688 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2691 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2695 ;; Minimum and maximum insns
2697 (define_expand "smaxsi3"
2699 (set (match_operand:SI 0 "s_register_operand" "")
2700 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2701 (match_operand:SI 2 "arm_rhs_operand" "")))
2702 (clobber (reg:CC CC_REGNUM))])]
2705 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2707 /* No need for a clobber of the condition code register here. */
2708 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2709 gen_rtx_SMAX (SImode, operands[1],
2715 (define_insn "*smax_0"
2716 [(set (match_operand:SI 0 "s_register_operand" "=r")
2717 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2720 "bic%?\\t%0, %1, %1, asr #31"
2721 [(set_attr "predicable" "yes")]
2724 (define_insn "*smax_m1"
2725 [(set (match_operand:SI 0 "s_register_operand" "=r")
2726 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2729 "orr%?\\t%0, %1, %1, asr #31"
2730 [(set_attr "predicable" "yes")]
2733 (define_insn "*arm_smax_insn"
2734 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2735 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2736 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2737 (clobber (reg:CC CC_REGNUM))]
2740 cmp\\t%1, %2\;movlt\\t%0, %2
2741 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2742 [(set_attr "conds" "clob")
2743 (set_attr "length" "8,12")]
2746 (define_expand "sminsi3"
2748 (set (match_operand:SI 0 "s_register_operand" "")
2749 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2750 (match_operand:SI 2 "arm_rhs_operand" "")))
2751 (clobber (reg:CC CC_REGNUM))])]
2754 if (operands[2] == const0_rtx)
2756 /* No need for a clobber of the condition code register here. */
2757 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2758 gen_rtx_SMIN (SImode, operands[1],
2764 (define_insn "*smin_0"
2765 [(set (match_operand:SI 0 "s_register_operand" "=r")
2766 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2769 "and%?\\t%0, %1, %1, asr #31"
2770 [(set_attr "predicable" "yes")]
2773 (define_insn "*arm_smin_insn"
2774 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2775 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2776 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2777 (clobber (reg:CC CC_REGNUM))]
2780 cmp\\t%1, %2\;movge\\t%0, %2
2781 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2782 [(set_attr "conds" "clob")
2783 (set_attr "length" "8,12")]
2786 (define_expand "umaxsi3"
2788 (set (match_operand:SI 0 "s_register_operand" "")
2789 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2790 (match_operand:SI 2 "arm_rhs_operand" "")))
2791 (clobber (reg:CC CC_REGNUM))])]
2796 (define_insn "*arm_umaxsi3"
2797 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2798 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2799 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2800 (clobber (reg:CC CC_REGNUM))]
2803 cmp\\t%1, %2\;movcc\\t%0, %2
2804 cmp\\t%1, %2\;movcs\\t%0, %1
2805 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2806 [(set_attr "conds" "clob")
2807 (set_attr "length" "8,8,12")]
2810 (define_expand "uminsi3"
2812 (set (match_operand:SI 0 "s_register_operand" "")
2813 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2814 (match_operand:SI 2 "arm_rhs_operand" "")))
2815 (clobber (reg:CC CC_REGNUM))])]
2820 (define_insn "*arm_uminsi3"
2821 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2822 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2823 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2824 (clobber (reg:CC CC_REGNUM))]
2827 cmp\\t%1, %2\;movcs\\t%0, %2
2828 cmp\\t%1, %2\;movcc\\t%0, %1
2829 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2830 [(set_attr "conds" "clob")
2831 (set_attr "length" "8,8,12")]
2834 (define_insn "*store_minmaxsi"
2835 [(set (match_operand:SI 0 "memory_operand" "=m")
2836 (match_operator:SI 3 "minmax_operator"
2837 [(match_operand:SI 1 "s_register_operand" "r")
2838 (match_operand:SI 2 "s_register_operand" "r")]))
2839 (clobber (reg:CC CC_REGNUM))]
2842 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2843 operands[1], operands[2]);
2844 output_asm_insn (\"cmp\\t%1, %2\", operands);
2846 output_asm_insn (\"ite\t%d3\", operands);
2847 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2848 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2851 [(set_attr "conds" "clob")
2852 (set (attr "length")
2853 (if_then_else (eq_attr "is_thumb" "yes")
2856 (set_attr "type" "store1")]
2859 ; Reject the frame pointer in operand[1], since reloading this after
2860 ; it has been eliminated can cause carnage.
2861 (define_insn "*minmax_arithsi"
2862 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2863 (match_operator:SI 4 "shiftable_operator"
2864 [(match_operator:SI 5 "minmax_operator"
2865 [(match_operand:SI 2 "s_register_operand" "r,r")
2866 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2867 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2868 (clobber (reg:CC CC_REGNUM))]
2869 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2872 enum rtx_code code = GET_CODE (operands[4]);
2875 if (which_alternative != 0 || operands[3] != const0_rtx
2876 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2881 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2882 operands[2], operands[3]);
2883 output_asm_insn (\"cmp\\t%2, %3\", operands);
2887 output_asm_insn (\"ite\\t%d5\", operands);
2889 output_asm_insn (\"it\\t%d5\", operands);
2891 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
2893 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
2896 [(set_attr "conds" "clob")
2897 (set (attr "length")
2898 (if_then_else (eq_attr "is_thumb" "yes")
2904 ;; Shift and rotation insns
2906 (define_expand "ashldi3"
2907 [(set (match_operand:DI 0 "s_register_operand" "")
2908 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
2909 (match_operand:SI 2 "reg_or_int_operand" "")))]
2912 if (GET_CODE (operands[2]) == CONST_INT)
2914 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2916 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
2919 /* Ideally we shouldn't fail here if we could know that operands[1]
2920 ends up already living in an iwmmxt register. Otherwise it's
2921 cheaper to have the alternate code being generated than moving
2922 values to iwmmxt regs and back. */
2925 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
2930 (define_insn "arm_ashldi3_1bit"
2931 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2932 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2934 (clobber (reg:CC CC_REGNUM))]
2936 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
2937 [(set_attr "conds" "clob")
2938 (set_attr "length" "8")]
2941 (define_expand "ashlsi3"
2942 [(set (match_operand:SI 0 "s_register_operand" "")
2943 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
2944 (match_operand:SI 2 "arm_rhs_operand" "")))]
2947 if (GET_CODE (operands[2]) == CONST_INT
2948 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2950 emit_insn (gen_movsi (operands[0], const0_rtx));
2956 (define_insn "*thumb1_ashlsi3"
2957 [(set (match_operand:SI 0 "register_operand" "=l,l")
2958 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
2959 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2962 [(set_attr "length" "2")]
2965 (define_expand "ashrdi3"
2966 [(set (match_operand:DI 0 "s_register_operand" "")
2967 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
2968 (match_operand:SI 2 "reg_or_int_operand" "")))]
2971 if (GET_CODE (operands[2]) == CONST_INT)
2973 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2975 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
2978 /* Ideally we shouldn't fail here if we could know that operands[1]
2979 ends up already living in an iwmmxt register. Otherwise it's
2980 cheaper to have the alternate code being generated than moving
2981 values to iwmmxt regs and back. */
2984 else if (!TARGET_REALLY_IWMMXT)
2989 (define_insn "arm_ashrdi3_1bit"
2990 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2991 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2993 (clobber (reg:CC CC_REGNUM))]
2995 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
2996 [(set_attr "conds" "clob")
2997 (set_attr "length" "8")]
3000 (define_expand "ashrsi3"
3001 [(set (match_operand:SI 0 "s_register_operand" "")
3002 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3003 (match_operand:SI 2 "arm_rhs_operand" "")))]
3006 if (GET_CODE (operands[2]) == CONST_INT
3007 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3008 operands[2] = GEN_INT (31);
3012 (define_insn "*thumb1_ashrsi3"
3013 [(set (match_operand:SI 0 "register_operand" "=l,l")
3014 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3015 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3018 [(set_attr "length" "2")]
3021 (define_expand "lshrdi3"
3022 [(set (match_operand:DI 0 "s_register_operand" "")
3023 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3024 (match_operand:SI 2 "reg_or_int_operand" "")))]
3027 if (GET_CODE (operands[2]) == CONST_INT)
3029 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3031 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3034 /* Ideally we shouldn't fail here if we could know that operands[1]
3035 ends up already living in an iwmmxt register. Otherwise it's
3036 cheaper to have the alternate code being generated than moving
3037 values to iwmmxt regs and back. */
3040 else if (!TARGET_REALLY_IWMMXT)
3045 (define_insn "arm_lshrdi3_1bit"
3046 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3047 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3049 (clobber (reg:CC CC_REGNUM))]
3051 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3052 [(set_attr "conds" "clob")
3053 (set_attr "length" "8")]
3056 (define_expand "lshrsi3"
3057 [(set (match_operand:SI 0 "s_register_operand" "")
3058 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3059 (match_operand:SI 2 "arm_rhs_operand" "")))]
3062 if (GET_CODE (operands[2]) == CONST_INT
3063 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3065 emit_insn (gen_movsi (operands[0], const0_rtx));
3071 (define_insn "*thumb1_lshrsi3"
3072 [(set (match_operand:SI 0 "register_operand" "=l,l")
3073 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3074 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3077 [(set_attr "length" "2")]
3080 (define_expand "rotlsi3"
3081 [(set (match_operand:SI 0 "s_register_operand" "")
3082 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3083 (match_operand:SI 2 "reg_or_int_operand" "")))]
3086 if (GET_CODE (operands[2]) == CONST_INT)
3087 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3090 rtx reg = gen_reg_rtx (SImode);
3091 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3097 (define_expand "rotrsi3"
3098 [(set (match_operand:SI 0 "s_register_operand" "")
3099 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3100 (match_operand:SI 2 "arm_rhs_operand" "")))]
3105 if (GET_CODE (operands[2]) == CONST_INT
3106 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3107 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3109 else /* TARGET_THUMB1 */
3111 if (GET_CODE (operands [2]) == CONST_INT)
3112 operands [2] = force_reg (SImode, operands[2]);
3117 (define_insn "*thumb1_rotrsi3"
3118 [(set (match_operand:SI 0 "register_operand" "=l")
3119 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3120 (match_operand:SI 2 "register_operand" "l")))]
3123 [(set_attr "length" "2")]
3126 (define_insn "*arm_shiftsi3"
3127 [(set (match_operand:SI 0 "s_register_operand" "=r")
3128 (match_operator:SI 3 "shift_operator"
3129 [(match_operand:SI 1 "s_register_operand" "r")
3130 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3132 "* return arm_output_shift(operands, 0);"
3133 [(set_attr "predicable" "yes")
3134 (set_attr "shift" "1")
3135 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3136 (const_string "alu_shift")
3137 (const_string "alu_shift_reg")))]
3140 (define_insn "*shiftsi3_compare0"
3141 [(set (reg:CC_NOOV CC_REGNUM)
3142 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3143 [(match_operand:SI 1 "s_register_operand" "r")
3144 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3146 (set (match_operand:SI 0 "s_register_operand" "=r")
3147 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3149 "* return arm_output_shift(operands, 1);"
3150 [(set_attr "conds" "set")
3151 (set_attr "shift" "1")
3152 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3153 (const_string "alu_shift")
3154 (const_string "alu_shift_reg")))]
3157 (define_insn "*shiftsi3_compare0_scratch"
3158 [(set (reg:CC_NOOV CC_REGNUM)
3159 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3160 [(match_operand:SI 1 "s_register_operand" "r")
3161 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3163 (clobber (match_scratch:SI 0 "=r"))]
3165 "* return arm_output_shift(operands, 1);"
3166 [(set_attr "conds" "set")
3167 (set_attr "shift" "1")]
3170 (define_insn "*arm_notsi_shiftsi"
3171 [(set (match_operand:SI 0 "s_register_operand" "=r")
3172 (not:SI (match_operator:SI 3 "shift_operator"
3173 [(match_operand:SI 1 "s_register_operand" "r")
3174 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3177 [(set_attr "predicable" "yes")
3178 (set_attr "shift" "1")
3179 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3180 (const_string "alu_shift")
3181 (const_string "alu_shift_reg")))]
3184 (define_insn "*arm_notsi_shiftsi_compare0"
3185 [(set (reg:CC_NOOV CC_REGNUM)
3186 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3187 [(match_operand:SI 1 "s_register_operand" "r")
3188 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3190 (set (match_operand:SI 0 "s_register_operand" "=r")
3191 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3194 [(set_attr "conds" "set")
3195 (set_attr "shift" "1")
3196 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3197 (const_string "alu_shift")
3198 (const_string "alu_shift_reg")))]
3201 (define_insn "*arm_not_shiftsi_compare0_scratch"
3202 [(set (reg:CC_NOOV CC_REGNUM)
3203 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3204 [(match_operand:SI 1 "s_register_operand" "r")
3205 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3207 (clobber (match_scratch:SI 0 "=r"))]
3210 [(set_attr "conds" "set")
3211 (set_attr "shift" "1")
3212 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3213 (const_string "alu_shift")
3214 (const_string "alu_shift_reg")))]
3217 ;; We don't really have extzv, but defining this using shifts helps
3218 ;; to reduce register pressure later on.
3220 (define_expand "extzv"
3222 (ashift:SI (match_operand:SI 1 "register_operand" "")
3223 (match_operand:SI 2 "const_int_operand" "")))
3224 (set (match_operand:SI 0 "register_operand" "")
3225 (lshiftrt:SI (match_dup 4)
3226 (match_operand:SI 3 "const_int_operand" "")))]
3230 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3231 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3233 operands[3] = GEN_INT (rshift);
3237 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3241 operands[2] = GEN_INT (lshift);
3242 operands[4] = gen_reg_rtx (SImode);
3247 ;; Unary arithmetic insns
3249 (define_expand "negdi2"
3251 [(set (match_operand:DI 0 "s_register_operand" "")
3252 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3253 (clobber (reg:CC CC_REGNUM))])]
3258 if (GET_CODE (operands[1]) != REG)
3259 operands[1] = force_reg (SImode, operands[1]);
3264 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3265 ;; The second alternative is to allow the common case of a *full* overlap.
3266 (define_insn "*arm_negdi2"
3267 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3268 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3269 (clobber (reg:CC CC_REGNUM))]
3271 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3272 [(set_attr "conds" "clob")
3273 (set_attr "length" "8")]
3276 (define_insn "*thumb1_negdi2"
3277 [(set (match_operand:DI 0 "register_operand" "=&l")
3278 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3279 (clobber (reg:CC CC_REGNUM))]
3281 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3282 [(set_attr "length" "6")]
3285 (define_expand "negsi2"
3286 [(set (match_operand:SI 0 "s_register_operand" "")
3287 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3292 (define_insn "*arm_negsi2"
3293 [(set (match_operand:SI 0 "s_register_operand" "=r")
3294 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3296 "rsb%?\\t%0, %1, #0"
3297 [(set_attr "predicable" "yes")]
3300 (define_insn "*thumb1_negsi2"
3301 [(set (match_operand:SI 0 "register_operand" "=l")
3302 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3305 [(set_attr "length" "2")]
3308 (define_expand "negsf2"
3309 [(set (match_operand:SF 0 "s_register_operand" "")
3310 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3311 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3315 (define_expand "negdf2"
3316 [(set (match_operand:DF 0 "s_register_operand" "")
3317 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3318 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3321 ;; abssi2 doesn't really clobber the condition codes if a different register
3322 ;; is being set. To keep things simple, assume during rtl manipulations that
3323 ;; it does, but tell the final scan operator the truth. Similarly for
3326 (define_expand "abssi2"
3328 [(set (match_operand:SI 0 "s_register_operand" "")
3329 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3330 (clobber (match_dup 2))])]
3334 operands[2] = gen_rtx_SCRATCH (SImode);
3336 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3339 (define_insn "*arm_abssi2"
3340 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3341 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3342 (clobber (reg:CC CC_REGNUM))]
3345 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3346 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3347 [(set_attr "conds" "clob,*")
3348 (set_attr "shift" "1")
3349 ;; predicable can't be set based on the variant, so left as no
3350 (set_attr "length" "8")]
3353 (define_insn_and_split "*thumb1_abssi2"
3354 [(set (match_operand:SI 0 "s_register_operand" "=l")
3355 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3356 (clobber (match_scratch:SI 2 "=&l"))]
3359 "TARGET_THUMB1 && reload_completed"
3360 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3361 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3362 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3364 [(set_attr "length" "6")]
3367 (define_insn "*arm_neg_abssi2"
3368 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3369 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3370 (clobber (reg:CC CC_REGNUM))]
3373 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3374 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3375 [(set_attr "conds" "clob,*")
3376 (set_attr "shift" "1")
3377 ;; predicable can't be set based on the variant, so left as no
3378 (set_attr "length" "8")]
3381 (define_insn_and_split "*thumb1_neg_abssi2"
3382 [(set (match_operand:SI 0 "s_register_operand" "=l")
3383 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3384 (clobber (match_scratch:SI 2 "=&l"))]
3387 "TARGET_THUMB1 && reload_completed"
3388 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3389 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3390 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3392 [(set_attr "length" "6")]
3395 (define_expand "abssf2"
3396 [(set (match_operand:SF 0 "s_register_operand" "")
3397 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3398 "TARGET_32BIT && TARGET_HARD_FLOAT"
3401 (define_expand "absdf2"
3402 [(set (match_operand:DF 0 "s_register_operand" "")
3403 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3404 "TARGET_32BIT && TARGET_HARD_FLOAT"
3407 (define_expand "sqrtsf2"
3408 [(set (match_operand:SF 0 "s_register_operand" "")
3409 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3410 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3413 (define_expand "sqrtdf2"
3414 [(set (match_operand:DF 0 "s_register_operand" "")
3415 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3416 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3419 (define_insn_and_split "one_cmpldi2"
3420 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3421 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3424 "TARGET_32BIT && reload_completed"
3425 [(set (match_dup 0) (not:SI (match_dup 1)))
3426 (set (match_dup 2) (not:SI (match_dup 3)))]
3429 operands[2] = gen_highpart (SImode, operands[0]);
3430 operands[0] = gen_lowpart (SImode, operands[0]);
3431 operands[3] = gen_highpart (SImode, operands[1]);
3432 operands[1] = gen_lowpart (SImode, operands[1]);
3434 [(set_attr "length" "8")
3435 (set_attr "predicable" "yes")]
3438 (define_expand "one_cmplsi2"
3439 [(set (match_operand:SI 0 "s_register_operand" "")
3440 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3445 (define_insn "*arm_one_cmplsi2"
3446 [(set (match_operand:SI 0 "s_register_operand" "=r")
3447 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3450 [(set_attr "predicable" "yes")]
3453 (define_insn "*thumb1_one_cmplsi2"
3454 [(set (match_operand:SI 0 "register_operand" "=l")
3455 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3458 [(set_attr "length" "2")]
3461 (define_insn "*notsi_compare0"
3462 [(set (reg:CC_NOOV CC_REGNUM)
3463 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3465 (set (match_operand:SI 0 "s_register_operand" "=r")
3466 (not:SI (match_dup 1)))]
3469 [(set_attr "conds" "set")]
3472 (define_insn "*notsi_compare0_scratch"
3473 [(set (reg:CC_NOOV CC_REGNUM)
3474 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3476 (clobber (match_scratch:SI 0 "=r"))]
3479 [(set_attr "conds" "set")]
3482 ;; Fixed <--> Floating conversion insns
3484 (define_expand "floatsisf2"
3485 [(set (match_operand:SF 0 "s_register_operand" "")
3486 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3487 "TARGET_32BIT && TARGET_HARD_FLOAT"
3489 if (TARGET_MAVERICK)
3491 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3496 (define_expand "floatsidf2"
3497 [(set (match_operand:DF 0 "s_register_operand" "")
3498 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3499 "TARGET_32BIT && TARGET_HARD_FLOAT"
3501 if (TARGET_MAVERICK)
3503 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3508 (define_expand "fix_truncsfsi2"
3509 [(set (match_operand:SI 0 "s_register_operand" "")
3510 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3511 "TARGET_32BIT && TARGET_HARD_FLOAT"
3513 if (TARGET_MAVERICK)
3515 if (!cirrus_fp_register (operands[0], SImode))
3516 operands[0] = force_reg (SImode, operands[0]);
3517 if (!cirrus_fp_register (operands[1], SFmode))
3518 operands[1] = force_reg (SFmode, operands[0]);
3519 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3524 (define_expand "fix_truncdfsi2"
3525 [(set (match_operand:SI 0 "s_register_operand" "")
3526 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3527 "TARGET_32BIT && TARGET_HARD_FLOAT"
3529 if (TARGET_MAVERICK)
3531 if (!cirrus_fp_register (operands[1], DFmode))
3532 operands[1] = force_reg (DFmode, operands[0]);
3533 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3540 (define_expand "truncdfsf2"
3541 [(set (match_operand:SF 0 "s_register_operand" "")
3543 (match_operand:DF 1 "s_register_operand" "")))]
3544 "TARGET_32BIT && TARGET_HARD_FLOAT"
3548 ;; Zero and sign extension instructions.
3550 (define_expand "zero_extendsidi2"
3551 [(set (match_operand:DI 0 "s_register_operand" "")
3552 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3557 (define_insn "*arm_zero_extendsidi2"
3558 [(set (match_operand:DI 0 "s_register_operand" "=r")
3559 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3562 if (REGNO (operands[1])
3563 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3564 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3565 return \"mov%?\\t%R0, #0\";
3567 [(set_attr "length" "8")
3568 (set_attr "predicable" "yes")]
3571 (define_expand "zero_extendqidi2"
3572 [(set (match_operand:DI 0 "s_register_operand" "")
3573 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3578 (define_insn "*arm_zero_extendqidi2"
3579 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3580 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3583 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3584 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3585 [(set_attr "length" "8")
3586 (set_attr "predicable" "yes")
3587 (set_attr "type" "*,load_byte")
3588 (set_attr "pool_range" "*,4092")
3589 (set_attr "neg_pool_range" "*,4084")]
3592 (define_expand "extendsidi2"
3593 [(set (match_operand:DI 0 "s_register_operand" "")
3594 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3599 (define_insn "*arm_extendsidi2"
3600 [(set (match_operand:DI 0 "s_register_operand" "=r")
3601 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3604 if (REGNO (operands[1])
3605 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3606 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3607 return \"mov%?\\t%R0, %Q0, asr #31\";
3609 [(set_attr "length" "8")
3610 (set_attr "shift" "1")
3611 (set_attr "predicable" "yes")]
3614 (define_expand "zero_extendhisi2"
3616 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3618 (set (match_operand:SI 0 "s_register_operand" "")
3619 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3623 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3625 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3626 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3630 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3632 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3636 if (!s_register_operand (operands[1], HImode))
3637 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3641 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3642 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3646 operands[1] = gen_lowpart (SImode, operands[1]);
3647 operands[2] = gen_reg_rtx (SImode);
3651 (define_insn "*thumb1_zero_extendhisi2"
3652 [(set (match_operand:SI 0 "register_operand" "=l")
3653 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3654 "TARGET_THUMB1 && !arm_arch6"
3656 rtx mem = XEXP (operands[1], 0);
3658 if (GET_CODE (mem) == CONST)
3659 mem = XEXP (mem, 0);
3661 if (GET_CODE (mem) == LABEL_REF)
3662 return \"ldr\\t%0, %1\";
3664 if (GET_CODE (mem) == PLUS)
3666 rtx a = XEXP (mem, 0);
3667 rtx b = XEXP (mem, 1);
3669 /* This can happen due to bugs in reload. */
3670 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3673 ops[0] = operands[0];
3676 output_asm_insn (\"mov %0, %1\", ops);
3678 XEXP (mem, 0) = operands[0];
3681 else if ( GET_CODE (a) == LABEL_REF
3682 && GET_CODE (b) == CONST_INT)
3683 return \"ldr\\t%0, %1\";
3686 return \"ldrh\\t%0, %1\";
3688 [(set_attr "length" "4")
3689 (set_attr "type" "load_byte")
3690 (set_attr "pool_range" "60")]
3693 (define_insn "*thumb1_zero_extendhisi2_v6"
3694 [(set (match_operand:SI 0 "register_operand" "=l,l")
3695 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3696 "TARGET_THUMB1 && arm_arch6"
3700 if (which_alternative == 0)
3701 return \"uxth\\t%0, %1\";
3703 mem = XEXP (operands[1], 0);
3705 if (GET_CODE (mem) == CONST)
3706 mem = XEXP (mem, 0);
3708 if (GET_CODE (mem) == LABEL_REF)
3709 return \"ldr\\t%0, %1\";
3711 if (GET_CODE (mem) == PLUS)
3713 rtx a = XEXP (mem, 0);
3714 rtx b = XEXP (mem, 1);
3716 /* This can happen due to bugs in reload. */
3717 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3720 ops[0] = operands[0];
3723 output_asm_insn (\"mov %0, %1\", ops);
3725 XEXP (mem, 0) = operands[0];
3728 else if ( GET_CODE (a) == LABEL_REF
3729 && GET_CODE (b) == CONST_INT)
3730 return \"ldr\\t%0, %1\";
3733 return \"ldrh\\t%0, %1\";
3735 [(set_attr "length" "2,4")
3736 (set_attr "type" "alu_shift,load_byte")
3737 (set_attr "pool_range" "*,60")]
3740 (define_insn "*arm_zero_extendhisi2"
3741 [(set (match_operand:SI 0 "s_register_operand" "=r")
3742 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3743 "TARGET_ARM && arm_arch4 && !arm_arch6"
3745 [(set_attr "type" "load_byte")
3746 (set_attr "predicable" "yes")
3747 (set_attr "pool_range" "256")
3748 (set_attr "neg_pool_range" "244")]
3751 (define_insn "*arm_zero_extendhisi2_v6"
3752 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3753 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3754 "TARGET_ARM && arm_arch6"
3758 [(set_attr "type" "alu_shift,load_byte")
3759 (set_attr "predicable" "yes")
3760 (set_attr "pool_range" "*,256")
3761 (set_attr "neg_pool_range" "*,244")]
3764 (define_insn "*arm_zero_extendhisi2addsi"
3765 [(set (match_operand:SI 0 "s_register_operand" "=r")
3766 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3767 (match_operand:SI 2 "s_register_operand" "r")))]
3769 "uxtah%?\\t%0, %2, %1"
3770 [(set_attr "type" "alu_shift")
3771 (set_attr "predicable" "yes")]
3774 (define_expand "zero_extendqisi2"
3775 [(set (match_operand:SI 0 "s_register_operand" "")
3776 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3779 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3783 emit_insn (gen_andsi3 (operands[0],
3784 gen_lowpart (SImode, operands[1]),
3787 else /* TARGET_THUMB */
3789 rtx temp = gen_reg_rtx (SImode);
3792 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3793 operands[1] = gen_lowpart (SImode, operands[1]);
3796 ops[1] = operands[1];
3797 ops[2] = GEN_INT (24);
3799 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3800 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3802 ops[0] = operands[0];
3804 ops[2] = GEN_INT (24);
3806 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3807 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3814 (define_insn "*thumb1_zero_extendqisi2"
3815 [(set (match_operand:SI 0 "register_operand" "=l")
3816 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3817 "TARGET_THUMB1 && !arm_arch6"
3819 [(set_attr "length" "2")
3820 (set_attr "type" "load_byte")
3821 (set_attr "pool_range" "32")]
3824 (define_insn "*thumb1_zero_extendqisi2_v6"
3825 [(set (match_operand:SI 0 "register_operand" "=l,l")
3826 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3827 "TARGET_THUMB1 && arm_arch6"
3831 [(set_attr "length" "2,2")
3832 (set_attr "type" "alu_shift,load_byte")
3833 (set_attr "pool_range" "*,32")]
3836 (define_insn "*arm_zero_extendqisi2"
3837 [(set (match_operand:SI 0 "s_register_operand" "=r")
3838 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3839 "TARGET_ARM && !arm_arch6"
3840 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3841 [(set_attr "type" "load_byte")
3842 (set_attr "predicable" "yes")
3843 (set_attr "pool_range" "4096")
3844 (set_attr "neg_pool_range" "4084")]
3847 (define_insn "*arm_zero_extendqisi2_v6"
3848 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3849 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3850 "TARGET_ARM && arm_arch6"
3853 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3854 [(set_attr "type" "alu_shift,load_byte")
3855 (set_attr "predicable" "yes")
3856 (set_attr "pool_range" "*,4096")
3857 (set_attr "neg_pool_range" "*,4084")]
3860 (define_insn "*arm_zero_extendqisi2addsi"
3861 [(set (match_operand:SI 0 "s_register_operand" "=r")
3862 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
3863 (match_operand:SI 2 "s_register_operand" "r")))]
3865 "uxtab%?\\t%0, %2, %1"
3866 [(set_attr "predicable" "yes")
3867 (set_attr "type" "alu_shift")]
3871 [(set (match_operand:SI 0 "s_register_operand" "")
3872 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
3873 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3874 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
3875 [(set (match_dup 2) (match_dup 1))
3876 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3881 [(set (match_operand:SI 0 "s_register_operand" "")
3882 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
3883 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3884 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
3885 [(set (match_dup 2) (match_dup 1))
3886 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3890 (define_insn "*compareqi_eq0"
3891 [(set (reg:CC_Z CC_REGNUM)
3892 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
3896 [(set_attr "conds" "set")]
3899 (define_expand "extendhisi2"
3901 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3903 (set (match_operand:SI 0 "s_register_operand" "")
3904 (ashiftrt:SI (match_dup 2)
3909 if (GET_CODE (operands[1]) == MEM)
3913 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3918 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3919 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3924 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3926 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
3930 if (!s_register_operand (operands[1], HImode))
3931 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3936 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3938 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3939 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3944 operands[1] = gen_lowpart (SImode, operands[1]);
3945 operands[2] = gen_reg_rtx (SImode);
3949 (define_insn "thumb1_extendhisi2"
3950 [(set (match_operand:SI 0 "register_operand" "=l")
3951 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
3952 (clobber (match_scratch:SI 2 "=&l"))]
3953 "TARGET_THUMB1 && !arm_arch6"
3957 rtx mem = XEXP (operands[1], 0);
3959 /* This code used to try to use 'V', and fix the address only if it was
3960 offsettable, but this fails for e.g. REG+48 because 48 is outside the
3961 range of QImode offsets, and offsettable_address_p does a QImode
3964 if (GET_CODE (mem) == CONST)
3965 mem = XEXP (mem, 0);
3967 if (GET_CODE (mem) == LABEL_REF)
3968 return \"ldr\\t%0, %1\";
3970 if (GET_CODE (mem) == PLUS)
3972 rtx a = XEXP (mem, 0);
3973 rtx b = XEXP (mem, 1);
3975 if (GET_CODE (a) == LABEL_REF
3976 && GET_CODE (b) == CONST_INT)
3977 return \"ldr\\t%0, %1\";
3979 if (GET_CODE (b) == REG)
3980 return \"ldrsh\\t%0, %1\";
3988 ops[2] = const0_rtx;
3991 gcc_assert (GET_CODE (ops[1]) == REG);
3993 ops[0] = operands[0];
3994 ops[3] = operands[2];
3995 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
3998 [(set_attr "length" "4")
3999 (set_attr "type" "load_byte")
4000 (set_attr "pool_range" "1020")]
4003 ;; We used to have an early-clobber on the scratch register here.
4004 ;; However, there's a bug somewhere in reload which means that this
4005 ;; can be partially ignored during spill allocation if the memory
4006 ;; address also needs reloading; this causes us to die later on when
4007 ;; we try to verify the operands. Fortunately, we don't really need
4008 ;; the early-clobber: we can always use operand 0 if operand 2
4009 ;; overlaps the address.
4010 (define_insn "*thumb1_extendhisi2_insn_v6"
4011 [(set (match_operand:SI 0 "register_operand" "=l,l")
4012 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4013 (clobber (match_scratch:SI 2 "=X,l"))]
4014 "TARGET_THUMB1 && arm_arch6"
4020 if (which_alternative == 0)
4021 return \"sxth\\t%0, %1\";
4023 mem = XEXP (operands[1], 0);
4025 /* This code used to try to use 'V', and fix the address only if it was
4026 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4027 range of QImode offsets, and offsettable_address_p does a QImode
4030 if (GET_CODE (mem) == CONST)
4031 mem = XEXP (mem, 0);
4033 if (GET_CODE (mem) == LABEL_REF)
4034 return \"ldr\\t%0, %1\";
4036 if (GET_CODE (mem) == PLUS)
4038 rtx a = XEXP (mem, 0);
4039 rtx b = XEXP (mem, 1);
4041 if (GET_CODE (a) == LABEL_REF
4042 && GET_CODE (b) == CONST_INT)
4043 return \"ldr\\t%0, %1\";
4045 if (GET_CODE (b) == REG)
4046 return \"ldrsh\\t%0, %1\";
4054 ops[2] = const0_rtx;
4057 gcc_assert (GET_CODE (ops[1]) == REG);
4059 ops[0] = operands[0];
4060 if (reg_mentioned_p (operands[2], ops[1]))
4063 ops[3] = operands[2];
4064 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4067 [(set_attr "length" "2,4")
4068 (set_attr "type" "alu_shift,load_byte")
4069 (set_attr "pool_range" "*,1020")]
4072 ;; This pattern will only be used when ldsh is not available
4073 (define_expand "extendhisi2_mem"
4074 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4076 (zero_extend:SI (match_dup 7)))
4077 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4078 (set (match_operand:SI 0 "" "")
4079 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4084 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4086 mem1 = change_address (operands[1], QImode, addr);
4087 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4088 operands[0] = gen_lowpart (SImode, operands[0]);
4090 operands[2] = gen_reg_rtx (SImode);
4091 operands[3] = gen_reg_rtx (SImode);
4092 operands[6] = gen_reg_rtx (SImode);
4095 if (BYTES_BIG_ENDIAN)
4097 operands[4] = operands[2];
4098 operands[5] = operands[3];
4102 operands[4] = operands[3];
4103 operands[5] = operands[2];
4108 (define_insn "*arm_extendhisi2"
4109 [(set (match_operand:SI 0 "s_register_operand" "=r")
4110 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4111 "TARGET_ARM && arm_arch4 && !arm_arch6"
4112 "ldr%(sh%)\\t%0, %1"
4113 [(set_attr "type" "load_byte")
4114 (set_attr "predicable" "yes")
4115 (set_attr "pool_range" "256")
4116 (set_attr "neg_pool_range" "244")]
4119 ;; ??? Check Thumb-2 pool range
4120 (define_insn "*arm_extendhisi2_v6"
4121 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4122 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4123 "TARGET_32BIT && arm_arch6"
4127 [(set_attr "type" "alu_shift,load_byte")
4128 (set_attr "predicable" "yes")
4129 (set_attr "pool_range" "*,256")
4130 (set_attr "neg_pool_range" "*,244")]
4133 (define_insn "*arm_extendhisi2addsi"
4134 [(set (match_operand:SI 0 "s_register_operand" "=r")
4135 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4136 (match_operand:SI 2 "s_register_operand" "r")))]
4138 "sxtah%?\\t%0, %2, %1"
4141 (define_expand "extendqihi2"
4143 (ashift:SI (match_operand:QI 1 "general_operand" "")
4145 (set (match_operand:HI 0 "s_register_operand" "")
4146 (ashiftrt:SI (match_dup 2)
4151 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4153 emit_insn (gen_rtx_SET (VOIDmode,
4155 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4158 if (!s_register_operand (operands[1], QImode))
4159 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4160 operands[0] = gen_lowpart (SImode, operands[0]);
4161 operands[1] = gen_lowpart (SImode, operands[1]);
4162 operands[2] = gen_reg_rtx (SImode);
4166 (define_insn "*arm_extendqihi_insn"
4167 [(set (match_operand:HI 0 "s_register_operand" "=r")
4168 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4169 "TARGET_ARM && arm_arch4"
4170 "ldr%(sb%)\\t%0, %1"
4171 [(set_attr "type" "load_byte")
4172 (set_attr "predicable" "yes")
4173 (set_attr "pool_range" "256")
4174 (set_attr "neg_pool_range" "244")]
4177 (define_expand "extendqisi2"
4179 (ashift:SI (match_operand:QI 1 "general_operand" "")
4181 (set (match_operand:SI 0 "s_register_operand" "")
4182 (ashiftrt:SI (match_dup 2)
4187 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4189 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4190 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4194 if (!s_register_operand (operands[1], QImode))
4195 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4199 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4200 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4204 operands[1] = gen_lowpart (SImode, operands[1]);
4205 operands[2] = gen_reg_rtx (SImode);
4209 (define_insn "*arm_extendqisi"
4210 [(set (match_operand:SI 0 "s_register_operand" "=r")
4211 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4212 "TARGET_ARM && arm_arch4 && !arm_arch6"
4213 "ldr%(sb%)\\t%0, %1"
4214 [(set_attr "type" "load_byte")
4215 (set_attr "predicable" "yes")
4216 (set_attr "pool_range" "256")
4217 (set_attr "neg_pool_range" "244")]
4220 (define_insn "*arm_extendqisi_v6"
4221 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4222 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4223 "TARGET_ARM && arm_arch6"
4227 [(set_attr "type" "alu_shift,load_byte")
4228 (set_attr "predicable" "yes")
4229 (set_attr "pool_range" "*,256")
4230 (set_attr "neg_pool_range" "*,244")]
4233 (define_insn "*arm_extendqisi2addsi"
4234 [(set (match_operand:SI 0 "s_register_operand" "=r")
4235 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4236 (match_operand:SI 2 "s_register_operand" "r")))]
4238 "sxtab%?\\t%0, %2, %1"
4239 [(set_attr "type" "alu_shift")
4240 (set_attr "predicable" "yes")]
4243 (define_insn "*thumb1_extendqisi2"
4244 [(set (match_operand:SI 0 "register_operand" "=l,l")
4245 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4246 "TARGET_THUMB1 && !arm_arch6"
4250 rtx mem = XEXP (operands[1], 0);
4252 if (GET_CODE (mem) == CONST)
4253 mem = XEXP (mem, 0);
4255 if (GET_CODE (mem) == LABEL_REF)
4256 return \"ldr\\t%0, %1\";
4258 if (GET_CODE (mem) == PLUS
4259 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4260 return \"ldr\\t%0, %1\";
4262 if (which_alternative == 0)
4263 return \"ldrsb\\t%0, %1\";
4265 ops[0] = operands[0];
4267 if (GET_CODE (mem) == PLUS)
4269 rtx a = XEXP (mem, 0);
4270 rtx b = XEXP (mem, 1);
4275 if (GET_CODE (a) == REG)
4277 if (GET_CODE (b) == REG)
4278 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4279 else if (REGNO (a) == REGNO (ops[0]))
4281 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4282 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4283 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4286 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4290 gcc_assert (GET_CODE (b) == REG);
4291 if (REGNO (b) == REGNO (ops[0]))
4293 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4294 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4295 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4298 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4301 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4303 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4304 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4305 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4310 ops[2] = const0_rtx;
4312 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4316 [(set_attr "length" "2,6")
4317 (set_attr "type" "load_byte,load_byte")
4318 (set_attr "pool_range" "32,32")]
4321 (define_insn "*thumb1_extendqisi2_v6"
4322 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4323 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4324 "TARGET_THUMB1 && arm_arch6"
4330 if (which_alternative == 0)
4331 return \"sxtb\\t%0, %1\";
4333 mem = XEXP (operands[1], 0);
4335 if (GET_CODE (mem) == CONST)
4336 mem = XEXP (mem, 0);
4338 if (GET_CODE (mem) == LABEL_REF)
4339 return \"ldr\\t%0, %1\";
4341 if (GET_CODE (mem) == PLUS
4342 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4343 return \"ldr\\t%0, %1\";
4345 if (which_alternative == 0)
4346 return \"ldrsb\\t%0, %1\";
4348 ops[0] = operands[0];
4350 if (GET_CODE (mem) == PLUS)
4352 rtx a = XEXP (mem, 0);
4353 rtx b = XEXP (mem, 1);
4358 if (GET_CODE (a) == REG)
4360 if (GET_CODE (b) == REG)
4361 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4362 else if (REGNO (a) == REGNO (ops[0]))
4364 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4365 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4368 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4372 gcc_assert (GET_CODE (b) == REG);
4373 if (REGNO (b) == REGNO (ops[0]))
4375 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4376 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4379 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4382 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4384 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4385 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4390 ops[2] = const0_rtx;
4392 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4396 [(set_attr "length" "2,2,4")
4397 (set_attr "type" "alu_shift,load_byte,load_byte")
4398 (set_attr "pool_range" "*,32,32")]
4401 (define_expand "extendsfdf2"
4402 [(set (match_operand:DF 0 "s_register_operand" "")
4403 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4404 "TARGET_32BIT && TARGET_HARD_FLOAT"
4408 ;; Move insns (including loads and stores)
4410 ;; XXX Just some ideas about movti.
4411 ;; I don't think these are a good idea on the arm, there just aren't enough
4413 ;;(define_expand "loadti"
4414 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4415 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4418 ;;(define_expand "storeti"
4419 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4420 ;; (match_operand:TI 1 "s_register_operand" ""))]
4423 ;;(define_expand "movti"
4424 ;; [(set (match_operand:TI 0 "general_operand" "")
4425 ;; (match_operand:TI 1 "general_operand" ""))]
4431 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4432 ;; operands[1] = copy_to_reg (operands[1]);
4433 ;; if (GET_CODE (operands[0]) == MEM)
4434 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4435 ;; else if (GET_CODE (operands[1]) == MEM)
4436 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4440 ;; emit_insn (insn);
4444 ;; Recognize garbage generated above.
4447 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4448 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4452 ;; register mem = (which_alternative < 3);
4453 ;; register const char *template;
4455 ;; operands[mem] = XEXP (operands[mem], 0);
4456 ;; switch (which_alternative)
4458 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4459 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4460 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4461 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4462 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4463 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4465 ;; output_asm_insn (template, operands);
4469 (define_expand "movdi"
4470 [(set (match_operand:DI 0 "general_operand" "")
4471 (match_operand:DI 1 "general_operand" ""))]
4474 if (!no_new_pseudos)
4476 if (GET_CODE (operands[0]) != REG)
4477 operands[1] = force_reg (DImode, operands[1]);
4482 (define_insn "*arm_movdi"
4483 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4484 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4486 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4488 && ( register_operand (operands[0], DImode)
4489 || register_operand (operands[1], DImode))"
4491 switch (which_alternative)
4498 return output_move_double (operands);
4501 [(set_attr "length" "8,12,16,8,8")
4502 (set_attr "type" "*,*,*,load2,store2")
4503 (set_attr "pool_range" "*,*,*,1020,*")
4504 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4508 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4509 (match_operand:ANY64 1 "const_double_operand" ""))]
4512 && (arm_const_double_inline_cost (operands[1])
4513 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4516 arm_split_constant (SET, SImode, curr_insn,
4517 INTVAL (gen_lowpart (SImode, operands[1])),
4518 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4519 arm_split_constant (SET, SImode, curr_insn,
4520 INTVAL (gen_highpart_mode (SImode,
4521 GET_MODE (operands[0]),
4523 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4528 ; If optimizing for size, or if we have load delay slots, then
4529 ; we want to split the constant into two separate operations.
4530 ; In both cases this may split a trivial part into a single data op
4531 ; leaving a single complex constant to load. We can also get longer
4532 ; offsets in a LDR which means we get better chances of sharing the pool
4533 ; entries. Finally, we can normally do a better job of scheduling
4534 ; LDR instructions than we can with LDM.
4535 ; This pattern will only match if the one above did not.
4537 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4538 (match_operand:ANY64 1 "const_double_operand" ""))]
4539 "TARGET_ARM && reload_completed
4540 && arm_const_double_by_parts (operands[1])"
4541 [(set (match_dup 0) (match_dup 1))
4542 (set (match_dup 2) (match_dup 3))]
4544 operands[2] = gen_highpart (SImode, operands[0]);
4545 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4547 operands[0] = gen_lowpart (SImode, operands[0]);
4548 operands[1] = gen_lowpart (SImode, operands[1]);
4553 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4554 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4555 "TARGET_EITHER && reload_completed"
4556 [(set (match_dup 0) (match_dup 1))
4557 (set (match_dup 2) (match_dup 3))]
4559 operands[2] = gen_highpart (SImode, operands[0]);
4560 operands[3] = gen_highpart (SImode, operands[1]);
4561 operands[0] = gen_lowpart (SImode, operands[0]);
4562 operands[1] = gen_lowpart (SImode, operands[1]);
4564 /* Handle a partial overlap. */
4565 if (rtx_equal_p (operands[0], operands[3]))
4567 rtx tmp0 = operands[0];
4568 rtx tmp1 = operands[1];
4570 operands[0] = operands[2];
4571 operands[1] = operands[3];
4578 ;; We can't actually do base+index doubleword loads if the index and
4579 ;; destination overlap. Split here so that we at least have chance to
4582 [(set (match_operand:DI 0 "s_register_operand" "")
4583 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4584 (match_operand:SI 2 "s_register_operand" ""))))]
4586 && reg_overlap_mentioned_p (operands[0], operands[1])
4587 && reg_overlap_mentioned_p (operands[0], operands[2])"
4589 (plus:SI (match_dup 1)
4592 (mem:DI (match_dup 4)))]
4594 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4598 ;;; ??? This should have alternatives for constants.
4599 ;;; ??? This was originally identical to the movdf_insn pattern.
4600 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4601 ;;; thumb_reorg with a memory reference.
4602 (define_insn "*thumb1_movdi_insn"
4603 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4604 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4606 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4607 && ( register_operand (operands[0], DImode)
4608 || register_operand (operands[1], DImode))"
4611 switch (which_alternative)
4615 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4616 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4617 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4619 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4621 operands[1] = GEN_INT (- INTVAL (operands[1]));
4622 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4624 return \"ldmia\\t%1, {%0, %H0}\";
4626 return \"stmia\\t%0, {%1, %H1}\";
4628 return thumb_load_double_from_address (operands);
4630 operands[2] = gen_rtx_MEM (SImode,
4631 plus_constant (XEXP (operands[0], 0), 4));
4632 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4635 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4636 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4637 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4640 [(set_attr "length" "4,4,6,2,2,6,4,4")
4641 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4642 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4645 (define_expand "movsi"
4646 [(set (match_operand:SI 0 "general_operand" "")
4647 (match_operand:SI 1 "general_operand" ""))]
4652 /* Everything except mem = const or mem = mem can be done easily. */
4653 if (GET_CODE (operands[0]) == MEM)
4654 operands[1] = force_reg (SImode, operands[1]);
4655 if (arm_general_register_operand (operands[0], SImode)
4656 && GET_CODE (operands[1]) == CONST_INT
4657 && !(const_ok_for_arm (INTVAL (operands[1]))
4658 || const_ok_for_arm (~INTVAL (operands[1]))))
4660 arm_split_constant (SET, SImode, NULL_RTX,
4661 INTVAL (operands[1]), operands[0], NULL_RTX,
4662 optimize && !no_new_pseudos);
4666 else /* TARGET_THUMB1... */
4668 if (!no_new_pseudos)
4670 if (GET_CODE (operands[0]) != REG)
4671 operands[1] = force_reg (SImode, operands[1]);
4675 /* Recognize the case where operand[1] is a reference to thread-local
4676 data and load its address to a register. */
4677 if (arm_tls_referenced_p (operands[1]))
4679 rtx tmp = operands[1];
4682 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4684 addend = XEXP (XEXP (tmp, 0), 1);
4685 tmp = XEXP (XEXP (tmp, 0), 0);
4688 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4689 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4691 tmp = legitimize_tls_address (tmp, no_new_pseudos ? operands[0] : 0);
4694 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4695 tmp = force_operand (tmp, operands[0]);
4700 && (CONSTANT_P (operands[1])
4701 || symbol_mentioned_p (operands[1])
4702 || label_mentioned_p (operands[1])))
4703 operands[1] = legitimize_pic_address (operands[1], SImode,
4704 (no_new_pseudos ? operands[0] : 0));
4708 (define_insn "*arm_movsi_insn"
4709 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m")
4710 (match_operand:SI 1 "general_operand" "rI,K,N,mi,r"))]
4711 "TARGET_ARM && ! TARGET_IWMMXT
4712 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4713 && ( register_operand (operands[0], SImode)
4714 || register_operand (operands[1], SImode))"
4721 [(set_attr "type" "*,*,*,load1,store1")
4722 (set_attr "predicable" "yes")
4723 (set_attr "pool_range" "*,*,*,4096,*")
4724 (set_attr "neg_pool_range" "*,*,*,4084,*")]
4728 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4729 (match_operand:SI 1 "const_int_operand" ""))]
4731 && (!(const_ok_for_arm (INTVAL (operands[1]))
4732 || const_ok_for_arm (~INTVAL (operands[1]))))"
4733 [(clobber (const_int 0))]
4735 arm_split_constant (SET, SImode, NULL_RTX,
4736 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4741 (define_insn "*thumb1_movsi_insn"
4742 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
4743 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
4745 && ( register_operand (operands[0], SImode)
4746 || register_operand (operands[1], SImode))"
4757 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4758 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4759 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4763 [(set (match_operand:SI 0 "register_operand" "")
4764 (match_operand:SI 1 "const_int_operand" ""))]
4765 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4766 [(set (match_dup 0) (match_dup 1))
4767 (set (match_dup 0) (neg:SI (match_dup 0)))]
4768 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4772 [(set (match_operand:SI 0 "register_operand" "")
4773 (match_operand:SI 1 "const_int_operand" ""))]
4774 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4775 [(set (match_dup 0) (match_dup 1))
4776 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4779 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4780 unsigned HOST_WIDE_INT mask = 0xff;
4783 for (i = 0; i < 25; i++)
4784 if ((val & (mask << i)) == val)
4787 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4791 operands[1] = GEN_INT (val >> i);
4792 operands[2] = GEN_INT (i);
4796 ;; When generating pic, we need to load the symbol offset into a register.
4797 ;; So that the optimizer does not confuse this with a normal symbol load
4798 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4799 ;; since that is the only type of relocation we can use.
4801 ;; The rather odd constraints on the following are to force reload to leave
4802 ;; the insn alone, and to force the minipool generation pass to then move
4803 ;; the GOT symbol to memory.
4805 (define_insn "pic_load_addr_arm"
4806 [(set (match_operand:SI 0 "s_register_operand" "=r")
4807 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4808 "TARGET_ARM && flag_pic"
4810 [(set_attr "type" "load1")
4811 (set (attr "pool_range") (const_int 4096))
4812 (set (attr "neg_pool_range") (const_int 4084))]
4815 (define_insn "pic_load_addr_thumb1"
4816 [(set (match_operand:SI 0 "s_register_operand" "=l")
4817 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4818 "TARGET_THUMB1 && flag_pic"
4820 [(set_attr "type" "load1")
4821 (set (attr "pool_range") (const_int 1024))]
4824 ;; This variant is used for AOF assembly, since it needs to mention the
4825 ;; pic register in the rtl.
4826 (define_expand "pic_load_addr_based"
4827 [(set (match_operand:SI 0 "s_register_operand" "")
4828 (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
4829 "TARGET_ARM && flag_pic"
4830 "operands[2] = cfun->machine->pic_reg;"
4833 (define_insn "*pic_load_addr_based_insn"
4834 [(set (match_operand:SI 0 "s_register_operand" "=r")
4835 (unspec:SI [(match_operand 1 "" "")
4836 (match_operand 2 "s_register_operand" "r")]
4838 "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg"
4840 #ifdef AOF_ASSEMBLER
4841 operands[1] = aof_pic_entry (operands[1]);
4843 output_asm_insn (\"ldr%?\\t%0, %a1\", operands);
4846 [(set_attr "type" "load1")
4847 (set (attr "pool_range")
4848 (if_then_else (eq_attr "is_thumb" "yes")
4851 (set (attr "neg_pool_range")
4852 (if_then_else (eq_attr "is_thumb" "yes")
4857 (define_insn "pic_add_dot_plus_four"
4858 [(set (match_operand:SI 0 "register_operand" "=r")
4859 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4860 (const (plus:SI (pc) (const_int 4))))]
4862 (use (match_operand 2 "" ""))]
4865 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4866 INTVAL (operands[2]));
4867 return \"add\\t%0, %|pc\";
4869 [(set_attr "length" "2")]
4872 (define_insn "pic_add_dot_plus_eight"
4873 [(set (match_operand:SI 0 "register_operand" "=r")
4874 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4875 (const (plus:SI (pc) (const_int 8))))]
4877 (use (match_operand 2 "" ""))]
4880 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4881 INTVAL (operands[2]));
4882 return \"add%?\\t%0, %|pc, %1\";
4884 [(set_attr "predicable" "yes")]
4887 (define_insn "tls_load_dot_plus_eight"
4888 [(set (match_operand:SI 0 "register_operand" "+r")
4889 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4890 (const (plus:SI (pc) (const_int 8))))]
4892 (use (match_operand 2 "" ""))]
4895 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4896 INTVAL (operands[2]));
4897 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
4899 [(set_attr "predicable" "yes")]
4902 ;; PIC references to local variables can generate pic_add_dot_plus_eight
4903 ;; followed by a load. These sequences can be crunched down to
4904 ;; tls_load_dot_plus_eight by a peephole.
4907 [(parallel [(set (match_operand:SI 0 "register_operand" "")
4908 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
4909 (const (plus:SI (pc) (const_int 8))))]
4911 (use (label_ref (match_operand 1 "" "")))])
4912 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
4913 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
4914 [(parallel [(set (match_dup 2)
4915 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
4916 (const (plus:SI (pc) (const_int 8))))]
4918 (use (label_ref (match_dup 1)))])]
4922 (define_expand "builtin_setjmp_receiver"
4923 [(label_ref (match_operand 0 "" ""))]
4927 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
4929 if (arm_pic_register != INVALID_REGNUM)
4930 arm_load_pic_register (1UL << 3);
4934 ;; If copying one reg to another we can set the condition codes according to
4935 ;; its value. Such a move is common after a return from subroutine and the
4936 ;; result is being tested against zero.
4938 (define_insn "*movsi_compare0"
4939 [(set (reg:CC CC_REGNUM)
4940 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
4942 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4948 [(set_attr "conds" "set")]
4951 ;; Subroutine to store a half word from a register into memory.
4952 ;; Operand 0 is the source register (HImode)
4953 ;; Operand 1 is the destination address in a register (SImode)
4955 ;; In both this routine and the next, we must be careful not to spill
4956 ;; a memory address of reg+large_const into a separate PLUS insn, since this
4957 ;; can generate unrecognizable rtl.
4959 (define_expand "storehi"
4960 [;; store the low byte
4961 (set (match_operand 1 "" "") (match_dup 3))
4962 ;; extract the high byte
4964 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4965 ;; store the high byte
4966 (set (match_dup 4) (match_dup 5))]
4970 rtx op1 = operands[1];
4971 rtx addr = XEXP (op1, 0);
4972 enum rtx_code code = GET_CODE (addr);
4974 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4976 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
4978 operands[4] = adjust_address (op1, QImode, 1);
4979 operands[1] = adjust_address (operands[1], QImode, 0);
4980 operands[3] = gen_lowpart (QImode, operands[0]);
4981 operands[0] = gen_lowpart (SImode, operands[0]);
4982 operands[2] = gen_reg_rtx (SImode);
4983 operands[5] = gen_lowpart (QImode, operands[2]);
4987 (define_expand "storehi_bigend"
4988 [(set (match_dup 4) (match_dup 3))
4990 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4991 (set (match_operand 1 "" "") (match_dup 5))]
4995 rtx op1 = operands[1];
4996 rtx addr = XEXP (op1, 0);
4997 enum rtx_code code = GET_CODE (addr);
4999 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5001 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5003 operands[4] = adjust_address (op1, QImode, 1);
5004 operands[1] = adjust_address (operands[1], QImode, 0);
5005 operands[3] = gen_lowpart (QImode, operands[0]);
5006 operands[0] = gen_lowpart (SImode, operands[0]);
5007 operands[2] = gen_reg_rtx (SImode);
5008 operands[5] = gen_lowpart (QImode, operands[2]);
5012 ;; Subroutine to store a half word integer constant into memory.
5013 (define_expand "storeinthi"
5014 [(set (match_operand 0 "" "")
5015 (match_operand 1 "" ""))
5016 (set (match_dup 3) (match_dup 2))]
5020 HOST_WIDE_INT value = INTVAL (operands[1]);
5021 rtx addr = XEXP (operands[0], 0);
5022 rtx op0 = operands[0];
5023 enum rtx_code code = GET_CODE (addr);
5025 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5027 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5029 operands[1] = gen_reg_rtx (SImode);
5030 if (BYTES_BIG_ENDIAN)
5032 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5033 if ((value & 255) == ((value >> 8) & 255))
5034 operands[2] = operands[1];
5037 operands[2] = gen_reg_rtx (SImode);
5038 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5043 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5044 if ((value & 255) == ((value >> 8) & 255))
5045 operands[2] = operands[1];
5048 operands[2] = gen_reg_rtx (SImode);
5049 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5053 operands[3] = adjust_address (op0, QImode, 1);
5054 operands[0] = adjust_address (operands[0], QImode, 0);
5055 operands[2] = gen_lowpart (QImode, operands[2]);
5056 operands[1] = gen_lowpart (QImode, operands[1]);
5060 (define_expand "storehi_single_op"
5061 [(set (match_operand:HI 0 "memory_operand" "")
5062 (match_operand:HI 1 "general_operand" ""))]
5063 "TARGET_32BIT && arm_arch4"
5065 if (!s_register_operand (operands[1], HImode))
5066 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5070 (define_expand "movhi"
5071 [(set (match_operand:HI 0 "general_operand" "")
5072 (match_operand:HI 1 "general_operand" ""))]
5077 if (!no_new_pseudos)
5079 if (GET_CODE (operands[0]) == MEM)
5083 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5086 if (GET_CODE (operands[1]) == CONST_INT)
5087 emit_insn (gen_storeinthi (operands[0], operands[1]));
5090 if (GET_CODE (operands[1]) == MEM)
5091 operands[1] = force_reg (HImode, operands[1]);
5092 if (BYTES_BIG_ENDIAN)
5093 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5095 emit_insn (gen_storehi (operands[1], operands[0]));
5099 /* Sign extend a constant, and keep it in an SImode reg. */
5100 else if (GET_CODE (operands[1]) == CONST_INT)
5102 rtx reg = gen_reg_rtx (SImode);
5103 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5105 /* If the constant is already valid, leave it alone. */
5106 if (!const_ok_for_arm (val))
5108 /* If setting all the top bits will make the constant
5109 loadable in a single instruction, then set them.
5110 Otherwise, sign extend the number. */
5112 if (const_ok_for_arm (~(val | ~0xffff)))
5114 else if (val & 0x8000)
5118 emit_insn (gen_movsi (reg, GEN_INT (val)));
5119 operands[1] = gen_lowpart (HImode, reg);
5121 else if (arm_arch4 && optimize && !no_new_pseudos
5122 && GET_CODE (operands[1]) == MEM)
5124 rtx reg = gen_reg_rtx (SImode);
5126 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5127 operands[1] = gen_lowpart (HImode, reg);
5129 else if (!arm_arch4)
5131 if (GET_CODE (operands[1]) == MEM)
5134 rtx offset = const0_rtx;
5135 rtx reg = gen_reg_rtx (SImode);
5137 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5138 || (GET_CODE (base) == PLUS
5139 && (GET_CODE (offset = XEXP (base, 1))
5141 && ((INTVAL(offset) & 1) != 1)
5142 && GET_CODE (base = XEXP (base, 0)) == REG))
5143 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5147 new = widen_memory_access (operands[1], SImode,
5148 ((INTVAL (offset) & ~3)
5149 - INTVAL (offset)));
5150 emit_insn (gen_movsi (reg, new));
5151 if (((INTVAL (offset) & 2) != 0)
5152 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5154 rtx reg2 = gen_reg_rtx (SImode);
5156 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5161 emit_insn (gen_movhi_bytes (reg, operands[1]));
5163 operands[1] = gen_lowpart (HImode, reg);
5167 /* Handle loading a large integer during reload. */
5168 else if (GET_CODE (operands[1]) == CONST_INT
5169 && !const_ok_for_arm (INTVAL (operands[1]))
5170 && !const_ok_for_arm (~INTVAL (operands[1])))
5172 /* Writing a constant to memory needs a scratch, which should
5173 be handled with SECONDARY_RELOADs. */
5174 gcc_assert (GET_CODE (operands[0]) == REG);
5176 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5177 emit_insn (gen_movsi (operands[0], operands[1]));
5181 else if (TARGET_THUMB2)
5183 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5184 if (!no_new_pseudos)
5186 if (GET_CODE (operands[0]) != REG)
5187 operands[1] = force_reg (HImode, operands[1]);
5188 /* Zero extend a constant, and keep it in an SImode reg. */
5189 else if (GET_CODE (operands[1]) == CONST_INT)
5191 rtx reg = gen_reg_rtx (SImode);
5192 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5194 emit_insn (gen_movsi (reg, GEN_INT (val)));
5195 operands[1] = gen_lowpart (HImode, reg);
5199 else /* TARGET_THUMB1 */
5201 if (!no_new_pseudos)
5203 if (GET_CODE (operands[1]) == CONST_INT)
5205 rtx reg = gen_reg_rtx (SImode);
5207 emit_insn (gen_movsi (reg, operands[1]));
5208 operands[1] = gen_lowpart (HImode, reg);
5211 /* ??? We shouldn't really get invalid addresses here, but this can
5212 happen if we are passed a SP (never OK for HImode/QImode) or
5213 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5214 HImode/QImode) relative address. */
5215 /* ??? This should perhaps be fixed elsewhere, for instance, in
5216 fixup_stack_1, by checking for other kinds of invalid addresses,
5217 e.g. a bare reference to a virtual register. This may confuse the
5218 alpha though, which must handle this case differently. */
5219 if (GET_CODE (operands[0]) == MEM
5220 && !memory_address_p (GET_MODE (operands[0]),
5221 XEXP (operands[0], 0)))
5223 = replace_equiv_address (operands[0],
5224 copy_to_reg (XEXP (operands[0], 0)));
5226 if (GET_CODE (operands[1]) == MEM
5227 && !memory_address_p (GET_MODE (operands[1]),
5228 XEXP (operands[1], 0)))
5230 = replace_equiv_address (operands[1],
5231 copy_to_reg (XEXP (operands[1], 0)));
5233 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5235 rtx reg = gen_reg_rtx (SImode);
5237 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5238 operands[1] = gen_lowpart (HImode, reg);
5241 if (GET_CODE (operands[0]) == MEM)
5242 operands[1] = force_reg (HImode, operands[1]);
5244 else if (GET_CODE (operands[1]) == CONST_INT
5245 && !satisfies_constraint_I (operands[1]))
5247 /* Handle loading a large integer during reload. */
5249 /* Writing a constant to memory needs a scratch, which should
5250 be handled with SECONDARY_RELOADs. */
5251 gcc_assert (GET_CODE (operands[0]) == REG);
5253 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5254 emit_insn (gen_movsi (operands[0], operands[1]));
5261 (define_insn "*thumb1_movhi_insn"
5262 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5263 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5265 && ( register_operand (operands[0], HImode)
5266 || register_operand (operands[1], HImode))"
5268 switch (which_alternative)
5270 case 0: return \"add %0, %1, #0\";
5271 case 2: return \"strh %1, %0\";
5272 case 3: return \"mov %0, %1\";
5273 case 4: return \"mov %0, %1\";
5274 case 5: return \"mov %0, %1\";
5275 default: gcc_unreachable ();
5277 /* The stack pointer can end up being taken as an index register.
5278 Catch this case here and deal with it. */
5279 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5280 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5281 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5284 ops[0] = operands[0];
5285 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5287 output_asm_insn (\"mov %0, %1\", ops);
5289 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5292 return \"ldrh %0, %1\";
5294 [(set_attr "length" "2,4,2,2,2,2")
5295 (set_attr "type" "*,load1,store1,*,*,*")]
5299 (define_expand "movhi_bytes"
5300 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5302 (zero_extend:SI (match_dup 6)))
5303 (set (match_operand:SI 0 "" "")
5304 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5309 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5311 mem1 = change_address (operands[1], QImode, addr);
5312 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5313 operands[0] = gen_lowpart (SImode, operands[0]);
5315 operands[2] = gen_reg_rtx (SImode);
5316 operands[3] = gen_reg_rtx (SImode);
5319 if (BYTES_BIG_ENDIAN)
5321 operands[4] = operands[2];
5322 operands[5] = operands[3];
5326 operands[4] = operands[3];
5327 operands[5] = operands[2];
5332 (define_expand "movhi_bigend"
5334 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5337 (ashiftrt:SI (match_dup 2) (const_int 16)))
5338 (set (match_operand:HI 0 "s_register_operand" "")
5342 operands[2] = gen_reg_rtx (SImode);
5343 operands[3] = gen_reg_rtx (SImode);
5344 operands[4] = gen_lowpart (HImode, operands[3]);
5348 ;; Pattern to recognize insn generated default case above
5349 (define_insn "*movhi_insn_arch4"
5350 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5351 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5354 && (GET_CODE (operands[1]) != CONST_INT
5355 || const_ok_for_arm (INTVAL (operands[1]))
5356 || const_ok_for_arm (~INTVAL (operands[1])))"
5358 mov%?\\t%0, %1\\t%@ movhi
5359 mvn%?\\t%0, #%B1\\t%@ movhi
5360 str%(h%)\\t%1, %0\\t%@ movhi
5361 ldr%(h%)\\t%0, %1\\t%@ movhi"
5362 [(set_attr "type" "*,*,store1,load1")
5363 (set_attr "predicable" "yes")
5364 (set_attr "pool_range" "*,*,*,256")
5365 (set_attr "neg_pool_range" "*,*,*,244")]
5368 (define_insn "*movhi_bytes"
5369 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5370 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5373 mov%?\\t%0, %1\\t%@ movhi
5374 mvn%?\\t%0, #%B1\\t%@ movhi"
5375 [(set_attr "predicable" "yes")]
5378 (define_expand "thumb_movhi_clobber"
5379 [(set (match_operand:HI 0 "memory_operand" "")
5380 (match_operand:HI 1 "register_operand" ""))
5381 (clobber (match_operand:DI 2 "register_operand" ""))]
5384 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5385 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5387 emit_insn (gen_movhi (operands[0], operands[1]));
5390 /* XXX Fixme, need to handle other cases here as well. */
5395 ;; We use a DImode scratch because we may occasionally need an additional
5396 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5397 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5398 (define_expand "reload_outhi"
5399 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5400 (match_operand:HI 1 "s_register_operand" "r")
5401 (match_operand:DI 2 "s_register_operand" "=&l")])]
5404 arm_reload_out_hi (operands);
5406 thumb_reload_out_hi (operands);
5411 (define_expand "reload_inhi"
5412 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5413 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5414 (match_operand:DI 2 "s_register_operand" "=&r")])]
5418 arm_reload_in_hi (operands);
5420 thumb_reload_out_hi (operands);
5424 (define_expand "movqi"
5425 [(set (match_operand:QI 0 "general_operand" "")
5426 (match_operand:QI 1 "general_operand" ""))]
5429 /* Everything except mem = const or mem = mem can be done easily */
5431 if (!no_new_pseudos)
5433 if (GET_CODE (operands[1]) == CONST_INT)
5435 rtx reg = gen_reg_rtx (SImode);
5437 emit_insn (gen_movsi (reg, operands[1]));
5438 operands[1] = gen_lowpart (QImode, reg);
5443 /* ??? We shouldn't really get invalid addresses here, but this can
5444 happen if we are passed a SP (never OK for HImode/QImode) or
5445 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5446 HImode/QImode) relative address. */
5447 /* ??? This should perhaps be fixed elsewhere, for instance, in
5448 fixup_stack_1, by checking for other kinds of invalid addresses,
5449 e.g. a bare reference to a virtual register. This may confuse the
5450 alpha though, which must handle this case differently. */
5451 if (GET_CODE (operands[0]) == MEM
5452 && !memory_address_p (GET_MODE (operands[0]),
5453 XEXP (operands[0], 0)))
5455 = replace_equiv_address (operands[0],
5456 copy_to_reg (XEXP (operands[0], 0)));
5457 if (GET_CODE (operands[1]) == MEM
5458 && !memory_address_p (GET_MODE (operands[1]),
5459 XEXP (operands[1], 0)))
5461 = replace_equiv_address (operands[1],
5462 copy_to_reg (XEXP (operands[1], 0)));
5465 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5467 rtx reg = gen_reg_rtx (SImode);
5469 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5470 operands[1] = gen_lowpart (QImode, reg);
5473 if (GET_CODE (operands[0]) == MEM)
5474 operands[1] = force_reg (QImode, operands[1]);
5476 else if (TARGET_THUMB
5477 && GET_CODE (operands[1]) == CONST_INT
5478 && !satisfies_constraint_I (operands[1]))
5480 /* Handle loading a large integer during reload. */
5482 /* Writing a constant to memory needs a scratch, which should
5483 be handled with SECONDARY_RELOADs. */
5484 gcc_assert (GET_CODE (operands[0]) == REG);
5486 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5487 emit_insn (gen_movsi (operands[0], operands[1]));
5494 (define_insn "*arm_movqi_insn"
5495 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5496 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5498 && ( register_operand (operands[0], QImode)
5499 || register_operand (operands[1], QImode))"
5505 [(set_attr "type" "*,*,load1,store1")
5506 (set_attr "predicable" "yes")]
5509 (define_insn "*thumb1_movqi_insn"
5510 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5511 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5513 && ( register_operand (operands[0], QImode)
5514 || register_operand (operands[1], QImode))"
5522 [(set_attr "length" "2")
5523 (set_attr "type" "*,load1,store1,*,*,*")
5524 (set_attr "pool_range" "*,32,*,*,*,*")]
5527 (define_expand "movsf"
5528 [(set (match_operand:SF 0 "general_operand" "")
5529 (match_operand:SF 1 "general_operand" ""))]
5534 if (GET_CODE (operands[0]) == MEM)
5535 operands[1] = force_reg (SFmode, operands[1]);
5537 else /* TARGET_THUMB1 */
5539 if (!no_new_pseudos)
5541 if (GET_CODE (operands[0]) != REG)
5542 operands[1] = force_reg (SFmode, operands[1]);
5548 ;; Transform a floating-point move of a constant into a core register into
5549 ;; an SImode operation.
5551 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5552 (match_operand:SF 1 "immediate_operand" ""))]
5555 && GET_CODE (operands[1]) == CONST_DOUBLE"
5556 [(set (match_dup 2) (match_dup 3))]
5558 operands[2] = gen_lowpart (SImode, operands[0]);
5559 operands[3] = gen_lowpart (SImode, operands[1]);
5560 if (operands[2] == 0 || operands[3] == 0)
5565 (define_insn "*arm_movsf_soft_insn"
5566 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5567 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5569 && TARGET_SOFT_FLOAT
5570 && (GET_CODE (operands[0]) != MEM
5571 || register_operand (operands[1], SFmode))"
5574 ldr%?\\t%0, %1\\t%@ float
5575 str%?\\t%1, %0\\t%@ float"
5576 [(set_attr "length" "4,4,4")
5577 (set_attr "predicable" "yes")
5578 (set_attr "type" "*,load1,store1")
5579 (set_attr "pool_range" "*,4096,*")
5580 (set_attr "neg_pool_range" "*,4084,*")]
5583 ;;; ??? This should have alternatives for constants.
5584 (define_insn "*thumb1_movsf_insn"
5585 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5586 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5588 && ( register_operand (operands[0], SFmode)
5589 || register_operand (operands[1], SFmode))"
5598 [(set_attr "length" "2")
5599 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5600 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5603 (define_expand "movdf"
5604 [(set (match_operand:DF 0 "general_operand" "")
5605 (match_operand:DF 1 "general_operand" ""))]
5610 if (GET_CODE (operands[0]) == MEM)
5611 operands[1] = force_reg (DFmode, operands[1]);
5613 else /* TARGET_THUMB */
5615 if (!no_new_pseudos)
5617 if (GET_CODE (operands[0]) != REG)
5618 operands[1] = force_reg (DFmode, operands[1]);
5624 ;; Reloading a df mode value stored in integer regs to memory can require a
5626 (define_expand "reload_outdf"
5627 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5628 (match_operand:DF 1 "s_register_operand" "r")
5629 (match_operand:SI 2 "s_register_operand" "=&r")]
5633 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5636 operands[2] = XEXP (operands[0], 0);
5637 else if (code == POST_INC || code == PRE_DEC)
5639 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5640 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5641 emit_insn (gen_movdi (operands[0], operands[1]));
5644 else if (code == PRE_INC)
5646 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5648 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5651 else if (code == POST_DEC)
5652 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5654 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5655 XEXP (XEXP (operands[0], 0), 1)));
5657 emit_insn (gen_rtx_SET (VOIDmode,
5658 replace_equiv_address (operands[0], operands[2]),
5661 if (code == POST_DEC)
5662 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5668 (define_insn "*movdf_soft_insn"
5669 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5670 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5671 "TARGET_ARM && TARGET_SOFT_FLOAT
5672 && ( register_operand (operands[0], DFmode)
5673 || register_operand (operands[1], DFmode))"
5675 switch (which_alternative)
5682 return output_move_double (operands);
5685 [(set_attr "length" "8,12,16,8,8")
5686 (set_attr "type" "*,*,*,load2,store2")
5687 (set_attr "pool_range" "1020")
5688 (set_attr "neg_pool_range" "1008")]
5691 ;;; ??? This should have alternatives for constants.
5692 ;;; ??? This was originally identical to the movdi_insn pattern.
5693 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5694 ;;; thumb_reorg with a memory reference.
5695 (define_insn "*thumb_movdf_insn"
5696 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5697 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5699 && ( register_operand (operands[0], DFmode)
5700 || register_operand (operands[1], DFmode))"
5702 switch (which_alternative)
5706 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5707 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5708 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5710 return \"ldmia\\t%1, {%0, %H0}\";
5712 return \"stmia\\t%0, {%1, %H1}\";
5714 return thumb_load_double_from_address (operands);
5716 operands[2] = gen_rtx_MEM (SImode,
5717 plus_constant (XEXP (operands[0], 0), 4));
5718 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5721 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5722 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5723 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5726 [(set_attr "length" "4,2,2,6,4,4")
5727 (set_attr "type" "*,load2,store2,load2,store2,*")
5728 (set_attr "pool_range" "*,*,*,1020,*,*")]
5731 (define_expand "movxf"
5732 [(set (match_operand:XF 0 "general_operand" "")
5733 (match_operand:XF 1 "general_operand" ""))]
5734 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5736 if (GET_CODE (operands[0]) == MEM)
5737 operands[1] = force_reg (XFmode, operands[1]);
5742 (define_expand "movv2si"
5743 [(set (match_operand:V2SI 0 "nonimmediate_operand" "")
5744 (match_operand:V2SI 1 "general_operand" ""))]
5745 "TARGET_REALLY_IWMMXT"
5749 (define_expand "movv4hi"
5750 [(set (match_operand:V4HI 0 "nonimmediate_operand" "")
5751 (match_operand:V4HI 1 "general_operand" ""))]
5752 "TARGET_REALLY_IWMMXT"
5756 (define_expand "movv8qi"
5757 [(set (match_operand:V8QI 0 "nonimmediate_operand" "")
5758 (match_operand:V8QI 1 "general_operand" ""))]
5759 "TARGET_REALLY_IWMMXT"
5764 ;; load- and store-multiple insns
5765 ;; The arm can load/store any set of registers, provided that they are in
5766 ;; ascending order; but that is beyond GCC so stick with what it knows.
5768 (define_expand "load_multiple"
5769 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5770 (match_operand:SI 1 "" ""))
5771 (use (match_operand:SI 2 "" ""))])]
5774 HOST_WIDE_INT offset = 0;
5776 /* Support only fixed point registers. */
5777 if (GET_CODE (operands[2]) != CONST_INT
5778 || INTVAL (operands[2]) > 14
5779 || INTVAL (operands[2]) < 2
5780 || GET_CODE (operands[1]) != MEM
5781 || GET_CODE (operands[0]) != REG
5782 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5783 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5787 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5788 force_reg (SImode, XEXP (operands[1], 0)),
5789 TRUE, FALSE, operands[1], &offset);
5792 ;; Load multiple with write-back
5794 (define_insn "*ldmsi_postinc4"
5795 [(match_parallel 0 "load_multiple_operation"
5796 [(set (match_operand:SI 1 "s_register_operand" "=r")
5797 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5799 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5800 (mem:SI (match_dup 2)))
5801 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5802 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5803 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5804 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5805 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5806 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5807 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5808 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5809 [(set_attr "type" "load4")
5810 (set_attr "predicable" "yes")]
5813 (define_insn "*ldmsi_postinc4_thumb1"
5814 [(match_parallel 0 "load_multiple_operation"
5815 [(set (match_operand:SI 1 "s_register_operand" "=l")
5816 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5818 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5819 (mem:SI (match_dup 2)))
5820 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5821 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5822 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5823 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5824 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5825 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5826 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5827 "ldmia\\t%1!, {%3, %4, %5, %6}"
5828 [(set_attr "type" "load4")]
5831 (define_insn "*ldmsi_postinc3"
5832 [(match_parallel 0 "load_multiple_operation"
5833 [(set (match_operand:SI 1 "s_register_operand" "=r")
5834 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5836 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5837 (mem:SI (match_dup 2)))
5838 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5839 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5840 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5841 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5842 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5843 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5844 [(set_attr "type" "load3")
5845 (set_attr "predicable" "yes")]
5848 (define_insn "*ldmsi_postinc2"
5849 [(match_parallel 0 "load_multiple_operation"
5850 [(set (match_operand:SI 1 "s_register_operand" "=r")
5851 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5853 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5854 (mem:SI (match_dup 2)))
5855 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5856 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5857 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5858 "ldm%(ia%)\\t%1!, {%3, %4}"
5859 [(set_attr "type" "load2")
5860 (set_attr "predicable" "yes")]
5863 ;; Ordinary load multiple
5865 (define_insn "*ldmsi4"
5866 [(match_parallel 0 "load_multiple_operation"
5867 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5868 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5869 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5870 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5871 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5872 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5873 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5874 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5875 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5876 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
5877 [(set_attr "type" "load4")
5878 (set_attr "predicable" "yes")]
5881 (define_insn "*ldmsi3"
5882 [(match_parallel 0 "load_multiple_operation"
5883 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5884 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5885 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5886 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5887 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5888 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
5889 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5890 "ldm%(ia%)\\t%1, {%2, %3, %4}"
5891 [(set_attr "type" "load3")
5892 (set_attr "predicable" "yes")]
5895 (define_insn "*ldmsi2"
5896 [(match_parallel 0 "load_multiple_operation"
5897 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5898 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5899 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5900 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
5901 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
5902 "ldm%(ia%)\\t%1, {%2, %3}"
5903 [(set_attr "type" "load2")
5904 (set_attr "predicable" "yes")]
5907 (define_expand "store_multiple"
5908 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5909 (match_operand:SI 1 "" ""))
5910 (use (match_operand:SI 2 "" ""))])]
5913 HOST_WIDE_INT offset = 0;
5915 /* Support only fixed point registers. */
5916 if (GET_CODE (operands[2]) != CONST_INT
5917 || INTVAL (operands[2]) > 14
5918 || INTVAL (operands[2]) < 2
5919 || GET_CODE (operands[1]) != REG
5920 || GET_CODE (operands[0]) != MEM
5921 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
5922 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5926 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
5927 force_reg (SImode, XEXP (operands[0], 0)),
5928 TRUE, FALSE, operands[0], &offset);
5931 ;; Store multiple with write-back
5933 (define_insn "*stmsi_postinc4"
5934 [(match_parallel 0 "store_multiple_operation"
5935 [(set (match_operand:SI 1 "s_register_operand" "=r")
5936 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5938 (set (mem:SI (match_dup 2))
5939 (match_operand:SI 3 "arm_hard_register_operand" ""))
5940 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5941 (match_operand:SI 4 "arm_hard_register_operand" ""))
5942 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5943 (match_operand:SI 5 "arm_hard_register_operand" ""))
5944 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5945 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5946 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5947 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5948 [(set_attr "predicable" "yes")
5949 (set_attr "type" "store4")]
5952 (define_insn "*stmsi_postinc4_thumb1"
5953 [(match_parallel 0 "store_multiple_operation"
5954 [(set (match_operand:SI 1 "s_register_operand" "=l")
5955 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5957 (set (mem:SI (match_dup 2))
5958 (match_operand:SI 3 "arm_hard_register_operand" ""))
5959 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5960 (match_operand:SI 4 "arm_hard_register_operand" ""))
5961 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5962 (match_operand:SI 5 "arm_hard_register_operand" ""))
5963 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5964 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5965 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5966 "stmia\\t%1!, {%3, %4, %5, %6}"
5967 [(set_attr "type" "store4")]
5970 (define_insn "*stmsi_postinc3"
5971 [(match_parallel 0 "store_multiple_operation"
5972 [(set (match_operand:SI 1 "s_register_operand" "=r")
5973 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5975 (set (mem:SI (match_dup 2))
5976 (match_operand:SI 3 "arm_hard_register_operand" ""))
5977 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5978 (match_operand:SI 4 "arm_hard_register_operand" ""))
5979 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5980 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
5981 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5982 "stm%(ia%)\\t%1!, {%3, %4, %5}"
5983 [(set_attr "predicable" "yes")
5984 (set_attr "type" "store3")]
5987 (define_insn "*stmsi_postinc2"
5988 [(match_parallel 0 "store_multiple_operation"
5989 [(set (match_operand:SI 1 "s_register_operand" "=r")
5990 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5992 (set (mem:SI (match_dup 2))
5993 (match_operand:SI 3 "arm_hard_register_operand" ""))
5994 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5995 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
5996 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5997 "stm%(ia%)\\t%1!, {%3, %4}"
5998 [(set_attr "predicable" "yes")
5999 (set_attr "type" "store2")]
6002 ;; Ordinary store multiple
6004 (define_insn "*stmsi4"
6005 [(match_parallel 0 "store_multiple_operation"
6006 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6007 (match_operand:SI 2 "arm_hard_register_operand" ""))
6008 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6009 (match_operand:SI 3 "arm_hard_register_operand" ""))
6010 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6011 (match_operand:SI 4 "arm_hard_register_operand" ""))
6012 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6013 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6014 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6015 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6016 [(set_attr "predicable" "yes")
6017 (set_attr "type" "store4")]
6020 (define_insn "*stmsi3"
6021 [(match_parallel 0 "store_multiple_operation"
6022 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6023 (match_operand:SI 2 "arm_hard_register_operand" ""))
6024 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6025 (match_operand:SI 3 "arm_hard_register_operand" ""))
6026 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6027 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6028 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6029 "stm%(ia%)\\t%1, {%2, %3, %4}"
6030 [(set_attr "predicable" "yes")
6031 (set_attr "type" "store3")]
6034 (define_insn "*stmsi2"
6035 [(match_parallel 0 "store_multiple_operation"
6036 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6037 (match_operand:SI 2 "arm_hard_register_operand" ""))
6038 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6039 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6040 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6041 "stm%(ia%)\\t%1, {%2, %3}"
6042 [(set_attr "predicable" "yes")
6043 (set_attr "type" "store2")]
6046 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6047 ;; We could let this apply for blocks of less than this, but it clobbers so
6048 ;; many registers that there is then probably a better way.
6050 (define_expand "movmemqi"
6051 [(match_operand:BLK 0 "general_operand" "")
6052 (match_operand:BLK 1 "general_operand" "")
6053 (match_operand:SI 2 "const_int_operand" "")
6054 (match_operand:SI 3 "const_int_operand" "")]
6059 if (arm_gen_movmemqi (operands))
6063 else /* TARGET_THUMB1 */
6065 if ( INTVAL (operands[3]) != 4
6066 || INTVAL (operands[2]) > 48)
6069 thumb_expand_movmemqi (operands);
6075 ;; Thumb block-move insns
6077 (define_insn "movmem12b"
6078 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6079 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6080 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6081 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6082 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6083 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6084 (set (match_operand:SI 0 "register_operand" "=l")
6085 (plus:SI (match_dup 2) (const_int 12)))
6086 (set (match_operand:SI 1 "register_operand" "=l")
6087 (plus:SI (match_dup 3) (const_int 12)))
6088 (clobber (match_scratch:SI 4 "=&l"))
6089 (clobber (match_scratch:SI 5 "=&l"))
6090 (clobber (match_scratch:SI 6 "=&l"))]
6092 "* return thumb_output_move_mem_multiple (3, operands);"
6093 [(set_attr "length" "4")
6094 ; This isn't entirely accurate... It loads as well, but in terms of
6095 ; scheduling the following insn it is better to consider it as a store
6096 (set_attr "type" "store3")]
6099 (define_insn "movmem8b"
6100 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6101 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6102 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6103 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6104 (set (match_operand:SI 0 "register_operand" "=l")
6105 (plus:SI (match_dup 2) (const_int 8)))
6106 (set (match_operand:SI 1 "register_operand" "=l")
6107 (plus:SI (match_dup 3) (const_int 8)))
6108 (clobber (match_scratch:SI 4 "=&l"))
6109 (clobber (match_scratch:SI 5 "=&l"))]
6111 "* return thumb_output_move_mem_multiple (2, operands);"
6112 [(set_attr "length" "4")
6113 ; This isn't entirely accurate... It loads as well, but in terms of
6114 ; scheduling the following insn it is better to consider it as a store
6115 (set_attr "type" "store2")]
6120 ;; Compare & branch insns
6121 ;; The range calculations are based as follows:
6122 ;; For forward branches, the address calculation returns the address of
6123 ;; the next instruction. This is 2 beyond the branch instruction.
6124 ;; For backward branches, the address calculation returns the address of
6125 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6126 ;; instruction for the shortest sequence, and 4 before the branch instruction
6127 ;; if we have to jump around an unconditional branch.
6128 ;; To the basic branch range the PC offset must be added (this is +4).
6129 ;; So for forward branches we have
6130 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6131 ;; And for backward branches we have
6132 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6134 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6135 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6137 (define_expand "cbranchsi4"
6138 [(set (pc) (if_then_else
6139 (match_operator 0 "arm_comparison_operator"
6140 [(match_operand:SI 1 "s_register_operand" "")
6141 (match_operand:SI 2 "nonmemory_operand" "")])
6142 (label_ref (match_operand 3 "" ""))
6146 if (thumb1_cmpneg_operand (operands[2], SImode))
6148 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6149 operands[3], operands[0]));
6152 if (!thumb1_cmp_operand (operands[2], SImode))
6153 operands[2] = force_reg (SImode, operands[2]);
6156 (define_insn "*cbranchsi4_insn"
6157 [(set (pc) (if_then_else
6158 (match_operator 0 "arm_comparison_operator"
6159 [(match_operand:SI 1 "s_register_operand" "l,*h")
6160 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6161 (label_ref (match_operand 3 "" ""))
6165 output_asm_insn (\"cmp\\t%1, %2\", operands);
6167 switch (get_attr_length (insn))
6169 case 4: return \"b%d0\\t%l3\";
6170 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6171 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6174 [(set (attr "far_jump")
6176 (eq_attr "length" "8")
6177 (const_string "yes")
6178 (const_string "no")))
6179 (set (attr "length")
6181 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6182 (le (minus (match_dup 3) (pc)) (const_int 256)))
6185 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6186 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6191 (define_insn "cbranchsi4_scratch"
6192 [(set (pc) (if_then_else
6193 (match_operator 4 "arm_comparison_operator"
6194 [(match_operand:SI 1 "s_register_operand" "l,0")
6195 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6196 (label_ref (match_operand 3 "" ""))
6198 (clobber (match_scratch:SI 0 "=l,l"))]
6201 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6203 switch (get_attr_length (insn))
6205 case 4: return \"b%d4\\t%l3\";
6206 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6207 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6210 [(set (attr "far_jump")
6212 (eq_attr "length" "8")
6213 (const_string "yes")
6214 (const_string "no")))
6215 (set (attr "length")
6217 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6218 (le (minus (match_dup 3) (pc)) (const_int 256)))
6221 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6222 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6226 (define_insn "*movsi_cbranchsi4"
6229 (match_operator 3 "arm_comparison_operator"
6230 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6232 (label_ref (match_operand 2 "" ""))
6234 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6238 if (which_alternative == 0)
6239 output_asm_insn (\"cmp\t%0, #0\", operands);
6240 else if (which_alternative == 1)
6241 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6244 output_asm_insn (\"cmp\t%1, #0\", operands);
6245 if (which_alternative == 2)
6246 output_asm_insn (\"mov\t%0, %1\", operands);
6248 output_asm_insn (\"str\t%1, %0\", operands);
6250 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6252 case 4: return \"b%d3\\t%l2\";
6253 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6254 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6257 [(set (attr "far_jump")
6259 (ior (and (gt (symbol_ref ("which_alternative"))
6261 (eq_attr "length" "8"))
6262 (eq_attr "length" "10"))
6263 (const_string "yes")
6264 (const_string "no")))
6265 (set (attr "length")
6267 (le (symbol_ref ("which_alternative"))
6270 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6271 (le (minus (match_dup 2) (pc)) (const_int 256)))
6274 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6275 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6279 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6280 (le (minus (match_dup 2) (pc)) (const_int 256)))
6283 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6284 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6289 (define_insn "*negated_cbranchsi4"
6292 (match_operator 0 "equality_operator"
6293 [(match_operand:SI 1 "s_register_operand" "l")
6294 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6295 (label_ref (match_operand 3 "" ""))
6299 output_asm_insn (\"cmn\\t%1, %2\", operands);
6300 switch (get_attr_length (insn))
6302 case 4: return \"b%d0\\t%l3\";
6303 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6304 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6307 [(set (attr "far_jump")
6309 (eq_attr "length" "8")
6310 (const_string "yes")
6311 (const_string "no")))
6312 (set (attr "length")
6314 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6315 (le (minus (match_dup 3) (pc)) (const_int 256)))
6318 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6319 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6324 (define_insn "*tbit_cbranch"
6327 (match_operator 0 "equality_operator"
6328 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6330 (match_operand:SI 2 "const_int_operand" "i"))
6332 (label_ref (match_operand 3 "" ""))
6334 (clobber (match_scratch:SI 4 "=l"))]
6339 op[0] = operands[4];
6340 op[1] = operands[1];
6341 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6343 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6344 switch (get_attr_length (insn))
6346 case 4: return \"b%d0\\t%l3\";
6347 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6348 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6351 [(set (attr "far_jump")
6353 (eq_attr "length" "8")
6354 (const_string "yes")
6355 (const_string "no")))
6356 (set (attr "length")
6358 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6359 (le (minus (match_dup 3) (pc)) (const_int 256)))
6362 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6363 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6368 (define_insn "*tlobits_cbranch"
6371 (match_operator 0 "equality_operator"
6372 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6373 (match_operand:SI 2 "const_int_operand" "i")
6376 (label_ref (match_operand 3 "" ""))
6378 (clobber (match_scratch:SI 4 "=l"))]
6383 op[0] = operands[4];
6384 op[1] = operands[1];
6385 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6387 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6388 switch (get_attr_length (insn))
6390 case 4: return \"b%d0\\t%l3\";
6391 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6392 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6395 [(set (attr "far_jump")
6397 (eq_attr "length" "8")
6398 (const_string "yes")
6399 (const_string "no")))
6400 (set (attr "length")
6402 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6403 (le (minus (match_dup 3) (pc)) (const_int 256)))
6406 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6407 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6412 (define_insn "*tstsi3_cbranch"
6415 (match_operator 3 "equality_operator"
6416 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6417 (match_operand:SI 1 "s_register_operand" "l"))
6419 (label_ref (match_operand 2 "" ""))
6424 output_asm_insn (\"tst\\t%0, %1\", operands);
6425 switch (get_attr_length (insn))
6427 case 4: return \"b%d3\\t%l2\";
6428 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6429 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6432 [(set (attr "far_jump")
6434 (eq_attr "length" "8")
6435 (const_string "yes")
6436 (const_string "no")))
6437 (set (attr "length")
6439 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6440 (le (minus (match_dup 2) (pc)) (const_int 256)))
6443 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6444 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6449 (define_insn "*andsi3_cbranch"
6452 (match_operator 5 "equality_operator"
6453 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6454 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6456 (label_ref (match_operand 4 "" ""))
6458 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6459 (and:SI (match_dup 2) (match_dup 3)))
6460 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6464 if (which_alternative == 0)
6465 output_asm_insn (\"and\\t%0, %3\", operands);
6466 else if (which_alternative == 1)
6468 output_asm_insn (\"and\\t%1, %3\", operands);
6469 output_asm_insn (\"mov\\t%0, %1\", operands);
6473 output_asm_insn (\"and\\t%1, %3\", operands);
6474 output_asm_insn (\"str\\t%1, %0\", operands);
6477 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6479 case 4: return \"b%d5\\t%l4\";
6480 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6481 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6484 [(set (attr "far_jump")
6486 (ior (and (eq (symbol_ref ("which_alternative"))
6488 (eq_attr "length" "8"))
6489 (eq_attr "length" "10"))
6490 (const_string "yes")
6491 (const_string "no")))
6492 (set (attr "length")
6494 (eq (symbol_ref ("which_alternative"))
6497 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6498 (le (minus (match_dup 4) (pc)) (const_int 256)))
6501 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6502 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6506 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6507 (le (minus (match_dup 4) (pc)) (const_int 256)))
6510 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6511 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6516 (define_insn "*orrsi3_cbranch_scratch"
6519 (match_operator 4 "equality_operator"
6520 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6521 (match_operand:SI 2 "s_register_operand" "l"))
6523 (label_ref (match_operand 3 "" ""))
6525 (clobber (match_scratch:SI 0 "=l"))]
6529 output_asm_insn (\"orr\\t%0, %2\", operands);
6530 switch (get_attr_length (insn))
6532 case 4: return \"b%d4\\t%l3\";
6533 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6534 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6537 [(set (attr "far_jump")
6539 (eq_attr "length" "8")
6540 (const_string "yes")
6541 (const_string "no")))
6542 (set (attr "length")
6544 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6545 (le (minus (match_dup 3) (pc)) (const_int 256)))
6548 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6549 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6554 (define_insn "*orrsi3_cbranch"
6557 (match_operator 5 "equality_operator"
6558 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6559 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6561 (label_ref (match_operand 4 "" ""))
6563 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6564 (ior:SI (match_dup 2) (match_dup 3)))
6565 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6569 if (which_alternative == 0)
6570 output_asm_insn (\"orr\\t%0, %3\", operands);
6571 else if (which_alternative == 1)
6573 output_asm_insn (\"orr\\t%1, %3\", operands);
6574 output_asm_insn (\"mov\\t%0, %1\", operands);
6578 output_asm_insn (\"orr\\t%1, %3\", operands);
6579 output_asm_insn (\"str\\t%1, %0\", operands);
6582 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6584 case 4: return \"b%d5\\t%l4\";
6585 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6586 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6589 [(set (attr "far_jump")
6591 (ior (and (eq (symbol_ref ("which_alternative"))
6593 (eq_attr "length" "8"))
6594 (eq_attr "length" "10"))
6595 (const_string "yes")
6596 (const_string "no")))
6597 (set (attr "length")
6599 (eq (symbol_ref ("which_alternative"))
6602 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6603 (le (minus (match_dup 4) (pc)) (const_int 256)))
6606 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6607 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6611 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6612 (le (minus (match_dup 4) (pc)) (const_int 256)))
6615 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6616 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6621 (define_insn "*xorsi3_cbranch_scratch"
6624 (match_operator 4 "equality_operator"
6625 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6626 (match_operand:SI 2 "s_register_operand" "l"))
6628 (label_ref (match_operand 3 "" ""))
6630 (clobber (match_scratch:SI 0 "=l"))]
6634 output_asm_insn (\"eor\\t%0, %2\", operands);
6635 switch (get_attr_length (insn))
6637 case 4: return \"b%d4\\t%l3\";
6638 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6639 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6642 [(set (attr "far_jump")
6644 (eq_attr "length" "8")
6645 (const_string "yes")
6646 (const_string "no")))
6647 (set (attr "length")
6649 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6650 (le (minus (match_dup 3) (pc)) (const_int 256)))
6653 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6654 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6659 (define_insn "*xorsi3_cbranch"
6662 (match_operator 5 "equality_operator"
6663 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6664 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6666 (label_ref (match_operand 4 "" ""))
6668 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6669 (xor:SI (match_dup 2) (match_dup 3)))
6670 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6674 if (which_alternative == 0)
6675 output_asm_insn (\"eor\\t%0, %3\", operands);
6676 else if (which_alternative == 1)
6678 output_asm_insn (\"eor\\t%1, %3\", operands);
6679 output_asm_insn (\"mov\\t%0, %1\", operands);
6683 output_asm_insn (\"eor\\t%1, %3\", operands);
6684 output_asm_insn (\"str\\t%1, %0\", operands);
6687 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6689 case 4: return \"b%d5\\t%l4\";
6690 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6691 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6694 [(set (attr "far_jump")
6696 (ior (and (eq (symbol_ref ("which_alternative"))
6698 (eq_attr "length" "8"))
6699 (eq_attr "length" "10"))
6700 (const_string "yes")
6701 (const_string "no")))
6702 (set (attr "length")
6704 (eq (symbol_ref ("which_alternative"))
6707 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6708 (le (minus (match_dup 4) (pc)) (const_int 256)))
6711 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6712 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6716 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6717 (le (minus (match_dup 4) (pc)) (const_int 256)))
6720 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6721 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6726 (define_insn "*bicsi3_cbranch_scratch"
6729 (match_operator 4 "equality_operator"
6730 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6731 (match_operand:SI 1 "s_register_operand" "0"))
6733 (label_ref (match_operand 3 "" ""))
6735 (clobber (match_scratch:SI 0 "=l"))]
6739 output_asm_insn (\"bic\\t%0, %2\", operands);
6740 switch (get_attr_length (insn))
6742 case 4: return \"b%d4\\t%l3\";
6743 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6744 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6747 [(set (attr "far_jump")
6749 (eq_attr "length" "8")
6750 (const_string "yes")
6751 (const_string "no")))
6752 (set (attr "length")
6754 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6755 (le (minus (match_dup 3) (pc)) (const_int 256)))
6758 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6759 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6764 (define_insn "*bicsi3_cbranch"
6767 (match_operator 5 "equality_operator"
6768 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6769 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6771 (label_ref (match_operand 4 "" ""))
6773 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6774 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6775 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6779 if (which_alternative == 0)
6780 output_asm_insn (\"bic\\t%0, %3\", operands);
6781 else if (which_alternative <= 2)
6783 output_asm_insn (\"bic\\t%1, %3\", operands);
6784 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6785 conditions again, since we're only testing for equality. */
6786 output_asm_insn (\"mov\\t%0, %1\", operands);
6790 output_asm_insn (\"bic\\t%1, %3\", operands);
6791 output_asm_insn (\"str\\t%1, %0\", operands);
6794 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6796 case 4: return \"b%d5\\t%l4\";
6797 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6798 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6801 [(set (attr "far_jump")
6803 (ior (and (eq (symbol_ref ("which_alternative"))
6805 (eq_attr "length" "8"))
6806 (eq_attr "length" "10"))
6807 (const_string "yes")
6808 (const_string "no")))
6809 (set (attr "length")
6811 (eq (symbol_ref ("which_alternative"))
6814 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6815 (le (minus (match_dup 4) (pc)) (const_int 256)))
6818 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6819 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6823 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6824 (le (minus (match_dup 4) (pc)) (const_int 256)))
6827 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6828 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6833 (define_insn "*cbranchne_decr1"
6835 (if_then_else (match_operator 3 "equality_operator"
6836 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6838 (label_ref (match_operand 4 "" ""))
6840 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6841 (plus:SI (match_dup 2) (const_int -1)))
6842 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6847 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6849 VOIDmode, operands[2], const1_rtx);
6850 cond[1] = operands[4];
6852 if (which_alternative == 0)
6853 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6854 else if (which_alternative == 1)
6856 /* We must provide an alternative for a hi reg because reload
6857 cannot handle output reloads on a jump instruction, but we
6858 can't subtract into that. Fortunately a mov from lo to hi
6859 does not clobber the condition codes. */
6860 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6861 output_asm_insn (\"mov\\t%0, %1\", operands);
6865 /* Similarly, but the target is memory. */
6866 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6867 output_asm_insn (\"str\\t%1, %0\", operands);
6870 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6873 output_asm_insn (\"b%d0\\t%l1\", cond);
6876 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6877 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6879 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6880 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6884 [(set (attr "far_jump")
6886 (ior (and (eq (symbol_ref ("which_alternative"))
6888 (eq_attr "length" "8"))
6889 (eq_attr "length" "10"))
6890 (const_string "yes")
6891 (const_string "no")))
6892 (set_attr_alternative "length"
6896 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6897 (le (minus (match_dup 4) (pc)) (const_int 256)))
6900 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6901 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6906 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6907 (le (minus (match_dup 4) (pc)) (const_int 256)))
6910 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6911 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6916 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6917 (le (minus (match_dup 4) (pc)) (const_int 256)))
6920 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6921 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6926 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6927 (le (minus (match_dup 4) (pc)) (const_int 256)))
6930 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6931 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6936 (define_insn "*addsi3_cbranch"
6939 (match_operator 4 "comparison_operator"
6941 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
6942 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
6944 (label_ref (match_operand 5 "" ""))
6947 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6948 (plus:SI (match_dup 2) (match_dup 3)))
6949 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
6951 && (GET_CODE (operands[4]) == EQ
6952 || GET_CODE (operands[4]) == NE
6953 || GET_CODE (operands[4]) == GE
6954 || GET_CODE (operands[4]) == LT)"
6960 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
6961 cond[1] = operands[2];
6962 cond[2] = operands[3];
6964 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6965 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6967 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6969 if (which_alternative >= 3
6970 && which_alternative < 4)
6971 output_asm_insn (\"mov\\t%0, %1\", operands);
6972 else if (which_alternative >= 4)
6973 output_asm_insn (\"str\\t%1, %0\", operands);
6975 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
6978 return \"b%d4\\t%l5\";
6980 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6982 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6986 [(set (attr "far_jump")
6988 (ior (and (lt (symbol_ref ("which_alternative"))
6990 (eq_attr "length" "8"))
6991 (eq_attr "length" "10"))
6992 (const_string "yes")
6993 (const_string "no")))
6994 (set (attr "length")
6996 (lt (symbol_ref ("which_alternative"))
6999 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7000 (le (minus (match_dup 5) (pc)) (const_int 256)))
7003 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7004 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7008 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7009 (le (minus (match_dup 5) (pc)) (const_int 256)))
7012 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7013 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7018 (define_insn "*addsi3_cbranch_scratch"
7021 (match_operator 3 "comparison_operator"
7023 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7024 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7026 (label_ref (match_operand 4 "" ""))
7028 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7030 && (GET_CODE (operands[3]) == EQ
7031 || GET_CODE (operands[3]) == NE
7032 || GET_CODE (operands[3]) == GE
7033 || GET_CODE (operands[3]) == LT)"
7036 switch (which_alternative)
7039 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7042 output_asm_insn (\"cmn\t%1, %2\", operands);
7045 if (INTVAL (operands[2]) < 0)
7046 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7048 output_asm_insn (\"add\t%0, %1, %2\", operands);
7051 if (INTVAL (operands[2]) < 0)
7052 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7054 output_asm_insn (\"add\t%0, %0, %2\", operands);
7058 switch (get_attr_length (insn))
7061 return \"b%d3\\t%l4\";
7063 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7065 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7069 [(set (attr "far_jump")
7071 (eq_attr "length" "8")
7072 (const_string "yes")
7073 (const_string "no")))
7074 (set (attr "length")
7076 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7077 (le (minus (match_dup 4) (pc)) (const_int 256)))
7080 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7081 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7086 (define_insn "*subsi3_cbranch"
7089 (match_operator 4 "comparison_operator"
7091 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7092 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7094 (label_ref (match_operand 5 "" ""))
7096 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7097 (minus:SI (match_dup 2) (match_dup 3)))
7098 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7100 && (GET_CODE (operands[4]) == EQ
7101 || GET_CODE (operands[4]) == NE
7102 || GET_CODE (operands[4]) == GE
7103 || GET_CODE (operands[4]) == LT)"
7106 if (which_alternative == 0)
7107 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7108 else if (which_alternative == 1)
7110 /* We must provide an alternative for a hi reg because reload
7111 cannot handle output reloads on a jump instruction, but we
7112 can't subtract into that. Fortunately a mov from lo to hi
7113 does not clobber the condition codes. */
7114 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7115 output_asm_insn (\"mov\\t%0, %1\", operands);
7119 /* Similarly, but the target is memory. */
7120 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7121 output_asm_insn (\"str\\t%1, %0\", operands);
7124 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7127 return \"b%d4\\t%l5\";
7129 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7131 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7135 [(set (attr "far_jump")
7137 (ior (and (eq (symbol_ref ("which_alternative"))
7139 (eq_attr "length" "8"))
7140 (eq_attr "length" "10"))
7141 (const_string "yes")
7142 (const_string "no")))
7143 (set (attr "length")
7145 (eq (symbol_ref ("which_alternative"))
7148 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7149 (le (minus (match_dup 5) (pc)) (const_int 256)))
7152 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7153 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7157 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7158 (le (minus (match_dup 5) (pc)) (const_int 256)))
7161 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7162 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7167 (define_insn "*subsi3_cbranch_scratch"
7170 (match_operator 0 "arm_comparison_operator"
7171 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7172 (match_operand:SI 2 "nonmemory_operand" "l"))
7174 (label_ref (match_operand 3 "" ""))
7177 && (GET_CODE (operands[0]) == EQ
7178 || GET_CODE (operands[0]) == NE
7179 || GET_CODE (operands[0]) == GE
7180 || GET_CODE (operands[0]) == LT)"
7182 output_asm_insn (\"cmp\\t%1, %2\", operands);
7183 switch (get_attr_length (insn))
7185 case 4: return \"b%d0\\t%l3\";
7186 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7187 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7190 [(set (attr "far_jump")
7192 (eq_attr "length" "8")
7193 (const_string "yes")
7194 (const_string "no")))
7195 (set (attr "length")
7197 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7198 (le (minus (match_dup 3) (pc)) (const_int 256)))
7201 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7202 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7207 ;; Comparison and test insns
7209 (define_expand "cmpsi"
7210 [(match_operand:SI 0 "s_register_operand" "")
7211 (match_operand:SI 1 "arm_add_operand" "")]
7214 arm_compare_op0 = operands[0];
7215 arm_compare_op1 = operands[1];
7220 (define_expand "cmpsf"
7221 [(match_operand:SF 0 "s_register_operand" "")
7222 (match_operand:SF 1 "arm_float_compare_operand" "")]
7223 "TARGET_32BIT && TARGET_HARD_FLOAT"
7225 arm_compare_op0 = operands[0];
7226 arm_compare_op1 = operands[1];
7231 (define_expand "cmpdf"
7232 [(match_operand:DF 0 "s_register_operand" "")
7233 (match_operand:DF 1 "arm_float_compare_operand" "")]
7234 "TARGET_32BIT && TARGET_HARD_FLOAT"
7236 arm_compare_op0 = operands[0];
7237 arm_compare_op1 = operands[1];
7242 (define_insn "*arm_cmpsi_insn"
7243 [(set (reg:CC CC_REGNUM)
7244 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7245 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7250 [(set_attr "conds" "set")]
7253 (define_insn "*arm_cmpsi_shiftsi"
7254 [(set (reg:CC CC_REGNUM)
7255 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7256 (match_operator:SI 3 "shift_operator"
7257 [(match_operand:SI 1 "s_register_operand" "r")
7258 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7261 [(set_attr "conds" "set")
7262 (set_attr "shift" "1")
7263 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7264 (const_string "alu_shift")
7265 (const_string "alu_shift_reg")))]
7268 (define_insn "*arm_cmpsi_shiftsi_swp"
7269 [(set (reg:CC_SWP CC_REGNUM)
7270 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7271 [(match_operand:SI 1 "s_register_operand" "r")
7272 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7273 (match_operand:SI 0 "s_register_operand" "r")))]
7276 [(set_attr "conds" "set")
7277 (set_attr "shift" "1")
7278 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7279 (const_string "alu_shift")
7280 (const_string "alu_shift_reg")))]
7283 (define_insn "*arm_cmpsi_negshiftsi_si"
7284 [(set (reg:CC_Z CC_REGNUM)
7286 (neg:SI (match_operator:SI 1 "shift_operator"
7287 [(match_operand:SI 2 "s_register_operand" "r")
7288 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7289 (match_operand:SI 0 "s_register_operand" "r")))]
7292 [(set_attr "conds" "set")
7293 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7294 (const_string "alu_shift")
7295 (const_string "alu_shift_reg")))]
7298 ;; Cirrus SF compare instruction
7299 (define_insn "*cirrus_cmpsf"
7300 [(set (reg:CCFP CC_REGNUM)
7301 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7302 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7303 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7304 "cfcmps%?\\tr15, %V0, %V1"
7305 [(set_attr "type" "mav_farith")
7306 (set_attr "cirrus" "compare")]
7309 ;; Cirrus DF compare instruction
7310 (define_insn "*cirrus_cmpdf"
7311 [(set (reg:CCFP CC_REGNUM)
7312 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7313 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7314 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7315 "cfcmpd%?\\tr15, %V0, %V1"
7316 [(set_attr "type" "mav_farith")
7317 (set_attr "cirrus" "compare")]
7320 ;; Cirrus DI compare instruction
7321 (define_expand "cmpdi"
7322 [(match_operand:DI 0 "cirrus_fp_register" "")
7323 (match_operand:DI 1 "cirrus_fp_register" "")]
7324 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7326 arm_compare_op0 = operands[0];
7327 arm_compare_op1 = operands[1];
7331 (define_insn "*cirrus_cmpdi"
7332 [(set (reg:CC CC_REGNUM)
7333 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7334 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7335 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7336 "cfcmp64%?\\tr15, %V0, %V1"
7337 [(set_attr "type" "mav_farith")
7338 (set_attr "cirrus" "compare")]
7341 ; This insn allows redundant compares to be removed by cse, nothing should
7342 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7343 ; is deleted later on. The match_dup will match the mode here, so that
7344 ; mode changes of the condition codes aren't lost by this even though we don't
7345 ; specify what they are.
7347 (define_insn "*deleted_compare"
7348 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7350 "\\t%@ deleted compare"
7351 [(set_attr "conds" "set")
7352 (set_attr "length" "0")]
7356 ;; Conditional branch insns
7358 (define_expand "beq"
7360 (if_then_else (eq (match_dup 1) (const_int 0))
7361 (label_ref (match_operand 0 "" ""))
7364 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7367 (define_expand "bne"
7369 (if_then_else (ne (match_dup 1) (const_int 0))
7370 (label_ref (match_operand 0 "" ""))
7373 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7376 (define_expand "bgt"
7378 (if_then_else (gt (match_dup 1) (const_int 0))
7379 (label_ref (match_operand 0 "" ""))
7382 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7385 (define_expand "ble"
7387 (if_then_else (le (match_dup 1) (const_int 0))
7388 (label_ref (match_operand 0 "" ""))
7391 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7394 (define_expand "bge"
7396 (if_then_else (ge (match_dup 1) (const_int 0))
7397 (label_ref (match_operand 0 "" ""))
7400 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7403 (define_expand "blt"
7405 (if_then_else (lt (match_dup 1) (const_int 0))
7406 (label_ref (match_operand 0 "" ""))
7409 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7412 (define_expand "bgtu"
7414 (if_then_else (gtu (match_dup 1) (const_int 0))
7415 (label_ref (match_operand 0 "" ""))
7418 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7421 (define_expand "bleu"
7423 (if_then_else (leu (match_dup 1) (const_int 0))
7424 (label_ref (match_operand 0 "" ""))
7427 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7430 (define_expand "bgeu"
7432 (if_then_else (geu (match_dup 1) (const_int 0))
7433 (label_ref (match_operand 0 "" ""))
7436 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7439 (define_expand "bltu"
7441 (if_then_else (ltu (match_dup 1) (const_int 0))
7442 (label_ref (match_operand 0 "" ""))
7445 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7448 (define_expand "bunordered"
7450 (if_then_else (unordered (match_dup 1) (const_int 0))
7451 (label_ref (match_operand 0 "" ""))
7453 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7454 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7458 (define_expand "bordered"
7460 (if_then_else (ordered (match_dup 1) (const_int 0))
7461 (label_ref (match_operand 0 "" ""))
7463 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7464 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7468 (define_expand "bungt"
7470 (if_then_else (ungt (match_dup 1) (const_int 0))
7471 (label_ref (match_operand 0 "" ""))
7473 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7474 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7477 (define_expand "bunlt"
7479 (if_then_else (unlt (match_dup 1) (const_int 0))
7480 (label_ref (match_operand 0 "" ""))
7482 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7483 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7486 (define_expand "bunge"
7488 (if_then_else (unge (match_dup 1) (const_int 0))
7489 (label_ref (match_operand 0 "" ""))
7491 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7492 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7495 (define_expand "bunle"
7497 (if_then_else (unle (match_dup 1) (const_int 0))
7498 (label_ref (match_operand 0 "" ""))
7500 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7501 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7504 ;; The following two patterns need two branch instructions, since there is
7505 ;; no single instruction that will handle all cases.
7506 (define_expand "buneq"
7508 (if_then_else (uneq (match_dup 1) (const_int 0))
7509 (label_ref (match_operand 0 "" ""))
7511 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7512 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7515 (define_expand "bltgt"
7517 (if_then_else (ltgt (match_dup 1) (const_int 0))
7518 (label_ref (match_operand 0 "" ""))
7520 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7521 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7525 ;; Patterns to match conditional branch insns.
7528 ; Special pattern to match UNEQ.
7529 (define_insn "*arm_buneq"
7531 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7532 (label_ref (match_operand 0 "" ""))
7534 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7536 gcc_assert (!arm_ccfsm_state);
7538 return \"bvs\\t%l0\;beq\\t%l0\";
7540 [(set_attr "conds" "jump_clob")
7541 (set_attr "length" "8")]
7544 ; Special pattern to match LTGT.
7545 (define_insn "*arm_bltgt"
7547 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7548 (label_ref (match_operand 0 "" ""))
7550 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7552 gcc_assert (!arm_ccfsm_state);
7554 return \"bmi\\t%l0\;bgt\\t%l0\";
7556 [(set_attr "conds" "jump_clob")
7557 (set_attr "length" "8")]
7560 (define_insn "*arm_cond_branch"
7562 (if_then_else (match_operator 1 "arm_comparison_operator"
7563 [(match_operand 2 "cc_register" "") (const_int 0)])
7564 (label_ref (match_operand 0 "" ""))
7568 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7570 arm_ccfsm_state += 2;
7573 return \"b%d1\\t%l0\";
7575 [(set_attr "conds" "use")
7576 (set_attr "type" "branch")]
7579 ; Special pattern to match reversed UNEQ.
7580 (define_insn "*arm_buneq_reversed"
7582 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7584 (label_ref (match_operand 0 "" ""))))]
7585 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7587 gcc_assert (!arm_ccfsm_state);
7589 return \"bmi\\t%l0\;bgt\\t%l0\";
7591 [(set_attr "conds" "jump_clob")
7592 (set_attr "length" "8")]
7595 ; Special pattern to match reversed LTGT.
7596 (define_insn "*arm_bltgt_reversed"
7598 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7600 (label_ref (match_operand 0 "" ""))))]
7601 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7603 gcc_assert (!arm_ccfsm_state);
7605 return \"bvs\\t%l0\;beq\\t%l0\";
7607 [(set_attr "conds" "jump_clob")
7608 (set_attr "length" "8")]
7611 (define_insn "*arm_cond_branch_reversed"
7613 (if_then_else (match_operator 1 "arm_comparison_operator"
7614 [(match_operand 2 "cc_register" "") (const_int 0)])
7616 (label_ref (match_operand 0 "" ""))))]
7619 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7621 arm_ccfsm_state += 2;
7624 return \"b%D1\\t%l0\";
7626 [(set_attr "conds" "use")
7627 (set_attr "type" "branch")]
7634 (define_expand "seq"
7635 [(set (match_operand:SI 0 "s_register_operand" "")
7636 (eq:SI (match_dup 1) (const_int 0)))]
7638 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7641 (define_expand "sne"
7642 [(set (match_operand:SI 0 "s_register_operand" "")
7643 (ne:SI (match_dup 1) (const_int 0)))]
7645 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7648 (define_expand "sgt"
7649 [(set (match_operand:SI 0 "s_register_operand" "")
7650 (gt:SI (match_dup 1) (const_int 0)))]
7652 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7655 (define_expand "sle"
7656 [(set (match_operand:SI 0 "s_register_operand" "")
7657 (le:SI (match_dup 1) (const_int 0)))]
7659 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7662 (define_expand "sge"
7663 [(set (match_operand:SI 0 "s_register_operand" "")
7664 (ge:SI (match_dup 1) (const_int 0)))]
7666 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7669 (define_expand "slt"
7670 [(set (match_operand:SI 0 "s_register_operand" "")
7671 (lt:SI (match_dup 1) (const_int 0)))]
7673 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7676 (define_expand "sgtu"
7677 [(set (match_operand:SI 0 "s_register_operand" "")
7678 (gtu:SI (match_dup 1) (const_int 0)))]
7680 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7683 (define_expand "sleu"
7684 [(set (match_operand:SI 0 "s_register_operand" "")
7685 (leu:SI (match_dup 1) (const_int 0)))]
7687 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7690 (define_expand "sgeu"
7691 [(set (match_operand:SI 0 "s_register_operand" "")
7692 (geu:SI (match_dup 1) (const_int 0)))]
7694 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7697 (define_expand "sltu"
7698 [(set (match_operand:SI 0 "s_register_operand" "")
7699 (ltu:SI (match_dup 1) (const_int 0)))]
7701 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7704 (define_expand "sunordered"
7705 [(set (match_operand:SI 0 "s_register_operand" "")
7706 (unordered:SI (match_dup 1) (const_int 0)))]
7707 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7708 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7712 (define_expand "sordered"
7713 [(set (match_operand:SI 0 "s_register_operand" "")
7714 (ordered:SI (match_dup 1) (const_int 0)))]
7715 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7716 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7720 (define_expand "sungt"
7721 [(set (match_operand:SI 0 "s_register_operand" "")
7722 (ungt:SI (match_dup 1) (const_int 0)))]
7723 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7724 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7728 (define_expand "sunge"
7729 [(set (match_operand:SI 0 "s_register_operand" "")
7730 (unge:SI (match_dup 1) (const_int 0)))]
7731 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7732 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7736 (define_expand "sunlt"
7737 [(set (match_operand:SI 0 "s_register_operand" "")
7738 (unlt:SI (match_dup 1) (const_int 0)))]
7739 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7740 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7744 (define_expand "sunle"
7745 [(set (match_operand:SI 0 "s_register_operand" "")
7746 (unle:SI (match_dup 1) (const_int 0)))]
7747 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7748 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7752 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7753 ;;; simple ARM instructions.
7755 ; (define_expand "suneq"
7756 ; [(set (match_operand:SI 0 "s_register_operand" "")
7757 ; (uneq:SI (match_dup 1) (const_int 0)))]
7758 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7759 ; "gcc_unreachable ();"
7762 ; (define_expand "sltgt"
7763 ; [(set (match_operand:SI 0 "s_register_operand" "")
7764 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7765 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7766 ; "gcc_unreachable ();"
7769 (define_insn "*mov_scc"
7770 [(set (match_operand:SI 0 "s_register_operand" "=r")
7771 (match_operator:SI 1 "arm_comparison_operator"
7772 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7774 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7775 [(set_attr "conds" "use")
7776 (set_attr "length" "8")]
7779 (define_insn "*mov_negscc"
7780 [(set (match_operand:SI 0 "s_register_operand" "=r")
7781 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7782 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7784 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7785 [(set_attr "conds" "use")
7786 (set_attr "length" "8")]
7789 (define_insn "*mov_notscc"
7790 [(set (match_operand:SI 0 "s_register_operand" "=r")
7791 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7792 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7794 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7795 [(set_attr "conds" "use")
7796 (set_attr "length" "8")]
7799 (define_expand "cstoresi4"
7800 [(set (match_operand:SI 0 "s_register_operand" "")
7801 (match_operator:SI 1 "arm_comparison_operator"
7802 [(match_operand:SI 2 "s_register_operand" "")
7803 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7806 rtx op3, scratch, scratch2;
7808 if (operands[3] == const0_rtx)
7810 switch (GET_CODE (operands[1]))
7813 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7817 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7821 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7822 NULL_RTX, 0, OPTAB_WIDEN);
7823 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7824 NULL_RTX, 0, OPTAB_WIDEN);
7825 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7826 operands[0], 1, OPTAB_WIDEN);
7830 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7832 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7833 NULL_RTX, 1, OPTAB_WIDEN);
7837 scratch = expand_binop (SImode, ashr_optab, operands[2],
7838 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7839 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7840 NULL_RTX, 0, OPTAB_WIDEN);
7841 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7845 /* LT is handled by generic code. No need for unsigned with 0. */
7852 switch (GET_CODE (operands[1]))
7855 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7856 NULL_RTX, 0, OPTAB_WIDEN);
7857 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7861 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7862 NULL_RTX, 0, OPTAB_WIDEN);
7863 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7867 op3 = force_reg (SImode, operands[3]);
7869 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7870 NULL_RTX, 1, OPTAB_WIDEN);
7871 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7872 NULL_RTX, 0, OPTAB_WIDEN);
7873 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7879 if (!thumb1_cmp_operand (op3, SImode))
7880 op3 = force_reg (SImode, op3);
7881 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7882 NULL_RTX, 0, OPTAB_WIDEN);
7883 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7884 NULL_RTX, 1, OPTAB_WIDEN);
7885 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7890 op3 = force_reg (SImode, operands[3]);
7891 scratch = force_reg (SImode, const0_rtx);
7892 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7898 if (!thumb1_cmp_operand (op3, SImode))
7899 op3 = force_reg (SImode, op3);
7900 scratch = force_reg (SImode, const0_rtx);
7901 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7907 if (!thumb1_cmp_operand (op3, SImode))
7908 op3 = force_reg (SImode, op3);
7909 scratch = gen_reg_rtx (SImode);
7910 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
7911 emit_insn (gen_negsi2 (operands[0], scratch));
7915 op3 = force_reg (SImode, operands[3]);
7916 scratch = gen_reg_rtx (SImode);
7917 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
7918 emit_insn (gen_negsi2 (operands[0], scratch));
7921 /* No good sequences for GT, LT. */
7928 (define_expand "cstoresi_eq0_thumb1"
7930 [(set (match_operand:SI 0 "s_register_operand" "")
7931 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7933 (clobber (match_dup:SI 2))])]
7935 "operands[2] = gen_reg_rtx (SImode);"
7938 (define_expand "cstoresi_ne0_thumb1"
7940 [(set (match_operand:SI 0 "s_register_operand" "")
7941 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7943 (clobber (match_dup:SI 2))])]
7945 "operands[2] = gen_reg_rtx (SImode);"
7948 (define_insn "*cstoresi_eq0_thumb1_insn"
7949 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7950 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7952 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7955 neg\\t%0, %1\;adc\\t%0, %0, %1
7956 neg\\t%2, %1\;adc\\t%0, %1, %2"
7957 [(set_attr "length" "4")]
7960 (define_insn "*cstoresi_ne0_thumb1_insn"
7961 [(set (match_operand:SI 0 "s_register_operand" "=l")
7962 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7964 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7966 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7967 [(set_attr "length" "4")]
7970 (define_insn "cstoresi_nltu_thumb1"
7971 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7972 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7973 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7975 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7976 [(set_attr "length" "4")]
7979 ;; Used as part of the expansion of thumb les sequence.
7980 (define_insn "thumb1_addsi3_addgeu"
7981 [(set (match_operand:SI 0 "s_register_operand" "=l")
7982 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7983 (match_operand:SI 2 "s_register_operand" "l"))
7984 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7985 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7987 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7988 [(set_attr "length" "4")]
7992 ;; Conditional move insns
7994 (define_expand "movsicc"
7995 [(set (match_operand:SI 0 "s_register_operand" "")
7996 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7997 (match_operand:SI 2 "arm_not_operand" "")
7998 (match_operand:SI 3 "arm_not_operand" "")))]
8002 enum rtx_code code = GET_CODE (operands[1]);
8005 if (code == UNEQ || code == LTGT)
8008 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8009 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8013 (define_expand "movsfcc"
8014 [(set (match_operand:SF 0 "s_register_operand" "")
8015 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8016 (match_operand:SF 2 "s_register_operand" "")
8017 (match_operand:SF 3 "nonmemory_operand" "")))]
8021 enum rtx_code code = GET_CODE (operands[1]);
8024 if (code == UNEQ || code == LTGT)
8027 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8028 Otherwise, ensure it is a valid FP add operand */
8029 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8030 || (!arm_float_add_operand (operands[3], SFmode)))
8031 operands[3] = force_reg (SFmode, operands[3]);
8033 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8034 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8038 (define_expand "movdfcc"
8039 [(set (match_operand:DF 0 "s_register_operand" "")
8040 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8041 (match_operand:DF 2 "s_register_operand" "")
8042 (match_operand:DF 3 "arm_float_add_operand" "")))]
8043 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8046 enum rtx_code code = GET_CODE (operands[1]);
8049 if (code == UNEQ || code == LTGT)
8052 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8053 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8057 (define_insn "*movsicc_insn"
8058 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8060 (match_operator 3 "arm_comparison_operator"
8061 [(match_operand 4 "cc_register" "") (const_int 0)])
8062 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8063 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8070 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8071 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8072 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8073 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8074 [(set_attr "length" "4,4,4,4,8,8,8,8")
8075 (set_attr "conds" "use")]
8078 (define_insn "*movsfcc_soft_insn"
8079 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8080 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8081 [(match_operand 4 "cc_register" "") (const_int 0)])
8082 (match_operand:SF 1 "s_register_operand" "0,r")
8083 (match_operand:SF 2 "s_register_operand" "r,0")))]
8084 "TARGET_ARM && TARGET_SOFT_FLOAT"
8088 [(set_attr "conds" "use")]
8092 ;; Jump and linkage insns
8094 (define_expand "jump"
8096 (label_ref (match_operand 0 "" "")))]
8101 (define_insn "*arm_jump"
8103 (label_ref (match_operand 0 "" "")))]
8107 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8109 arm_ccfsm_state += 2;
8112 return \"b%?\\t%l0\";
8115 [(set_attr "predicable" "yes")]
8118 (define_insn "*thumb_jump"
8120 (label_ref (match_operand 0 "" "")))]
8123 if (get_attr_length (insn) == 2)
8125 return \"bl\\t%l0\\t%@ far jump\";
8127 [(set (attr "far_jump")
8129 (eq_attr "length" "4")
8130 (const_string "yes")
8131 (const_string "no")))
8132 (set (attr "length")
8134 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8135 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8140 (define_expand "call"
8141 [(parallel [(call (match_operand 0 "memory_operand" "")
8142 (match_operand 1 "general_operand" ""))
8143 (use (match_operand 2 "" ""))
8144 (clobber (reg:SI LR_REGNUM))])]
8150 /* In an untyped call, we can get NULL for operand 2. */
8151 if (operands[2] == NULL_RTX)
8152 operands[2] = const0_rtx;
8154 /* This is to decide if we should generate indirect calls by loading the
8155 32-bit address of the callee into a register before performing the
8156 branch and link. operand[2] encodes the long_call/short_call
8157 attribute of the function being called. This attribute is set whenever
8158 __attribute__((long_call/short_call)) or #pragma long_call/no_long_call
8159 is used, and the short_call attribute can also be set if function is
8160 declared as static or if it has already been defined in the current
8161 compilation unit. See arm.c and arm.h for info about this. The third
8162 parameter to arm_is_longcall_p is used to tell it which pattern
8164 callee = XEXP (operands[0], 0);
8166 if ((GET_CODE (callee) == SYMBOL_REF
8167 && arm_is_longcall_p (operands[0], INTVAL (operands[2]), 0))
8168 || (GET_CODE (callee) != SYMBOL_REF
8169 && GET_CODE (callee) != REG))
8170 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8174 (define_insn "*call_reg_armv5"
8175 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8176 (match_operand 1 "" ""))
8177 (use (match_operand 2 "" ""))
8178 (clobber (reg:SI LR_REGNUM))]
8179 "TARGET_ARM && arm_arch5"
8181 [(set_attr "type" "call")]
8184 (define_insn "*call_reg_arm"
8185 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8186 (match_operand 1 "" ""))
8187 (use (match_operand 2 "" ""))
8188 (clobber (reg:SI LR_REGNUM))]
8189 "TARGET_ARM && !arm_arch5"
8191 return output_call (operands);
8193 ;; length is worst case, normally it is only two
8194 [(set_attr "length" "12")
8195 (set_attr "type" "call")]
8198 (define_insn "*call_mem"
8199 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8200 (match_operand 1 "" ""))
8201 (use (match_operand 2 "" ""))
8202 (clobber (reg:SI LR_REGNUM))]
8205 return output_call_mem (operands);
8207 [(set_attr "length" "12")
8208 (set_attr "type" "call")]
8211 (define_insn "*call_reg_thumb1_v5"
8212 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8213 (match_operand 1 "" ""))
8214 (use (match_operand 2 "" ""))
8215 (clobber (reg:SI LR_REGNUM))]
8216 "TARGET_THUMB1 && arm_arch5"
8218 [(set_attr "length" "2")
8219 (set_attr "type" "call")]
8222 (define_insn "*call_reg_thumb1"
8223 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8224 (match_operand 1 "" ""))
8225 (use (match_operand 2 "" ""))
8226 (clobber (reg:SI LR_REGNUM))]
8227 "TARGET_THUMB1 && !arm_arch5"
8230 if (!TARGET_CALLER_INTERWORKING)
8231 return thumb_call_via_reg (operands[0]);
8232 else if (operands[1] == const0_rtx)
8233 return \"bl\\t%__interwork_call_via_%0\";
8234 else if (frame_pointer_needed)
8235 return \"bl\\t%__interwork_r7_call_via_%0\";
8237 return \"bl\\t%__interwork_r11_call_via_%0\";
8239 [(set_attr "type" "call")]
8242 (define_expand "call_value"
8243 [(parallel [(set (match_operand 0 "" "")
8244 (call (match_operand 1 "memory_operand" "")
8245 (match_operand 2 "general_operand" "")))
8246 (use (match_operand 3 "" ""))
8247 (clobber (reg:SI LR_REGNUM))])]
8251 rtx callee = XEXP (operands[1], 0);
8253 /* In an untyped call, we can get NULL for operand 2. */
8254 if (operands[3] == 0)
8255 operands[3] = const0_rtx;
8257 /* See the comment in define_expand \"call\". */
8258 if ((GET_CODE (callee) == SYMBOL_REF
8259 && arm_is_longcall_p (operands[1], INTVAL (operands[3]), 0))
8260 || (GET_CODE (callee) != SYMBOL_REF
8261 && GET_CODE (callee) != REG))
8262 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8266 (define_insn "*call_value_reg_armv5"
8267 [(set (match_operand 0 "" "")
8268 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8269 (match_operand 2 "" "")))
8270 (use (match_operand 3 "" ""))
8271 (clobber (reg:SI LR_REGNUM))]
8272 "TARGET_ARM && arm_arch5"
8274 [(set_attr "type" "call")]
8277 (define_insn "*call_value_reg_arm"
8278 [(set (match_operand 0 "" "")
8279 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8280 (match_operand 2 "" "")))
8281 (use (match_operand 3 "" ""))
8282 (clobber (reg:SI LR_REGNUM))]
8283 "TARGET_ARM && !arm_arch5"
8285 return output_call (&operands[1]);
8287 [(set_attr "length" "12")
8288 (set_attr "type" "call")]
8291 (define_insn "*call_value_mem"
8292 [(set (match_operand 0 "" "")
8293 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8294 (match_operand 2 "" "")))
8295 (use (match_operand 3 "" ""))
8296 (clobber (reg:SI LR_REGNUM))]
8297 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8299 return output_call_mem (&operands[1]);
8301 [(set_attr "length" "12")
8302 (set_attr "type" "call")]
8305 (define_insn "*call_value_reg_thumb1_v5"
8306 [(set (match_operand 0 "" "")
8307 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8308 (match_operand 2 "" "")))
8309 (use (match_operand 3 "" ""))
8310 (clobber (reg:SI LR_REGNUM))]
8311 "TARGET_THUMB1 && arm_arch5"
8313 [(set_attr "length" "2")
8314 (set_attr "type" "call")]
8317 (define_insn "*call_value_reg_thumb1"
8318 [(set (match_operand 0 "" "")
8319 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8320 (match_operand 2 "" "")))
8321 (use (match_operand 3 "" ""))
8322 (clobber (reg:SI LR_REGNUM))]
8323 "TARGET_THUMB1 && !arm_arch5"
8326 if (!TARGET_CALLER_INTERWORKING)
8327 return thumb_call_via_reg (operands[1]);
8328 else if (operands[2] == const0_rtx)
8329 return \"bl\\t%__interwork_call_via_%1\";
8330 else if (frame_pointer_needed)
8331 return \"bl\\t%__interwork_r7_call_via_%1\";
8333 return \"bl\\t%__interwork_r11_call_via_%1\";
8335 [(set_attr "type" "call")]
8338 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8339 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8341 (define_insn "*call_symbol"
8342 [(call (mem:SI (match_operand:SI 0 "" ""))
8343 (match_operand 1 "" ""))
8344 (use (match_operand 2 "" ""))
8345 (clobber (reg:SI LR_REGNUM))]
8347 && (GET_CODE (operands[0]) == SYMBOL_REF)
8348 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
8351 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8353 [(set_attr "type" "call")]
8356 (define_insn "*call_value_symbol"
8357 [(set (match_operand 0 "" "")
8358 (call (mem:SI (match_operand:SI 1 "" ""))
8359 (match_operand:SI 2 "" "")))
8360 (use (match_operand 3 "" ""))
8361 (clobber (reg:SI LR_REGNUM))]
8363 && (GET_CODE (operands[1]) == SYMBOL_REF)
8364 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
8367 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8369 [(set_attr "type" "call")]
8372 (define_insn "*call_insn"
8373 [(call (mem:SI (match_operand:SI 0 "" ""))
8374 (match_operand:SI 1 "" ""))
8375 (use (match_operand 2 "" ""))
8376 (clobber (reg:SI LR_REGNUM))]
8378 && GET_CODE (operands[0]) == SYMBOL_REF
8379 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
8381 [(set_attr "length" "4")
8382 (set_attr "type" "call")]
8385 (define_insn "*call_value_insn"
8386 [(set (match_operand 0 "" "")
8387 (call (mem:SI (match_operand 1 "" ""))
8388 (match_operand 2 "" "")))
8389 (use (match_operand 3 "" ""))
8390 (clobber (reg:SI LR_REGNUM))]
8392 && GET_CODE (operands[1]) == SYMBOL_REF
8393 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
8395 [(set_attr "length" "4")
8396 (set_attr "type" "call")]
8399 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8400 (define_expand "sibcall"
8401 [(parallel [(call (match_operand 0 "memory_operand" "")
8402 (match_operand 1 "general_operand" ""))
8404 (use (match_operand 2 "" ""))])]
8408 if (operands[2] == NULL_RTX)
8409 operands[2] = const0_rtx;
8413 (define_expand "sibcall_value"
8414 [(parallel [(set (match_operand 0 "" "")
8415 (call (match_operand 1 "memory_operand" "")
8416 (match_operand 2 "general_operand" "")))
8418 (use (match_operand 3 "" ""))])]
8422 if (operands[3] == NULL_RTX)
8423 operands[3] = const0_rtx;
8427 (define_insn "*sibcall_insn"
8428 [(call (mem:SI (match_operand:SI 0 "" "X"))
8429 (match_operand 1 "" ""))
8431 (use (match_operand 2 "" ""))]
8432 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8434 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8436 [(set_attr "type" "call")]
8439 (define_insn "*sibcall_value_insn"
8440 [(set (match_operand 0 "" "")
8441 (call (mem:SI (match_operand:SI 1 "" "X"))
8442 (match_operand 2 "" "")))
8444 (use (match_operand 3 "" ""))]
8445 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8447 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8449 [(set_attr "type" "call")]
8452 ;; Often the return insn will be the same as loading from memory, so set attr
8453 (define_insn "return"
8455 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8458 if (arm_ccfsm_state == 2)
8460 arm_ccfsm_state += 2;
8463 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8465 [(set_attr "type" "load1")
8466 (set_attr "length" "12")
8467 (set_attr "predicable" "yes")]
8470 (define_insn "*cond_return"
8472 (if_then_else (match_operator 0 "arm_comparison_operator"
8473 [(match_operand 1 "cc_register" "") (const_int 0)])
8476 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8479 if (arm_ccfsm_state == 2)
8481 arm_ccfsm_state += 2;
8484 return output_return_instruction (operands[0], TRUE, FALSE);
8486 [(set_attr "conds" "use")
8487 (set_attr "length" "12")
8488 (set_attr "type" "load1")]
8491 (define_insn "*cond_return_inverted"
8493 (if_then_else (match_operator 0 "arm_comparison_operator"
8494 [(match_operand 1 "cc_register" "") (const_int 0)])
8497 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8500 if (arm_ccfsm_state == 2)
8502 arm_ccfsm_state += 2;
8505 return output_return_instruction (operands[0], TRUE, TRUE);
8507 [(set_attr "conds" "use")
8508 (set_attr "length" "12")
8509 (set_attr "type" "load1")]
8512 ;; Generate a sequence of instructions to determine if the processor is
8513 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8516 (define_expand "return_addr_mask"
8518 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8520 (set (match_operand:SI 0 "s_register_operand" "")
8521 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8523 (const_int 67108860)))] ; 0x03fffffc
8526 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8529 (define_insn "*check_arch2"
8530 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8531 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8534 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8535 [(set_attr "length" "8")
8536 (set_attr "conds" "set")]
8539 ;; Call subroutine returning any type.
8541 (define_expand "untyped_call"
8542 [(parallel [(call (match_operand 0 "" "")
8544 (match_operand 1 "" "")
8545 (match_operand 2 "" "")])]
8550 rtx par = gen_rtx_PARALLEL (VOIDmode,
8551 rtvec_alloc (XVECLEN (operands[2], 0)));
8552 rtx addr = gen_reg_rtx (Pmode);
8556 emit_move_insn (addr, XEXP (operands[1], 0));
8557 mem = change_address (operands[1], BLKmode, addr);
8559 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8561 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8563 /* Default code only uses r0 as a return value, but we could
8564 be using anything up to 4 registers. */
8565 if (REGNO (src) == R0_REGNUM)
8566 src = gen_rtx_REG (TImode, R0_REGNUM);
8568 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8570 size += GET_MODE_SIZE (GET_MODE (src));
8573 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8578 for (i = 0; i < XVECLEN (par, 0); i++)
8580 HOST_WIDE_INT offset = 0;
8581 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8584 emit_move_insn (addr, plus_constant (addr, size));
8586 mem = change_address (mem, GET_MODE (reg), NULL);
8587 if (REGNO (reg) == R0_REGNUM)
8589 /* On thumb we have to use a write-back instruction. */
8590 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8591 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8592 size = TARGET_ARM ? 16 : 0;
8596 emit_move_insn (mem, reg);
8597 size = GET_MODE_SIZE (GET_MODE (reg));
8601 /* The optimizer does not know that the call sets the function value
8602 registers we stored in the result block. We avoid problems by
8603 claiming that all hard registers are used and clobbered at this
8605 emit_insn (gen_blockage ());
8611 (define_expand "untyped_return"
8612 [(match_operand:BLK 0 "memory_operand" "")
8613 (match_operand 1 "" "")]
8618 rtx addr = gen_reg_rtx (Pmode);
8622 emit_move_insn (addr, XEXP (operands[0], 0));
8623 mem = change_address (operands[0], BLKmode, addr);
8625 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8627 HOST_WIDE_INT offset = 0;
8628 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8631 emit_move_insn (addr, plus_constant (addr, size));
8633 mem = change_address (mem, GET_MODE (reg), NULL);
8634 if (REGNO (reg) == R0_REGNUM)
8636 /* On thumb we have to use a write-back instruction. */
8637 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8638 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8639 size = TARGET_ARM ? 16 : 0;
8643 emit_move_insn (reg, mem);
8644 size = GET_MODE_SIZE (GET_MODE (reg));
8648 /* Emit USE insns before the return. */
8649 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8650 emit_insn (gen_rtx_USE (VOIDmode,
8651 SET_DEST (XVECEXP (operands[1], 0, i))));
8653 /* Construct the return. */
8654 expand_naked_return ();
8660 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8661 ;; all of memory. This blocks insns from being moved across this point.
8663 (define_insn "blockage"
8664 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8667 [(set_attr "length" "0")
8668 (set_attr "type" "block")]
8671 (define_expand "casesi"
8672 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8673 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8674 (match_operand:SI 2 "const_int_operand" "") ; total range
8675 (match_operand:SI 3 "" "") ; table label
8676 (match_operand:SI 4 "" "")] ; Out of range label
8681 if (operands[1] != const0_rtx)
8683 reg = gen_reg_rtx (SImode);
8685 emit_insn (gen_addsi3 (reg, operands[0],
8686 GEN_INT (-INTVAL (operands[1]))));
8690 if (!const_ok_for_arm (INTVAL (operands[2])))
8691 operands[2] = force_reg (SImode, operands[2]);
8695 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8696 operands[3], operands[4]));
8700 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8701 operands[2], operands[3], operands[4]));
8705 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8706 operands[3], operands[4]));
8712 ;; The USE in this pattern is needed to tell flow analysis that this is
8713 ;; a CASESI insn. It has no other purpose.
8714 (define_insn "arm_casesi_internal"
8715 [(parallel [(set (pc)
8717 (leu (match_operand:SI 0 "s_register_operand" "r")
8718 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8719 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8720 (label_ref (match_operand 2 "" ""))))
8721 (label_ref (match_operand 3 "" ""))))
8722 (clobber (reg:CC CC_REGNUM))
8723 (use (label_ref (match_dup 2)))])]
8727 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8728 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8730 [(set_attr "conds" "clob")
8731 (set_attr "length" "12")]
8734 (define_expand "indirect_jump"
8736 (match_operand:SI 0 "s_register_operand" ""))]
8739 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8740 address and use bx. */
8744 tmp = gen_reg_rtx (SImode);
8745 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8751 ;; NB Never uses BX.
8752 (define_insn "*arm_indirect_jump"
8754 (match_operand:SI 0 "s_register_operand" "r"))]
8756 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8757 [(set_attr "predicable" "yes")]
8760 (define_insn "*load_indirect_jump"
8762 (match_operand:SI 0 "memory_operand" "m"))]
8764 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8765 [(set_attr "type" "load1")
8766 (set_attr "pool_range" "4096")
8767 (set_attr "neg_pool_range" "4084")
8768 (set_attr "predicable" "yes")]
8771 ;; NB Never uses BX.
8772 (define_insn "*thumb1_indirect_jump"
8774 (match_operand:SI 0 "register_operand" "l*r"))]
8777 [(set_attr "conds" "clob")
8778 (set_attr "length" "2")]
8788 if (TARGET_UNIFIED_ASM)
8791 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8792 return \"mov\\tr8, r8\";
8794 [(set (attr "length")
8795 (if_then_else (eq_attr "is_thumb" "yes")
8801 ;; Patterns to allow combination of arithmetic, cond code and shifts
8803 (define_insn "*arith_shiftsi"
8804 [(set (match_operand:SI 0 "s_register_operand" "=r")
8805 (match_operator:SI 1 "shiftable_operator"
8806 [(match_operator:SI 3 "shift_operator"
8807 [(match_operand:SI 4 "s_register_operand" "r")
8808 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8809 (match_operand:SI 2 "s_register_operand" "r")]))]
8811 "%i1%?\\t%0, %2, %4%S3"
8812 [(set_attr "predicable" "yes")
8813 (set_attr "shift" "4")
8814 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8815 (const_string "alu_shift")
8816 (const_string "alu_shift_reg")))]
8820 [(set (match_operand:SI 0 "s_register_operand" "")
8821 (match_operator:SI 1 "shiftable_operator"
8822 [(match_operator:SI 2 "shiftable_operator"
8823 [(match_operator:SI 3 "shift_operator"
8824 [(match_operand:SI 4 "s_register_operand" "")
8825 (match_operand:SI 5 "reg_or_int_operand" "")])
8826 (match_operand:SI 6 "s_register_operand" "")])
8827 (match_operand:SI 7 "arm_rhs_operand" "")]))
8828 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8831 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8834 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8837 (define_insn "*arith_shiftsi_compare0"
8838 [(set (reg:CC_NOOV CC_REGNUM)
8839 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8840 [(match_operator:SI 3 "shift_operator"
8841 [(match_operand:SI 4 "s_register_operand" "r")
8842 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8843 (match_operand:SI 2 "s_register_operand" "r")])
8845 (set (match_operand:SI 0 "s_register_operand" "=r")
8846 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8849 "%i1%.\\t%0, %2, %4%S3"
8850 [(set_attr "conds" "set")
8851 (set_attr "shift" "4")
8852 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8853 (const_string "alu_shift")
8854 (const_string "alu_shift_reg")))]
8857 (define_insn "*arith_shiftsi_compare0_scratch"
8858 [(set (reg:CC_NOOV CC_REGNUM)
8859 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8860 [(match_operator:SI 3 "shift_operator"
8861 [(match_operand:SI 4 "s_register_operand" "r")
8862 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8863 (match_operand:SI 2 "s_register_operand" "r")])
8865 (clobber (match_scratch:SI 0 "=r"))]
8867 "%i1%.\\t%0, %2, %4%S3"
8868 [(set_attr "conds" "set")
8869 (set_attr "shift" "4")
8870 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8871 (const_string "alu_shift")
8872 (const_string "alu_shift_reg")))]
8875 (define_insn "*sub_shiftsi"
8876 [(set (match_operand:SI 0 "s_register_operand" "=r")
8877 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8878 (match_operator:SI 2 "shift_operator"
8879 [(match_operand:SI 3 "s_register_operand" "r")
8880 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
8882 "sub%?\\t%0, %1, %3%S2"
8883 [(set_attr "predicable" "yes")
8884 (set_attr "shift" "3")
8885 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8886 (const_string "alu_shift")
8887 (const_string "alu_shift_reg")))]
8890 (define_insn "*sub_shiftsi_compare0"
8891 [(set (reg:CC_NOOV CC_REGNUM)
8893 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8894 (match_operator:SI 2 "shift_operator"
8895 [(match_operand:SI 3 "s_register_operand" "r")
8896 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8898 (set (match_operand:SI 0 "s_register_operand" "=r")
8899 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
8902 "sub%.\\t%0, %1, %3%S2"
8903 [(set_attr "conds" "set")
8904 (set_attr "shift" "3")
8905 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8906 (const_string "alu_shift")
8907 (const_string "alu_shift_reg")))]
8910 (define_insn "*sub_shiftsi_compare0_scratch"
8911 [(set (reg:CC_NOOV CC_REGNUM)
8913 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8914 (match_operator:SI 2 "shift_operator"
8915 [(match_operand:SI 3 "s_register_operand" "r")
8916 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8918 (clobber (match_scratch:SI 0 "=r"))]
8920 "sub%.\\t%0, %1, %3%S2"
8921 [(set_attr "conds" "set")
8922 (set_attr "shift" "3")
8923 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8924 (const_string "alu_shift")
8925 (const_string "alu_shift_reg")))]
8930 (define_insn "*and_scc"
8931 [(set (match_operand:SI 0 "s_register_operand" "=r")
8932 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8933 [(match_operand 3 "cc_register" "") (const_int 0)])
8934 (match_operand:SI 2 "s_register_operand" "r")))]
8936 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8937 [(set_attr "conds" "use")
8938 (set_attr "length" "8")]
8941 (define_insn "*ior_scc"
8942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8943 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8944 [(match_operand 3 "cc_register" "") (const_int 0)])
8945 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8949 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8950 [(set_attr "conds" "use")
8951 (set_attr "length" "4,8")]
8954 (define_insn "*compare_scc"
8955 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8956 (match_operator:SI 1 "arm_comparison_operator"
8957 [(match_operand:SI 2 "s_register_operand" "r,r")
8958 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8959 (clobber (reg:CC CC_REGNUM))]
8962 if (operands[3] == const0_rtx)
8964 if (GET_CODE (operands[1]) == LT)
8965 return \"mov\\t%0, %2, lsr #31\";
8967 if (GET_CODE (operands[1]) == GE)
8968 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
8970 if (GET_CODE (operands[1]) == EQ)
8971 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
8974 if (GET_CODE (operands[1]) == NE)
8976 if (which_alternative == 1)
8977 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
8978 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
8980 if (which_alternative == 1)
8981 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
8983 output_asm_insn (\"cmp\\t%2, %3\", operands);
8984 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
8986 [(set_attr "conds" "clob")
8987 (set_attr "length" "12")]
8990 (define_insn "*cond_move"
8991 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8992 (if_then_else:SI (match_operator 3 "equality_operator"
8993 [(match_operator 4 "arm_comparison_operator"
8994 [(match_operand 5 "cc_register" "") (const_int 0)])
8996 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8997 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9000 if (GET_CODE (operands[3]) == NE)
9002 if (which_alternative != 1)
9003 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9004 if (which_alternative != 0)
9005 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9008 if (which_alternative != 0)
9009 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9010 if (which_alternative != 1)
9011 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9014 [(set_attr "conds" "use")
9015 (set_attr "length" "4,4,8")]
9018 (define_insn "*cond_arith"
9019 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9020 (match_operator:SI 5 "shiftable_operator"
9021 [(match_operator:SI 4 "arm_comparison_operator"
9022 [(match_operand:SI 2 "s_register_operand" "r,r")
9023 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9024 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9025 (clobber (reg:CC CC_REGNUM))]
9028 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9029 return \"%i5\\t%0, %1, %2, lsr #31\";
9031 output_asm_insn (\"cmp\\t%2, %3\", operands);
9032 if (GET_CODE (operands[5]) == AND)
9033 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9034 else if (GET_CODE (operands[5]) == MINUS)
9035 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9036 else if (which_alternative != 0)
9037 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9038 return \"%i5%d4\\t%0, %1, #1\";
9040 [(set_attr "conds" "clob")
9041 (set_attr "length" "12")]
9044 (define_insn "*cond_sub"
9045 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9046 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9047 (match_operator:SI 4 "arm_comparison_operator"
9048 [(match_operand:SI 2 "s_register_operand" "r,r")
9049 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9050 (clobber (reg:CC CC_REGNUM))]
9053 output_asm_insn (\"cmp\\t%2, %3\", operands);
9054 if (which_alternative != 0)
9055 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9056 return \"sub%d4\\t%0, %1, #1\";
9058 [(set_attr "conds" "clob")
9059 (set_attr "length" "8,12")]
9062 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9063 (define_insn "*cmp_ite0"
9064 [(set (match_operand 6 "dominant_cc_register" "")
9067 (match_operator 4 "arm_comparison_operator"
9068 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9069 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9070 (match_operator:SI 5 "arm_comparison_operator"
9071 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9072 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9078 static const char * const opcodes[4][2] =
9080 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9081 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9082 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9083 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9084 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9085 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9086 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9087 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9090 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9092 return opcodes[which_alternative][swap];
9094 [(set_attr "conds" "set")
9095 (set_attr "length" "8")]
9098 (define_insn "*cmp_ite1"
9099 [(set (match_operand 6 "dominant_cc_register" "")
9102 (match_operator 4 "arm_comparison_operator"
9103 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9104 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9105 (match_operator:SI 5 "arm_comparison_operator"
9106 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9107 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9113 static const char * const opcodes[4][2] =
9115 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9116 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9117 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9118 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9119 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9120 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9121 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9122 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9125 comparison_dominates_p (GET_CODE (operands[5]),
9126 reverse_condition (GET_CODE (operands[4])));
9128 return opcodes[which_alternative][swap];
9130 [(set_attr "conds" "set")
9131 (set_attr "length" "8")]
9134 (define_insn "*cmp_and"
9135 [(set (match_operand 6 "dominant_cc_register" "")
9138 (match_operator 4 "arm_comparison_operator"
9139 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9140 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9141 (match_operator:SI 5 "arm_comparison_operator"
9142 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9143 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9148 static const char *const opcodes[4][2] =
9150 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9151 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9152 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9153 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9154 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9155 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9156 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9157 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9160 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9162 return opcodes[which_alternative][swap];
9164 [(set_attr "conds" "set")
9165 (set_attr "predicable" "no")
9166 (set_attr "length" "8")]
9169 (define_insn "*cmp_ior"
9170 [(set (match_operand 6 "dominant_cc_register" "")
9173 (match_operator 4 "arm_comparison_operator"
9174 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9175 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9176 (match_operator:SI 5 "arm_comparison_operator"
9177 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9178 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9183 static const char *const opcodes[4][2] =
9185 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9186 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9187 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9188 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9189 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9190 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9191 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9192 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9195 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9197 return opcodes[which_alternative][swap];
9200 [(set_attr "conds" "set")
9201 (set_attr "length" "8")]
9204 (define_insn_and_split "*ior_scc_scc"
9205 [(set (match_operand:SI 0 "s_register_operand" "=r")
9206 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9207 [(match_operand:SI 1 "s_register_operand" "r")
9208 (match_operand:SI 2 "arm_add_operand" "rIL")])
9209 (match_operator:SI 6 "arm_comparison_operator"
9210 [(match_operand:SI 4 "s_register_operand" "r")
9211 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9212 (clobber (reg:CC CC_REGNUM))]
9214 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9217 "TARGET_ARM && reload_completed"
9221 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9222 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9224 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9226 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9229 [(set_attr "conds" "clob")
9230 (set_attr "length" "16")])
9232 ; If the above pattern is followed by a CMP insn, then the compare is
9233 ; redundant, since we can rework the conditional instruction that follows.
9234 (define_insn_and_split "*ior_scc_scc_cmp"
9235 [(set (match_operand 0 "dominant_cc_register" "")
9236 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9237 [(match_operand:SI 1 "s_register_operand" "r")
9238 (match_operand:SI 2 "arm_add_operand" "rIL")])
9239 (match_operator:SI 6 "arm_comparison_operator"
9240 [(match_operand:SI 4 "s_register_operand" "r")
9241 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9243 (set (match_operand:SI 7 "s_register_operand" "=r")
9244 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9245 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9248 "TARGET_ARM && reload_completed"
9252 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9253 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9255 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9257 [(set_attr "conds" "set")
9258 (set_attr "length" "16")])
9260 (define_insn_and_split "*and_scc_scc"
9261 [(set (match_operand:SI 0 "s_register_operand" "=r")
9262 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9263 [(match_operand:SI 1 "s_register_operand" "r")
9264 (match_operand:SI 2 "arm_add_operand" "rIL")])
9265 (match_operator:SI 6 "arm_comparison_operator"
9266 [(match_operand:SI 4 "s_register_operand" "r")
9267 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9268 (clobber (reg:CC CC_REGNUM))]
9270 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9273 "TARGET_ARM && reload_completed
9274 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9279 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9280 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9282 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9284 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9287 [(set_attr "conds" "clob")
9288 (set_attr "length" "16")])
9290 ; If the above pattern is followed by a CMP insn, then the compare is
9291 ; redundant, since we can rework the conditional instruction that follows.
9292 (define_insn_and_split "*and_scc_scc_cmp"
9293 [(set (match_operand 0 "dominant_cc_register" "")
9294 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9295 [(match_operand:SI 1 "s_register_operand" "r")
9296 (match_operand:SI 2 "arm_add_operand" "rIL")])
9297 (match_operator:SI 6 "arm_comparison_operator"
9298 [(match_operand:SI 4 "s_register_operand" "r")
9299 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9301 (set (match_operand:SI 7 "s_register_operand" "=r")
9302 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9303 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9306 "TARGET_ARM && reload_completed"
9310 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9311 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9313 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9315 [(set_attr "conds" "set")
9316 (set_attr "length" "16")])
9318 ;; If there is no dominance in the comparison, then we can still save an
9319 ;; instruction in the AND case, since we can know that the second compare
9320 ;; need only zero the value if false (if true, then the value is already
9322 (define_insn_and_split "*and_scc_scc_nodom"
9323 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9324 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9325 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9326 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9327 (match_operator:SI 6 "arm_comparison_operator"
9328 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9329 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9330 (clobber (reg:CC CC_REGNUM))]
9332 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9335 "TARGET_ARM && reload_completed"
9336 [(parallel [(set (match_dup 0)
9337 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9338 (clobber (reg:CC CC_REGNUM))])
9339 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9341 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9344 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9345 operands[4], operands[5]),
9347 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9349 [(set_attr "conds" "clob")
9350 (set_attr "length" "20")])
9353 [(set (reg:CC_NOOV CC_REGNUM)
9354 (compare:CC_NOOV (ior:SI
9355 (and:SI (match_operand:SI 0 "s_register_operand" "")
9357 (match_operator:SI 1 "comparison_operator"
9358 [(match_operand:SI 2 "s_register_operand" "")
9359 (match_operand:SI 3 "arm_add_operand" "")]))
9361 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9364 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9366 (set (reg:CC_NOOV CC_REGNUM)
9367 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9372 [(set (reg:CC_NOOV CC_REGNUM)
9373 (compare:CC_NOOV (ior:SI
9374 (match_operator:SI 1 "comparison_operator"
9375 [(match_operand:SI 2 "s_register_operand" "")
9376 (match_operand:SI 3 "arm_add_operand" "")])
9377 (and:SI (match_operand:SI 0 "s_register_operand" "")
9380 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9383 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9385 (set (reg:CC_NOOV CC_REGNUM)
9386 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9389 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9391 (define_insn "*negscc"
9392 [(set (match_operand:SI 0 "s_register_operand" "=r")
9393 (neg:SI (match_operator 3 "arm_comparison_operator"
9394 [(match_operand:SI 1 "s_register_operand" "r")
9395 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9396 (clobber (reg:CC CC_REGNUM))]
9399 if (GET_CODE (operands[3]) == LT && operands[3] == const0_rtx)
9400 return \"mov\\t%0, %1, asr #31\";
9402 if (GET_CODE (operands[3]) == NE)
9403 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9405 if (GET_CODE (operands[3]) == GT)
9406 return \"subs\\t%0, %1, %2\;mvnne\\t%0, %0, asr #31\";
9408 output_asm_insn (\"cmp\\t%1, %2\", operands);
9409 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9410 return \"mvn%d3\\t%0, #0\";
9412 [(set_attr "conds" "clob")
9413 (set_attr "length" "12")]
9416 (define_insn "movcond"
9417 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9419 (match_operator 5 "arm_comparison_operator"
9420 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9421 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9422 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9423 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9424 (clobber (reg:CC CC_REGNUM))]
9427 if (GET_CODE (operands[5]) == LT
9428 && (operands[4] == const0_rtx))
9430 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9432 if (operands[2] == const0_rtx)
9433 return \"and\\t%0, %1, %3, asr #31\";
9434 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9436 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9438 if (operands[1] == const0_rtx)
9439 return \"bic\\t%0, %2, %3, asr #31\";
9440 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9442 /* The only case that falls through to here is when both ops 1 & 2
9446 if (GET_CODE (operands[5]) == GE
9447 && (operands[4] == const0_rtx))
9449 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9451 if (operands[2] == const0_rtx)
9452 return \"bic\\t%0, %1, %3, asr #31\";
9453 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9455 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9457 if (operands[1] == const0_rtx)
9458 return \"and\\t%0, %2, %3, asr #31\";
9459 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9461 /* The only case that falls through to here is when both ops 1 & 2
9464 if (GET_CODE (operands[4]) == CONST_INT
9465 && !const_ok_for_arm (INTVAL (operands[4])))
9466 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9468 output_asm_insn (\"cmp\\t%3, %4\", operands);
9469 if (which_alternative != 0)
9470 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9471 if (which_alternative != 1)
9472 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9475 [(set_attr "conds" "clob")
9476 (set_attr "length" "8,8,12")]
9479 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9481 (define_insn "*ifcompare_plus_move"
9482 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9483 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9484 [(match_operand:SI 4 "s_register_operand" "r,r")
9485 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9487 (match_operand:SI 2 "s_register_operand" "r,r")
9488 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9489 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9490 (clobber (reg:CC CC_REGNUM))]
9493 [(set_attr "conds" "clob")
9494 (set_attr "length" "8,12")]
9497 (define_insn "*if_plus_move"
9498 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9500 (match_operator 4 "arm_comparison_operator"
9501 [(match_operand 5 "cc_register" "") (const_int 0)])
9503 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9504 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9505 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9509 sub%d4\\t%0, %2, #%n3
9510 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9511 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9512 [(set_attr "conds" "use")
9513 (set_attr "length" "4,4,8,8")
9514 (set_attr "type" "*,*,*,*")]
9517 (define_insn "*ifcompare_move_plus"
9518 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9519 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9520 [(match_operand:SI 4 "s_register_operand" "r,r")
9521 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9522 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9524 (match_operand:SI 2 "s_register_operand" "r,r")
9525 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9526 (clobber (reg:CC CC_REGNUM))]
9529 [(set_attr "conds" "clob")
9530 (set_attr "length" "8,12")]
9533 (define_insn "*if_move_plus"
9534 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9536 (match_operator 4 "arm_comparison_operator"
9537 [(match_operand 5 "cc_register" "") (const_int 0)])
9538 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9540 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9541 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9545 sub%D4\\t%0, %2, #%n3
9546 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9547 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9548 [(set_attr "conds" "use")
9549 (set_attr "length" "4,4,8,8")
9550 (set_attr "type" "*,*,*,*")]
9553 (define_insn "*ifcompare_arith_arith"
9554 [(set (match_operand:SI 0 "s_register_operand" "=r")
9555 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9556 [(match_operand:SI 5 "s_register_operand" "r")
9557 (match_operand:SI 6 "arm_add_operand" "rIL")])
9558 (match_operator:SI 8 "shiftable_operator"
9559 [(match_operand:SI 1 "s_register_operand" "r")
9560 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9561 (match_operator:SI 7 "shiftable_operator"
9562 [(match_operand:SI 3 "s_register_operand" "r")
9563 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9564 (clobber (reg:CC CC_REGNUM))]
9567 [(set_attr "conds" "clob")
9568 (set_attr "length" "12")]
9571 (define_insn "*if_arith_arith"
9572 [(set (match_operand:SI 0 "s_register_operand" "=r")
9573 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9574 [(match_operand 8 "cc_register" "") (const_int 0)])
9575 (match_operator:SI 6 "shiftable_operator"
9576 [(match_operand:SI 1 "s_register_operand" "r")
9577 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9578 (match_operator:SI 7 "shiftable_operator"
9579 [(match_operand:SI 3 "s_register_operand" "r")
9580 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9582 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9583 [(set_attr "conds" "use")
9584 (set_attr "length" "8")]
9587 (define_insn "*ifcompare_arith_move"
9588 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9589 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9590 [(match_operand:SI 2 "s_register_operand" "r,r")
9591 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9592 (match_operator:SI 7 "shiftable_operator"
9593 [(match_operand:SI 4 "s_register_operand" "r,r")
9594 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9595 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9596 (clobber (reg:CC CC_REGNUM))]
9599 /* If we have an operation where (op x 0) is the identity operation and
9600 the conditional operator is LT or GE and we are comparing against zero and
9601 everything is in registers then we can do this in two instructions. */
9602 if (operands[3] == const0_rtx
9603 && GET_CODE (operands[7]) != AND
9604 && GET_CODE (operands[5]) == REG
9605 && GET_CODE (operands[1]) == REG
9606 && REGNO (operands[1]) == REGNO (operands[4])
9607 && REGNO (operands[4]) != REGNO (operands[0]))
9609 if (GET_CODE (operands[6]) == LT)
9610 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9611 else if (GET_CODE (operands[6]) == GE)
9612 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9614 if (GET_CODE (operands[3]) == CONST_INT
9615 && !const_ok_for_arm (INTVAL (operands[3])))
9616 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9618 output_asm_insn (\"cmp\\t%2, %3\", operands);
9619 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9620 if (which_alternative != 0)
9621 return \"mov%D6\\t%0, %1\";
9624 [(set_attr "conds" "clob")
9625 (set_attr "length" "8,12")]
9628 (define_insn "*if_arith_move"
9629 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9630 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9631 [(match_operand 6 "cc_register" "") (const_int 0)])
9632 (match_operator:SI 5 "shiftable_operator"
9633 [(match_operand:SI 2 "s_register_operand" "r,r")
9634 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9635 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9639 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9640 [(set_attr "conds" "use")
9641 (set_attr "length" "4,8")
9642 (set_attr "type" "*,*")]
9645 (define_insn "*ifcompare_move_arith"
9646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9647 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9648 [(match_operand:SI 4 "s_register_operand" "r,r")
9649 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9650 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9651 (match_operator:SI 7 "shiftable_operator"
9652 [(match_operand:SI 2 "s_register_operand" "r,r")
9653 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9654 (clobber (reg:CC CC_REGNUM))]
9657 /* If we have an operation where (op x 0) is the identity operation and
9658 the conditional operator is LT or GE and we are comparing against zero and
9659 everything is in registers then we can do this in two instructions */
9660 if (operands[5] == const0_rtx
9661 && GET_CODE (operands[7]) != AND
9662 && GET_CODE (operands[3]) == REG
9663 && GET_CODE (operands[1]) == REG
9664 && REGNO (operands[1]) == REGNO (operands[2])
9665 && REGNO (operands[2]) != REGNO (operands[0]))
9667 if (GET_CODE (operands[6]) == GE)
9668 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9669 else if (GET_CODE (operands[6]) == LT)
9670 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9673 if (GET_CODE (operands[5]) == CONST_INT
9674 && !const_ok_for_arm (INTVAL (operands[5])))
9675 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9677 output_asm_insn (\"cmp\\t%4, %5\", operands);
9679 if (which_alternative != 0)
9680 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9681 return \"%I7%D6\\t%0, %2, %3\";
9683 [(set_attr "conds" "clob")
9684 (set_attr "length" "8,12")]
9687 (define_insn "*if_move_arith"
9688 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9690 (match_operator 4 "arm_comparison_operator"
9691 [(match_operand 6 "cc_register" "") (const_int 0)])
9692 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9693 (match_operator:SI 5 "shiftable_operator"
9694 [(match_operand:SI 2 "s_register_operand" "r,r")
9695 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9699 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9700 [(set_attr "conds" "use")
9701 (set_attr "length" "4,8")
9702 (set_attr "type" "*,*")]
9705 (define_insn "*ifcompare_move_not"
9706 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9708 (match_operator 5 "arm_comparison_operator"
9709 [(match_operand:SI 3 "s_register_operand" "r,r")
9710 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9711 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9713 (match_operand:SI 2 "s_register_operand" "r,r"))))
9714 (clobber (reg:CC CC_REGNUM))]
9717 [(set_attr "conds" "clob")
9718 (set_attr "length" "8,12")]
9721 (define_insn "*if_move_not"
9722 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9724 (match_operator 4 "arm_comparison_operator"
9725 [(match_operand 3 "cc_register" "") (const_int 0)])
9726 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9727 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9731 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9732 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9733 [(set_attr "conds" "use")
9734 (set_attr "length" "4,8,8")]
9737 (define_insn "*ifcompare_not_move"
9738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9740 (match_operator 5 "arm_comparison_operator"
9741 [(match_operand:SI 3 "s_register_operand" "r,r")
9742 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9744 (match_operand:SI 2 "s_register_operand" "r,r"))
9745 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9746 (clobber (reg:CC CC_REGNUM))]
9749 [(set_attr "conds" "clob")
9750 (set_attr "length" "8,12")]
9753 (define_insn "*if_not_move"
9754 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9756 (match_operator 4 "arm_comparison_operator"
9757 [(match_operand 3 "cc_register" "") (const_int 0)])
9758 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9759 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9763 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9764 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9765 [(set_attr "conds" "use")
9766 (set_attr "length" "4,8,8")]
9769 (define_insn "*ifcompare_shift_move"
9770 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9772 (match_operator 6 "arm_comparison_operator"
9773 [(match_operand:SI 4 "s_register_operand" "r,r")
9774 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9775 (match_operator:SI 7 "shift_operator"
9776 [(match_operand:SI 2 "s_register_operand" "r,r")
9777 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9778 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9779 (clobber (reg:CC CC_REGNUM))]
9782 [(set_attr "conds" "clob")
9783 (set_attr "length" "8,12")]
9786 (define_insn "*if_shift_move"
9787 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9789 (match_operator 5 "arm_comparison_operator"
9790 [(match_operand 6 "cc_register" "") (const_int 0)])
9791 (match_operator:SI 4 "shift_operator"
9792 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9793 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9794 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9798 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9799 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9800 [(set_attr "conds" "use")
9801 (set_attr "shift" "2")
9802 (set_attr "length" "4,8,8")
9803 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9804 (const_string "alu_shift")
9805 (const_string "alu_shift_reg")))]
9808 (define_insn "*ifcompare_move_shift"
9809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9811 (match_operator 6 "arm_comparison_operator"
9812 [(match_operand:SI 4 "s_register_operand" "r,r")
9813 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9814 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9815 (match_operator:SI 7 "shift_operator"
9816 [(match_operand:SI 2 "s_register_operand" "r,r")
9817 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9818 (clobber (reg:CC CC_REGNUM))]
9821 [(set_attr "conds" "clob")
9822 (set_attr "length" "8,12")]
9825 (define_insn "*if_move_shift"
9826 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9828 (match_operator 5 "arm_comparison_operator"
9829 [(match_operand 6 "cc_register" "") (const_int 0)])
9830 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9831 (match_operator:SI 4 "shift_operator"
9832 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9833 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9837 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9838 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9839 [(set_attr "conds" "use")
9840 (set_attr "shift" "2")
9841 (set_attr "length" "4,8,8")
9842 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9843 (const_string "alu_shift")
9844 (const_string "alu_shift_reg")))]
9847 (define_insn "*ifcompare_shift_shift"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r")
9850 (match_operator 7 "arm_comparison_operator"
9851 [(match_operand:SI 5 "s_register_operand" "r")
9852 (match_operand:SI 6 "arm_add_operand" "rIL")])
9853 (match_operator:SI 8 "shift_operator"
9854 [(match_operand:SI 1 "s_register_operand" "r")
9855 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9856 (match_operator:SI 9 "shift_operator"
9857 [(match_operand:SI 3 "s_register_operand" "r")
9858 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9859 (clobber (reg:CC CC_REGNUM))]
9862 [(set_attr "conds" "clob")
9863 (set_attr "length" "12")]
9866 (define_insn "*if_shift_shift"
9867 [(set (match_operand:SI 0 "s_register_operand" "=r")
9869 (match_operator 5 "arm_comparison_operator"
9870 [(match_operand 8 "cc_register" "") (const_int 0)])
9871 (match_operator:SI 6 "shift_operator"
9872 [(match_operand:SI 1 "s_register_operand" "r")
9873 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9874 (match_operator:SI 7 "shift_operator"
9875 [(match_operand:SI 3 "s_register_operand" "r")
9876 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9878 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9879 [(set_attr "conds" "use")
9880 (set_attr "shift" "1")
9881 (set_attr "length" "8")
9882 (set (attr "type") (if_then_else
9883 (and (match_operand 2 "const_int_operand" "")
9884 (match_operand 4 "const_int_operand" ""))
9885 (const_string "alu_shift")
9886 (const_string "alu_shift_reg")))]
9889 (define_insn "*ifcompare_not_arith"
9890 [(set (match_operand:SI 0 "s_register_operand" "=r")
9892 (match_operator 6 "arm_comparison_operator"
9893 [(match_operand:SI 4 "s_register_operand" "r")
9894 (match_operand:SI 5 "arm_add_operand" "rIL")])
9895 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9896 (match_operator:SI 7 "shiftable_operator"
9897 [(match_operand:SI 2 "s_register_operand" "r")
9898 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9899 (clobber (reg:CC CC_REGNUM))]
9902 [(set_attr "conds" "clob")
9903 (set_attr "length" "12")]
9906 (define_insn "*if_not_arith"
9907 [(set (match_operand:SI 0 "s_register_operand" "=r")
9909 (match_operator 5 "arm_comparison_operator"
9910 [(match_operand 4 "cc_register" "") (const_int 0)])
9911 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9912 (match_operator:SI 6 "shiftable_operator"
9913 [(match_operand:SI 2 "s_register_operand" "r")
9914 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9916 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9917 [(set_attr "conds" "use")
9918 (set_attr "length" "8")]
9921 (define_insn "*ifcompare_arith_not"
9922 [(set (match_operand:SI 0 "s_register_operand" "=r")
9924 (match_operator 6 "arm_comparison_operator"
9925 [(match_operand:SI 4 "s_register_operand" "r")
9926 (match_operand:SI 5 "arm_add_operand" "rIL")])
9927 (match_operator:SI 7 "shiftable_operator"
9928 [(match_operand:SI 2 "s_register_operand" "r")
9929 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9930 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9931 (clobber (reg:CC CC_REGNUM))]
9934 [(set_attr "conds" "clob")
9935 (set_attr "length" "12")]
9938 (define_insn "*if_arith_not"
9939 [(set (match_operand:SI 0 "s_register_operand" "=r")
9941 (match_operator 5 "arm_comparison_operator"
9942 [(match_operand 4 "cc_register" "") (const_int 0)])
9943 (match_operator:SI 6 "shiftable_operator"
9944 [(match_operand:SI 2 "s_register_operand" "r")
9945 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9946 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9948 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9949 [(set_attr "conds" "use")
9950 (set_attr "length" "8")]
9953 (define_insn "*ifcompare_neg_move"
9954 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9956 (match_operator 5 "arm_comparison_operator"
9957 [(match_operand:SI 3 "s_register_operand" "r,r")
9958 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9959 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9960 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9961 (clobber (reg:CC CC_REGNUM))]
9964 [(set_attr "conds" "clob")
9965 (set_attr "length" "8,12")]
9968 (define_insn "*if_neg_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9971 (match_operator 4 "arm_comparison_operator"
9972 [(match_operand 3 "cc_register" "") (const_int 0)])
9973 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9974 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9978 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9979 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9980 [(set_attr "conds" "use")
9981 (set_attr "length" "4,8,8")]
9984 (define_insn "*ifcompare_move_neg"
9985 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9987 (match_operator 5 "arm_comparison_operator"
9988 [(match_operand:SI 3 "s_register_operand" "r,r")
9989 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9990 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9991 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9992 (clobber (reg:CC CC_REGNUM))]
9995 [(set_attr "conds" "clob")
9996 (set_attr "length" "8,12")]
9999 (define_insn "*if_move_neg"
10000 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10002 (match_operator 4 "arm_comparison_operator"
10003 [(match_operand 3 "cc_register" "") (const_int 0)])
10004 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10005 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10008 rsb%D4\\t%0, %2, #0
10009 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10010 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10011 [(set_attr "conds" "use")
10012 (set_attr "length" "4,8,8")]
10015 (define_insn "*arith_adjacentmem"
10016 [(set (match_operand:SI 0 "s_register_operand" "=r")
10017 (match_operator:SI 1 "shiftable_operator"
10018 [(match_operand:SI 2 "memory_operand" "m")
10019 (match_operand:SI 3 "memory_operand" "m")]))
10020 (clobber (match_scratch:SI 4 "=r"))]
10021 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10027 HOST_WIDE_INT val1 = 0, val2 = 0;
10029 if (REGNO (operands[0]) > REGNO (operands[4]))
10031 ldm[1] = operands[4];
10032 ldm[2] = operands[0];
10036 ldm[1] = operands[0];
10037 ldm[2] = operands[4];
10040 base_reg = XEXP (operands[2], 0);
10042 if (!REG_P (base_reg))
10044 val1 = INTVAL (XEXP (base_reg, 1));
10045 base_reg = XEXP (base_reg, 0);
10048 if (!REG_P (XEXP (operands[3], 0)))
10049 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10051 arith[0] = operands[0];
10052 arith[3] = operands[1];
10066 if (val1 !=0 && val2 != 0)
10070 if (val1 == 4 || val2 == 4)
10071 /* Other val must be 8, since we know they are adjacent and neither
10073 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10074 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10076 ldm[0] = ops[0] = operands[4];
10078 ops[2] = GEN_INT (val1);
10079 output_add_immediate (ops);
10081 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10083 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10087 /* Offset is out of range for a single add, so use two ldr. */
10090 ops[2] = GEN_INT (val1);
10091 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10093 ops[2] = GEN_INT (val2);
10094 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10097 else if (val1 != 0)
10100 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10102 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10107 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10109 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10111 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10114 [(set_attr "length" "12")
10115 (set_attr "predicable" "yes")
10116 (set_attr "type" "load1")]
10119 ; This pattern is never tried by combine, so do it as a peephole
10122 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10123 (match_operand:SI 1 "arm_general_register_operand" ""))
10124 (set (reg:CC CC_REGNUM)
10125 (compare:CC (match_dup 1) (const_int 0)))]
10127 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10128 (set (match_dup 0) (match_dup 1))])]
10132 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10133 ; reversed, check that the memory references aren't volatile.
10136 [(set (match_operand:SI 0 "s_register_operand" "=r")
10137 (match_operand:SI 4 "memory_operand" "m"))
10138 (set (match_operand:SI 1 "s_register_operand" "=r")
10139 (match_operand:SI 5 "memory_operand" "m"))
10140 (set (match_operand:SI 2 "s_register_operand" "=r")
10141 (match_operand:SI 6 "memory_operand" "m"))
10142 (set (match_operand:SI 3 "s_register_operand" "=r")
10143 (match_operand:SI 7 "memory_operand" "m"))]
10144 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10146 return emit_ldm_seq (operands, 4);
10151 [(set (match_operand:SI 0 "s_register_operand" "=r")
10152 (match_operand:SI 3 "memory_operand" "m"))
10153 (set (match_operand:SI 1 "s_register_operand" "=r")
10154 (match_operand:SI 4 "memory_operand" "m"))
10155 (set (match_operand:SI 2 "s_register_operand" "=r")
10156 (match_operand:SI 5 "memory_operand" "m"))]
10157 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10159 return emit_ldm_seq (operands, 3);
10164 [(set (match_operand:SI 0 "s_register_operand" "=r")
10165 (match_operand:SI 2 "memory_operand" "m"))
10166 (set (match_operand:SI 1 "s_register_operand" "=r")
10167 (match_operand:SI 3 "memory_operand" "m"))]
10168 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10170 return emit_ldm_seq (operands, 2);
10175 [(set (match_operand:SI 4 "memory_operand" "=m")
10176 (match_operand:SI 0 "s_register_operand" "r"))
10177 (set (match_operand:SI 5 "memory_operand" "=m")
10178 (match_operand:SI 1 "s_register_operand" "r"))
10179 (set (match_operand:SI 6 "memory_operand" "=m")
10180 (match_operand:SI 2 "s_register_operand" "r"))
10181 (set (match_operand:SI 7 "memory_operand" "=m")
10182 (match_operand:SI 3 "s_register_operand" "r"))]
10183 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10185 return emit_stm_seq (operands, 4);
10190 [(set (match_operand:SI 3 "memory_operand" "=m")
10191 (match_operand:SI 0 "s_register_operand" "r"))
10192 (set (match_operand:SI 4 "memory_operand" "=m")
10193 (match_operand:SI 1 "s_register_operand" "r"))
10194 (set (match_operand:SI 5 "memory_operand" "=m")
10195 (match_operand:SI 2 "s_register_operand" "r"))]
10196 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10198 return emit_stm_seq (operands, 3);
10203 [(set (match_operand:SI 2 "memory_operand" "=m")
10204 (match_operand:SI 0 "s_register_operand" "r"))
10205 (set (match_operand:SI 3 "memory_operand" "=m")
10206 (match_operand:SI 1 "s_register_operand" "r"))]
10207 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10209 return emit_stm_seq (operands, 2);
10214 [(set (match_operand:SI 0 "s_register_operand" "")
10215 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10217 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10218 [(match_operand:SI 3 "s_register_operand" "")
10219 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10220 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10222 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10223 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10228 ;; This split can be used because CC_Z mode implies that the following
10229 ;; branch will be an equality, or an unsigned inequality, so the sign
10230 ;; extension is not needed.
10233 [(set (reg:CC_Z CC_REGNUM)
10235 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10237 (match_operand 1 "const_int_operand" "")))
10238 (clobber (match_scratch:SI 2 ""))]
10240 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10241 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10242 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10243 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10245 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10248 ;; ??? Check the patterns above for Thumb-2 usefulness
10250 (define_expand "prologue"
10251 [(clobber (const_int 0))]
10254 arm_expand_prologue ();
10256 thumb1_expand_prologue ();
10261 (define_expand "epilogue"
10262 [(clobber (const_int 0))]
10265 if (current_function_calls_eh_return)
10266 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10268 thumb1_expand_epilogue ();
10269 else if (USE_RETURN_INSN (FALSE))
10271 emit_jump_insn (gen_return ());
10274 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10276 gen_rtx_RETURN (VOIDmode)),
10277 VUNSPEC_EPILOGUE));
10282 ;; Note - although unspec_volatile's USE all hard registers,
10283 ;; USEs are ignored after relaod has completed. Thus we need
10284 ;; to add an unspec of the link register to ensure that flow
10285 ;; does not think that it is unused by the sibcall branch that
10286 ;; will replace the standard function epilogue.
10287 (define_insn "sibcall_epilogue"
10288 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10289 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10292 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10293 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10294 return arm_output_epilogue (next_nonnote_insn (insn));
10296 ;; Length is absolute worst case
10297 [(set_attr "length" "44")
10298 (set_attr "type" "block")
10299 ;; We don't clobber the conditions, but the potential length of this
10300 ;; operation is sufficient to make conditionalizing the sequence
10301 ;; unlikely to be profitable.
10302 (set_attr "conds" "clob")]
10305 (define_insn "*epilogue_insns"
10306 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10310 return arm_output_epilogue (NULL);
10311 else /* TARGET_THUMB1 */
10312 return thumb_unexpanded_epilogue ();
10314 ; Length is absolute worst case
10315 [(set_attr "length" "44")
10316 (set_attr "type" "block")
10317 ;; We don't clobber the conditions, but the potential length of this
10318 ;; operation is sufficient to make conditionalizing the sequence
10319 ;; unlikely to be profitable.
10320 (set_attr "conds" "clob")]
10323 (define_expand "eh_epilogue"
10324 [(use (match_operand:SI 0 "register_operand" ""))
10325 (use (match_operand:SI 1 "register_operand" ""))
10326 (use (match_operand:SI 2 "register_operand" ""))]
10330 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10331 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10333 rtx ra = gen_rtx_REG (Pmode, 2);
10335 emit_move_insn (ra, operands[2]);
10338 /* This is a hack -- we may have crystalized the function type too
10340 cfun->machine->func_type = 0;
10344 ;; This split is only used during output to reduce the number of patterns
10345 ;; that need assembler instructions adding to them. We allowed the setting
10346 ;; of the conditions to be implicit during rtl generation so that
10347 ;; the conditional compare patterns would work. However this conflicts to
10348 ;; some extent with the conditional data operations, so we have to split them
10351 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10352 ;; conditional execution sufficient?
10355 [(set (match_operand:SI 0 "s_register_operand" "")
10356 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10357 [(match_operand 2 "" "") (match_operand 3 "" "")])
10359 (match_operand 4 "" "")))
10360 (clobber (reg:CC CC_REGNUM))]
10361 "TARGET_ARM && reload_completed"
10362 [(set (match_dup 5) (match_dup 6))
10363 (cond_exec (match_dup 7)
10364 (set (match_dup 0) (match_dup 4)))]
10367 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10368 operands[2], operands[3]);
10369 enum rtx_code rc = GET_CODE (operands[1]);
10371 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10372 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10373 if (mode == CCFPmode || mode == CCFPEmode)
10374 rc = reverse_condition_maybe_unordered (rc);
10376 rc = reverse_condition (rc);
10378 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10383 [(set (match_operand:SI 0 "s_register_operand" "")
10384 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10385 [(match_operand 2 "" "") (match_operand 3 "" "")])
10386 (match_operand 4 "" "")
10388 (clobber (reg:CC CC_REGNUM))]
10389 "TARGET_ARM && reload_completed"
10390 [(set (match_dup 5) (match_dup 6))
10391 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10392 (set (match_dup 0) (match_dup 4)))]
10395 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10396 operands[2], operands[3]);
10398 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10399 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10404 [(set (match_operand:SI 0 "s_register_operand" "")
10405 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10406 [(match_operand 2 "" "") (match_operand 3 "" "")])
10407 (match_operand 4 "" "")
10408 (match_operand 5 "" "")))
10409 (clobber (reg:CC CC_REGNUM))]
10410 "TARGET_ARM && reload_completed"
10411 [(set (match_dup 6) (match_dup 7))
10412 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10413 (set (match_dup 0) (match_dup 4)))
10414 (cond_exec (match_dup 8)
10415 (set (match_dup 0) (match_dup 5)))]
10418 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10419 operands[2], operands[3]);
10420 enum rtx_code rc = GET_CODE (operands[1]);
10422 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10423 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10424 if (mode == CCFPmode || mode == CCFPEmode)
10425 rc = reverse_condition_maybe_unordered (rc);
10427 rc = reverse_condition (rc);
10429 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10434 [(set (match_operand:SI 0 "s_register_operand" "")
10435 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10436 [(match_operand:SI 2 "s_register_operand" "")
10437 (match_operand:SI 3 "arm_add_operand" "")])
10438 (match_operand:SI 4 "arm_rhs_operand" "")
10440 (match_operand:SI 5 "s_register_operand" ""))))
10441 (clobber (reg:CC CC_REGNUM))]
10442 "TARGET_ARM && reload_completed"
10443 [(set (match_dup 6) (match_dup 7))
10444 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10445 (set (match_dup 0) (match_dup 4)))
10446 (cond_exec (match_dup 8)
10447 (set (match_dup 0) (not:SI (match_dup 5))))]
10450 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10451 operands[2], operands[3]);
10452 enum rtx_code rc = GET_CODE (operands[1]);
10454 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10455 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10456 if (mode == CCFPmode || mode == CCFPEmode)
10457 rc = reverse_condition_maybe_unordered (rc);
10459 rc = reverse_condition (rc);
10461 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10465 (define_insn "*cond_move_not"
10466 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10467 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10468 [(match_operand 3 "cc_register" "") (const_int 0)])
10469 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10471 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10475 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10476 [(set_attr "conds" "use")
10477 (set_attr "length" "4,8")]
10480 ;; The next two patterns occur when an AND operation is followed by a
10481 ;; scc insn sequence
10483 (define_insn "*sign_extract_onebit"
10484 [(set (match_operand:SI 0 "s_register_operand" "=r")
10485 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10487 (match_operand:SI 2 "const_int_operand" "n")))
10488 (clobber (reg:CC CC_REGNUM))]
10491 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10492 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10493 return \"mvnne\\t%0, #0\";
10495 [(set_attr "conds" "clob")
10496 (set_attr "length" "8")]
10499 (define_insn "*not_signextract_onebit"
10500 [(set (match_operand:SI 0 "s_register_operand" "=r")
10502 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10504 (match_operand:SI 2 "const_int_operand" "n"))))
10505 (clobber (reg:CC CC_REGNUM))]
10508 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10509 output_asm_insn (\"tst\\t%1, %2\", operands);
10510 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10511 return \"movne\\t%0, #0\";
10513 [(set_attr "conds" "clob")
10514 (set_attr "length" "12")]
10516 ;; ??? The above patterns need auditing for Thumb-2
10518 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10519 ;; expressions. For simplicity, the first register is also in the unspec
10521 (define_insn "*push_multi"
10522 [(match_parallel 2 "multi_register_push"
10523 [(set (match_operand:BLK 0 "memory_operand" "=m")
10524 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10525 UNSPEC_PUSH_MULT))])]
10529 int num_saves = XVECLEN (operands[2], 0);
10531 /* For the StrongARM at least it is faster to
10532 use STR to store only a single register.
10533 In Thumb mode always use push, and the assembler will pick
10534 something appropriate. */
10535 if (num_saves == 1 && TARGET_ARM)
10536 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10543 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10545 strcpy (pattern, \"push\\t{%1\");
10547 for (i = 1; i < num_saves; i++)
10549 strcat (pattern, \", %|\");
10551 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10554 strcat (pattern, \"}\");
10555 output_asm_insn (pattern, operands);
10560 [(set_attr "type" "store4")]
10563 (define_insn "stack_tie"
10564 [(set (mem:BLK (scratch))
10565 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r")
10566 (match_operand:SI 1 "s_register_operand" "r")]
10570 [(set_attr "length" "0")]
10573 ;; Similarly for the floating point registers
10574 (define_insn "*push_fp_multi"
10575 [(match_parallel 2 "multi_register_push"
10576 [(set (match_operand:BLK 0 "memory_operand" "=m")
10577 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10578 UNSPEC_PUSH_MULT))])]
10579 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10584 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10585 output_asm_insn (pattern, operands);
10588 [(set_attr "type" "f_store")]
10591 ;; Special patterns for dealing with the constant pool
10593 (define_insn "align_4"
10594 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10597 assemble_align (32);
10602 (define_insn "align_8"
10603 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10606 assemble_align (64);
10611 (define_insn "consttable_end"
10612 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10615 making_const_table = FALSE;
10620 (define_insn "consttable_1"
10621 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10624 making_const_table = TRUE;
10625 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10626 assemble_zeros (3);
10629 [(set_attr "length" "4")]
10632 (define_insn "consttable_2"
10633 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10636 making_const_table = TRUE;
10637 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10638 assemble_zeros (2);
10641 [(set_attr "length" "4")]
10644 (define_insn "consttable_4"
10645 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10649 making_const_table = TRUE;
10650 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10655 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10656 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10660 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10665 [(set_attr "length" "4")]
10668 (define_insn "consttable_8"
10669 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10673 making_const_table = TRUE;
10674 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10679 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10680 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10684 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10689 [(set_attr "length" "8")]
10692 ;; Miscellaneous Thumb patterns
10694 (define_expand "tablejump"
10695 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10696 (use (label_ref (match_operand 1 "" "")))])]
10701 /* Hopefully, CSE will eliminate this copy. */
10702 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10703 rtx reg2 = gen_reg_rtx (SImode);
10705 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10706 operands[0] = reg2;
10711 ;; NB never uses BX.
10712 (define_insn "*thumb1_tablejump"
10713 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10714 (use (label_ref (match_operand 1 "" "")))]
10717 [(set_attr "length" "2")]
10720 ;; V5 Instructions,
10722 (define_insn "clzsi2"
10723 [(set (match_operand:SI 0 "s_register_operand" "=r")
10724 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10725 "TARGET_32BIT && arm_arch5"
10727 [(set_attr "predicable" "yes")])
10729 (define_expand "ffssi2"
10730 [(set (match_operand:SI 0 "s_register_operand" "")
10731 (ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
10732 "TARGET_32BIT && arm_arch5"
10737 t1 = gen_reg_rtx (SImode);
10738 t2 = gen_reg_rtx (SImode);
10739 t3 = gen_reg_rtx (SImode);
10741 emit_insn (gen_negsi2 (t1, operands[1]));
10742 emit_insn (gen_andsi3 (t2, operands[1], t1));
10743 emit_insn (gen_clzsi2 (t3, t2));
10744 emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3));
10749 (define_expand "ctzsi2"
10750 [(set (match_operand:SI 0 "s_register_operand" "")
10751 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10752 "TARGET_32BIT && arm_arch5"
10757 t1 = gen_reg_rtx (SImode);
10758 t2 = gen_reg_rtx (SImode);
10759 t3 = gen_reg_rtx (SImode);
10761 emit_insn (gen_negsi2 (t1, operands[1]));
10762 emit_insn (gen_andsi3 (t2, operands[1], t1));
10763 emit_insn (gen_clzsi2 (t3, t2));
10764 emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3));
10769 ;; V5E instructions.
10771 (define_insn "prefetch"
10772 [(prefetch (match_operand:SI 0 "address_operand" "p")
10773 (match_operand:SI 1 "" "")
10774 (match_operand:SI 2 "" ""))]
10775 "TARGET_32BIT && arm_arch5e"
10778 ;; General predication pattern
10781 [(match_operator 0 "arm_comparison_operator"
10782 [(match_operand 1 "cc_register" "")
10788 (define_insn "prologue_use"
10789 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10791 "%@ %0 needed for prologue"
10795 ;; Patterns for exception handling
10797 (define_expand "eh_return"
10798 [(use (match_operand 0 "general_operand" ""))]
10803 emit_insn (gen_arm_eh_return (operands[0]));
10805 emit_insn (gen_thumb_eh_return (operands[0]));
10810 ;; We can't expand this before we know where the link register is stored.
10811 (define_insn_and_split "arm_eh_return"
10812 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10814 (clobber (match_scratch:SI 1 "=&r"))]
10817 "&& reload_completed"
10821 arm_set_return_address (operands[0], operands[1]);
10826 (define_insn_and_split "thumb_eh_return"
10827 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10829 (clobber (match_scratch:SI 1 "=&l"))]
10832 "&& reload_completed"
10836 thumb_set_return_address (operands[0], operands[1]);
10844 (define_insn "load_tp_hard"
10845 [(set (match_operand:SI 0 "register_operand" "=r")
10846 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10848 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10849 [(set_attr "predicable" "yes")]
10852 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10853 (define_insn "load_tp_soft"
10854 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10855 (clobber (reg:SI LR_REGNUM))
10856 (clobber (reg:SI IP_REGNUM))
10857 (clobber (reg:CC CC_REGNUM))]
10859 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10860 [(set_attr "conds" "clob")]
10863 ;; Load the FPA co-processor patterns
10865 ;; Load the Maverick co-processor patterns
10866 (include "cirrus.md")
10867 ;; Load the Intel Wireless Multimedia Extension patterns
10868 (include "iwmmxt.md")
10869 ;; Load the VFP co-processor patterns
10871 ;; Thumb-2 patterns
10872 (include "thumb2.md")