1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
45 ;; 3rd operand to select_dominance_cc_mode
52 ;; conditional compare combination
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
66 (define_c_enum "unspec" [
67 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 UNSPEC_PUSH_MULT ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
77 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
90 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
91 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
104 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
107 ; correctly for PIC usage.
108 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
109 ; a given symbolic address.
110 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
111 UNSPEC_RBIT ; rbit operation.
112 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
113 ; another symbolic address.
114 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
115 UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access
116 ; unaligned locations, on architectures which support
118 UNSPEC_UNALIGNED_STORE ; Same for str/strh.
119 UNSPEC_PIC_UNIFIED ; Create a common pic addressing form.
122 ;; UNSPEC_VOLATILE Usage:
124 (define_c_enum "unspecv" [
125 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
127 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
128 ; instruction epilogue sequence that isn't expanded
129 ; into normal RTL. Used for both normal and sibcall
131 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
132 ; modes from arm to thumb.
133 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
134 ; for inlined constants.
135 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
137 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
139 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
141 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
143 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
145 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
147 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
148 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
149 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
150 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
151 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
152 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
153 VUNSPEC_EH_RETURN ; Use to override the return address for exception
155 VUNSPEC_ATOMIC_CAS ; Represent an atomic compare swap.
156 VUNSPEC_ATOMIC_XCHG ; Represent an atomic exchange.
157 VUNSPEC_ATOMIC_OP ; Represent an atomic operation.
158 VUNSPEC_LL ; Represent a load-register-exclusive.
159 VUNSPEC_SC ; Represent a store-register-exclusive.
162 ;;---------------------------------------------------------------------------
165 ;; Processor type. This is created automatically from arm-cores.def.
166 (include "arm-tune.md")
168 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
169 ; generating ARM code. This is used to control the length of some insn
170 ; patterns that share the same RTL in both ARM and Thumb code.
171 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
173 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
174 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
176 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
177 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
179 ;; Operand number of an input operand that is shifted. Zero if the
180 ;; given instruction does not shift one of its input operands.
181 (define_attr "shift" "" (const_int 0))
183 ; Floating Point Unit. If we only have floating point emulation, then there
184 ; is no point in scheduling the floating point insns. (Well, for best
185 ; performance we should try and group them together).
186 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
187 (const (symbol_ref "arm_fpu_attr")))
189 ; LENGTH of an instruction (in bytes)
190 (define_attr "length" ""
193 ; The architecture which supports the instruction (or alternative).
194 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
195 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
196 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
197 ; arm_arch6. This attribute is used to compute attribute "enabled",
198 ; use type "any" to enable an alternative in all cases.
199 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,neon_onlya8,nota8,neon_nota8"
200 (const_string "any"))
202 (define_attr "arch_enabled" "no,yes"
203 (cond [(eq_attr "arch" "any")
206 (and (eq_attr "arch" "a")
207 (match_test "TARGET_ARM"))
210 (and (eq_attr "arch" "t")
211 (match_test "TARGET_THUMB"))
214 (and (eq_attr "arch" "t1")
215 (match_test "TARGET_THUMB1"))
218 (and (eq_attr "arch" "t2")
219 (match_test "TARGET_THUMB2"))
222 (and (eq_attr "arch" "32")
223 (match_test "TARGET_32BIT"))
226 (and (eq_attr "arch" "v6")
227 (match_test "TARGET_32BIT && arm_arch6"))
230 (and (eq_attr "arch" "nov6")
231 (match_test "TARGET_32BIT && !arm_arch6"))
234 (and (eq_attr "arch" "onlya8")
235 (eq_attr "tune" "cortexa8"))
238 (and (eq_attr "arch" "neon_onlya8")
239 (eq_attr "tune" "cortexa8")
240 (match_test "TARGET_NEON"))
243 (and (eq_attr "arch" "nota8")
244 (not (eq_attr "tune" "cortexa8")))
247 (and (eq_attr "arch" "neon_nota8")
248 (not (eq_attr "tune" "cortexa8"))
249 (match_test "TARGET_NEON"))
250 (const_string "yes")]
251 (const_string "no")))
253 ; Allows an insn to disable certain alternatives for reasons other than
255 (define_attr "insn_enabled" "no,yes"
256 (const_string "yes"))
258 ; Enable all alternatives that are both arch_enabled and insn_enabled.
259 (define_attr "enabled" "no,yes"
260 (if_then_else (eq_attr "insn_enabled" "yes")
261 (if_then_else (eq_attr "arch_enabled" "yes")
264 (const_string "no")))
266 ; POOL_RANGE is how far away from a constant pool entry that this insn
267 ; can be placed. If the distance is zero, then this insn will never
268 ; reference the pool.
269 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
270 ; before its address. It is set to <max_range> - (8 + <data_size>).
271 (define_attr "arm_pool_range" "" (const_int 0))
272 (define_attr "thumb2_pool_range" "" (const_int 0))
273 (define_attr "arm_neg_pool_range" "" (const_int 0))
274 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
276 (define_attr "pool_range" ""
277 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
278 (attr "arm_pool_range")))
279 (define_attr "neg_pool_range" ""
280 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
281 (attr "arm_neg_pool_range")))
283 ; An assembler sequence may clobber the condition codes without us knowing.
284 ; If such an insn references the pool, then we have no way of knowing how,
285 ; so use the most conservative value for pool_range.
286 (define_asm_attributes
287 [(set_attr "conds" "clob")
288 (set_attr "length" "4")
289 (set_attr "pool_range" "250")])
291 ;; The instruction used to implement a particular pattern. This
292 ;; information is used by pipeline descriptions to provide accurate
293 ;; scheduling information.
296 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,sat,other"
297 (const_string "other"))
299 ; TYPE attribute is used to detect floating point instructions which, if
300 ; running on a co-processor can run in parallel with other, basic instructions
301 ; If write-buffer scheduling is enabled then it can also be used in the
302 ; scheduling of writes.
304 ; Classification of each insn
305 ; Note: vfp.md has different meanings for some of these, and some further
306 ; types as well. See that file for details.
307 ; alu any alu instruction that doesn't hit memory or fp
308 ; regs or have a shifted source operand
309 ; alu_shift any data instruction that doesn't hit memory or fp
310 ; regs, but has a source operand shifted by a constant
311 ; alu_shift_reg any data instruction that doesn't hit memory or fp
312 ; regs, but has a source operand shifted by a register value
313 ; mult a multiply instruction
314 ; block blockage insn, this blocks all functional units
315 ; float a floating point arithmetic operation (subject to expansion)
316 ; fdivd DFmode floating point division
317 ; fdivs SFmode floating point division
318 ; fmul Floating point multiply
319 ; ffmul Fast floating point multiply
320 ; farith Floating point arithmetic (4 cycle)
321 ; ffarith Fast floating point arithmetic (2 cycle)
322 ; float_em a floating point arithmetic operation that is normally emulated
323 ; even on a machine with an fpa.
324 ; f_fpa_load a floating point load from memory. Only for the FPA.
325 ; f_fpa_store a floating point store to memory. Only for the FPA.
326 ; f_load[sd] A single/double load from memory. Used for VFP unit.
327 ; f_store[sd] A single/double store to memory. Used for VFP unit.
328 ; f_flag a transfer of co-processor flags to the CPSR
329 ; f_mem_r a transfer of a floating point register to a real reg via mem
330 ; r_mem_f the reverse of f_mem_r
331 ; f_2_r fast transfer float to arm (no memory needed)
332 ; r_2_f fast transfer arm to float
333 ; f_cvt convert floating<->integral
335 ; call a subroutine call
336 ; load_byte load byte(s) from memory to arm registers
337 ; load1 load 1 word from memory to arm registers
338 ; load2 load 2 words from memory to arm registers
339 ; load3 load 3 words from memory to arm registers
340 ; load4 load 4 words from memory to arm registers
341 ; store store 1 word to memory from arm registers
342 ; store2 store 2 words
343 ; store3 store 3 words
344 ; store4 store 4 (or more) words
345 ; Additions for Cirrus Maverick co-processor:
346 ; mav_farith Floating point arithmetic (4 cycle)
347 ; mav_dmult Double multiplies (7 cycle)
351 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
353 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
354 (const_string "mult")
355 (const_string "alu")))
357 ; Is this an (integer side) multiply with a 64-bit result?
358 (define_attr "mul64" "no,yes"
360 (eq_attr "insn" "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
362 (const_string "no")))
364 ; Load scheduling, set from the arm_ld_sched variable
365 ; initialized by arm_option_override()
366 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
368 ;; Classification of NEON instructions for scheduling purposes.
369 ;; Do not set this attribute and the "type" attribute together in
370 ;; any one instruction pattern.
371 (define_attr "neon_type"
382 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
383 neon_mul_qqq_8_16_32_ddd_32,\
384 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
385 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
387 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
388 neon_mla_qqq_32_qqd_32_scalar,\
389 neon_mul_ddd_16_scalar_32_16_long_scalar,\
390 neon_mul_qqd_32_scalar,\
391 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
396 neon_vqshl_vrshl_vqrshl_qqq,\
398 neon_fp_vadd_ddd_vabs_dd,\
399 neon_fp_vadd_qqq_vabs_qq,\
405 neon_fp_vmla_ddd_scalar,\
406 neon_fp_vmla_qqq_scalar,\
407 neon_fp_vrecps_vrsqrts_ddd,\
408 neon_fp_vrecps_vrsqrts_qqq,\
416 neon_vld2_2_regs_vld1_vld2_all_lanes,\
419 neon_vst1_1_2_regs_vst2_2_regs,\
421 neon_vst2_4_regs_vst3_vst4,\
423 neon_vld1_vld2_lane,\
424 neon_vld3_vld4_lane,\
425 neon_vst1_vst2_lane,\
426 neon_vst3_vst4_lane,\
427 neon_vld3_vld4_all_lanes,\
435 (const_string "none"))
437 ; condition codes: this one is used by final_prescan_insn to speed up
438 ; conditionalizing instructions. It saves having to scan the rtl to see if
439 ; it uses or alters the condition codes.
441 ; USE means that the condition codes are used by the insn in the process of
442 ; outputting code, this means (at present) that we can't use the insn in
445 ; SET means that the purpose of the insn is to set the condition codes in a
446 ; well defined manner.
448 ; CLOB means that the condition codes are altered in an undefined manner, if
449 ; they are altered at all
451 ; UNCONDITIONAL means the instruction can not be conditionally executed and
452 ; that the instruction does not use or alter the condition codes.
454 ; NOCOND means that the instruction does not use or alter the condition
455 ; codes but can be converted into a conditionally exectuted instruction.
457 (define_attr "conds" "use,set,clob,unconditional,nocond"
459 (ior (eq_attr "is_thumb1" "yes")
460 (eq_attr "type" "call"))
461 (const_string "clob")
462 (if_then_else (eq_attr "neon_type" "none")
463 (const_string "nocond")
464 (const_string "unconditional"))))
466 ; Predicable means that the insn can be conditionally executed based on
467 ; an automatically added predicate (additional patterns are generated by
468 ; gen...). We default to 'no' because no Thumb patterns match this rule
469 ; and not all ARM patterns do.
470 (define_attr "predicable" "no,yes" (const_string "no"))
472 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
473 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
474 ; suffer blockages enough to warrant modelling this (and it can adversely
475 ; affect the schedule).
476 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
478 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
479 ; to stall the processor. Used with model_wbuf above.
480 (define_attr "write_conflict" "no,yes"
481 (if_then_else (eq_attr "type"
482 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
484 (const_string "no")))
486 ; Classify the insns into those that take one cycle and those that take more
487 ; than one on the main cpu execution unit.
488 (define_attr "core_cycles" "single,multi"
489 (if_then_else (eq_attr "type"
490 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
491 (const_string "single")
492 (const_string "multi")))
494 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
495 ;; distant label. Only applicable to Thumb code.
496 (define_attr "far_jump" "yes,no" (const_string "no"))
499 ;; The number of machine instructions this pattern expands to.
500 ;; Used for Thumb-2 conditional execution.
501 (define_attr "ce_count" "" (const_int 1))
503 ;;---------------------------------------------------------------------------
506 (include "iterators.md")
508 ;;---------------------------------------------------------------------------
511 (include "predicates.md")
512 (include "constraints.md")
514 ;;---------------------------------------------------------------------------
515 ;; Pipeline descriptions
517 (define_attr "tune_cortexr4" "yes,no"
519 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
521 (const_string "no"))))
523 ;; True if the generic scheduling description should be used.
525 (define_attr "generic_sched" "yes,no"
527 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexa15,cortexm4")
528 (eq_attr "tune_cortexr4" "yes"))
530 (const_string "yes"))))
532 (define_attr "generic_vfp" "yes,no"
534 (and (eq_attr "fpu" "vfp")
535 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
536 (eq_attr "tune_cortexr4" "no"))
538 (const_string "no"))))
540 (include "arm-generic.md")
541 (include "arm926ejs.md")
542 (include "arm1020e.md")
543 (include "arm1026ejs.md")
544 (include "arm1136jfs.md")
546 (include "fa606te.md")
547 (include "fa626te.md")
548 (include "fmp626.md")
549 (include "fa726te.md")
550 (include "cortex-a5.md")
551 (include "cortex-a8.md")
552 (include "cortex-a9.md")
553 (include "cortex-a15.md")
554 (include "cortex-r4.md")
555 (include "cortex-r4f.md")
556 (include "cortex-m4.md")
557 (include "cortex-m4-fpu.md")
561 ;;---------------------------------------------------------------------------
566 ;; Note: For DImode insns, there is normally no reason why operands should
567 ;; not be in the same register, what we don't want is for something being
568 ;; written to partially overlap something that is an input.
569 ;; Cirrus 64bit additions should not be split because we have a native
570 ;; 64bit addition instructions.
572 (define_expand "adddi3"
574 [(set (match_operand:DI 0 "s_register_operand" "")
575 (plus:DI (match_operand:DI 1 "s_register_operand" "")
576 (match_operand:DI 2 "s_register_operand" "")))
577 (clobber (reg:CC CC_REGNUM))])]
580 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
582 if (!cirrus_fp_register (operands[0], DImode))
583 operands[0] = force_reg (DImode, operands[0]);
584 if (!cirrus_fp_register (operands[1], DImode))
585 operands[1] = force_reg (DImode, operands[1]);
586 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
592 if (GET_CODE (operands[1]) != REG)
593 operands[1] = force_reg (DImode, operands[1]);
594 if (GET_CODE (operands[2]) != REG)
595 operands[2] = force_reg (DImode, operands[2]);
600 (define_insn "*thumb1_adddi3"
601 [(set (match_operand:DI 0 "register_operand" "=l")
602 (plus:DI (match_operand:DI 1 "register_operand" "%0")
603 (match_operand:DI 2 "register_operand" "l")))
604 (clobber (reg:CC CC_REGNUM))
607 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
608 [(set_attr "length" "4")]
611 (define_insn_and_split "*arm_adddi3"
612 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
613 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
614 (match_operand:DI 2 "s_register_operand" "r, 0")))
615 (clobber (reg:CC CC_REGNUM))]
616 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
618 "TARGET_32BIT && reload_completed
619 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
620 [(parallel [(set (reg:CC_C CC_REGNUM)
621 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
623 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
624 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
625 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
628 operands[3] = gen_highpart (SImode, operands[0]);
629 operands[0] = gen_lowpart (SImode, operands[0]);
630 operands[4] = gen_highpart (SImode, operands[1]);
631 operands[1] = gen_lowpart (SImode, operands[1]);
632 operands[5] = gen_highpart (SImode, operands[2]);
633 operands[2] = gen_lowpart (SImode, operands[2]);
635 [(set_attr "conds" "clob")
636 (set_attr "length" "8")]
639 (define_insn_and_split "*adddi_sesidi_di"
640 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
641 (plus:DI (sign_extend:DI
642 (match_operand:SI 2 "s_register_operand" "r,r"))
643 (match_operand:DI 1 "s_register_operand" "0,r")))
644 (clobber (reg:CC CC_REGNUM))]
645 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
647 "TARGET_32BIT && reload_completed"
648 [(parallel [(set (reg:CC_C CC_REGNUM)
649 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
651 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
652 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
655 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
658 operands[3] = gen_highpart (SImode, operands[0]);
659 operands[0] = gen_lowpart (SImode, operands[0]);
660 operands[4] = gen_highpart (SImode, operands[1]);
661 operands[1] = gen_lowpart (SImode, operands[1]);
662 operands[2] = gen_lowpart (SImode, operands[2]);
664 [(set_attr "conds" "clob")
665 (set_attr "length" "8")]
668 (define_insn_and_split "*adddi_zesidi_di"
669 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
670 (plus:DI (zero_extend:DI
671 (match_operand:SI 2 "s_register_operand" "r,r"))
672 (match_operand:DI 1 "s_register_operand" "0,r")))
673 (clobber (reg:CC CC_REGNUM))]
674 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
676 "TARGET_32BIT && reload_completed"
677 [(parallel [(set (reg:CC_C CC_REGNUM)
678 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
680 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
681 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
682 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
685 operands[3] = gen_highpart (SImode, operands[0]);
686 operands[0] = gen_lowpart (SImode, operands[0]);
687 operands[4] = gen_highpart (SImode, operands[1]);
688 operands[1] = gen_lowpart (SImode, operands[1]);
689 operands[2] = gen_lowpart (SImode, operands[2]);
691 [(set_attr "conds" "clob")
692 (set_attr "length" "8")]
695 (define_expand "addsi3"
696 [(set (match_operand:SI 0 "s_register_operand" "")
697 (plus:SI (match_operand:SI 1 "s_register_operand" "")
698 (match_operand:SI 2 "reg_or_int_operand" "")))]
701 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
703 arm_split_constant (PLUS, SImode, NULL_RTX,
704 INTVAL (operands[2]), operands[0], operands[1],
705 optimize && can_create_pseudo_p ());
711 ; If there is a scratch available, this will be faster than synthesizing the
714 [(match_scratch:SI 3 "r")
715 (set (match_operand:SI 0 "arm_general_register_operand" "")
716 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
717 (match_operand:SI 2 "const_int_operand" "")))]
719 !(const_ok_for_arm (INTVAL (operands[2]))
720 || const_ok_for_arm (-INTVAL (operands[2])))
721 && const_ok_for_arm (~INTVAL (operands[2]))"
722 [(set (match_dup 3) (match_dup 2))
723 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
727 ;; The r/r/k alternative is required when reloading the address
728 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
729 ;; put the duplicated register first, and not try the commutative version.
730 (define_insn_and_split "*arm_addsi3"
731 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
732 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
733 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
743 subw%?\\t%0, %1, #%n2
744 subw%?\\t%0, %1, #%n2
747 && GET_CODE (operands[2]) == CONST_INT
748 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
749 && (reload_completed || !arm_eliminable_register (operands[1]))"
750 [(clobber (const_int 0))]
752 arm_split_constant (PLUS, SImode, curr_insn,
753 INTVAL (operands[2]), operands[0],
757 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
758 (set_attr "predicable" "yes")
759 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
762 (define_insn_and_split "*thumb1_addsi3"
763 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
764 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
765 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
768 static const char * const asms[] =
770 \"add\\t%0, %0, %2\",
771 \"sub\\t%0, %0, #%n2\",
772 \"add\\t%0, %1, %2\",
773 \"add\\t%0, %0, %2\",
774 \"add\\t%0, %0, %2\",
775 \"add\\t%0, %1, %2\",
776 \"add\\t%0, %1, %2\",
781 if ((which_alternative == 2 || which_alternative == 6)
782 && GET_CODE (operands[2]) == CONST_INT
783 && INTVAL (operands[2]) < 0)
784 return \"sub\\t%0, %1, #%n2\";
785 return asms[which_alternative];
787 "&& reload_completed && CONST_INT_P (operands[2])
788 && ((operands[1] != stack_pointer_rtx
789 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
790 || (operands[1] == stack_pointer_rtx
791 && INTVAL (operands[2]) > 1020))"
792 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
793 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
795 HOST_WIDE_INT offset = INTVAL (operands[2]);
796 if (operands[1] == stack_pointer_rtx)
802 else if (offset < -255)
805 operands[3] = GEN_INT (offset);
806 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
808 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
811 ;; Reloading and elimination of the frame pointer can
812 ;; sometimes cause this optimization to be missed.
814 [(set (match_operand:SI 0 "arm_general_register_operand" "")
815 (match_operand:SI 1 "const_int_operand" ""))
817 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
819 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
820 && (INTVAL (operands[1]) & 3) == 0"
821 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
825 (define_insn "addsi3_compare0"
826 [(set (reg:CC_NOOV CC_REGNUM)
828 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
829 (match_operand:SI 2 "arm_add_operand" "rI,L"))
831 (set (match_operand:SI 0 "s_register_operand" "=r,r")
832 (plus:SI (match_dup 1) (match_dup 2)))]
836 sub%.\\t%0, %1, #%n2"
837 [(set_attr "conds" "set")]
840 (define_insn "*addsi3_compare0_scratch"
841 [(set (reg:CC_NOOV CC_REGNUM)
843 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
844 (match_operand:SI 1 "arm_add_operand" "rI,L"))
850 [(set_attr "conds" "set")
851 (set_attr "predicable" "yes")]
854 (define_insn "*compare_negsi_si"
855 [(set (reg:CC_Z CC_REGNUM)
857 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
858 (match_operand:SI 1 "s_register_operand" "r")))]
861 [(set_attr "conds" "set")
862 (set_attr "predicable" "yes")]
865 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
866 ;; addend is a constant.
867 (define_insn "*cmpsi2_addneg"
868 [(set (reg:CC CC_REGNUM)
870 (match_operand:SI 1 "s_register_operand" "r,r")
871 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
872 (set (match_operand:SI 0 "s_register_operand" "=r,r")
873 (plus:SI (match_dup 1)
874 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
875 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
878 sub%.\\t%0, %1, #%n3"
879 [(set_attr "conds" "set")]
882 ;; Convert the sequence
884 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
888 ;; bcs dest ((unsigned)rn >= 1)
889 ;; similarly for the beq variant using bcc.
890 ;; This is a common looping idiom (while (n--))
892 [(set (match_operand:SI 0 "arm_general_register_operand" "")
893 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
895 (set (match_operand 2 "cc_register" "")
896 (compare (match_dup 0) (const_int -1)))
898 (if_then_else (match_operator 3 "equality_operator"
899 [(match_dup 2) (const_int 0)])
900 (match_operand 4 "" "")
901 (match_operand 5 "" "")))]
902 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
906 (match_dup 1) (const_int 1)))
907 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
909 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
912 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
913 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
916 operands[2], const0_rtx);"
919 ;; The next four insns work because they compare the result with one of
920 ;; the operands, and we know that the use of the condition code is
921 ;; either GEU or LTU, so we can use the carry flag from the addition
922 ;; instead of doing the compare a second time.
923 (define_insn "*addsi3_compare_op1"
924 [(set (reg:CC_C CC_REGNUM)
926 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
927 (match_operand:SI 2 "arm_add_operand" "rI,L"))
929 (set (match_operand:SI 0 "s_register_operand" "=r,r")
930 (plus:SI (match_dup 1) (match_dup 2)))]
934 sub%.\\t%0, %1, #%n2"
935 [(set_attr "conds" "set")]
938 (define_insn "*addsi3_compare_op2"
939 [(set (reg:CC_C CC_REGNUM)
941 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
942 (match_operand:SI 2 "arm_add_operand" "rI,L"))
944 (set (match_operand:SI 0 "s_register_operand" "=r,r")
945 (plus:SI (match_dup 1) (match_dup 2)))]
949 sub%.\\t%0, %1, #%n2"
950 [(set_attr "conds" "set")]
953 (define_insn "*compare_addsi2_op0"
954 [(set (reg:CC_C CC_REGNUM)
956 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
957 (match_operand:SI 1 "arm_add_operand" "rI,L"))
963 [(set_attr "conds" "set")
964 (set_attr "predicable" "yes")]
967 (define_insn "*compare_addsi2_op1"
968 [(set (reg:CC_C CC_REGNUM)
970 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
971 (match_operand:SI 1 "arm_add_operand" "rI,L"))
977 [(set_attr "conds" "set")
978 (set_attr "predicable" "yes")]
981 (define_insn "*addsi3_carryin_<optab>"
982 [(set (match_operand:SI 0 "s_register_operand" "=r")
983 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
984 (match_operand:SI 2 "arm_rhs_operand" "rI"))
985 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
988 [(set_attr "conds" "use")]
991 (define_insn "*addsi3_carryin_alt2_<optab>"
992 [(set (match_operand:SI 0 "s_register_operand" "=r")
993 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
994 (match_operand:SI 1 "s_register_operand" "%r"))
995 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
998 [(set_attr "conds" "use")]
1001 (define_insn "*addsi3_carryin_shift_<optab>"
1002 [(set (match_operand:SI 0 "s_register_operand" "=r")
1004 (match_operator:SI 2 "shift_operator"
1005 [(match_operand:SI 3 "s_register_operand" "r")
1006 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1007 (match_operand:SI 1 "s_register_operand" "r"))
1008 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1010 "adc%?\\t%0, %1, %3%S2"
1011 [(set_attr "conds" "use")
1012 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1013 (const_string "alu_shift")
1014 (const_string "alu_shift_reg")))]
1017 (define_insn "*addsi3_carryin_clobercc_<optab>"
1018 [(set (match_operand:SI 0 "s_register_operand" "=r")
1019 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1020 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1021 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1022 (clobber (reg:CC CC_REGNUM))]
1024 "adc%.\\t%0, %1, %2"
1025 [(set_attr "conds" "set")]
1028 (define_expand "incscc"
1029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1030 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1031 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1032 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1037 (define_insn "*arm_incscc"
1038 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1039 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1040 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1041 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1045 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1046 [(set_attr "conds" "use")
1047 (set_attr "length" "4,8")]
1050 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1052 [(set (match_operand:SI 0 "s_register_operand" "")
1053 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1054 (match_operand:SI 2 "s_register_operand" ""))
1056 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1058 [(set (match_dup 3) (match_dup 1))
1059 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1061 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1064 (define_expand "addsf3"
1065 [(set (match_operand:SF 0 "s_register_operand" "")
1066 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1067 (match_operand:SF 2 "arm_float_add_operand" "")))]
1068 "TARGET_32BIT && TARGET_HARD_FLOAT"
1071 && !cirrus_fp_register (operands[2], SFmode))
1072 operands[2] = force_reg (SFmode, operands[2]);
1075 (define_expand "adddf3"
1076 [(set (match_operand:DF 0 "s_register_operand" "")
1077 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1078 (match_operand:DF 2 "arm_float_add_operand" "")))]
1079 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1082 && !cirrus_fp_register (operands[2], DFmode))
1083 operands[2] = force_reg (DFmode, operands[2]);
1086 (define_expand "subdi3"
1088 [(set (match_operand:DI 0 "s_register_operand" "")
1089 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1090 (match_operand:DI 2 "s_register_operand" "")))
1091 (clobber (reg:CC CC_REGNUM))])]
1094 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1096 && cirrus_fp_register (operands[0], DImode)
1097 && cirrus_fp_register (operands[1], DImode))
1099 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1105 if (GET_CODE (operands[1]) != REG)
1106 operands[1] = force_reg (DImode, operands[1]);
1107 if (GET_CODE (operands[2]) != REG)
1108 operands[2] = force_reg (DImode, operands[2]);
1113 (define_insn "*arm_subdi3"
1114 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1115 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1116 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1117 (clobber (reg:CC CC_REGNUM))]
1118 "TARGET_32BIT && !TARGET_NEON"
1119 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1120 [(set_attr "conds" "clob")
1121 (set_attr "length" "8")]
1124 (define_insn "*thumb_subdi3"
1125 [(set (match_operand:DI 0 "register_operand" "=l")
1126 (minus:DI (match_operand:DI 1 "register_operand" "0")
1127 (match_operand:DI 2 "register_operand" "l")))
1128 (clobber (reg:CC CC_REGNUM))]
1130 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1131 [(set_attr "length" "4")]
1134 (define_insn "*subdi_di_zesidi"
1135 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1136 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1138 (match_operand:SI 2 "s_register_operand" "r,r"))))
1139 (clobber (reg:CC CC_REGNUM))]
1141 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1142 [(set_attr "conds" "clob")
1143 (set_attr "length" "8")]
1146 (define_insn "*subdi_di_sesidi"
1147 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1148 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1150 (match_operand:SI 2 "s_register_operand" "r,r"))))
1151 (clobber (reg:CC CC_REGNUM))]
1153 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1154 [(set_attr "conds" "clob")
1155 (set_attr "length" "8")]
1158 (define_insn "*subdi_zesidi_di"
1159 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1160 (minus:DI (zero_extend:DI
1161 (match_operand:SI 2 "s_register_operand" "r,r"))
1162 (match_operand:DI 1 "s_register_operand" "0,r")))
1163 (clobber (reg:CC CC_REGNUM))]
1165 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1166 [(set_attr "conds" "clob")
1167 (set_attr "length" "8")]
1170 (define_insn "*subdi_sesidi_di"
1171 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1172 (minus:DI (sign_extend:DI
1173 (match_operand:SI 2 "s_register_operand" "r,r"))
1174 (match_operand:DI 1 "s_register_operand" "0,r")))
1175 (clobber (reg:CC CC_REGNUM))]
1177 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1178 [(set_attr "conds" "clob")
1179 (set_attr "length" "8")]
1182 (define_insn "*subdi_zesidi_zesidi"
1183 [(set (match_operand:DI 0 "s_register_operand" "=r")
1184 (minus:DI (zero_extend:DI
1185 (match_operand:SI 1 "s_register_operand" "r"))
1187 (match_operand:SI 2 "s_register_operand" "r"))))
1188 (clobber (reg:CC CC_REGNUM))]
1190 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1191 [(set_attr "conds" "clob")
1192 (set_attr "length" "8")]
1195 (define_expand "subsi3"
1196 [(set (match_operand:SI 0 "s_register_operand" "")
1197 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1198 (match_operand:SI 2 "s_register_operand" "")))]
1201 if (GET_CODE (operands[1]) == CONST_INT)
1205 arm_split_constant (MINUS, SImode, NULL_RTX,
1206 INTVAL (operands[1]), operands[0],
1207 operands[2], optimize && can_create_pseudo_p ());
1210 else /* TARGET_THUMB1 */
1211 operands[1] = force_reg (SImode, operands[1]);
1216 (define_insn "thumb1_subsi3_insn"
1217 [(set (match_operand:SI 0 "register_operand" "=l")
1218 (minus:SI (match_operand:SI 1 "register_operand" "l")
1219 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1222 [(set_attr "length" "2")
1223 (set_attr "conds" "set")])
1225 ; ??? Check Thumb-2 split length
1226 (define_insn_and_split "*arm_subsi3_insn"
1227 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r")
1228 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n")
1229 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r")))]
1236 "&& (GET_CODE (operands[1]) == CONST_INT
1237 && !const_ok_for_arm (INTVAL (operands[1])))"
1238 [(clobber (const_int 0))]
1240 arm_split_constant (MINUS, SImode, curr_insn,
1241 INTVAL (operands[1]), operands[0], operands[2], 0);
1244 [(set_attr "length" "4,4,4,16")
1245 (set_attr "predicable" "yes")]
1249 [(match_scratch:SI 3 "r")
1250 (set (match_operand:SI 0 "arm_general_register_operand" "")
1251 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1252 (match_operand:SI 2 "arm_general_register_operand" "")))]
1254 && !const_ok_for_arm (INTVAL (operands[1]))
1255 && const_ok_for_arm (~INTVAL (operands[1]))"
1256 [(set (match_dup 3) (match_dup 1))
1257 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1261 (define_insn "*subsi3_compare0"
1262 [(set (reg:CC_NOOV CC_REGNUM)
1264 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1265 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1267 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1268 (minus:SI (match_dup 1) (match_dup 2)))]
1273 [(set_attr "conds" "set")]
1276 (define_insn "*subsi3_compare"
1277 [(set (reg:CC CC_REGNUM)
1278 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1279 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1280 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1281 (minus:SI (match_dup 1) (match_dup 2)))]
1286 [(set_attr "conds" "set")]
1289 (define_expand "decscc"
1290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1291 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1292 (match_operator:SI 2 "arm_comparison_operator"
1293 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1298 (define_insn "*arm_decscc"
1299 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1300 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1301 (match_operator:SI 2 "arm_comparison_operator"
1302 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1306 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1307 [(set_attr "conds" "use")
1308 (set_attr "length" "*,8")]
1311 (define_expand "subsf3"
1312 [(set (match_operand:SF 0 "s_register_operand" "")
1313 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1314 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1315 "TARGET_32BIT && TARGET_HARD_FLOAT"
1317 if (TARGET_MAVERICK)
1319 if (!cirrus_fp_register (operands[1], SFmode))
1320 operands[1] = force_reg (SFmode, operands[1]);
1321 if (!cirrus_fp_register (operands[2], SFmode))
1322 operands[2] = force_reg (SFmode, operands[2]);
1326 (define_expand "subdf3"
1327 [(set (match_operand:DF 0 "s_register_operand" "")
1328 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1329 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1330 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1332 if (TARGET_MAVERICK)
1334 if (!cirrus_fp_register (operands[1], DFmode))
1335 operands[1] = force_reg (DFmode, operands[1]);
1336 if (!cirrus_fp_register (operands[2], DFmode))
1337 operands[2] = force_reg (DFmode, operands[2]);
1342 ;; Multiplication insns
1344 (define_expand "mulsi3"
1345 [(set (match_operand:SI 0 "s_register_operand" "")
1346 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1347 (match_operand:SI 1 "s_register_operand" "")))]
1352 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1353 (define_insn "*arm_mulsi3"
1354 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1355 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1356 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1357 "TARGET_32BIT && !arm_arch6"
1358 "mul%?\\t%0, %2, %1"
1359 [(set_attr "insn" "mul")
1360 (set_attr "predicable" "yes")]
1363 (define_insn "*arm_mulsi3_v6"
1364 [(set (match_operand:SI 0 "s_register_operand" "=r")
1365 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1366 (match_operand:SI 2 "s_register_operand" "r")))]
1367 "TARGET_32BIT && arm_arch6"
1368 "mul%?\\t%0, %1, %2"
1369 [(set_attr "insn" "mul")
1370 (set_attr "predicable" "yes")]
1373 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1374 ; 1 and 2; are the same, because reload will make operand 0 match
1375 ; operand 1 without realizing that this conflicts with operand 2. We fix
1376 ; this by adding another alternative to match this case, and then `reload'
1377 ; it ourselves. This alternative must come first.
1378 (define_insn "*thumb_mulsi3"
1379 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1380 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1381 (match_operand:SI 2 "register_operand" "l,l,l")))]
1382 "TARGET_THUMB1 && !arm_arch6"
1384 if (which_alternative < 2)
1385 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1387 return \"mul\\t%0, %2\";
1389 [(set_attr "length" "4,4,2")
1390 (set_attr "insn" "mul")]
1393 (define_insn "*thumb_mulsi3_v6"
1394 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1395 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1396 (match_operand:SI 2 "register_operand" "l,0,0")))]
1397 "TARGET_THUMB1 && arm_arch6"
1402 [(set_attr "length" "2")
1403 (set_attr "insn" "mul")]
1406 (define_insn "*mulsi3_compare0"
1407 [(set (reg:CC_NOOV CC_REGNUM)
1408 (compare:CC_NOOV (mult:SI
1409 (match_operand:SI 2 "s_register_operand" "r,r")
1410 (match_operand:SI 1 "s_register_operand" "%0,r"))
1412 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1413 (mult:SI (match_dup 2) (match_dup 1)))]
1414 "TARGET_ARM && !arm_arch6"
1415 "mul%.\\t%0, %2, %1"
1416 [(set_attr "conds" "set")
1417 (set_attr "insn" "muls")]
1420 (define_insn "*mulsi3_compare0_v6"
1421 [(set (reg:CC_NOOV CC_REGNUM)
1422 (compare:CC_NOOV (mult:SI
1423 (match_operand:SI 2 "s_register_operand" "r")
1424 (match_operand:SI 1 "s_register_operand" "r"))
1426 (set (match_operand:SI 0 "s_register_operand" "=r")
1427 (mult:SI (match_dup 2) (match_dup 1)))]
1428 "TARGET_ARM && arm_arch6 && optimize_size"
1429 "mul%.\\t%0, %2, %1"
1430 [(set_attr "conds" "set")
1431 (set_attr "insn" "muls")]
1434 (define_insn "*mulsi_compare0_scratch"
1435 [(set (reg:CC_NOOV CC_REGNUM)
1436 (compare:CC_NOOV (mult:SI
1437 (match_operand:SI 2 "s_register_operand" "r,r")
1438 (match_operand:SI 1 "s_register_operand" "%0,r"))
1440 (clobber (match_scratch:SI 0 "=&r,&r"))]
1441 "TARGET_ARM && !arm_arch6"
1442 "mul%.\\t%0, %2, %1"
1443 [(set_attr "conds" "set")
1444 (set_attr "insn" "muls")]
1447 (define_insn "*mulsi_compare0_scratch_v6"
1448 [(set (reg:CC_NOOV CC_REGNUM)
1449 (compare:CC_NOOV (mult:SI
1450 (match_operand:SI 2 "s_register_operand" "r")
1451 (match_operand:SI 1 "s_register_operand" "r"))
1453 (clobber (match_scratch:SI 0 "=r"))]
1454 "TARGET_ARM && arm_arch6 && optimize_size"
1455 "mul%.\\t%0, %2, %1"
1456 [(set_attr "conds" "set")
1457 (set_attr "insn" "muls")]
1460 ;; Unnamed templates to match MLA instruction.
1462 (define_insn "*mulsi3addsi"
1463 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1465 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1466 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1467 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1468 "TARGET_32BIT && !arm_arch6"
1469 "mla%?\\t%0, %2, %1, %3"
1470 [(set_attr "insn" "mla")
1471 (set_attr "predicable" "yes")]
1474 (define_insn "*mulsi3addsi_v6"
1475 [(set (match_operand:SI 0 "s_register_operand" "=r")
1477 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1478 (match_operand:SI 1 "s_register_operand" "r"))
1479 (match_operand:SI 3 "s_register_operand" "r")))]
1480 "TARGET_32BIT && arm_arch6"
1481 "mla%?\\t%0, %2, %1, %3"
1482 [(set_attr "insn" "mla")
1483 (set_attr "predicable" "yes")]
1486 (define_insn "*mulsi3addsi_compare0"
1487 [(set (reg:CC_NOOV CC_REGNUM)
1490 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1491 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1492 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1494 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1495 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1497 "TARGET_ARM && arm_arch6"
1498 "mla%.\\t%0, %2, %1, %3"
1499 [(set_attr "conds" "set")
1500 (set_attr "insn" "mlas")]
1503 (define_insn "*mulsi3addsi_compare0_v6"
1504 [(set (reg:CC_NOOV CC_REGNUM)
1507 (match_operand:SI 2 "s_register_operand" "r")
1508 (match_operand:SI 1 "s_register_operand" "r"))
1509 (match_operand:SI 3 "s_register_operand" "r"))
1511 (set (match_operand:SI 0 "s_register_operand" "=r")
1512 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1514 "TARGET_ARM && arm_arch6 && optimize_size"
1515 "mla%.\\t%0, %2, %1, %3"
1516 [(set_attr "conds" "set")
1517 (set_attr "insn" "mlas")]
1520 (define_insn "*mulsi3addsi_compare0_scratch"
1521 [(set (reg:CC_NOOV CC_REGNUM)
1524 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1525 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1526 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1528 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1529 "TARGET_ARM && !arm_arch6"
1530 "mla%.\\t%0, %2, %1, %3"
1531 [(set_attr "conds" "set")
1532 (set_attr "insn" "mlas")]
1535 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1536 [(set (reg:CC_NOOV CC_REGNUM)
1539 (match_operand:SI 2 "s_register_operand" "r")
1540 (match_operand:SI 1 "s_register_operand" "r"))
1541 (match_operand:SI 3 "s_register_operand" "r"))
1543 (clobber (match_scratch:SI 0 "=r"))]
1544 "TARGET_ARM && arm_arch6 && optimize_size"
1545 "mla%.\\t%0, %2, %1, %3"
1546 [(set_attr "conds" "set")
1547 (set_attr "insn" "mlas")]
1550 (define_insn "*mulsi3subsi"
1551 [(set (match_operand:SI 0 "s_register_operand" "=r")
1553 (match_operand:SI 3 "s_register_operand" "r")
1554 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1555 (match_operand:SI 1 "s_register_operand" "r"))))]
1556 "TARGET_32BIT && arm_arch_thumb2"
1557 "mls%?\\t%0, %2, %1, %3"
1558 [(set_attr "insn" "mla")
1559 (set_attr "predicable" "yes")]
1562 (define_expand "maddsidi4"
1563 [(set (match_operand:DI 0 "s_register_operand" "")
1566 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1567 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1568 (match_operand:DI 3 "s_register_operand" "")))]
1569 "TARGET_32BIT && arm_arch3m"
1572 (define_insn "*mulsidi3adddi"
1573 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1576 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1577 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1578 (match_operand:DI 1 "s_register_operand" "0")))]
1579 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1580 "smlal%?\\t%Q0, %R0, %3, %2"
1581 [(set_attr "insn" "smlal")
1582 (set_attr "predicable" "yes")]
1585 (define_insn "*mulsidi3adddi_v6"
1586 [(set (match_operand:DI 0 "s_register_operand" "=r")
1589 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1590 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1591 (match_operand:DI 1 "s_register_operand" "0")))]
1592 "TARGET_32BIT && arm_arch6"
1593 "smlal%?\\t%Q0, %R0, %3, %2"
1594 [(set_attr "insn" "smlal")
1595 (set_attr "predicable" "yes")]
1598 ;; 32x32->64 widening multiply.
1599 ;; As with mulsi3, the only difference between the v3-5 and v6+
1600 ;; versions of these patterns is the requirement that the output not
1601 ;; overlap the inputs, but that still means we have to have a named
1602 ;; expander and two different starred insns.
1604 (define_expand "mulsidi3"
1605 [(set (match_operand:DI 0 "s_register_operand" "")
1607 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1608 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1609 "TARGET_32BIT && arm_arch3m"
1613 (define_insn "*mulsidi3_nov6"
1614 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1616 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1617 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1618 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1619 "smull%?\\t%Q0, %R0, %1, %2"
1620 [(set_attr "insn" "smull")
1621 (set_attr "predicable" "yes")]
1624 (define_insn "*mulsidi3_v6"
1625 [(set (match_operand:DI 0 "s_register_operand" "=r")
1627 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1628 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1629 "TARGET_32BIT && arm_arch6"
1630 "smull%?\\t%Q0, %R0, %1, %2"
1631 [(set_attr "insn" "smull")
1632 (set_attr "predicable" "yes")]
1635 (define_expand "umulsidi3"
1636 [(set (match_operand:DI 0 "s_register_operand" "")
1638 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1639 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1640 "TARGET_32BIT && arm_arch3m"
1644 (define_insn "*umulsidi3_nov6"
1645 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1647 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1648 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1649 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1650 "umull%?\\t%Q0, %R0, %1, %2"
1651 [(set_attr "insn" "umull")
1652 (set_attr "predicable" "yes")]
1655 (define_insn "*umulsidi3_v6"
1656 [(set (match_operand:DI 0 "s_register_operand" "=r")
1658 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1659 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1660 "TARGET_32BIT && arm_arch6"
1661 "umull%?\\t%Q0, %R0, %1, %2"
1662 [(set_attr "insn" "umull")
1663 (set_attr "predicable" "yes")]
1666 (define_expand "umaddsidi4"
1667 [(set (match_operand:DI 0 "s_register_operand" "")
1670 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1671 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1672 (match_operand:DI 3 "s_register_operand" "")))]
1673 "TARGET_32BIT && arm_arch3m"
1676 (define_insn "*umulsidi3adddi"
1677 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1680 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1681 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1682 (match_operand:DI 1 "s_register_operand" "0")))]
1683 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1684 "umlal%?\\t%Q0, %R0, %3, %2"
1685 [(set_attr "insn" "umlal")
1686 (set_attr "predicable" "yes")]
1689 (define_insn "*umulsidi3adddi_v6"
1690 [(set (match_operand:DI 0 "s_register_operand" "=r")
1693 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1694 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1695 (match_operand:DI 1 "s_register_operand" "0")))]
1696 "TARGET_32BIT && arm_arch6"
1697 "umlal%?\\t%Q0, %R0, %3, %2"
1698 [(set_attr "insn" "umlal")
1699 (set_attr "predicable" "yes")]
1702 (define_expand "smulsi3_highpart"
1704 [(set (match_operand:SI 0 "s_register_operand" "")
1708 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1709 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1711 (clobber (match_scratch:SI 3 ""))])]
1712 "TARGET_32BIT && arm_arch3m"
1716 (define_insn "*smulsi3_highpart_nov6"
1717 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1721 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1722 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1724 (clobber (match_scratch:SI 3 "=&r,&r"))]
1725 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1726 "smull%?\\t%3, %0, %2, %1"
1727 [(set_attr "insn" "smull")
1728 (set_attr "predicable" "yes")]
1731 (define_insn "*smulsi3_highpart_v6"
1732 [(set (match_operand:SI 0 "s_register_operand" "=r")
1736 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1737 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1739 (clobber (match_scratch:SI 3 "=r"))]
1740 "TARGET_32BIT && arm_arch6"
1741 "smull%?\\t%3, %0, %2, %1"
1742 [(set_attr "insn" "smull")
1743 (set_attr "predicable" "yes")]
1746 (define_expand "umulsi3_highpart"
1748 [(set (match_operand:SI 0 "s_register_operand" "")
1752 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1753 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1755 (clobber (match_scratch:SI 3 ""))])]
1756 "TARGET_32BIT && arm_arch3m"
1760 (define_insn "*umulsi3_highpart_nov6"
1761 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1765 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1766 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1768 (clobber (match_scratch:SI 3 "=&r,&r"))]
1769 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1770 "umull%?\\t%3, %0, %2, %1"
1771 [(set_attr "insn" "umull")
1772 (set_attr "predicable" "yes")]
1775 (define_insn "*umulsi3_highpart_v6"
1776 [(set (match_operand:SI 0 "s_register_operand" "=r")
1780 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1781 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1783 (clobber (match_scratch:SI 3 "=r"))]
1784 "TARGET_32BIT && arm_arch6"
1785 "umull%?\\t%3, %0, %2, %1"
1786 [(set_attr "insn" "umull")
1787 (set_attr "predicable" "yes")]
1790 (define_insn "mulhisi3"
1791 [(set (match_operand:SI 0 "s_register_operand" "=r")
1792 (mult:SI (sign_extend:SI
1793 (match_operand:HI 1 "s_register_operand" "%r"))
1795 (match_operand:HI 2 "s_register_operand" "r"))))]
1796 "TARGET_DSP_MULTIPLY"
1797 "smulbb%?\\t%0, %1, %2"
1798 [(set_attr "insn" "smulxy")
1799 (set_attr "predicable" "yes")]
1802 (define_insn "*mulhisi3tb"
1803 [(set (match_operand:SI 0 "s_register_operand" "=r")
1804 (mult:SI (ashiftrt:SI
1805 (match_operand:SI 1 "s_register_operand" "r")
1808 (match_operand:HI 2 "s_register_operand" "r"))))]
1809 "TARGET_DSP_MULTIPLY"
1810 "smultb%?\\t%0, %1, %2"
1811 [(set_attr "insn" "smulxy")
1812 (set_attr "predicable" "yes")]
1815 (define_insn "*mulhisi3bt"
1816 [(set (match_operand:SI 0 "s_register_operand" "=r")
1817 (mult:SI (sign_extend:SI
1818 (match_operand:HI 1 "s_register_operand" "r"))
1820 (match_operand:SI 2 "s_register_operand" "r")
1822 "TARGET_DSP_MULTIPLY"
1823 "smulbt%?\\t%0, %1, %2"
1824 [(set_attr "insn" "smulxy")
1825 (set_attr "predicable" "yes")]
1828 (define_insn "*mulhisi3tt"
1829 [(set (match_operand:SI 0 "s_register_operand" "=r")
1830 (mult:SI (ashiftrt:SI
1831 (match_operand:SI 1 "s_register_operand" "r")
1834 (match_operand:SI 2 "s_register_operand" "r")
1836 "TARGET_DSP_MULTIPLY"
1837 "smultt%?\\t%0, %1, %2"
1838 [(set_attr "insn" "smulxy")
1839 (set_attr "predicable" "yes")]
1842 (define_insn "maddhisi4"
1843 [(set (match_operand:SI 0 "s_register_operand" "=r")
1844 (plus:SI (mult:SI (sign_extend:SI
1845 (match_operand:HI 1 "s_register_operand" "r"))
1847 (match_operand:HI 2 "s_register_operand" "r")))
1848 (match_operand:SI 3 "s_register_operand" "r")))]
1849 "TARGET_DSP_MULTIPLY"
1850 "smlabb%?\\t%0, %1, %2, %3"
1851 [(set_attr "insn" "smlaxy")
1852 (set_attr "predicable" "yes")]
1855 ;; Note: there is no maddhisi4ibt because this one is canonical form
1856 (define_insn "*maddhisi4tb"
1857 [(set (match_operand:SI 0 "s_register_operand" "=r")
1858 (plus:SI (mult:SI (ashiftrt:SI
1859 (match_operand:SI 1 "s_register_operand" "r")
1862 (match_operand:HI 2 "s_register_operand" "r")))
1863 (match_operand:SI 3 "s_register_operand" "r")))]
1864 "TARGET_DSP_MULTIPLY"
1865 "smlatb%?\\t%0, %1, %2, %3"
1866 [(set_attr "insn" "smlaxy")
1867 (set_attr "predicable" "yes")]
1870 (define_insn "*maddhisi4tt"
1871 [(set (match_operand:SI 0 "s_register_operand" "=r")
1872 (plus:SI (mult:SI (ashiftrt:SI
1873 (match_operand:SI 1 "s_register_operand" "r")
1876 (match_operand:SI 2 "s_register_operand" "r")
1878 (match_operand:SI 3 "s_register_operand" "r")))]
1879 "TARGET_DSP_MULTIPLY"
1880 "smlatt%?\\t%0, %1, %2, %3"
1881 [(set_attr "insn" "smlaxy")
1882 (set_attr "predicable" "yes")]
1885 (define_insn "maddhidi4"
1886 [(set (match_operand:DI 0 "s_register_operand" "=r")
1888 (mult:DI (sign_extend:DI
1889 (match_operand:HI 1 "s_register_operand" "r"))
1891 (match_operand:HI 2 "s_register_operand" "r")))
1892 (match_operand:DI 3 "s_register_operand" "0")))]
1893 "TARGET_DSP_MULTIPLY"
1894 "smlalbb%?\\t%Q0, %R0, %1, %2"
1895 [(set_attr "insn" "smlalxy")
1896 (set_attr "predicable" "yes")])
1898 ;; Note: there is no maddhidi4ibt because this one is canonical form
1899 (define_insn "*maddhidi4tb"
1900 [(set (match_operand:DI 0 "s_register_operand" "=r")
1902 (mult:DI (sign_extend:DI
1904 (match_operand:SI 1 "s_register_operand" "r")
1907 (match_operand:HI 2 "s_register_operand" "r")))
1908 (match_operand:DI 3 "s_register_operand" "0")))]
1909 "TARGET_DSP_MULTIPLY"
1910 "smlaltb%?\\t%Q0, %R0, %1, %2"
1911 [(set_attr "insn" "smlalxy")
1912 (set_attr "predicable" "yes")])
1914 (define_insn "*maddhidi4tt"
1915 [(set (match_operand:DI 0 "s_register_operand" "=r")
1917 (mult:DI (sign_extend:DI
1919 (match_operand:SI 1 "s_register_operand" "r")
1923 (match_operand:SI 2 "s_register_operand" "r")
1925 (match_operand:DI 3 "s_register_operand" "0")))]
1926 "TARGET_DSP_MULTIPLY"
1927 "smlaltt%?\\t%Q0, %R0, %1, %2"
1928 [(set_attr "insn" "smlalxy")
1929 (set_attr "predicable" "yes")])
1931 (define_expand "mulsf3"
1932 [(set (match_operand:SF 0 "s_register_operand" "")
1933 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1934 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1935 "TARGET_32BIT && TARGET_HARD_FLOAT"
1938 && !cirrus_fp_register (operands[2], SFmode))
1939 operands[2] = force_reg (SFmode, operands[2]);
1942 (define_expand "muldf3"
1943 [(set (match_operand:DF 0 "s_register_operand" "")
1944 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1945 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1946 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1949 && !cirrus_fp_register (operands[2], DFmode))
1950 operands[2] = force_reg (DFmode, operands[2]);
1955 (define_expand "divsf3"
1956 [(set (match_operand:SF 0 "s_register_operand" "")
1957 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1958 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1959 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1962 (define_expand "divdf3"
1963 [(set (match_operand:DF 0 "s_register_operand" "")
1964 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1965 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1966 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1971 (define_expand "modsf3"
1972 [(set (match_operand:SF 0 "s_register_operand" "")
1973 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1974 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1975 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1978 (define_expand "moddf3"
1979 [(set (match_operand:DF 0 "s_register_operand" "")
1980 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1981 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1982 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1985 ;; Boolean and,ior,xor insns
1987 ;; Split up double word logical operations
1989 ;; Split up simple DImode logical operations. Simply perform the logical
1990 ;; operation on the upper and lower halves of the registers.
1992 [(set (match_operand:DI 0 "s_register_operand" "")
1993 (match_operator:DI 6 "logical_binary_operator"
1994 [(match_operand:DI 1 "s_register_operand" "")
1995 (match_operand:DI 2 "s_register_operand" "")]))]
1996 "TARGET_32BIT && reload_completed
1997 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1998 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1999 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2000 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2003 operands[3] = gen_highpart (SImode, operands[0]);
2004 operands[0] = gen_lowpart (SImode, operands[0]);
2005 operands[4] = gen_highpart (SImode, operands[1]);
2006 operands[1] = gen_lowpart (SImode, operands[1]);
2007 operands[5] = gen_highpart (SImode, operands[2]);
2008 operands[2] = gen_lowpart (SImode, operands[2]);
2013 [(set (match_operand:DI 0 "s_register_operand" "")
2014 (match_operator:DI 6 "logical_binary_operator"
2015 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2016 (match_operand:DI 1 "s_register_operand" "")]))]
2017 "TARGET_32BIT && reload_completed"
2018 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2019 (set (match_dup 3) (match_op_dup:SI 6
2020 [(ashiftrt:SI (match_dup 2) (const_int 31))
2024 operands[3] = gen_highpart (SImode, operands[0]);
2025 operands[0] = gen_lowpart (SImode, operands[0]);
2026 operands[4] = gen_highpart (SImode, operands[1]);
2027 operands[1] = gen_lowpart (SImode, operands[1]);
2028 operands[5] = gen_highpart (SImode, operands[2]);
2029 operands[2] = gen_lowpart (SImode, operands[2]);
2033 ;; The zero extend of operand 2 means we can just copy the high part of
2034 ;; operand1 into operand0.
2036 [(set (match_operand:DI 0 "s_register_operand" "")
2038 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2039 (match_operand:DI 1 "s_register_operand" "")))]
2040 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2041 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2042 (set (match_dup 3) (match_dup 4))]
2045 operands[4] = gen_highpart (SImode, operands[1]);
2046 operands[3] = gen_highpart (SImode, operands[0]);
2047 operands[0] = gen_lowpart (SImode, operands[0]);
2048 operands[1] = gen_lowpart (SImode, operands[1]);
2052 ;; The zero extend of operand 2 means we can just copy the high part of
2053 ;; operand1 into operand0.
2055 [(set (match_operand:DI 0 "s_register_operand" "")
2057 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2058 (match_operand:DI 1 "s_register_operand" "")))]
2059 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2060 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2061 (set (match_dup 3) (match_dup 4))]
2064 operands[4] = gen_highpart (SImode, operands[1]);
2065 operands[3] = gen_highpart (SImode, operands[0]);
2066 operands[0] = gen_lowpart (SImode, operands[0]);
2067 operands[1] = gen_lowpart (SImode, operands[1]);
2071 (define_expand "anddi3"
2072 [(set (match_operand:DI 0 "s_register_operand" "")
2073 (and:DI (match_operand:DI 1 "s_register_operand" "")
2074 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2079 (define_insn "*anddi3_insn"
2080 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2081 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2082 (match_operand:DI 2 "s_register_operand" "r,r")))]
2083 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2085 [(set_attr "length" "8")]
2088 (define_insn_and_split "*anddi_zesidi_di"
2089 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2090 (and:DI (zero_extend:DI
2091 (match_operand:SI 2 "s_register_operand" "r,r"))
2092 (match_operand:DI 1 "s_register_operand" "0,r")))]
2095 "TARGET_32BIT && reload_completed"
2096 ; The zero extend of operand 2 clears the high word of the output
2098 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2099 (set (match_dup 3) (const_int 0))]
2102 operands[3] = gen_highpart (SImode, operands[0]);
2103 operands[0] = gen_lowpart (SImode, operands[0]);
2104 operands[1] = gen_lowpart (SImode, operands[1]);
2106 [(set_attr "length" "8")]
2109 (define_insn "*anddi_sesdi_di"
2110 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2111 (and:DI (sign_extend:DI
2112 (match_operand:SI 2 "s_register_operand" "r,r"))
2113 (match_operand:DI 1 "s_register_operand" "0,r")))]
2116 [(set_attr "length" "8")]
2119 (define_expand "andsi3"
2120 [(set (match_operand:SI 0 "s_register_operand" "")
2121 (and:SI (match_operand:SI 1 "s_register_operand" "")
2122 (match_operand:SI 2 "reg_or_int_operand" "")))]
2127 if (GET_CODE (operands[2]) == CONST_INT)
2129 if (INTVAL (operands[2]) == 255 && arm_arch6)
2131 operands[1] = convert_to_mode (QImode, operands[1], 1);
2132 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2136 arm_split_constant (AND, SImode, NULL_RTX,
2137 INTVAL (operands[2]), operands[0],
2139 optimize && can_create_pseudo_p ());
2144 else /* TARGET_THUMB1 */
2146 if (GET_CODE (operands[2]) != CONST_INT)
2148 rtx tmp = force_reg (SImode, operands[2]);
2149 if (rtx_equal_p (operands[0], operands[1]))
2153 operands[2] = operands[1];
2161 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2163 operands[2] = force_reg (SImode,
2164 GEN_INT (~INTVAL (operands[2])));
2166 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2171 for (i = 9; i <= 31; i++)
2173 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2175 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2179 else if ((((HOST_WIDE_INT) 1) << i) - 1
2180 == ~INTVAL (operands[2]))
2182 rtx shift = GEN_INT (i);
2183 rtx reg = gen_reg_rtx (SImode);
2185 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2186 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2192 operands[2] = force_reg (SImode, operands[2]);
2198 ; ??? Check split length for Thumb-2
2199 (define_insn_and_split "*arm_andsi3_insn"
2200 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2201 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2202 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2206 bic%?\\t%0, %1, #%B2
2209 && GET_CODE (operands[2]) == CONST_INT
2210 && !(const_ok_for_arm (INTVAL (operands[2]))
2211 || const_ok_for_arm (~INTVAL (operands[2])))"
2212 [(clobber (const_int 0))]
2214 arm_split_constant (AND, SImode, curr_insn,
2215 INTVAL (operands[2]), operands[0], operands[1], 0);
2218 [(set_attr "length" "4,4,16")
2219 (set_attr "predicable" "yes")]
2222 (define_insn "*thumb1_andsi3_insn"
2223 [(set (match_operand:SI 0 "register_operand" "=l")
2224 (and:SI (match_operand:SI 1 "register_operand" "%0")
2225 (match_operand:SI 2 "register_operand" "l")))]
2228 [(set_attr "length" "2")
2229 (set_attr "conds" "set")])
2231 (define_insn "*andsi3_compare0"
2232 [(set (reg:CC_NOOV CC_REGNUM)
2234 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2235 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2238 (and:SI (match_dup 1) (match_dup 2)))]
2242 bic%.\\t%0, %1, #%B2"
2243 [(set_attr "conds" "set")]
2246 (define_insn "*andsi3_compare0_scratch"
2247 [(set (reg:CC_NOOV CC_REGNUM)
2249 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2250 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2252 (clobber (match_scratch:SI 2 "=X,r"))]
2256 bic%.\\t%2, %0, #%B1"
2257 [(set_attr "conds" "set")]
2260 (define_insn "*zeroextractsi_compare0_scratch"
2261 [(set (reg:CC_NOOV CC_REGNUM)
2262 (compare:CC_NOOV (zero_extract:SI
2263 (match_operand:SI 0 "s_register_operand" "r")
2264 (match_operand 1 "const_int_operand" "n")
2265 (match_operand 2 "const_int_operand" "n"))
2268 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2269 && INTVAL (operands[1]) > 0
2270 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2271 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2273 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2274 << INTVAL (operands[2]));
2275 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2278 [(set_attr "conds" "set")
2279 (set_attr "predicable" "yes")]
2282 (define_insn_and_split "*ne_zeroextractsi"
2283 [(set (match_operand:SI 0 "s_register_operand" "=r")
2284 (ne:SI (zero_extract:SI
2285 (match_operand:SI 1 "s_register_operand" "r")
2286 (match_operand:SI 2 "const_int_operand" "n")
2287 (match_operand:SI 3 "const_int_operand" "n"))
2289 (clobber (reg:CC CC_REGNUM))]
2291 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2292 && INTVAL (operands[2]) > 0
2293 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2294 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2297 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2298 && INTVAL (operands[2]) > 0
2299 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2300 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2301 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2302 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2304 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2306 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2307 (match_dup 0) (const_int 1)))]
2309 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2310 << INTVAL (operands[3]));
2312 [(set_attr "conds" "clob")
2313 (set (attr "length")
2314 (if_then_else (eq_attr "is_thumb" "yes")
2319 (define_insn_and_split "*ne_zeroextractsi_shifted"
2320 [(set (match_operand:SI 0 "s_register_operand" "=r")
2321 (ne:SI (zero_extract:SI
2322 (match_operand:SI 1 "s_register_operand" "r")
2323 (match_operand:SI 2 "const_int_operand" "n")
2326 (clobber (reg:CC CC_REGNUM))]
2330 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2331 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2333 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2335 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2336 (match_dup 0) (const_int 1)))]
2338 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2340 [(set_attr "conds" "clob")
2341 (set_attr "length" "8")]
2344 (define_insn_and_split "*ite_ne_zeroextractsi"
2345 [(set (match_operand:SI 0 "s_register_operand" "=r")
2346 (if_then_else:SI (ne (zero_extract:SI
2347 (match_operand:SI 1 "s_register_operand" "r")
2348 (match_operand:SI 2 "const_int_operand" "n")
2349 (match_operand:SI 3 "const_int_operand" "n"))
2351 (match_operand:SI 4 "arm_not_operand" "rIK")
2353 (clobber (reg:CC CC_REGNUM))]
2355 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2356 && INTVAL (operands[2]) > 0
2357 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2358 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2359 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2362 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2363 && INTVAL (operands[2]) > 0
2364 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2365 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2366 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2367 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2368 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2370 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2372 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2373 (match_dup 0) (match_dup 4)))]
2375 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2376 << INTVAL (operands[3]));
2378 [(set_attr "conds" "clob")
2379 (set_attr "length" "8")]
2382 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2383 [(set (match_operand:SI 0 "s_register_operand" "=r")
2384 (if_then_else:SI (ne (zero_extract:SI
2385 (match_operand:SI 1 "s_register_operand" "r")
2386 (match_operand:SI 2 "const_int_operand" "n")
2389 (match_operand:SI 3 "arm_not_operand" "rIK")
2391 (clobber (reg:CC CC_REGNUM))]
2392 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2394 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2395 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2396 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2398 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2400 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2401 (match_dup 0) (match_dup 3)))]
2403 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2405 [(set_attr "conds" "clob")
2406 (set_attr "length" "8")]
2410 [(set (match_operand:SI 0 "s_register_operand" "")
2411 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2412 (match_operand:SI 2 "const_int_operand" "")
2413 (match_operand:SI 3 "const_int_operand" "")))
2414 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2416 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2417 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2419 HOST_WIDE_INT temp = INTVAL (operands[2]);
2421 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2422 operands[3] = GEN_INT (32 - temp);
2426 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2428 [(set (match_operand:SI 0 "s_register_operand" "")
2429 (match_operator:SI 1 "shiftable_operator"
2430 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2431 (match_operand:SI 3 "const_int_operand" "")
2432 (match_operand:SI 4 "const_int_operand" ""))
2433 (match_operand:SI 5 "s_register_operand" "")]))
2434 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2436 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2439 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2442 HOST_WIDE_INT temp = INTVAL (operands[3]);
2444 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2445 operands[4] = GEN_INT (32 - temp);
2450 [(set (match_operand:SI 0 "s_register_operand" "")
2451 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2452 (match_operand:SI 2 "const_int_operand" "")
2453 (match_operand:SI 3 "const_int_operand" "")))]
2455 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2456 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2458 HOST_WIDE_INT temp = INTVAL (operands[2]);
2460 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2461 operands[3] = GEN_INT (32 - temp);
2466 [(set (match_operand:SI 0 "s_register_operand" "")
2467 (match_operator:SI 1 "shiftable_operator"
2468 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2469 (match_operand:SI 3 "const_int_operand" "")
2470 (match_operand:SI 4 "const_int_operand" ""))
2471 (match_operand:SI 5 "s_register_operand" "")]))
2472 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2474 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2477 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2480 HOST_WIDE_INT temp = INTVAL (operands[3]);
2482 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2483 operands[4] = GEN_INT (32 - temp);
2487 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2488 ;;; represented by the bitfield, then this will produce incorrect results.
2489 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2490 ;;; which have a real bit-field insert instruction, the truncation happens
2491 ;;; in the bit-field insert instruction itself. Since arm does not have a
2492 ;;; bit-field insert instruction, we would have to emit code here to truncate
2493 ;;; the value before we insert. This loses some of the advantage of having
2494 ;;; this insv pattern, so this pattern needs to be reevalutated.
2496 (define_expand "insv"
2497 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2498 (match_operand 1 "general_operand" "")
2499 (match_operand 2 "general_operand" ""))
2500 (match_operand 3 "reg_or_int_operand" ""))]
2501 "TARGET_ARM || arm_arch_thumb2"
2504 int start_bit = INTVAL (operands[2]);
2505 int width = INTVAL (operands[1]);
2506 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2507 rtx target, subtarget;
2509 if (arm_arch_thumb2)
2511 if (unaligned_access && MEM_P (operands[0])
2512 && s_register_operand (operands[3], GET_MODE (operands[3]))
2513 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2517 if (BYTES_BIG_ENDIAN)
2518 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2523 base_addr = adjust_address (operands[0], SImode,
2524 start_bit / BITS_PER_UNIT);
2525 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2529 rtx tmp = gen_reg_rtx (HImode);
2531 base_addr = adjust_address (operands[0], HImode,
2532 start_bit / BITS_PER_UNIT);
2533 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2534 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2538 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2540 bool use_bfi = TRUE;
2542 if (GET_CODE (operands[3]) == CONST_INT)
2544 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2548 emit_insn (gen_insv_zero (operands[0], operands[1],
2553 /* See if the set can be done with a single orr instruction. */
2554 if (val == mask && const_ok_for_arm (val << start_bit))
2560 if (GET_CODE (operands[3]) != REG)
2561 operands[3] = force_reg (SImode, operands[3]);
2563 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2572 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2575 target = copy_rtx (operands[0]);
2576 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2577 subreg as the final target. */
2578 if (GET_CODE (target) == SUBREG)
2580 subtarget = gen_reg_rtx (SImode);
2581 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2582 < GET_MODE_SIZE (SImode))
2583 target = SUBREG_REG (target);
2588 if (GET_CODE (operands[3]) == CONST_INT)
2590 /* Since we are inserting a known constant, we may be able to
2591 reduce the number of bits that we have to clear so that
2592 the mask becomes simple. */
2593 /* ??? This code does not check to see if the new mask is actually
2594 simpler. It may not be. */
2595 rtx op1 = gen_reg_rtx (SImode);
2596 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2597 start of this pattern. */
2598 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2599 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2601 emit_insn (gen_andsi3 (op1, operands[0],
2602 gen_int_mode (~mask2, SImode)));
2603 emit_insn (gen_iorsi3 (subtarget, op1,
2604 gen_int_mode (op3_value << start_bit, SImode)));
2606 else if (start_bit == 0
2607 && !(const_ok_for_arm (mask)
2608 || const_ok_for_arm (~mask)))
2610 /* A Trick, since we are setting the bottom bits in the word,
2611 we can shift operand[3] up, operand[0] down, OR them together
2612 and rotate the result back again. This takes 3 insns, and
2613 the third might be mergeable into another op. */
2614 /* The shift up copes with the possibility that operand[3] is
2615 wider than the bitfield. */
2616 rtx op0 = gen_reg_rtx (SImode);
2617 rtx op1 = gen_reg_rtx (SImode);
2619 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2620 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2621 emit_insn (gen_iorsi3 (op1, op1, op0));
2622 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2624 else if ((width + start_bit == 32)
2625 && !(const_ok_for_arm (mask)
2626 || const_ok_for_arm (~mask)))
2628 /* Similar trick, but slightly less efficient. */
2630 rtx op0 = gen_reg_rtx (SImode);
2631 rtx op1 = gen_reg_rtx (SImode);
2633 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2634 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2635 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2636 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2640 rtx op0 = gen_int_mode (mask, SImode);
2641 rtx op1 = gen_reg_rtx (SImode);
2642 rtx op2 = gen_reg_rtx (SImode);
2644 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2646 rtx tmp = gen_reg_rtx (SImode);
2648 emit_insn (gen_movsi (tmp, op0));
2652 /* Mask out any bits in operand[3] that are not needed. */
2653 emit_insn (gen_andsi3 (op1, operands[3], op0));
2655 if (GET_CODE (op0) == CONST_INT
2656 && (const_ok_for_arm (mask << start_bit)
2657 || const_ok_for_arm (~(mask << start_bit))))
2659 op0 = gen_int_mode (~(mask << start_bit), SImode);
2660 emit_insn (gen_andsi3 (op2, operands[0], op0));
2664 if (GET_CODE (op0) == CONST_INT)
2666 rtx tmp = gen_reg_rtx (SImode);
2668 emit_insn (gen_movsi (tmp, op0));
2673 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2675 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2679 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2681 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2684 if (subtarget != target)
2686 /* If TARGET is still a SUBREG, then it must be wider than a word,
2687 so we must be careful only to set the subword we were asked to. */
2688 if (GET_CODE (target) == SUBREG)
2689 emit_move_insn (target, subtarget);
2691 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2698 (define_insn "insv_zero"
2699 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2700 (match_operand:SI 1 "const_int_operand" "M")
2701 (match_operand:SI 2 "const_int_operand" "M"))
2705 [(set_attr "length" "4")
2706 (set_attr "predicable" "yes")]
2709 (define_insn "insv_t2"
2710 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2711 (match_operand:SI 1 "const_int_operand" "M")
2712 (match_operand:SI 2 "const_int_operand" "M"))
2713 (match_operand:SI 3 "s_register_operand" "r"))]
2715 "bfi%?\t%0, %3, %2, %1"
2716 [(set_attr "length" "4")
2717 (set_attr "predicable" "yes")]
2720 ; constants for op 2 will never be given to these patterns.
2721 (define_insn_and_split "*anddi_notdi_di"
2722 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2723 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2724 (match_operand:DI 2 "s_register_operand" "r,0")))]
2727 "TARGET_32BIT && reload_completed
2728 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2729 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2730 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2731 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2734 operands[3] = gen_highpart (SImode, operands[0]);
2735 operands[0] = gen_lowpart (SImode, operands[0]);
2736 operands[4] = gen_highpart (SImode, operands[1]);
2737 operands[1] = gen_lowpart (SImode, operands[1]);
2738 operands[5] = gen_highpart (SImode, operands[2]);
2739 operands[2] = gen_lowpart (SImode, operands[2]);
2741 [(set_attr "length" "8")
2742 (set_attr "predicable" "yes")]
2745 (define_insn_and_split "*anddi_notzesidi_di"
2746 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2747 (and:DI (not:DI (zero_extend:DI
2748 (match_operand:SI 2 "s_register_operand" "r,r")))
2749 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2752 bic%?\\t%Q0, %Q1, %2
2754 ; (not (zero_extend ...)) allows us to just copy the high word from
2755 ; operand1 to operand0.
2758 && operands[0] != operands[1]"
2759 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2760 (set (match_dup 3) (match_dup 4))]
2763 operands[3] = gen_highpart (SImode, operands[0]);
2764 operands[0] = gen_lowpart (SImode, operands[0]);
2765 operands[4] = gen_highpart (SImode, operands[1]);
2766 operands[1] = gen_lowpart (SImode, operands[1]);
2768 [(set_attr "length" "4,8")
2769 (set_attr "predicable" "yes")]
2772 (define_insn_and_split "*anddi_notsesidi_di"
2773 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2774 (and:DI (not:DI (sign_extend:DI
2775 (match_operand:SI 2 "s_register_operand" "r,r")))
2776 (match_operand:DI 1 "s_register_operand" "0,r")))]
2779 "TARGET_32BIT && reload_completed"
2780 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2781 (set (match_dup 3) (and:SI (not:SI
2782 (ashiftrt:SI (match_dup 2) (const_int 31)))
2786 operands[3] = gen_highpart (SImode, operands[0]);
2787 operands[0] = gen_lowpart (SImode, operands[0]);
2788 operands[4] = gen_highpart (SImode, operands[1]);
2789 operands[1] = gen_lowpart (SImode, operands[1]);
2791 [(set_attr "length" "8")
2792 (set_attr "predicable" "yes")]
2795 (define_insn "andsi_notsi_si"
2796 [(set (match_operand:SI 0 "s_register_operand" "=r")
2797 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2798 (match_operand:SI 1 "s_register_operand" "r")))]
2800 "bic%?\\t%0, %1, %2"
2801 [(set_attr "predicable" "yes")]
2804 (define_insn "thumb1_bicsi3"
2805 [(set (match_operand:SI 0 "register_operand" "=l")
2806 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2807 (match_operand:SI 2 "register_operand" "0")))]
2810 [(set_attr "length" "2")
2811 (set_attr "conds" "set")])
2813 (define_insn "andsi_not_shiftsi_si"
2814 [(set (match_operand:SI 0 "s_register_operand" "=r")
2815 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2816 [(match_operand:SI 2 "s_register_operand" "r")
2817 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2818 (match_operand:SI 1 "s_register_operand" "r")))]
2820 "bic%?\\t%0, %1, %2%S4"
2821 [(set_attr "predicable" "yes")
2822 (set_attr "shift" "2")
2823 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2824 (const_string "alu_shift")
2825 (const_string "alu_shift_reg")))]
2828 (define_insn "*andsi_notsi_si_compare0"
2829 [(set (reg:CC_NOOV CC_REGNUM)
2831 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2832 (match_operand:SI 1 "s_register_operand" "r"))
2834 (set (match_operand:SI 0 "s_register_operand" "=r")
2835 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2837 "bic%.\\t%0, %1, %2"
2838 [(set_attr "conds" "set")]
2841 (define_insn "*andsi_notsi_si_compare0_scratch"
2842 [(set (reg:CC_NOOV CC_REGNUM)
2844 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2845 (match_operand:SI 1 "s_register_operand" "r"))
2847 (clobber (match_scratch:SI 0 "=r"))]
2849 "bic%.\\t%0, %1, %2"
2850 [(set_attr "conds" "set")]
2853 (define_expand "iordi3"
2854 [(set (match_operand:DI 0 "s_register_operand" "")
2855 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2856 (match_operand:DI 2 "neon_logic_op2" "")))]
2861 (define_insn "*iordi3_insn"
2862 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2863 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2864 (match_operand:DI 2 "s_register_operand" "r,r")))]
2865 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2867 [(set_attr "length" "8")
2868 (set_attr "predicable" "yes")]
2871 (define_insn "*iordi_zesidi_di"
2872 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2873 (ior:DI (zero_extend:DI
2874 (match_operand:SI 2 "s_register_operand" "r,r"))
2875 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2878 orr%?\\t%Q0, %Q1, %2
2880 [(set_attr "length" "4,8")
2881 (set_attr "predicable" "yes")]
2884 (define_insn "*iordi_sesidi_di"
2885 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2886 (ior:DI (sign_extend:DI
2887 (match_operand:SI 2 "s_register_operand" "r,r"))
2888 (match_operand:DI 1 "s_register_operand" "0,r")))]
2891 [(set_attr "length" "8")
2892 (set_attr "predicable" "yes")]
2895 (define_expand "iorsi3"
2896 [(set (match_operand:SI 0 "s_register_operand" "")
2897 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2898 (match_operand:SI 2 "reg_or_int_operand" "")))]
2901 if (GET_CODE (operands[2]) == CONST_INT)
2905 arm_split_constant (IOR, SImode, NULL_RTX,
2906 INTVAL (operands[2]), operands[0], operands[1],
2907 optimize && can_create_pseudo_p ());
2910 else /* TARGET_THUMB1 */
2912 rtx tmp = force_reg (SImode, operands[2]);
2913 if (rtx_equal_p (operands[0], operands[1]))
2917 operands[2] = operands[1];
2925 (define_insn_and_split "*iorsi3_insn"
2926 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2927 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2928 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2932 orn%?\\t%0, %1, #%B2
2935 && GET_CODE (operands[2]) == CONST_INT
2936 && !(const_ok_for_arm (INTVAL (operands[2]))
2937 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2938 [(clobber (const_int 0))]
2940 arm_split_constant (IOR, SImode, curr_insn,
2941 INTVAL (operands[2]), operands[0], operands[1], 0);
2944 [(set_attr "length" "4,4,16")
2945 (set_attr "arch" "32,t2,32")
2946 (set_attr "predicable" "yes")])
2948 (define_insn "*thumb1_iorsi3_insn"
2949 [(set (match_operand:SI 0 "register_operand" "=l")
2950 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2951 (match_operand:SI 2 "register_operand" "l")))]
2954 [(set_attr "length" "2")
2955 (set_attr "conds" "set")])
2958 [(match_scratch:SI 3 "r")
2959 (set (match_operand:SI 0 "arm_general_register_operand" "")
2960 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2961 (match_operand:SI 2 "const_int_operand" "")))]
2963 && !const_ok_for_arm (INTVAL (operands[2]))
2964 && const_ok_for_arm (~INTVAL (operands[2]))"
2965 [(set (match_dup 3) (match_dup 2))
2966 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2970 (define_insn "*iorsi3_compare0"
2971 [(set (reg:CC_NOOV CC_REGNUM)
2972 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2973 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2975 (set (match_operand:SI 0 "s_register_operand" "=r")
2976 (ior:SI (match_dup 1) (match_dup 2)))]
2978 "orr%.\\t%0, %1, %2"
2979 [(set_attr "conds" "set")]
2982 (define_insn "*iorsi3_compare0_scratch"
2983 [(set (reg:CC_NOOV CC_REGNUM)
2984 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2985 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2987 (clobber (match_scratch:SI 0 "=r"))]
2989 "orr%.\\t%0, %1, %2"
2990 [(set_attr "conds" "set")]
2993 (define_expand "xordi3"
2994 [(set (match_operand:DI 0 "s_register_operand" "")
2995 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2996 (match_operand:DI 2 "s_register_operand" "")))]
3001 (define_insn "*xordi3_insn"
3002 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3003 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
3004 (match_operand:DI 2 "s_register_operand" "r,r")))]
3005 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
3007 [(set_attr "length" "8")
3008 (set_attr "predicable" "yes")]
3011 (define_insn "*xordi_zesidi_di"
3012 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3013 (xor:DI (zero_extend:DI
3014 (match_operand:SI 2 "s_register_operand" "r,r"))
3015 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3018 eor%?\\t%Q0, %Q1, %2
3020 [(set_attr "length" "4,8")
3021 (set_attr "predicable" "yes")]
3024 (define_insn "*xordi_sesidi_di"
3025 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3026 (xor:DI (sign_extend:DI
3027 (match_operand:SI 2 "s_register_operand" "r,r"))
3028 (match_operand:DI 1 "s_register_operand" "0,r")))]
3031 [(set_attr "length" "8")
3032 (set_attr "predicable" "yes")]
3035 (define_expand "xorsi3"
3036 [(set (match_operand:SI 0 "s_register_operand" "")
3037 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3038 (match_operand:SI 2 "reg_or_int_operand" "")))]
3040 "if (GET_CODE (operands[2]) == CONST_INT)
3044 arm_split_constant (XOR, SImode, NULL_RTX,
3045 INTVAL (operands[2]), operands[0], operands[1],
3046 optimize && can_create_pseudo_p ());
3049 else /* TARGET_THUMB1 */
3051 rtx tmp = force_reg (SImode, operands[2]);
3052 if (rtx_equal_p (operands[0], operands[1]))
3056 operands[2] = operands[1];
3063 (define_insn_and_split "*arm_xorsi3"
3064 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3065 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3066 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
3072 && GET_CODE (operands[2]) == CONST_INT
3073 && !const_ok_for_arm (INTVAL (operands[2]))"
3074 [(clobber (const_int 0))]
3076 arm_split_constant (XOR, SImode, curr_insn,
3077 INTVAL (operands[2]), operands[0], operands[1], 0);
3080 [(set_attr "length" "4,16")
3081 (set_attr "predicable" "yes")]
3084 (define_insn "*thumb1_xorsi3_insn"
3085 [(set (match_operand:SI 0 "register_operand" "=l")
3086 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3087 (match_operand:SI 2 "register_operand" "l")))]
3090 [(set_attr "length" "2")
3091 (set_attr "conds" "set")])
3093 (define_insn "*xorsi3_compare0"
3094 [(set (reg:CC_NOOV CC_REGNUM)
3095 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3096 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3098 (set (match_operand:SI 0 "s_register_operand" "=r")
3099 (xor:SI (match_dup 1) (match_dup 2)))]
3101 "eor%.\\t%0, %1, %2"
3102 [(set_attr "conds" "set")]
3105 (define_insn "*xorsi3_compare0_scratch"
3106 [(set (reg:CC_NOOV CC_REGNUM)
3107 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3108 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3112 [(set_attr "conds" "set")]
3115 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3116 ; (NOT D) we can sometimes merge the final NOT into one of the following
3120 [(set (match_operand:SI 0 "s_register_operand" "")
3121 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3122 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3123 (match_operand:SI 3 "arm_rhs_operand" "")))
3124 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3126 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3127 (not:SI (match_dup 3))))
3128 (set (match_dup 0) (not:SI (match_dup 4)))]
3132 (define_insn "*andsi_iorsi3_notsi"
3133 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3134 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3135 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3136 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3138 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3139 [(set_attr "length" "8")
3140 (set_attr "ce_count" "2")
3141 (set_attr "predicable" "yes")]
3144 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3145 ; insns are available?
3147 [(set (match_operand:SI 0 "s_register_operand" "")
3148 (match_operator:SI 1 "logical_binary_operator"
3149 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3150 (match_operand:SI 3 "const_int_operand" "")
3151 (match_operand:SI 4 "const_int_operand" ""))
3152 (match_operator:SI 9 "logical_binary_operator"
3153 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3154 (match_operand:SI 6 "const_int_operand" ""))
3155 (match_operand:SI 7 "s_register_operand" "")])]))
3156 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3158 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3159 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3162 [(ashift:SI (match_dup 2) (match_dup 4))
3166 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3169 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3173 [(set (match_operand:SI 0 "s_register_operand" "")
3174 (match_operator:SI 1 "logical_binary_operator"
3175 [(match_operator:SI 9 "logical_binary_operator"
3176 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3177 (match_operand:SI 6 "const_int_operand" ""))
3178 (match_operand:SI 7 "s_register_operand" "")])
3179 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3180 (match_operand:SI 3 "const_int_operand" "")
3181 (match_operand:SI 4 "const_int_operand" ""))]))
3182 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3184 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3185 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3188 [(ashift:SI (match_dup 2) (match_dup 4))
3192 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3195 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3199 [(set (match_operand:SI 0 "s_register_operand" "")
3200 (match_operator:SI 1 "logical_binary_operator"
3201 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3202 (match_operand:SI 3 "const_int_operand" "")
3203 (match_operand:SI 4 "const_int_operand" ""))
3204 (match_operator:SI 9 "logical_binary_operator"
3205 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3206 (match_operand:SI 6 "const_int_operand" ""))
3207 (match_operand:SI 7 "s_register_operand" "")])]))
3208 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3210 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3211 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3214 [(ashift:SI (match_dup 2) (match_dup 4))
3218 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3221 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3225 [(set (match_operand:SI 0 "s_register_operand" "")
3226 (match_operator:SI 1 "logical_binary_operator"
3227 [(match_operator:SI 9 "logical_binary_operator"
3228 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3229 (match_operand:SI 6 "const_int_operand" ""))
3230 (match_operand:SI 7 "s_register_operand" "")])
3231 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3232 (match_operand:SI 3 "const_int_operand" "")
3233 (match_operand:SI 4 "const_int_operand" ""))]))
3234 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3236 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3237 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3240 [(ashift:SI (match_dup 2) (match_dup 4))
3244 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3247 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3251 ;; Minimum and maximum insns
3253 (define_expand "smaxsi3"
3255 (set (match_operand:SI 0 "s_register_operand" "")
3256 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3257 (match_operand:SI 2 "arm_rhs_operand" "")))
3258 (clobber (reg:CC CC_REGNUM))])]
3261 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3263 /* No need for a clobber of the condition code register here. */
3264 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3265 gen_rtx_SMAX (SImode, operands[1],
3271 (define_insn "*smax_0"
3272 [(set (match_operand:SI 0 "s_register_operand" "=r")
3273 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3276 "bic%?\\t%0, %1, %1, asr #31"
3277 [(set_attr "predicable" "yes")]
3280 (define_insn "*smax_m1"
3281 [(set (match_operand:SI 0 "s_register_operand" "=r")
3282 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3285 "orr%?\\t%0, %1, %1, asr #31"
3286 [(set_attr "predicable" "yes")]
3289 (define_insn "*arm_smax_insn"
3290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3291 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3292 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3293 (clobber (reg:CC CC_REGNUM))]
3296 cmp\\t%1, %2\;movlt\\t%0, %2
3297 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3298 [(set_attr "conds" "clob")
3299 (set_attr "length" "8,12")]
3302 (define_expand "sminsi3"
3304 (set (match_operand:SI 0 "s_register_operand" "")
3305 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "arm_rhs_operand" "")))
3307 (clobber (reg:CC CC_REGNUM))])]
3310 if (operands[2] == const0_rtx)
3312 /* No need for a clobber of the condition code register here. */
3313 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3314 gen_rtx_SMIN (SImode, operands[1],
3320 (define_insn "*smin_0"
3321 [(set (match_operand:SI 0 "s_register_operand" "=r")
3322 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3325 "and%?\\t%0, %1, %1, asr #31"
3326 [(set_attr "predicable" "yes")]
3329 (define_insn "*arm_smin_insn"
3330 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3331 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3332 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3333 (clobber (reg:CC CC_REGNUM))]
3336 cmp\\t%1, %2\;movge\\t%0, %2
3337 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3338 [(set_attr "conds" "clob")
3339 (set_attr "length" "8,12")]
3342 (define_expand "umaxsi3"
3344 (set (match_operand:SI 0 "s_register_operand" "")
3345 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3346 (match_operand:SI 2 "arm_rhs_operand" "")))
3347 (clobber (reg:CC CC_REGNUM))])]
3352 (define_insn "*arm_umaxsi3"
3353 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3354 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3355 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3356 (clobber (reg:CC CC_REGNUM))]
3359 cmp\\t%1, %2\;movcc\\t%0, %2
3360 cmp\\t%1, %2\;movcs\\t%0, %1
3361 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3362 [(set_attr "conds" "clob")
3363 (set_attr "length" "8,8,12")]
3366 (define_expand "uminsi3"
3368 (set (match_operand:SI 0 "s_register_operand" "")
3369 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3370 (match_operand:SI 2 "arm_rhs_operand" "")))
3371 (clobber (reg:CC CC_REGNUM))])]
3376 (define_insn "*arm_uminsi3"
3377 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3378 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3379 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3380 (clobber (reg:CC CC_REGNUM))]
3383 cmp\\t%1, %2\;movcs\\t%0, %2
3384 cmp\\t%1, %2\;movcc\\t%0, %1
3385 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3386 [(set_attr "conds" "clob")
3387 (set_attr "length" "8,8,12")]
3390 (define_insn "*store_minmaxsi"
3391 [(set (match_operand:SI 0 "memory_operand" "=m")
3392 (match_operator:SI 3 "minmax_operator"
3393 [(match_operand:SI 1 "s_register_operand" "r")
3394 (match_operand:SI 2 "s_register_operand" "r")]))
3395 (clobber (reg:CC CC_REGNUM))]
3398 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3399 operands[1], operands[2]);
3400 output_asm_insn (\"cmp\\t%1, %2\", operands);
3402 output_asm_insn (\"ite\t%d3\", operands);
3403 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3404 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3407 [(set_attr "conds" "clob")
3408 (set (attr "length")
3409 (if_then_else (eq_attr "is_thumb" "yes")
3412 (set_attr "type" "store1")]
3415 ; Reject the frame pointer in operand[1], since reloading this after
3416 ; it has been eliminated can cause carnage.
3417 (define_insn "*minmax_arithsi"
3418 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3419 (match_operator:SI 4 "shiftable_operator"
3420 [(match_operator:SI 5 "minmax_operator"
3421 [(match_operand:SI 2 "s_register_operand" "r,r")
3422 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3423 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3424 (clobber (reg:CC CC_REGNUM))]
3425 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3428 enum rtx_code code = GET_CODE (operands[4]);
3431 if (which_alternative != 0 || operands[3] != const0_rtx
3432 || (code != PLUS && code != IOR && code != XOR))
3437 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3438 operands[2], operands[3]);
3439 output_asm_insn (\"cmp\\t%2, %3\", operands);
3443 output_asm_insn (\"ite\\t%d5\", operands);
3445 output_asm_insn (\"it\\t%d5\", operands);
3447 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3449 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3452 [(set_attr "conds" "clob")
3453 (set (attr "length")
3454 (if_then_else (eq_attr "is_thumb" "yes")
3459 (define_code_iterator SAT [smin smax])
3460 (define_code_iterator SATrev [smin smax])
3461 (define_code_attr SATlo [(smin "1") (smax "2")])
3462 (define_code_attr SAThi [(smin "2") (smax "1")])
3464 (define_insn "*satsi_<SAT:code>"
3465 [(set (match_operand:SI 0 "s_register_operand" "=r")
3466 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3467 (match_operand:SI 1 "const_int_operand" "i"))
3468 (match_operand:SI 2 "const_int_operand" "i")))]
3469 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3470 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3474 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3475 &mask, &signed_sat))
3478 operands[1] = GEN_INT (mask);
3480 return "ssat%?\t%0, %1, %3";
3482 return "usat%?\t%0, %1, %3";
3484 [(set_attr "predicable" "yes")
3485 (set_attr "insn" "sat")])
3487 (define_insn "*satsi_<SAT:code>_shift"
3488 [(set (match_operand:SI 0 "s_register_operand" "=r")
3489 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3490 [(match_operand:SI 4 "s_register_operand" "r")
3491 (match_operand:SI 5 "const_int_operand" "i")])
3492 (match_operand:SI 1 "const_int_operand" "i"))
3493 (match_operand:SI 2 "const_int_operand" "i")))]
3494 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3495 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3499 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3500 &mask, &signed_sat))
3503 operands[1] = GEN_INT (mask);
3505 return "ssat%?\t%0, %1, %4%S3";
3507 return "usat%?\t%0, %1, %4%S3";
3509 [(set_attr "predicable" "yes")
3510 (set_attr "insn" "sat")
3511 (set_attr "shift" "3")
3512 (set_attr "type" "alu_shift")])
3514 ;; Shift and rotation insns
3516 (define_expand "ashldi3"
3517 [(set (match_operand:DI 0 "s_register_operand" "")
3518 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3519 (match_operand:SI 2 "reg_or_int_operand" "")))]
3522 if (GET_CODE (operands[2]) == CONST_INT)
3524 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3526 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3529 /* Ideally we shouldn't fail here if we could know that operands[1]
3530 ends up already living in an iwmmxt register. Otherwise it's
3531 cheaper to have the alternate code being generated than moving
3532 values to iwmmxt regs and back. */
3535 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3540 (define_insn "arm_ashldi3_1bit"
3541 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3542 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3544 (clobber (reg:CC CC_REGNUM))]
3546 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3547 [(set_attr "conds" "clob")
3548 (set_attr "length" "8")]
3551 (define_expand "ashlsi3"
3552 [(set (match_operand:SI 0 "s_register_operand" "")
3553 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3554 (match_operand:SI 2 "arm_rhs_operand" "")))]
3557 if (GET_CODE (operands[2]) == CONST_INT
3558 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3560 emit_insn (gen_movsi (operands[0], const0_rtx));
3566 (define_insn "*thumb1_ashlsi3"
3567 [(set (match_operand:SI 0 "register_operand" "=l,l")
3568 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3569 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3572 [(set_attr "length" "2")
3573 (set_attr "conds" "set")])
3575 (define_expand "ashrdi3"
3576 [(set (match_operand:DI 0 "s_register_operand" "")
3577 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3578 (match_operand:SI 2 "reg_or_int_operand" "")))]
3581 if (GET_CODE (operands[2]) == CONST_INT)
3583 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3585 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3588 /* Ideally we shouldn't fail here if we could know that operands[1]
3589 ends up already living in an iwmmxt register. Otherwise it's
3590 cheaper to have the alternate code being generated than moving
3591 values to iwmmxt regs and back. */
3594 else if (!TARGET_REALLY_IWMMXT)
3599 (define_insn "arm_ashrdi3_1bit"
3600 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3601 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3603 (clobber (reg:CC CC_REGNUM))]
3605 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3606 [(set_attr "conds" "clob")
3607 (set_attr "insn" "mov")
3608 (set_attr "length" "8")]
3611 (define_expand "ashrsi3"
3612 [(set (match_operand:SI 0 "s_register_operand" "")
3613 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3614 (match_operand:SI 2 "arm_rhs_operand" "")))]
3617 if (GET_CODE (operands[2]) == CONST_INT
3618 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3619 operands[2] = GEN_INT (31);
3623 (define_insn "*thumb1_ashrsi3"
3624 [(set (match_operand:SI 0 "register_operand" "=l,l")
3625 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3626 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3629 [(set_attr "length" "2")
3630 (set_attr "conds" "set")])
3632 (define_expand "lshrdi3"
3633 [(set (match_operand:DI 0 "s_register_operand" "")
3634 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3635 (match_operand:SI 2 "reg_or_int_operand" "")))]
3638 if (GET_CODE (operands[2]) == CONST_INT)
3640 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3642 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3645 /* Ideally we shouldn't fail here if we could know that operands[1]
3646 ends up already living in an iwmmxt register. Otherwise it's
3647 cheaper to have the alternate code being generated than moving
3648 values to iwmmxt regs and back. */
3651 else if (!TARGET_REALLY_IWMMXT)
3656 (define_insn "arm_lshrdi3_1bit"
3657 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3658 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3660 (clobber (reg:CC CC_REGNUM))]
3662 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3663 [(set_attr "conds" "clob")
3664 (set_attr "insn" "mov")
3665 (set_attr "length" "8")]
3668 (define_expand "lshrsi3"
3669 [(set (match_operand:SI 0 "s_register_operand" "")
3670 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3671 (match_operand:SI 2 "arm_rhs_operand" "")))]
3674 if (GET_CODE (operands[2]) == CONST_INT
3675 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3677 emit_insn (gen_movsi (operands[0], const0_rtx));
3683 (define_insn "*thumb1_lshrsi3"
3684 [(set (match_operand:SI 0 "register_operand" "=l,l")
3685 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3686 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3689 [(set_attr "length" "2")
3690 (set_attr "conds" "set")])
3692 (define_expand "rotlsi3"
3693 [(set (match_operand:SI 0 "s_register_operand" "")
3694 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3695 (match_operand:SI 2 "reg_or_int_operand" "")))]
3698 if (GET_CODE (operands[2]) == CONST_INT)
3699 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3702 rtx reg = gen_reg_rtx (SImode);
3703 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3709 (define_expand "rotrsi3"
3710 [(set (match_operand:SI 0 "s_register_operand" "")
3711 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3712 (match_operand:SI 2 "arm_rhs_operand" "")))]
3717 if (GET_CODE (operands[2]) == CONST_INT
3718 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3719 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3721 else /* TARGET_THUMB1 */
3723 if (GET_CODE (operands [2]) == CONST_INT)
3724 operands [2] = force_reg (SImode, operands[2]);
3729 (define_insn "*thumb1_rotrsi3"
3730 [(set (match_operand:SI 0 "register_operand" "=l")
3731 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3732 (match_operand:SI 2 "register_operand" "l")))]
3735 [(set_attr "length" "2")]
3738 (define_insn "*arm_shiftsi3"
3739 [(set (match_operand:SI 0 "s_register_operand" "=r")
3740 (match_operator:SI 3 "shift_operator"
3741 [(match_operand:SI 1 "s_register_operand" "r")
3742 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3744 "* return arm_output_shift(operands, 0);"
3745 [(set_attr "predicable" "yes")
3746 (set_attr "shift" "1")
3747 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3748 (const_string "alu_shift")
3749 (const_string "alu_shift_reg")))]
3752 (define_insn "*shiftsi3_compare0"
3753 [(set (reg:CC_NOOV CC_REGNUM)
3754 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3755 [(match_operand:SI 1 "s_register_operand" "r")
3756 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3758 (set (match_operand:SI 0 "s_register_operand" "=r")
3759 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3761 "* return arm_output_shift(operands, 1);"
3762 [(set_attr "conds" "set")
3763 (set_attr "shift" "1")
3764 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3765 (const_string "alu_shift")
3766 (const_string "alu_shift_reg")))]
3769 (define_insn "*shiftsi3_compare0_scratch"
3770 [(set (reg:CC_NOOV CC_REGNUM)
3771 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3772 [(match_operand:SI 1 "s_register_operand" "r")
3773 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3775 (clobber (match_scratch:SI 0 "=r"))]
3777 "* return arm_output_shift(operands, 1);"
3778 [(set_attr "conds" "set")
3779 (set_attr "shift" "1")]
3782 (define_insn "*not_shiftsi"
3783 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3784 (not:SI (match_operator:SI 3 "shift_operator"
3785 [(match_operand:SI 1 "s_register_operand" "r,r")
3786 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3789 [(set_attr "predicable" "yes")
3790 (set_attr "shift" "1")
3791 (set_attr "insn" "mvn")
3792 (set_attr "arch" "32,a")
3793 (set_attr "type" "alu_shift,alu_shift_reg")])
3795 (define_insn "*not_shiftsi_compare0"
3796 [(set (reg:CC_NOOV CC_REGNUM)
3798 (not:SI (match_operator:SI 3 "shift_operator"
3799 [(match_operand:SI 1 "s_register_operand" "r,r")
3800 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3802 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3803 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3806 [(set_attr "conds" "set")
3807 (set_attr "shift" "1")
3808 (set_attr "insn" "mvn")
3809 (set_attr "arch" "32,a")
3810 (set_attr "type" "alu_shift,alu_shift_reg")])
3812 (define_insn "*not_shiftsi_compare0_scratch"
3813 [(set (reg:CC_NOOV CC_REGNUM)
3815 (not:SI (match_operator:SI 3 "shift_operator"
3816 [(match_operand:SI 1 "s_register_operand" "r,r")
3817 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3819 (clobber (match_scratch:SI 0 "=r,r"))]
3822 [(set_attr "conds" "set")
3823 (set_attr "shift" "1")
3824 (set_attr "insn" "mvn")
3825 (set_attr "arch" "32,a")
3826 (set_attr "type" "alu_shift,alu_shift_reg")])
3828 ;; We don't really have extzv, but defining this using shifts helps
3829 ;; to reduce register pressure later on.
3831 (define_expand "extzv"
3832 [(set (match_operand 0 "s_register_operand" "")
3833 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3834 (match_operand 2 "const_int_operand" "")
3835 (match_operand 3 "const_int_operand" "")))]
3836 "TARGET_THUMB1 || arm_arch_thumb2"
3839 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3840 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3842 if (arm_arch_thumb2)
3844 HOST_WIDE_INT width = INTVAL (operands[2]);
3845 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3847 if (unaligned_access && MEM_P (operands[1])
3848 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3852 if (BYTES_BIG_ENDIAN)
3853 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3858 base_addr = adjust_address (operands[1], SImode,
3859 bitpos / BITS_PER_UNIT);
3860 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3864 rtx dest = operands[0];
3865 rtx tmp = gen_reg_rtx (SImode);
3867 /* We may get a paradoxical subreg here. Strip it off. */
3868 if (GET_CODE (dest) == SUBREG
3869 && GET_MODE (dest) == SImode
3870 && GET_MODE (SUBREG_REG (dest)) == HImode)
3871 dest = SUBREG_REG (dest);
3873 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3876 base_addr = adjust_address (operands[1], HImode,
3877 bitpos / BITS_PER_UNIT);
3878 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3879 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3883 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3885 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3893 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3896 operands[3] = GEN_INT (rshift);
3900 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3904 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3905 operands[3], gen_reg_rtx (SImode)));
3910 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3912 (define_expand "extzv_t1"
3913 [(set (match_operand:SI 4 "s_register_operand" "")
3914 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3915 (match_operand:SI 2 "const_int_operand" "")))
3916 (set (match_operand:SI 0 "s_register_operand" "")
3917 (lshiftrt:SI (match_dup 4)
3918 (match_operand:SI 3 "const_int_operand" "")))]
3922 (define_expand "extv"
3923 [(set (match_operand 0 "s_register_operand" "")
3924 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3925 (match_operand 2 "const_int_operand" "")
3926 (match_operand 3 "const_int_operand" "")))]
3929 HOST_WIDE_INT width = INTVAL (operands[2]);
3930 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3932 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3933 && (bitpos % BITS_PER_UNIT) == 0)
3937 if (BYTES_BIG_ENDIAN)
3938 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3942 base_addr = adjust_address (operands[1], SImode,
3943 bitpos / BITS_PER_UNIT);
3944 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3948 rtx dest = operands[0];
3949 rtx tmp = gen_reg_rtx (SImode);
3951 /* We may get a paradoxical subreg here. Strip it off. */
3952 if (GET_CODE (dest) == SUBREG
3953 && GET_MODE (dest) == SImode
3954 && GET_MODE (SUBREG_REG (dest)) == HImode)
3955 dest = SUBREG_REG (dest);
3957 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3960 base_addr = adjust_address (operands[1], HImode,
3961 bitpos / BITS_PER_UNIT);
3962 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3963 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3968 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3970 else if (GET_MODE (operands[0]) == SImode
3971 && GET_MODE (operands[1]) == SImode)
3973 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3981 ; Helper to expand register forms of extv with the proper modes.
3983 (define_expand "extv_regsi"
3984 [(set (match_operand:SI 0 "s_register_operand" "")
3985 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
3986 (match_operand 2 "const_int_operand" "")
3987 (match_operand 3 "const_int_operand" "")))]
3992 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3994 (define_insn "unaligned_loadsi"
3995 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3996 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
3997 UNSPEC_UNALIGNED_LOAD))]
3998 "unaligned_access && TARGET_32BIT"
3999 "ldr%?\t%0, %1\t@ unaligned"
4000 [(set_attr "arch" "t2,any")
4001 (set_attr "length" "2,4")
4002 (set_attr "predicable" "yes")
4003 (set_attr "type" "load1")])
4005 (define_insn "unaligned_loadhis"
4006 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4008 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4009 UNSPEC_UNALIGNED_LOAD)))]
4010 "unaligned_access && TARGET_32BIT"
4011 "ldr%(sh%)\t%0, %1\t@ unaligned"
4012 [(set_attr "arch" "t2,any")
4013 (set_attr "length" "2,4")
4014 (set_attr "predicable" "yes")
4015 (set_attr "type" "load_byte")])
4017 (define_insn "unaligned_loadhiu"
4018 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4020 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4021 UNSPEC_UNALIGNED_LOAD)))]
4022 "unaligned_access && TARGET_32BIT"
4023 "ldr%(h%)\t%0, %1\t@ unaligned"
4024 [(set_attr "arch" "t2,any")
4025 (set_attr "length" "2,4")
4026 (set_attr "predicable" "yes")
4027 (set_attr "type" "load_byte")])
4029 (define_insn "unaligned_storesi"
4030 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4031 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4032 UNSPEC_UNALIGNED_STORE))]
4033 "unaligned_access && TARGET_32BIT"
4034 "str%?\t%1, %0\t@ unaligned"
4035 [(set_attr "arch" "t2,any")
4036 (set_attr "length" "2,4")
4037 (set_attr "predicable" "yes")
4038 (set_attr "type" "store1")])
4040 (define_insn "unaligned_storehi"
4041 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4042 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4043 UNSPEC_UNALIGNED_STORE))]
4044 "unaligned_access && TARGET_32BIT"
4045 "str%(h%)\t%1, %0\t@ unaligned"
4046 [(set_attr "arch" "t2,any")
4047 (set_attr "length" "2,4")
4048 (set_attr "predicable" "yes")
4049 (set_attr "type" "store1")])
4051 (define_insn "*extv_reg"
4052 [(set (match_operand:SI 0 "s_register_operand" "=r")
4053 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4054 (match_operand:SI 2 "const_int_operand" "M")
4055 (match_operand:SI 3 "const_int_operand" "M")))]
4057 "sbfx%?\t%0, %1, %3, %2"
4058 [(set_attr "length" "4")
4059 (set_attr "predicable" "yes")]
4062 (define_insn "extzv_t2"
4063 [(set (match_operand:SI 0 "s_register_operand" "=r")
4064 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4065 (match_operand:SI 2 "const_int_operand" "M")
4066 (match_operand:SI 3 "const_int_operand" "M")))]
4068 "ubfx%?\t%0, %1, %3, %2"
4069 [(set_attr "length" "4")
4070 (set_attr "predicable" "yes")]
4074 ;; Division instructions
4075 (define_insn "divsi3"
4076 [(set (match_operand:SI 0 "s_register_operand" "=r")
4077 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4078 (match_operand:SI 2 "s_register_operand" "r")))]
4080 "sdiv%?\t%0, %1, %2"
4081 [(set_attr "predicable" "yes")
4082 (set_attr "insn" "sdiv")]
4085 (define_insn "udivsi3"
4086 [(set (match_operand:SI 0 "s_register_operand" "=r")
4087 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4088 (match_operand:SI 2 "s_register_operand" "r")))]
4090 "udiv%?\t%0, %1, %2"
4091 [(set_attr "predicable" "yes")
4092 (set_attr "insn" "udiv")]
4096 ;; Unary arithmetic insns
4098 (define_expand "negdi2"
4100 [(set (match_operand:DI 0 "s_register_operand" "")
4101 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4102 (clobber (reg:CC CC_REGNUM))])]
4107 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4113 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4114 ;; The first alternative allows the common case of a *full* overlap.
4115 (define_insn "*arm_negdi2"
4116 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4117 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4118 (clobber (reg:CC CC_REGNUM))]
4120 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4121 [(set_attr "conds" "clob")
4122 (set_attr "length" "8")]
4125 (define_insn "*thumb1_negdi2"
4126 [(set (match_operand:DI 0 "register_operand" "=&l")
4127 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4128 (clobber (reg:CC CC_REGNUM))]
4130 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4131 [(set_attr "length" "6")]
4134 (define_expand "negsi2"
4135 [(set (match_operand:SI 0 "s_register_operand" "")
4136 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4141 (define_insn "*arm_negsi2"
4142 [(set (match_operand:SI 0 "s_register_operand" "=r")
4143 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4145 "rsb%?\\t%0, %1, #0"
4146 [(set_attr "predicable" "yes")]
4149 (define_insn "*thumb1_negsi2"
4150 [(set (match_operand:SI 0 "register_operand" "=l")
4151 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4154 [(set_attr "length" "2")]
4157 (define_expand "negsf2"
4158 [(set (match_operand:SF 0 "s_register_operand" "")
4159 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4160 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4164 (define_expand "negdf2"
4165 [(set (match_operand:DF 0 "s_register_operand" "")
4166 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4167 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4170 ;; abssi2 doesn't really clobber the condition codes if a different register
4171 ;; is being set. To keep things simple, assume during rtl manipulations that
4172 ;; it does, but tell the final scan operator the truth. Similarly for
4175 (define_expand "abssi2"
4177 [(set (match_operand:SI 0 "s_register_operand" "")
4178 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4179 (clobber (match_dup 2))])]
4183 operands[2] = gen_rtx_SCRATCH (SImode);
4185 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4188 (define_insn "*arm_abssi2"
4189 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4190 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4191 (clobber (reg:CC CC_REGNUM))]
4194 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4195 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4196 [(set_attr "conds" "clob,*")
4197 (set_attr "shift" "1")
4198 ;; predicable can't be set based on the variant, so left as no
4199 (set_attr "length" "8")]
4202 (define_insn_and_split "*thumb1_abssi2"
4203 [(set (match_operand:SI 0 "s_register_operand" "=l")
4204 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4205 (clobber (match_scratch:SI 2 "=&l"))]
4208 "TARGET_THUMB1 && reload_completed"
4209 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4210 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4211 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4213 [(set_attr "length" "6")]
4216 (define_insn "*arm_neg_abssi2"
4217 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4218 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4219 (clobber (reg:CC CC_REGNUM))]
4222 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4223 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4224 [(set_attr "conds" "clob,*")
4225 (set_attr "shift" "1")
4226 ;; predicable can't be set based on the variant, so left as no
4227 (set_attr "length" "8")]
4230 (define_insn_and_split "*thumb1_neg_abssi2"
4231 [(set (match_operand:SI 0 "s_register_operand" "=l")
4232 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4233 (clobber (match_scratch:SI 2 "=&l"))]
4236 "TARGET_THUMB1 && reload_completed"
4237 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4238 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4239 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4241 [(set_attr "length" "6")]
4244 (define_expand "abssf2"
4245 [(set (match_operand:SF 0 "s_register_operand" "")
4246 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4247 "TARGET_32BIT && TARGET_HARD_FLOAT"
4250 (define_expand "absdf2"
4251 [(set (match_operand:DF 0 "s_register_operand" "")
4252 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4253 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4256 (define_expand "sqrtsf2"
4257 [(set (match_operand:SF 0 "s_register_operand" "")
4258 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4259 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4262 (define_expand "sqrtdf2"
4263 [(set (match_operand:DF 0 "s_register_operand" "")
4264 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4265 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4268 (define_insn_and_split "one_cmpldi2"
4269 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
4270 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
4277 "TARGET_32BIT && reload_completed
4278 && arm_general_register_operand (operands[0], DImode)"
4279 [(set (match_dup 0) (not:SI (match_dup 1)))
4280 (set (match_dup 2) (not:SI (match_dup 3)))]
4283 operands[2] = gen_highpart (SImode, operands[0]);
4284 operands[0] = gen_lowpart (SImode, operands[0]);
4285 operands[3] = gen_highpart (SImode, operands[1]);
4286 operands[1] = gen_lowpart (SImode, operands[1]);
4288 [(set_attr "length" "*,8,8,*")
4289 (set_attr "predicable" "no,yes,yes,no")
4290 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
4291 (set_attr "arch" "neon_nota8,*,*,neon_onlya8")]
4294 (define_expand "one_cmplsi2"
4295 [(set (match_operand:SI 0 "s_register_operand" "")
4296 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4301 (define_insn "*arm_one_cmplsi2"
4302 [(set (match_operand:SI 0 "s_register_operand" "=r")
4303 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4306 [(set_attr "predicable" "yes")
4307 (set_attr "insn" "mvn")]
4310 (define_insn "*thumb1_one_cmplsi2"
4311 [(set (match_operand:SI 0 "register_operand" "=l")
4312 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4315 [(set_attr "length" "2")
4316 (set_attr "insn" "mvn")]
4319 (define_insn "*notsi_compare0"
4320 [(set (reg:CC_NOOV CC_REGNUM)
4321 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4323 (set (match_operand:SI 0 "s_register_operand" "=r")
4324 (not:SI (match_dup 1)))]
4327 [(set_attr "conds" "set")
4328 (set_attr "insn" "mvn")]
4331 (define_insn "*notsi_compare0_scratch"
4332 [(set (reg:CC_NOOV CC_REGNUM)
4333 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4335 (clobber (match_scratch:SI 0 "=r"))]
4338 [(set_attr "conds" "set")
4339 (set_attr "insn" "mvn")]
4342 ;; Fixed <--> Floating conversion insns
4344 (define_expand "floatsihf2"
4345 [(set (match_operand:HF 0 "general_operand" "")
4346 (float:HF (match_operand:SI 1 "general_operand" "")))]
4350 rtx op1 = gen_reg_rtx (SFmode);
4351 expand_float (op1, operands[1], 0);
4352 op1 = convert_to_mode (HFmode, op1, 0);
4353 emit_move_insn (operands[0], op1);
4358 (define_expand "floatdihf2"
4359 [(set (match_operand:HF 0 "general_operand" "")
4360 (float:HF (match_operand:DI 1 "general_operand" "")))]
4364 rtx op1 = gen_reg_rtx (SFmode);
4365 expand_float (op1, operands[1], 0);
4366 op1 = convert_to_mode (HFmode, op1, 0);
4367 emit_move_insn (operands[0], op1);
4372 (define_expand "floatsisf2"
4373 [(set (match_operand:SF 0 "s_register_operand" "")
4374 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4375 "TARGET_32BIT && TARGET_HARD_FLOAT"
4377 if (TARGET_MAVERICK)
4379 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4384 (define_expand "floatsidf2"
4385 [(set (match_operand:DF 0 "s_register_operand" "")
4386 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4387 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4389 if (TARGET_MAVERICK)
4391 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4396 (define_expand "fix_trunchfsi2"
4397 [(set (match_operand:SI 0 "general_operand" "")
4398 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4402 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4403 expand_fix (operands[0], op1, 0);
4408 (define_expand "fix_trunchfdi2"
4409 [(set (match_operand:DI 0 "general_operand" "")
4410 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4414 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4415 expand_fix (operands[0], op1, 0);
4420 (define_expand "fix_truncsfsi2"
4421 [(set (match_operand:SI 0 "s_register_operand" "")
4422 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4423 "TARGET_32BIT && TARGET_HARD_FLOAT"
4425 if (TARGET_MAVERICK)
4427 if (!cirrus_fp_register (operands[0], SImode))
4428 operands[0] = force_reg (SImode, operands[0]);
4429 if (!cirrus_fp_register (operands[1], SFmode))
4430 operands[1] = force_reg (SFmode, operands[0]);
4431 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4436 (define_expand "fix_truncdfsi2"
4437 [(set (match_operand:SI 0 "s_register_operand" "")
4438 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4439 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4441 if (TARGET_MAVERICK)
4443 if (!cirrus_fp_register (operands[1], DFmode))
4444 operands[1] = force_reg (DFmode, operands[0]);
4445 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4452 (define_expand "truncdfsf2"
4453 [(set (match_operand:SF 0 "s_register_operand" "")
4455 (match_operand:DF 1 "s_register_operand" "")))]
4456 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4460 /* DFmode -> HFmode conversions have to go through SFmode. */
4461 (define_expand "truncdfhf2"
4462 [(set (match_operand:HF 0 "general_operand" "")
4464 (match_operand:DF 1 "general_operand" "")))]
4469 op1 = convert_to_mode (SFmode, operands[1], 0);
4470 op1 = convert_to_mode (HFmode, op1, 0);
4471 emit_move_insn (operands[0], op1);
4476 ;; Zero and sign extension instructions.
4478 (define_insn "zero_extend<mode>di2"
4479 [(set (match_operand:DI 0 "s_register_operand" "=r")
4480 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4481 "<qhs_zextenddi_cstr>")))]
4482 "TARGET_32BIT <qhs_zextenddi_cond>"
4484 [(set_attr "length" "8")
4485 (set_attr "ce_count" "2")
4486 (set_attr "predicable" "yes")]
4489 (define_insn "extend<mode>di2"
4490 [(set (match_operand:DI 0 "s_register_operand" "=r")
4491 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4492 "<qhs_extenddi_cstr>")))]
4493 "TARGET_32BIT <qhs_sextenddi_cond>"
4495 [(set_attr "length" "8")
4496 (set_attr "ce_count" "2")
4497 (set_attr "shift" "1")
4498 (set_attr "predicable" "yes")]
4501 ;; Splits for all extensions to DImode
4503 [(set (match_operand:DI 0 "s_register_operand" "")
4504 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4506 [(set (match_dup 0) (match_dup 1))]
4508 rtx lo_part = gen_lowpart (SImode, operands[0]);
4509 enum machine_mode src_mode = GET_MODE (operands[1]);
4511 if (REG_P (operands[0])
4512 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4513 emit_clobber (operands[0]);
4514 if (!REG_P (lo_part) || src_mode != SImode
4515 || !rtx_equal_p (lo_part, operands[1]))
4517 if (src_mode == SImode)
4518 emit_move_insn (lo_part, operands[1]);
4520 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4521 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4522 operands[1] = lo_part;
4524 operands[0] = gen_highpart (SImode, operands[0]);
4525 operands[1] = const0_rtx;
4529 [(set (match_operand:DI 0 "s_register_operand" "")
4530 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4532 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4534 rtx lo_part = gen_lowpart (SImode, operands[0]);
4535 enum machine_mode src_mode = GET_MODE (operands[1]);
4537 if (REG_P (operands[0])
4538 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4539 emit_clobber (operands[0]);
4541 if (!REG_P (lo_part) || src_mode != SImode
4542 || !rtx_equal_p (lo_part, operands[1]))
4544 if (src_mode == SImode)
4545 emit_move_insn (lo_part, operands[1]);
4547 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4548 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4549 operands[1] = lo_part;
4551 operands[0] = gen_highpart (SImode, operands[0]);
4554 (define_expand "zero_extendhisi2"
4555 [(set (match_operand:SI 0 "s_register_operand" "")
4556 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4559 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4561 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4564 if (!arm_arch6 && !MEM_P (operands[1]))
4566 rtx t = gen_lowpart (SImode, operands[1]);
4567 rtx tmp = gen_reg_rtx (SImode);
4568 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4569 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4575 [(set (match_operand:SI 0 "s_register_operand" "")
4576 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4577 "!TARGET_THUMB2 && !arm_arch6"
4578 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4579 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4581 operands[2] = gen_lowpart (SImode, operands[1]);
4584 (define_insn "*thumb1_zero_extendhisi2"
4585 [(set (match_operand:SI 0 "register_operand" "=l,l")
4586 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4591 if (which_alternative == 0 && arm_arch6)
4592 return "uxth\t%0, %1";
4593 if (which_alternative == 0)
4596 mem = XEXP (operands[1], 0);
4598 if (GET_CODE (mem) == CONST)
4599 mem = XEXP (mem, 0);
4601 if (GET_CODE (mem) == PLUS)
4603 rtx a = XEXP (mem, 0);
4605 /* This can happen due to bugs in reload. */
4606 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4609 ops[0] = operands[0];
4612 output_asm_insn ("mov\t%0, %1", ops);
4614 XEXP (mem, 0) = operands[0];
4618 return "ldrh\t%0, %1";
4620 [(set_attr_alternative "length"
4621 [(if_then_else (eq_attr "is_arch6" "yes")
4622 (const_int 2) (const_int 4))
4624 (set_attr "type" "alu_shift,load_byte")]
4627 (define_insn "*arm_zero_extendhisi2"
4628 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4629 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4630 "TARGET_ARM && arm_arch4 && !arm_arch6"
4634 [(set_attr "type" "alu_shift,load_byte")
4635 (set_attr "predicable" "yes")]
4638 (define_insn "*arm_zero_extendhisi2_v6"
4639 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4640 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4641 "TARGET_ARM && arm_arch6"
4645 [(set_attr "type" "alu_shift,load_byte")
4646 (set_attr "predicable" "yes")]
4649 (define_insn "*arm_zero_extendhisi2addsi"
4650 [(set (match_operand:SI 0 "s_register_operand" "=r")
4651 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4652 (match_operand:SI 2 "s_register_operand" "r")))]
4654 "uxtah%?\\t%0, %2, %1"
4655 [(set_attr "type" "alu_shift")
4656 (set_attr "predicable" "yes")]
4659 (define_expand "zero_extendqisi2"
4660 [(set (match_operand:SI 0 "s_register_operand" "")
4661 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4664 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4666 emit_insn (gen_andsi3 (operands[0],
4667 gen_lowpart (SImode, operands[1]),
4671 if (!arm_arch6 && !MEM_P (operands[1]))
4673 rtx t = gen_lowpart (SImode, operands[1]);
4674 rtx tmp = gen_reg_rtx (SImode);
4675 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4676 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4682 [(set (match_operand:SI 0 "s_register_operand" "")
4683 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4685 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4686 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4688 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4691 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4696 (define_insn "*thumb1_zero_extendqisi2"
4697 [(set (match_operand:SI 0 "register_operand" "=l,l")
4698 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4699 "TARGET_THUMB1 && !arm_arch6"
4703 [(set_attr "length" "4,2")
4704 (set_attr "type" "alu_shift,load_byte")
4705 (set_attr "pool_range" "*,32")]
4708 (define_insn "*thumb1_zero_extendqisi2_v6"
4709 [(set (match_operand:SI 0 "register_operand" "=l,l")
4710 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4711 "TARGET_THUMB1 && arm_arch6"
4715 [(set_attr "length" "2")
4716 (set_attr "type" "alu_shift,load_byte")]
4719 (define_insn "*arm_zero_extendqisi2"
4720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4721 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4722 "TARGET_ARM && !arm_arch6"
4725 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4726 [(set_attr "length" "8,4")
4727 (set_attr "type" "alu_shift,load_byte")
4728 (set_attr "predicable" "yes")]
4731 (define_insn "*arm_zero_extendqisi2_v6"
4732 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4733 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4734 "TARGET_ARM && arm_arch6"
4737 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4738 [(set_attr "type" "alu_shift,load_byte")
4739 (set_attr "predicable" "yes")]
4742 (define_insn "*arm_zero_extendqisi2addsi"
4743 [(set (match_operand:SI 0 "s_register_operand" "=r")
4744 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4745 (match_operand:SI 2 "s_register_operand" "r")))]
4747 "uxtab%?\\t%0, %2, %1"
4748 [(set_attr "predicable" "yes")
4749 (set_attr "insn" "xtab")
4750 (set_attr "type" "alu_shift")]
4754 [(set (match_operand:SI 0 "s_register_operand" "")
4755 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4756 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4757 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4758 [(set (match_dup 2) (match_dup 1))
4759 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4764 [(set (match_operand:SI 0 "s_register_operand" "")
4765 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4766 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4767 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4768 [(set (match_dup 2) (match_dup 1))
4769 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4775 [(set (match_operand:SI 0 "s_register_operand" "")
4776 (ior_xor:SI (and:SI (ashift:SI
4777 (match_operand:SI 1 "s_register_operand" "")
4778 (match_operand:SI 2 "const_int_operand" ""))
4779 (match_operand:SI 3 "const_int_operand" ""))
4781 (match_operator 5 "subreg_lowpart_operator"
4782 [(match_operand:SI 4 "s_register_operand" "")]))))]
4784 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4785 == (GET_MODE_MASK (GET_MODE (operands[5]))
4786 & (GET_MODE_MASK (GET_MODE (operands[5]))
4787 << (INTVAL (operands[2])))))"
4788 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4790 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4791 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4794 (define_insn "*compareqi_eq0"
4795 [(set (reg:CC_Z CC_REGNUM)
4796 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4800 [(set_attr "conds" "set")
4801 (set_attr "predicable" "yes")]
4804 (define_expand "extendhisi2"
4805 [(set (match_operand:SI 0 "s_register_operand" "")
4806 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4811 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4814 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4816 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4820 if (!arm_arch6 && !MEM_P (operands[1]))
4822 rtx t = gen_lowpart (SImode, operands[1]);
4823 rtx tmp = gen_reg_rtx (SImode);
4824 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4825 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4832 [(set (match_operand:SI 0 "register_operand" "")
4833 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4834 (clobber (match_scratch:SI 2 ""))])]
4836 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4837 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4839 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4842 ;; We used to have an early-clobber on the scratch register here.
4843 ;; However, there's a bug somewhere in reload which means that this
4844 ;; can be partially ignored during spill allocation if the memory
4845 ;; address also needs reloading; this causes us to die later on when
4846 ;; we try to verify the operands. Fortunately, we don't really need
4847 ;; the early-clobber: we can always use operand 0 if operand 2
4848 ;; overlaps the address.
4849 (define_insn "thumb1_extendhisi2"
4850 [(set (match_operand:SI 0 "register_operand" "=l,l")
4851 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4852 (clobber (match_scratch:SI 2 "=X,l"))]
4859 if (which_alternative == 0 && !arm_arch6)
4861 if (which_alternative == 0)
4862 return \"sxth\\t%0, %1\";
4864 mem = XEXP (operands[1], 0);
4866 /* This code used to try to use 'V', and fix the address only if it was
4867 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4868 range of QImode offsets, and offsettable_address_p does a QImode
4871 if (GET_CODE (mem) == CONST)
4872 mem = XEXP (mem, 0);
4874 if (GET_CODE (mem) == LABEL_REF)
4875 return \"ldr\\t%0, %1\";
4877 if (GET_CODE (mem) == PLUS)
4879 rtx a = XEXP (mem, 0);
4880 rtx b = XEXP (mem, 1);
4882 if (GET_CODE (a) == LABEL_REF
4883 && GET_CODE (b) == CONST_INT)
4884 return \"ldr\\t%0, %1\";
4886 if (GET_CODE (b) == REG)
4887 return \"ldrsh\\t%0, %1\";
4895 ops[2] = const0_rtx;
4898 gcc_assert (GET_CODE (ops[1]) == REG);
4900 ops[0] = operands[0];
4901 if (reg_mentioned_p (operands[2], ops[1]))
4904 ops[3] = operands[2];
4905 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4908 [(set_attr_alternative "length"
4909 [(if_then_else (eq_attr "is_arch6" "yes")
4910 (const_int 2) (const_int 4))
4912 (set_attr "type" "alu_shift,load_byte")
4913 (set_attr "pool_range" "*,1020")]
4916 ;; This pattern will only be used when ldsh is not available
4917 (define_expand "extendhisi2_mem"
4918 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4920 (zero_extend:SI (match_dup 7)))
4921 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4922 (set (match_operand:SI 0 "" "")
4923 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4928 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4930 mem1 = change_address (operands[1], QImode, addr);
4931 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4932 operands[0] = gen_lowpart (SImode, operands[0]);
4934 operands[2] = gen_reg_rtx (SImode);
4935 operands[3] = gen_reg_rtx (SImode);
4936 operands[6] = gen_reg_rtx (SImode);
4939 if (BYTES_BIG_ENDIAN)
4941 operands[4] = operands[2];
4942 operands[5] = operands[3];
4946 operands[4] = operands[3];
4947 operands[5] = operands[2];
4953 [(set (match_operand:SI 0 "register_operand" "")
4954 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4956 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4957 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4959 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4962 (define_insn "*arm_extendhisi2"
4963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4964 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4965 "TARGET_ARM && arm_arch4 && !arm_arch6"
4969 [(set_attr "length" "8,4")
4970 (set_attr "type" "alu_shift,load_byte")
4971 (set_attr "predicable" "yes")
4972 (set_attr "pool_range" "*,256")
4973 (set_attr "neg_pool_range" "*,244")]
4976 ;; ??? Check Thumb-2 pool range
4977 (define_insn "*arm_extendhisi2_v6"
4978 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4979 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4980 "TARGET_32BIT && arm_arch6"
4984 [(set_attr "type" "alu_shift,load_byte")
4985 (set_attr "predicable" "yes")
4986 (set_attr "pool_range" "*,256")
4987 (set_attr "neg_pool_range" "*,244")]
4990 (define_insn "*arm_extendhisi2addsi"
4991 [(set (match_operand:SI 0 "s_register_operand" "=r")
4992 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4993 (match_operand:SI 2 "s_register_operand" "r")))]
4995 "sxtah%?\\t%0, %2, %1"
4998 (define_expand "extendqihi2"
5000 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5002 (set (match_operand:HI 0 "s_register_operand" "")
5003 (ashiftrt:SI (match_dup 2)
5008 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
5010 emit_insn (gen_rtx_SET (VOIDmode,
5012 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5015 if (!s_register_operand (operands[1], QImode))
5016 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5017 operands[0] = gen_lowpart (SImode, operands[0]);
5018 operands[1] = gen_lowpart (SImode, operands[1]);
5019 operands[2] = gen_reg_rtx (SImode);
5023 (define_insn "*arm_extendqihi_insn"
5024 [(set (match_operand:HI 0 "s_register_operand" "=r")
5025 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5026 "TARGET_ARM && arm_arch4"
5027 "ldr%(sb%)\\t%0, %1"
5028 [(set_attr "type" "load_byte")
5029 (set_attr "predicable" "yes")
5030 (set_attr "pool_range" "256")
5031 (set_attr "neg_pool_range" "244")]
5034 (define_expand "extendqisi2"
5035 [(set (match_operand:SI 0 "s_register_operand" "")
5036 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5039 if (!arm_arch4 && MEM_P (operands[1]))
5040 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5042 if (!arm_arch6 && !MEM_P (operands[1]))
5044 rtx t = gen_lowpart (SImode, operands[1]);
5045 rtx tmp = gen_reg_rtx (SImode);
5046 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5047 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5053 [(set (match_operand:SI 0 "register_operand" "")
5054 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5056 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5057 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5059 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5062 (define_insn "*arm_extendqisi"
5063 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5064 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5065 "TARGET_ARM && arm_arch4 && !arm_arch6"
5069 [(set_attr "length" "8,4")
5070 (set_attr "type" "alu_shift,load_byte")
5071 (set_attr "predicable" "yes")
5072 (set_attr "pool_range" "*,256")
5073 (set_attr "neg_pool_range" "*,244")]
5076 (define_insn "*arm_extendqisi_v6"
5077 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5079 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5080 "TARGET_ARM && arm_arch6"
5084 [(set_attr "type" "alu_shift,load_byte")
5085 (set_attr "predicable" "yes")
5086 (set_attr "pool_range" "*,256")
5087 (set_attr "neg_pool_range" "*,244")]
5090 (define_insn "*arm_extendqisi2addsi"
5091 [(set (match_operand:SI 0 "s_register_operand" "=r")
5092 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5093 (match_operand:SI 2 "s_register_operand" "r")))]
5095 "sxtab%?\\t%0, %2, %1"
5096 [(set_attr "type" "alu_shift")
5097 (set_attr "insn" "xtab")
5098 (set_attr "predicable" "yes")]
5102 [(set (match_operand:SI 0 "register_operand" "")
5103 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5104 "TARGET_THUMB1 && reload_completed"
5105 [(set (match_dup 0) (match_dup 2))
5106 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5108 rtx addr = XEXP (operands[1], 0);
5110 if (GET_CODE (addr) == CONST)
5111 addr = XEXP (addr, 0);
5113 if (GET_CODE (addr) == PLUS
5114 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5115 /* No split necessary. */
5118 if (GET_CODE (addr) == PLUS
5119 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5122 if (reg_overlap_mentioned_p (operands[0], addr))
5124 rtx t = gen_lowpart (QImode, operands[0]);
5125 emit_move_insn (t, operands[1]);
5126 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5132 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5133 operands[2] = const0_rtx;
5135 else if (GET_CODE (addr) != PLUS)
5137 else if (REG_P (XEXP (addr, 0)))
5139 operands[2] = XEXP (addr, 1);
5140 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5144 operands[2] = XEXP (addr, 0);
5145 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5148 operands[3] = change_address (operands[1], QImode, addr);
5152 [(set (match_operand:SI 0 "register_operand" "")
5153 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5154 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5155 (set (match_operand:SI 3 "register_operand" "")
5156 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5158 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5159 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5160 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5161 && (peep2_reg_dead_p (3, operands[0])
5162 || rtx_equal_p (operands[0], operands[3]))
5163 && (peep2_reg_dead_p (3, operands[2])
5164 || rtx_equal_p (operands[2], operands[3]))"
5165 [(set (match_dup 2) (match_dup 1))
5166 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5168 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5169 operands[4] = change_address (operands[4], QImode, addr);
5172 (define_insn "thumb1_extendqisi2"
5173 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5174 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5179 if (which_alternative == 0 && arm_arch6)
5180 return "sxtb\\t%0, %1";
5181 if (which_alternative == 0)
5184 addr = XEXP (operands[1], 0);
5185 if (GET_CODE (addr) == PLUS
5186 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5187 return "ldrsb\\t%0, %1";
5191 [(set_attr_alternative "length"
5192 [(if_then_else (eq_attr "is_arch6" "yes")
5193 (const_int 2) (const_int 4))
5195 (if_then_else (eq_attr "is_arch6" "yes")
5196 (const_int 4) (const_int 6))])
5197 (set_attr "type" "alu_shift,load_byte,load_byte")]
5200 (define_expand "extendsfdf2"
5201 [(set (match_operand:DF 0 "s_register_operand" "")
5202 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5203 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5207 /* HFmode -> DFmode conversions have to go through SFmode. */
5208 (define_expand "extendhfdf2"
5209 [(set (match_operand:DF 0 "general_operand" "")
5210 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5215 op1 = convert_to_mode (SFmode, operands[1], 0);
5216 op1 = convert_to_mode (DFmode, op1, 0);
5217 emit_insn (gen_movdf (operands[0], op1));
5222 ;; Move insns (including loads and stores)
5224 ;; XXX Just some ideas about movti.
5225 ;; I don't think these are a good idea on the arm, there just aren't enough
5227 ;;(define_expand "loadti"
5228 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5229 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5232 ;;(define_expand "storeti"
5233 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5234 ;; (match_operand:TI 1 "s_register_operand" ""))]
5237 ;;(define_expand "movti"
5238 ;; [(set (match_operand:TI 0 "general_operand" "")
5239 ;; (match_operand:TI 1 "general_operand" ""))]
5245 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
5246 ;; operands[1] = copy_to_reg (operands[1]);
5247 ;; if (GET_CODE (operands[0]) == MEM)
5248 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5249 ;; else if (GET_CODE (operands[1]) == MEM)
5250 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5254 ;; emit_insn (insn);
5258 ;; Recognize garbage generated above.
5261 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5262 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5266 ;; register mem = (which_alternative < 3);
5267 ;; register const char *template;
5269 ;; operands[mem] = XEXP (operands[mem], 0);
5270 ;; switch (which_alternative)
5272 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5273 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5274 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5275 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5276 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5277 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5279 ;; output_asm_insn (template, operands);
5283 (define_expand "movdi"
5284 [(set (match_operand:DI 0 "general_operand" "")
5285 (match_operand:DI 1 "general_operand" ""))]
5288 if (can_create_pseudo_p ())
5290 if (GET_CODE (operands[0]) != REG)
5291 operands[1] = force_reg (DImode, operands[1]);
5296 (define_insn "*arm_movdi"
5297 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5298 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5300 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
5302 && ( register_operand (operands[0], DImode)
5303 || register_operand (operands[1], DImode))"
5305 switch (which_alternative)
5312 return output_move_double (operands, true, NULL);
5315 [(set_attr "length" "8,12,16,8,8")
5316 (set_attr "type" "*,*,*,load2,store2")
5317 (set_attr "arm_pool_range" "*,*,*,1020,*")
5318 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5319 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5320 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5324 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5325 (match_operand:ANY64 1 "const_double_operand" ""))]
5328 && (arm_const_double_inline_cost (operands[1])
5329 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5332 arm_split_constant (SET, SImode, curr_insn,
5333 INTVAL (gen_lowpart (SImode, operands[1])),
5334 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5335 arm_split_constant (SET, SImode, curr_insn,
5336 INTVAL (gen_highpart_mode (SImode,
5337 GET_MODE (operands[0]),
5339 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5344 ; If optimizing for size, or if we have load delay slots, then
5345 ; we want to split the constant into two separate operations.
5346 ; In both cases this may split a trivial part into a single data op
5347 ; leaving a single complex constant to load. We can also get longer
5348 ; offsets in a LDR which means we get better chances of sharing the pool
5349 ; entries. Finally, we can normally do a better job of scheduling
5350 ; LDR instructions than we can with LDM.
5351 ; This pattern will only match if the one above did not.
5353 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5354 (match_operand:ANY64 1 "const_double_operand" ""))]
5355 "TARGET_ARM && reload_completed
5356 && arm_const_double_by_parts (operands[1])"
5357 [(set (match_dup 0) (match_dup 1))
5358 (set (match_dup 2) (match_dup 3))]
5360 operands[2] = gen_highpart (SImode, operands[0]);
5361 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5363 operands[0] = gen_lowpart (SImode, operands[0]);
5364 operands[1] = gen_lowpart (SImode, operands[1]);
5369 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5370 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5371 "TARGET_EITHER && reload_completed"
5372 [(set (match_dup 0) (match_dup 1))
5373 (set (match_dup 2) (match_dup 3))]
5375 operands[2] = gen_highpart (SImode, operands[0]);
5376 operands[3] = gen_highpart (SImode, operands[1]);
5377 operands[0] = gen_lowpart (SImode, operands[0]);
5378 operands[1] = gen_lowpart (SImode, operands[1]);
5380 /* Handle a partial overlap. */
5381 if (rtx_equal_p (operands[0], operands[3]))
5383 rtx tmp0 = operands[0];
5384 rtx tmp1 = operands[1];
5386 operands[0] = operands[2];
5387 operands[1] = operands[3];
5394 ;; We can't actually do base+index doubleword loads if the index and
5395 ;; destination overlap. Split here so that we at least have chance to
5398 [(set (match_operand:DI 0 "s_register_operand" "")
5399 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5400 (match_operand:SI 2 "s_register_operand" ""))))]
5402 && reg_overlap_mentioned_p (operands[0], operands[1])
5403 && reg_overlap_mentioned_p (operands[0], operands[2])"
5405 (plus:SI (match_dup 1)
5408 (mem:DI (match_dup 4)))]
5410 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5414 ;;; ??? This should have alternatives for constants.
5415 ;;; ??? This was originally identical to the movdf_insn pattern.
5416 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5417 ;;; thumb_reorg with a memory reference.
5418 (define_insn "*thumb1_movdi_insn"
5419 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5420 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5422 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5423 && ( register_operand (operands[0], DImode)
5424 || register_operand (operands[1], DImode))"
5427 switch (which_alternative)
5431 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5432 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5433 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5435 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5437 operands[1] = GEN_INT (- INTVAL (operands[1]));
5438 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5440 return \"ldmia\\t%1, {%0, %H0}\";
5442 return \"stmia\\t%0, {%1, %H1}\";
5444 return thumb_load_double_from_address (operands);
5446 operands[2] = gen_rtx_MEM (SImode,
5447 plus_constant (XEXP (operands[0], 0), 4));
5448 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5451 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5452 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5453 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5456 [(set_attr "length" "4,4,6,2,2,6,4,4")
5457 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5458 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5459 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5462 (define_expand "movsi"
5463 [(set (match_operand:SI 0 "general_operand" "")
5464 (match_operand:SI 1 "general_operand" ""))]
5468 rtx base, offset, tmp;
5472 /* Everything except mem = const or mem = mem can be done easily. */
5473 if (GET_CODE (operands[0]) == MEM)
5474 operands[1] = force_reg (SImode, operands[1]);
5475 if (arm_general_register_operand (operands[0], SImode)
5476 && GET_CODE (operands[1]) == CONST_INT
5477 && !(const_ok_for_arm (INTVAL (operands[1]))
5478 || const_ok_for_arm (~INTVAL (operands[1]))))
5480 arm_split_constant (SET, SImode, NULL_RTX,
5481 INTVAL (operands[1]), operands[0], NULL_RTX,
5482 optimize && can_create_pseudo_p ());
5486 if (TARGET_USE_MOVT && !target_word_relocations
5487 && GET_CODE (operands[1]) == SYMBOL_REF
5488 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5490 arm_emit_movpair (operands[0], operands[1]);
5494 else /* TARGET_THUMB1... */
5496 if (can_create_pseudo_p ())
5498 if (GET_CODE (operands[0]) != REG)
5499 operands[1] = force_reg (SImode, operands[1]);
5503 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5505 split_const (operands[1], &base, &offset);
5506 if (GET_CODE (base) == SYMBOL_REF
5507 && !offset_within_block_p (base, INTVAL (offset)))
5509 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5510 emit_move_insn (tmp, base);
5511 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5516 /* Recognize the case where operand[1] is a reference to thread-local
5517 data and load its address to a register. */
5518 if (arm_tls_referenced_p (operands[1]))
5520 rtx tmp = operands[1];
5523 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5525 addend = XEXP (XEXP (tmp, 0), 1);
5526 tmp = XEXP (XEXP (tmp, 0), 0);
5529 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5530 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5532 tmp = legitimize_tls_address (tmp,
5533 !can_create_pseudo_p () ? operands[0] : 0);
5536 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5537 tmp = force_operand (tmp, operands[0]);
5542 && (CONSTANT_P (operands[1])
5543 || symbol_mentioned_p (operands[1])
5544 || label_mentioned_p (operands[1])))
5545 operands[1] = legitimize_pic_address (operands[1], SImode,
5546 (!can_create_pseudo_p ()
5553 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5554 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5555 ;; so this does not matter.
5556 (define_insn "*arm_movt"
5557 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5558 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5559 (match_operand:SI 2 "general_operand" "i")))]
5561 "movt%?\t%0, #:upper16:%c2"
5562 [(set_attr "predicable" "yes")
5563 (set_attr "length" "4")]
5566 (define_insn "*arm_movsi_insn"
5567 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5568 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5569 "TARGET_ARM && ! TARGET_IWMMXT
5570 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5571 && ( register_operand (operands[0], SImode)
5572 || register_operand (operands[1], SImode))"
5580 [(set_attr "type" "*,*,*,*,load1,store1")
5581 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5582 (set_attr "predicable" "yes")
5583 (set_attr "pool_range" "*,*,*,*,4096,*")
5584 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5588 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5589 (match_operand:SI 1 "const_int_operand" ""))]
5591 && (!(const_ok_for_arm (INTVAL (operands[1]))
5592 || const_ok_for_arm (~INTVAL (operands[1]))))"
5593 [(clobber (const_int 0))]
5595 arm_split_constant (SET, SImode, NULL_RTX,
5596 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5601 (define_insn "*thumb1_movsi_insn"
5602 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5603 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5605 && ( register_operand (operands[0], SImode)
5606 || register_operand (operands[1], SImode))"
5617 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5618 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5619 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5620 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5623 [(set (match_operand:SI 0 "register_operand" "")
5624 (match_operand:SI 1 "const_int_operand" ""))]
5625 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5626 [(set (match_dup 2) (match_dup 1))
5627 (set (match_dup 0) (neg:SI (match_dup 2)))]
5630 operands[1] = GEN_INT (- INTVAL (operands[1]));
5631 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5636 [(set (match_operand:SI 0 "register_operand" "")
5637 (match_operand:SI 1 "const_int_operand" ""))]
5638 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5639 [(set (match_dup 2) (match_dup 1))
5640 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5643 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5644 unsigned HOST_WIDE_INT mask = 0xff;
5647 for (i = 0; i < 25; i++)
5648 if ((val & (mask << i)) == val)
5651 /* Don't split if the shift is zero. */
5655 operands[1] = GEN_INT (val >> i);
5656 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5657 operands[3] = GEN_INT (i);
5661 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
5663 [(set (match_operand:SI 0 "register_operand" "")
5664 (match_operand:SI 1 "const_int_operand" ""))]
5665 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
5666 [(set (match_dup 2) (match_dup 1))
5667 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
5670 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
5671 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5672 operands[3] = GEN_INT (255);
5676 ;; When generating pic, we need to load the symbol offset into a register.
5677 ;; So that the optimizer does not confuse this with a normal symbol load
5678 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5679 ;; since that is the only type of relocation we can use.
5681 ;; Wrap calculation of the whole PIC address in a single pattern for the
5682 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5683 ;; a PIC address involves two loads from memory, so we want to CSE it
5684 ;; as often as possible.
5685 ;; This pattern will be split into one of the pic_load_addr_* patterns
5686 ;; and a move after GCSE optimizations.
5688 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5689 (define_expand "calculate_pic_address"
5690 [(set (match_operand:SI 0 "register_operand" "")
5691 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5692 (unspec:SI [(match_operand:SI 2 "" "")]
5697 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5699 [(set (match_operand:SI 0 "register_operand" "")
5700 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5701 (unspec:SI [(match_operand:SI 2 "" "")]
5704 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5705 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5706 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5709 ;; operand1 is the memory address to go into
5710 ;; pic_load_addr_32bit.
5711 ;; operand2 is the PIC label to be emitted
5712 ;; from pic_add_dot_plus_eight.
5713 ;; We do this to allow hoisting of the entire insn.
5714 (define_insn_and_split "pic_load_addr_unified"
5715 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5716 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5717 (match_operand:SI 2 "" "")]
5718 UNSPEC_PIC_UNIFIED))]
5721 "&& reload_completed"
5722 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5723 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5724 (match_dup 2)] UNSPEC_PIC_BASE))]
5725 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5726 [(set_attr "type" "load1,load1,load1")
5727 (set_attr "pool_range" "4096,4096,1024")
5728 (set_attr "neg_pool_range" "4084,0,0")
5729 (set_attr "arch" "a,t2,t1")
5730 (set_attr "length" "8,6,4")]
5733 ;; The rather odd constraints on the following are to force reload to leave
5734 ;; the insn alone, and to force the minipool generation pass to then move
5735 ;; the GOT symbol to memory.
5737 (define_insn "pic_load_addr_32bit"
5738 [(set (match_operand:SI 0 "s_register_operand" "=r")
5739 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5740 "TARGET_32BIT && flag_pic"
5742 [(set_attr "type" "load1")
5743 (set_attr "pool_range" "4096")
5744 (set (attr "neg_pool_range")
5745 (if_then_else (eq_attr "is_thumb" "no")
5750 (define_insn "pic_load_addr_thumb1"
5751 [(set (match_operand:SI 0 "s_register_operand" "=l")
5752 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5753 "TARGET_THUMB1 && flag_pic"
5755 [(set_attr "type" "load1")
5756 (set (attr "pool_range") (const_int 1024))]
5759 (define_insn "pic_add_dot_plus_four"
5760 [(set (match_operand:SI 0 "register_operand" "=r")
5761 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5763 (match_operand 2 "" "")]
5767 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5768 INTVAL (operands[2]));
5769 return \"add\\t%0, %|pc\";
5771 [(set_attr "length" "2")]
5774 (define_insn "pic_add_dot_plus_eight"
5775 [(set (match_operand:SI 0 "register_operand" "=r")
5776 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5778 (match_operand 2 "" "")]
5782 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5783 INTVAL (operands[2]));
5784 return \"add%?\\t%0, %|pc, %1\";
5786 [(set_attr "predicable" "yes")]
5789 (define_insn "tls_load_dot_plus_eight"
5790 [(set (match_operand:SI 0 "register_operand" "=r")
5791 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5793 (match_operand 2 "" "")]
5797 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5798 INTVAL (operands[2]));
5799 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5801 [(set_attr "predicable" "yes")]
5804 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5805 ;; followed by a load. These sequences can be crunched down to
5806 ;; tls_load_dot_plus_eight by a peephole.
5809 [(set (match_operand:SI 0 "register_operand" "")
5810 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5812 (match_operand 1 "" "")]
5814 (set (match_operand:SI 2 "arm_general_register_operand" "")
5815 (mem:SI (match_dup 0)))]
5816 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5818 (mem:SI (unspec:SI [(match_dup 3)
5825 (define_insn "pic_offset_arm"
5826 [(set (match_operand:SI 0 "register_operand" "=r")
5827 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5828 (unspec:SI [(match_operand:SI 2 "" "X")]
5829 UNSPEC_PIC_OFFSET))))]
5830 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5831 "ldr%?\\t%0, [%1,%2]"
5832 [(set_attr "type" "load1")]
5835 (define_expand "builtin_setjmp_receiver"
5836 [(label_ref (match_operand 0 "" ""))]
5840 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5842 if (arm_pic_register != INVALID_REGNUM)
5843 arm_load_pic_register (1UL << 3);
5847 ;; If copying one reg to another we can set the condition codes according to
5848 ;; its value. Such a move is common after a return from subroutine and the
5849 ;; result is being tested against zero.
5851 (define_insn "*movsi_compare0"
5852 [(set (reg:CC CC_REGNUM)
5853 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5855 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5861 [(set_attr "conds" "set")]
5864 ;; Subroutine to store a half word from a register into memory.
5865 ;; Operand 0 is the source register (HImode)
5866 ;; Operand 1 is the destination address in a register (SImode)
5868 ;; In both this routine and the next, we must be careful not to spill
5869 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5870 ;; can generate unrecognizable rtl.
5872 (define_expand "storehi"
5873 [;; store the low byte
5874 (set (match_operand 1 "" "") (match_dup 3))
5875 ;; extract the high byte
5877 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5878 ;; store the high byte
5879 (set (match_dup 4) (match_dup 5))]
5883 rtx op1 = operands[1];
5884 rtx addr = XEXP (op1, 0);
5885 enum rtx_code code = GET_CODE (addr);
5887 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5889 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5891 operands[4] = adjust_address (op1, QImode, 1);
5892 operands[1] = adjust_address (operands[1], QImode, 0);
5893 operands[3] = gen_lowpart (QImode, operands[0]);
5894 operands[0] = gen_lowpart (SImode, operands[0]);
5895 operands[2] = gen_reg_rtx (SImode);
5896 operands[5] = gen_lowpart (QImode, operands[2]);
5900 (define_expand "storehi_bigend"
5901 [(set (match_dup 4) (match_dup 3))
5903 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5904 (set (match_operand 1 "" "") (match_dup 5))]
5908 rtx op1 = operands[1];
5909 rtx addr = XEXP (op1, 0);
5910 enum rtx_code code = GET_CODE (addr);
5912 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5914 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5916 operands[4] = adjust_address (op1, QImode, 1);
5917 operands[1] = adjust_address (operands[1], QImode, 0);
5918 operands[3] = gen_lowpart (QImode, operands[0]);
5919 operands[0] = gen_lowpart (SImode, operands[0]);
5920 operands[2] = gen_reg_rtx (SImode);
5921 operands[5] = gen_lowpart (QImode, operands[2]);
5925 ;; Subroutine to store a half word integer constant into memory.
5926 (define_expand "storeinthi"
5927 [(set (match_operand 0 "" "")
5928 (match_operand 1 "" ""))
5929 (set (match_dup 3) (match_dup 2))]
5933 HOST_WIDE_INT value = INTVAL (operands[1]);
5934 rtx addr = XEXP (operands[0], 0);
5935 rtx op0 = operands[0];
5936 enum rtx_code code = GET_CODE (addr);
5938 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5940 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5942 operands[1] = gen_reg_rtx (SImode);
5943 if (BYTES_BIG_ENDIAN)
5945 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5946 if ((value & 255) == ((value >> 8) & 255))
5947 operands[2] = operands[1];
5950 operands[2] = gen_reg_rtx (SImode);
5951 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5956 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5957 if ((value & 255) == ((value >> 8) & 255))
5958 operands[2] = operands[1];
5961 operands[2] = gen_reg_rtx (SImode);
5962 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5966 operands[3] = adjust_address (op0, QImode, 1);
5967 operands[0] = adjust_address (operands[0], QImode, 0);
5968 operands[2] = gen_lowpart (QImode, operands[2]);
5969 operands[1] = gen_lowpart (QImode, operands[1]);
5973 (define_expand "storehi_single_op"
5974 [(set (match_operand:HI 0 "memory_operand" "")
5975 (match_operand:HI 1 "general_operand" ""))]
5976 "TARGET_32BIT && arm_arch4"
5978 if (!s_register_operand (operands[1], HImode))
5979 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5983 (define_expand "movhi"
5984 [(set (match_operand:HI 0 "general_operand" "")
5985 (match_operand:HI 1 "general_operand" ""))]
5990 if (can_create_pseudo_p ())
5992 if (GET_CODE (operands[0]) == MEM)
5996 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5999 if (GET_CODE (operands[1]) == CONST_INT)
6000 emit_insn (gen_storeinthi (operands[0], operands[1]));
6003 if (GET_CODE (operands[1]) == MEM)
6004 operands[1] = force_reg (HImode, operands[1]);
6005 if (BYTES_BIG_ENDIAN)
6006 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6008 emit_insn (gen_storehi (operands[1], operands[0]));
6012 /* Sign extend a constant, and keep it in an SImode reg. */
6013 else if (GET_CODE (operands[1]) == CONST_INT)
6015 rtx reg = gen_reg_rtx (SImode);
6016 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6018 /* If the constant is already valid, leave it alone. */
6019 if (!const_ok_for_arm (val))
6021 /* If setting all the top bits will make the constant
6022 loadable in a single instruction, then set them.
6023 Otherwise, sign extend the number. */
6025 if (const_ok_for_arm (~(val | ~0xffff)))
6027 else if (val & 0x8000)
6031 emit_insn (gen_movsi (reg, GEN_INT (val)));
6032 operands[1] = gen_lowpart (HImode, reg);
6034 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6035 && GET_CODE (operands[1]) == MEM)
6037 rtx reg = gen_reg_rtx (SImode);
6039 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6040 operands[1] = gen_lowpart (HImode, reg);
6042 else if (!arm_arch4)
6044 if (GET_CODE (operands[1]) == MEM)
6047 rtx offset = const0_rtx;
6048 rtx reg = gen_reg_rtx (SImode);
6050 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
6051 || (GET_CODE (base) == PLUS
6052 && (GET_CODE (offset = XEXP (base, 1))
6054 && ((INTVAL(offset) & 1) != 1)
6055 && GET_CODE (base = XEXP (base, 0)) == REG))
6056 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6060 new_rtx = widen_memory_access (operands[1], SImode,
6061 ((INTVAL (offset) & ~3)
6062 - INTVAL (offset)));
6063 emit_insn (gen_movsi (reg, new_rtx));
6064 if (((INTVAL (offset) & 2) != 0)
6065 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6067 rtx reg2 = gen_reg_rtx (SImode);
6069 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6074 emit_insn (gen_movhi_bytes (reg, operands[1]));
6076 operands[1] = gen_lowpart (HImode, reg);
6080 /* Handle loading a large integer during reload. */
6081 else if (GET_CODE (operands[1]) == CONST_INT
6082 && !const_ok_for_arm (INTVAL (operands[1]))
6083 && !const_ok_for_arm (~INTVAL (operands[1])))
6085 /* Writing a constant to memory needs a scratch, which should
6086 be handled with SECONDARY_RELOADs. */
6087 gcc_assert (GET_CODE (operands[0]) == REG);
6089 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6090 emit_insn (gen_movsi (operands[0], operands[1]));
6094 else if (TARGET_THUMB2)
6096 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6097 if (can_create_pseudo_p ())
6099 if (GET_CODE (operands[0]) != REG)
6100 operands[1] = force_reg (HImode, operands[1]);
6101 /* Zero extend a constant, and keep it in an SImode reg. */
6102 else if (GET_CODE (operands[1]) == CONST_INT)
6104 rtx reg = gen_reg_rtx (SImode);
6105 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6107 emit_insn (gen_movsi (reg, GEN_INT (val)));
6108 operands[1] = gen_lowpart (HImode, reg);
6112 else /* TARGET_THUMB1 */
6114 if (can_create_pseudo_p ())
6116 if (GET_CODE (operands[1]) == CONST_INT)
6118 rtx reg = gen_reg_rtx (SImode);
6120 emit_insn (gen_movsi (reg, operands[1]));
6121 operands[1] = gen_lowpart (HImode, reg);
6124 /* ??? We shouldn't really get invalid addresses here, but this can
6125 happen if we are passed a SP (never OK for HImode/QImode) or
6126 virtual register (also rejected as illegitimate for HImode/QImode)
6127 relative address. */
6128 /* ??? This should perhaps be fixed elsewhere, for instance, in
6129 fixup_stack_1, by checking for other kinds of invalid addresses,
6130 e.g. a bare reference to a virtual register. This may confuse the
6131 alpha though, which must handle this case differently. */
6132 if (GET_CODE (operands[0]) == MEM
6133 && !memory_address_p (GET_MODE (operands[0]),
6134 XEXP (operands[0], 0)))
6136 = replace_equiv_address (operands[0],
6137 copy_to_reg (XEXP (operands[0], 0)));
6139 if (GET_CODE (operands[1]) == MEM
6140 && !memory_address_p (GET_MODE (operands[1]),
6141 XEXP (operands[1], 0)))
6143 = replace_equiv_address (operands[1],
6144 copy_to_reg (XEXP (operands[1], 0)));
6146 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6148 rtx reg = gen_reg_rtx (SImode);
6150 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6151 operands[1] = gen_lowpart (HImode, reg);
6154 if (GET_CODE (operands[0]) == MEM)
6155 operands[1] = force_reg (HImode, operands[1]);
6157 else if (GET_CODE (operands[1]) == CONST_INT
6158 && !satisfies_constraint_I (operands[1]))
6160 /* Handle loading a large integer during reload. */
6162 /* Writing a constant to memory needs a scratch, which should
6163 be handled with SECONDARY_RELOADs. */
6164 gcc_assert (GET_CODE (operands[0]) == REG);
6166 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6167 emit_insn (gen_movsi (operands[0], operands[1]));
6174 (define_insn "*thumb1_movhi_insn"
6175 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6176 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6178 && ( register_operand (operands[0], HImode)
6179 || register_operand (operands[1], HImode))"
6181 switch (which_alternative)
6183 case 0: return \"add %0, %1, #0\";
6184 case 2: return \"strh %1, %0\";
6185 case 3: return \"mov %0, %1\";
6186 case 4: return \"mov %0, %1\";
6187 case 5: return \"mov %0, %1\";
6188 default: gcc_unreachable ();
6190 /* The stack pointer can end up being taken as an index register.
6191 Catch this case here and deal with it. */
6192 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6193 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
6194 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6197 ops[0] = operands[0];
6198 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6200 output_asm_insn (\"mov %0, %1\", ops);
6202 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6205 return \"ldrh %0, %1\";
6207 [(set_attr "length" "2,4,2,2,2,2")
6208 (set_attr "type" "*,load1,store1,*,*,*")
6209 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6212 (define_expand "movhi_bytes"
6213 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6215 (zero_extend:SI (match_dup 6)))
6216 (set (match_operand:SI 0 "" "")
6217 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6222 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6224 mem1 = change_address (operands[1], QImode, addr);
6225 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
6226 operands[0] = gen_lowpart (SImode, operands[0]);
6228 operands[2] = gen_reg_rtx (SImode);
6229 operands[3] = gen_reg_rtx (SImode);
6232 if (BYTES_BIG_ENDIAN)
6234 operands[4] = operands[2];
6235 operands[5] = operands[3];
6239 operands[4] = operands[3];
6240 operands[5] = operands[2];
6245 (define_expand "movhi_bigend"
6247 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6250 (ashiftrt:SI (match_dup 2) (const_int 16)))
6251 (set (match_operand:HI 0 "s_register_operand" "")
6255 operands[2] = gen_reg_rtx (SImode);
6256 operands[3] = gen_reg_rtx (SImode);
6257 operands[4] = gen_lowpart (HImode, operands[3]);
6261 ;; Pattern to recognize insn generated default case above
6262 (define_insn "*movhi_insn_arch4"
6263 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6264 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6267 && (register_operand (operands[0], HImode)
6268 || register_operand (operands[1], HImode))"
6270 mov%?\\t%0, %1\\t%@ movhi
6271 mvn%?\\t%0, #%B1\\t%@ movhi
6272 str%(h%)\\t%1, %0\\t%@ movhi
6273 ldr%(h%)\\t%0, %1\\t%@ movhi"
6274 [(set_attr "type" "*,*,store1,load1")
6275 (set_attr "predicable" "yes")
6276 (set_attr "insn" "mov,mvn,*,*")
6277 (set_attr "pool_range" "*,*,*,256")
6278 (set_attr "neg_pool_range" "*,*,*,244")]
6281 (define_insn "*movhi_bytes"
6282 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
6283 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
6286 mov%?\\t%0, %1\\t%@ movhi
6287 mvn%?\\t%0, #%B1\\t%@ movhi"
6288 [(set_attr "predicable" "yes")
6289 (set_attr "insn" "mov,mvn")]
6292 (define_expand "thumb_movhi_clobber"
6293 [(set (match_operand:HI 0 "memory_operand" "")
6294 (match_operand:HI 1 "register_operand" ""))
6295 (clobber (match_operand:DI 2 "register_operand" ""))]
6298 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6299 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6301 emit_insn (gen_movhi (operands[0], operands[1]));
6304 /* XXX Fixme, need to handle other cases here as well. */
6309 ;; We use a DImode scratch because we may occasionally need an additional
6310 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6311 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6312 (define_expand "reload_outhi"
6313 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6314 (match_operand:HI 1 "s_register_operand" "r")
6315 (match_operand:DI 2 "s_register_operand" "=&l")])]
6318 arm_reload_out_hi (operands);
6320 thumb_reload_out_hi (operands);
6325 (define_expand "reload_inhi"
6326 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6327 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6328 (match_operand:DI 2 "s_register_operand" "=&r")])]
6332 arm_reload_in_hi (operands);
6334 thumb_reload_out_hi (operands);
6338 (define_expand "movqi"
6339 [(set (match_operand:QI 0 "general_operand" "")
6340 (match_operand:QI 1 "general_operand" ""))]
6343 /* Everything except mem = const or mem = mem can be done easily */
6345 if (can_create_pseudo_p ())
6347 if (GET_CODE (operands[1]) == CONST_INT)
6349 rtx reg = gen_reg_rtx (SImode);
6351 /* For thumb we want an unsigned immediate, then we are more likely
6352 to be able to use a movs insn. */
6354 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6356 emit_insn (gen_movsi (reg, operands[1]));
6357 operands[1] = gen_lowpart (QImode, reg);
6362 /* ??? We shouldn't really get invalid addresses here, but this can
6363 happen if we are passed a SP (never OK for HImode/QImode) or
6364 virtual register (also rejected as illegitimate for HImode/QImode)
6365 relative address. */
6366 /* ??? This should perhaps be fixed elsewhere, for instance, in
6367 fixup_stack_1, by checking for other kinds of invalid addresses,
6368 e.g. a bare reference to a virtual register. This may confuse the
6369 alpha though, which must handle this case differently. */
6370 if (GET_CODE (operands[0]) == MEM
6371 && !memory_address_p (GET_MODE (operands[0]),
6372 XEXP (operands[0], 0)))
6374 = replace_equiv_address (operands[0],
6375 copy_to_reg (XEXP (operands[0], 0)));
6376 if (GET_CODE (operands[1]) == MEM
6377 && !memory_address_p (GET_MODE (operands[1]),
6378 XEXP (operands[1], 0)))
6380 = replace_equiv_address (operands[1],
6381 copy_to_reg (XEXP (operands[1], 0)));
6384 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6386 rtx reg = gen_reg_rtx (SImode);
6388 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6389 operands[1] = gen_lowpart (QImode, reg);
6392 if (GET_CODE (operands[0]) == MEM)
6393 operands[1] = force_reg (QImode, operands[1]);
6395 else if (TARGET_THUMB
6396 && GET_CODE (operands[1]) == CONST_INT
6397 && !satisfies_constraint_I (operands[1]))
6399 /* Handle loading a large integer during reload. */
6401 /* Writing a constant to memory needs a scratch, which should
6402 be handled with SECONDARY_RELOADs. */
6403 gcc_assert (GET_CODE (operands[0]) == REG);
6405 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6406 emit_insn (gen_movsi (operands[0], operands[1]));
6413 (define_insn "*arm_movqi_insn"
6414 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6415 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6417 && ( register_operand (operands[0], QImode)
6418 || register_operand (operands[1], QImode))"
6426 [(set_attr "type" "*,*,load1,store1,load1,store1")
6427 (set_attr "insn" "mov,mvn,*,*,*,*")
6428 (set_attr "predicable" "yes")
6429 (set_attr "arch" "any,any,t2,t2,any,any")
6430 (set_attr "length" "4,4,2,2,4,4")]
6433 (define_insn "*thumb1_movqi_insn"
6434 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6435 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6437 && ( register_operand (operands[0], QImode)
6438 || register_operand (operands[1], QImode))"
6446 [(set_attr "length" "2")
6447 (set_attr "type" "*,load1,store1,*,*,*")
6448 (set_attr "insn" "*,*,*,mov,mov,mov")
6449 (set_attr "pool_range" "*,32,*,*,*,*")
6450 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6453 (define_expand "movhf"
6454 [(set (match_operand:HF 0 "general_operand" "")
6455 (match_operand:HF 1 "general_operand" ""))]
6460 if (GET_CODE (operands[0]) == MEM)
6461 operands[1] = force_reg (HFmode, operands[1]);
6463 else /* TARGET_THUMB1 */
6465 if (can_create_pseudo_p ())
6467 if (GET_CODE (operands[0]) != REG)
6468 operands[1] = force_reg (HFmode, operands[1]);
6474 (define_insn "*arm32_movhf"
6475 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6476 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6477 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6478 && ( s_register_operand (operands[0], HFmode)
6479 || s_register_operand (operands[1], HFmode))"
6481 switch (which_alternative)
6483 case 0: /* ARM register from memory */
6484 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6485 case 1: /* memory from ARM register */
6486 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6487 case 2: /* ARM register from ARM register */
6488 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6489 case 3: /* ARM register from constant */
6495 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6496 bits = real_to_target (NULL, &r, HFmode);
6497 ops[0] = operands[0];
6498 ops[1] = GEN_INT (bits);
6499 ops[2] = GEN_INT (bits & 0xff00);
6500 ops[3] = GEN_INT (bits & 0x00ff);
6502 if (arm_arch_thumb2)
6503 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6505 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6512 [(set_attr "conds" "unconditional")
6513 (set_attr "type" "load1,store1,*,*")
6514 (set_attr "insn" "*,*,mov,mov")
6515 (set_attr "length" "4,4,4,8")
6516 (set_attr "predicable" "yes")]
6519 (define_insn "*thumb1_movhf"
6520 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6521 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6523 && ( s_register_operand (operands[0], HFmode)
6524 || s_register_operand (operands[1], HFmode))"
6526 switch (which_alternative)
6531 gcc_assert (GET_CODE(operands[1]) == MEM);
6532 addr = XEXP (operands[1], 0);
6533 if (GET_CODE (addr) == LABEL_REF
6534 || (GET_CODE (addr) == CONST
6535 && GET_CODE (XEXP (addr, 0)) == PLUS
6536 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6537 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6539 /* Constant pool entry. */
6540 return \"ldr\\t%0, %1\";
6542 return \"ldrh\\t%0, %1\";
6544 case 2: return \"strh\\t%1, %0\";
6545 default: return \"mov\\t%0, %1\";
6548 [(set_attr "length" "2")
6549 (set_attr "type" "*,load1,store1,*,*")
6550 (set_attr "insn" "mov,*,*,mov,mov")
6551 (set_attr "pool_range" "*,1020,*,*,*")
6552 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6554 (define_expand "movsf"
6555 [(set (match_operand:SF 0 "general_operand" "")
6556 (match_operand:SF 1 "general_operand" ""))]
6561 if (GET_CODE (operands[0]) == MEM)
6562 operands[1] = force_reg (SFmode, operands[1]);
6564 else /* TARGET_THUMB1 */
6566 if (can_create_pseudo_p ())
6568 if (GET_CODE (operands[0]) != REG)
6569 operands[1] = force_reg (SFmode, operands[1]);
6575 ;; Transform a floating-point move of a constant into a core register into
6576 ;; an SImode operation.
6578 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6579 (match_operand:SF 1 "immediate_operand" ""))]
6582 && GET_CODE (operands[1]) == CONST_DOUBLE"
6583 [(set (match_dup 2) (match_dup 3))]
6585 operands[2] = gen_lowpart (SImode, operands[0]);
6586 operands[3] = gen_lowpart (SImode, operands[1]);
6587 if (operands[2] == 0 || operands[3] == 0)
6592 (define_insn "*arm_movsf_soft_insn"
6593 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6594 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6596 && TARGET_SOFT_FLOAT
6597 && (GET_CODE (operands[0]) != MEM
6598 || register_operand (operands[1], SFmode))"
6601 ldr%?\\t%0, %1\\t%@ float
6602 str%?\\t%1, %0\\t%@ float"
6603 [(set_attr "predicable" "yes")
6604 (set_attr "type" "*,load1,store1")
6605 (set_attr "insn" "mov,*,*")
6606 (set_attr "pool_range" "*,4096,*")
6607 (set_attr "arm_neg_pool_range" "*,4084,*")
6608 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6611 ;;; ??? This should have alternatives for constants.
6612 (define_insn "*thumb1_movsf_insn"
6613 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6614 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6616 && ( register_operand (operands[0], SFmode)
6617 || register_operand (operands[1], SFmode))"
6626 [(set_attr "length" "2")
6627 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6628 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6629 (set_attr "insn" "*,*,*,*,*,mov,mov")
6630 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6633 (define_expand "movdf"
6634 [(set (match_operand:DF 0 "general_operand" "")
6635 (match_operand:DF 1 "general_operand" ""))]
6640 if (GET_CODE (operands[0]) == MEM)
6641 operands[1] = force_reg (DFmode, operands[1]);
6643 else /* TARGET_THUMB */
6645 if (can_create_pseudo_p ())
6647 if (GET_CODE (operands[0]) != REG)
6648 operands[1] = force_reg (DFmode, operands[1]);
6654 ;; Reloading a df mode value stored in integer regs to memory can require a
6656 (define_expand "reload_outdf"
6657 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6658 (match_operand:DF 1 "s_register_operand" "r")
6659 (match_operand:SI 2 "s_register_operand" "=&r")]
6663 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6666 operands[2] = XEXP (operands[0], 0);
6667 else if (code == POST_INC || code == PRE_DEC)
6669 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6670 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6671 emit_insn (gen_movdi (operands[0], operands[1]));
6674 else if (code == PRE_INC)
6676 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6678 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6681 else if (code == POST_DEC)
6682 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6684 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6685 XEXP (XEXP (operands[0], 0), 1)));
6687 emit_insn (gen_rtx_SET (VOIDmode,
6688 replace_equiv_address (operands[0], operands[2]),
6691 if (code == POST_DEC)
6692 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6698 (define_insn "*movdf_soft_insn"
6699 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6700 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6701 "TARGET_32BIT && TARGET_SOFT_FLOAT
6702 && ( register_operand (operands[0], DFmode)
6703 || register_operand (operands[1], DFmode))"
6705 switch (which_alternative)
6712 return output_move_double (operands, true, NULL);
6715 [(set_attr "length" "8,12,16,8,8")
6716 (set_attr "type" "*,*,*,load2,store2")
6717 (set_attr "pool_range" "*,*,*,1020,*")
6718 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6719 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6722 ;;; ??? This should have alternatives for constants.
6723 ;;; ??? This was originally identical to the movdi_insn pattern.
6724 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6725 ;;; thumb_reorg with a memory reference.
6726 (define_insn "*thumb_movdf_insn"
6727 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6728 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6730 && ( register_operand (operands[0], DFmode)
6731 || register_operand (operands[1], DFmode))"
6733 switch (which_alternative)
6737 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6738 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6739 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6741 return \"ldmia\\t%1, {%0, %H0}\";
6743 return \"stmia\\t%0, {%1, %H1}\";
6745 return thumb_load_double_from_address (operands);
6747 operands[2] = gen_rtx_MEM (SImode,
6748 plus_constant (XEXP (operands[0], 0), 4));
6749 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6752 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6753 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6754 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6757 [(set_attr "length" "4,2,2,6,4,4")
6758 (set_attr "type" "*,load2,store2,load2,store2,*")
6759 (set_attr "insn" "*,*,*,*,*,mov")
6760 (set_attr "pool_range" "*,*,*,1020,*,*")]
6763 (define_expand "movxf"
6764 [(set (match_operand:XF 0 "general_operand" "")
6765 (match_operand:XF 1 "general_operand" ""))]
6766 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6768 if (GET_CODE (operands[0]) == MEM)
6769 operands[1] = force_reg (XFmode, operands[1]);
6775 ;; load- and store-multiple insns
6776 ;; The arm can load/store any set of registers, provided that they are in
6777 ;; ascending order, but these expanders assume a contiguous set.
6779 (define_expand "load_multiple"
6780 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6781 (match_operand:SI 1 "" ""))
6782 (use (match_operand:SI 2 "" ""))])]
6785 HOST_WIDE_INT offset = 0;
6787 /* Support only fixed point registers. */
6788 if (GET_CODE (operands[2]) != CONST_INT
6789 || INTVAL (operands[2]) > 14
6790 || INTVAL (operands[2]) < 2
6791 || GET_CODE (operands[1]) != MEM
6792 || GET_CODE (operands[0]) != REG
6793 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6794 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6798 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6799 INTVAL (operands[2]),
6800 force_reg (SImode, XEXP (operands[1], 0)),
6801 FALSE, operands[1], &offset);
6804 (define_expand "store_multiple"
6805 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6806 (match_operand:SI 1 "" ""))
6807 (use (match_operand:SI 2 "" ""))])]
6810 HOST_WIDE_INT offset = 0;
6812 /* Support only fixed point registers. */
6813 if (GET_CODE (operands[2]) != CONST_INT
6814 || INTVAL (operands[2]) > 14
6815 || INTVAL (operands[2]) < 2
6816 || GET_CODE (operands[1]) != REG
6817 || GET_CODE (operands[0]) != MEM
6818 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6819 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6823 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6824 INTVAL (operands[2]),
6825 force_reg (SImode, XEXP (operands[0], 0)),
6826 FALSE, operands[0], &offset);
6830 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6831 ;; We could let this apply for blocks of less than this, but it clobbers so
6832 ;; many registers that there is then probably a better way.
6834 (define_expand "movmemqi"
6835 [(match_operand:BLK 0 "general_operand" "")
6836 (match_operand:BLK 1 "general_operand" "")
6837 (match_operand:SI 2 "const_int_operand" "")
6838 (match_operand:SI 3 "const_int_operand" "")]
6843 if (arm_gen_movmemqi (operands))
6847 else /* TARGET_THUMB1 */
6849 if ( INTVAL (operands[3]) != 4
6850 || INTVAL (operands[2]) > 48)
6853 thumb_expand_movmemqi (operands);
6859 ;; Thumb block-move insns
6861 (define_insn "movmem12b"
6862 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6863 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6864 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6865 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6866 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6867 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6868 (set (match_operand:SI 0 "register_operand" "=l")
6869 (plus:SI (match_dup 2) (const_int 12)))
6870 (set (match_operand:SI 1 "register_operand" "=l")
6871 (plus:SI (match_dup 3) (const_int 12)))
6872 (clobber (match_scratch:SI 4 "=&l"))
6873 (clobber (match_scratch:SI 5 "=&l"))
6874 (clobber (match_scratch:SI 6 "=&l"))]
6876 "* return thumb_output_move_mem_multiple (3, operands);"
6877 [(set_attr "length" "4")
6878 ; This isn't entirely accurate... It loads as well, but in terms of
6879 ; scheduling the following insn it is better to consider it as a store
6880 (set_attr "type" "store3")]
6883 (define_insn "movmem8b"
6884 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6885 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6886 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6887 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6888 (set (match_operand:SI 0 "register_operand" "=l")
6889 (plus:SI (match_dup 2) (const_int 8)))
6890 (set (match_operand:SI 1 "register_operand" "=l")
6891 (plus:SI (match_dup 3) (const_int 8)))
6892 (clobber (match_scratch:SI 4 "=&l"))
6893 (clobber (match_scratch:SI 5 "=&l"))]
6895 "* return thumb_output_move_mem_multiple (2, operands);"
6896 [(set_attr "length" "4")
6897 ; This isn't entirely accurate... It loads as well, but in terms of
6898 ; scheduling the following insn it is better to consider it as a store
6899 (set_attr "type" "store2")]
6904 ;; Compare & branch insns
6905 ;; The range calculations are based as follows:
6906 ;; For forward branches, the address calculation returns the address of
6907 ;; the next instruction. This is 2 beyond the branch instruction.
6908 ;; For backward branches, the address calculation returns the address of
6909 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6910 ;; instruction for the shortest sequence, and 4 before the branch instruction
6911 ;; if we have to jump around an unconditional branch.
6912 ;; To the basic branch range the PC offset must be added (this is +4).
6913 ;; So for forward branches we have
6914 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6915 ;; And for backward branches we have
6916 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6918 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6919 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6921 (define_expand "cbranchsi4"
6922 [(set (pc) (if_then_else
6923 (match_operator 0 "expandable_comparison_operator"
6924 [(match_operand:SI 1 "s_register_operand" "")
6925 (match_operand:SI 2 "nonmemory_operand" "")])
6926 (label_ref (match_operand 3 "" ""))
6928 "TARGET_THUMB1 || TARGET_32BIT"
6932 if (!arm_add_operand (operands[2], SImode))
6933 operands[2] = force_reg (SImode, operands[2]);
6934 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6938 if (thumb1_cmpneg_operand (operands[2], SImode))
6940 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6941 operands[3], operands[0]));
6944 if (!thumb1_cmp_operand (operands[2], SImode))
6945 operands[2] = force_reg (SImode, operands[2]);
6948 ;; A pattern to recognize a special situation and optimize for it.
6949 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6950 ;; due to the available addressing modes. Hence, convert a signed comparison
6951 ;; with zero into an unsigned comparison with 127 if possible.
6952 (define_expand "cbranchqi4"
6953 [(set (pc) (if_then_else
6954 (match_operator 0 "lt_ge_comparison_operator"
6955 [(match_operand:QI 1 "memory_operand" "")
6956 (match_operand:QI 2 "const0_operand" "")])
6957 (label_ref (match_operand 3 "" ""))
6962 xops[1] = gen_reg_rtx (SImode);
6963 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6964 xops[2] = GEN_INT (127);
6965 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6966 VOIDmode, xops[1], xops[2]);
6967 xops[3] = operands[3];
6968 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6972 (define_expand "cbranchsf4"
6973 [(set (pc) (if_then_else
6974 (match_operator 0 "expandable_comparison_operator"
6975 [(match_operand:SF 1 "s_register_operand" "")
6976 (match_operand:SF 2 "arm_float_compare_operand" "")])
6977 (label_ref (match_operand 3 "" ""))
6979 "TARGET_32BIT && TARGET_HARD_FLOAT"
6980 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6981 operands[3])); DONE;"
6984 (define_expand "cbranchdf4"
6985 [(set (pc) (if_then_else
6986 (match_operator 0 "expandable_comparison_operator"
6987 [(match_operand:DF 1 "s_register_operand" "")
6988 (match_operand:DF 2 "arm_float_compare_operand" "")])
6989 (label_ref (match_operand 3 "" ""))
6991 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6992 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6993 operands[3])); DONE;"
6996 (define_expand "cbranchdi4"
6997 [(set (pc) (if_then_else
6998 (match_operator 0 "expandable_comparison_operator"
6999 [(match_operand:DI 1 "cmpdi_operand" "")
7000 (match_operand:DI 2 "cmpdi_operand" "")])
7001 (label_ref (match_operand 3 "" ""))
7005 rtx swap = NULL_RTX;
7006 enum rtx_code code = GET_CODE (operands[0]);
7008 /* We should not have two constants. */
7009 gcc_assert (GET_MODE (operands[1]) == DImode
7010 || GET_MODE (operands[2]) == DImode);
7012 /* Flip unimplemented DImode comparisons to a form that
7013 arm_gen_compare_reg can handle. */
7017 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
7019 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
7021 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
7023 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
7028 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
7031 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7037 (define_insn "cbranchsi4_insn"
7038 [(set (pc) (if_then_else
7039 (match_operator 0 "arm_comparison_operator"
7040 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7041 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7042 (label_ref (match_operand 3 "" ""))
7046 rtx t = cfun->machine->thumb1_cc_insn;
7049 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7050 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7052 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7054 if (!noov_comparison_operator (operands[0], VOIDmode))
7057 else if (cfun->machine->thumb1_cc_mode != CCmode)
7062 output_asm_insn ("cmp\t%1, %2", operands);
7063 cfun->machine->thumb1_cc_insn = insn;
7064 cfun->machine->thumb1_cc_op0 = operands[1];
7065 cfun->machine->thumb1_cc_op1 = operands[2];
7066 cfun->machine->thumb1_cc_mode = CCmode;
7069 /* Ensure we emit the right type of condition code on the jump. */
7070 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7073 switch (get_attr_length (insn))
7075 case 4: return \"b%d0\\t%l3\";
7076 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7077 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7080 [(set (attr "far_jump")
7082 (eq_attr "length" "8")
7083 (const_string "yes")
7084 (const_string "no")))
7085 (set (attr "length")
7087 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7088 (le (minus (match_dup 3) (pc)) (const_int 256)))
7091 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7092 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7097 (define_insn "cbranchsi4_scratch"
7098 [(set (pc) (if_then_else
7099 (match_operator 4 "arm_comparison_operator"
7100 [(match_operand:SI 1 "s_register_operand" "l,0")
7101 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7102 (label_ref (match_operand 3 "" ""))
7104 (clobber (match_scratch:SI 0 "=l,l"))]
7107 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7109 switch (get_attr_length (insn))
7111 case 4: return \"b%d4\\t%l3\";
7112 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7113 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7116 [(set (attr "far_jump")
7118 (eq_attr "length" "8")
7119 (const_string "yes")
7120 (const_string "no")))
7121 (set (attr "length")
7123 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7124 (le (minus (match_dup 3) (pc)) (const_int 256)))
7127 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7128 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7133 ;; Two peepholes to generate subtract of 0 instead of a move if the
7134 ;; condition codes will be useful.
7136 [(set (match_operand:SI 0 "low_register_operand" "")
7137 (match_operand:SI 1 "low_register_operand" ""))
7139 (if_then_else (match_operator 2 "arm_comparison_operator"
7140 [(match_dup 1) (const_int 0)])
7141 (label_ref (match_operand 3 "" ""))
7144 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7146 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7147 (label_ref (match_dup 3))
7151 ;; Sigh! This variant shouldn't be needed, but combine often fails to
7152 ;; merge cases like this because the op1 is a hard register in
7153 ;; arm_class_likely_spilled_p.
7155 [(set (match_operand:SI 0 "low_register_operand" "")
7156 (match_operand:SI 1 "low_register_operand" ""))
7158 (if_then_else (match_operator 2 "arm_comparison_operator"
7159 [(match_dup 0) (const_int 0)])
7160 (label_ref (match_operand 3 "" ""))
7163 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7165 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7166 (label_ref (match_dup 3))
7170 (define_insn "*negated_cbranchsi4"
7173 (match_operator 0 "equality_operator"
7174 [(match_operand:SI 1 "s_register_operand" "l")
7175 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7176 (label_ref (match_operand 3 "" ""))
7180 output_asm_insn (\"cmn\\t%1, %2\", operands);
7181 switch (get_attr_length (insn))
7183 case 4: return \"b%d0\\t%l3\";
7184 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7185 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7188 [(set (attr "far_jump")
7190 (eq_attr "length" "8")
7191 (const_string "yes")
7192 (const_string "no")))
7193 (set (attr "length")
7195 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7196 (le (minus (match_dup 3) (pc)) (const_int 256)))
7199 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7200 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7205 (define_insn "*tbit_cbranch"
7208 (match_operator 0 "equality_operator"
7209 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7211 (match_operand:SI 2 "const_int_operand" "i"))
7213 (label_ref (match_operand 3 "" ""))
7215 (clobber (match_scratch:SI 4 "=l"))]
7220 op[0] = operands[4];
7221 op[1] = operands[1];
7222 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7224 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7225 switch (get_attr_length (insn))
7227 case 4: return \"b%d0\\t%l3\";
7228 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7229 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7232 [(set (attr "far_jump")
7234 (eq_attr "length" "8")
7235 (const_string "yes")
7236 (const_string "no")))
7237 (set (attr "length")
7239 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7240 (le (minus (match_dup 3) (pc)) (const_int 256)))
7243 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7244 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7249 (define_insn "*tlobits_cbranch"
7252 (match_operator 0 "equality_operator"
7253 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7254 (match_operand:SI 2 "const_int_operand" "i")
7257 (label_ref (match_operand 3 "" ""))
7259 (clobber (match_scratch:SI 4 "=l"))]
7264 op[0] = operands[4];
7265 op[1] = operands[1];
7266 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7268 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7269 switch (get_attr_length (insn))
7271 case 4: return \"b%d0\\t%l3\";
7272 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7273 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7276 [(set (attr "far_jump")
7278 (eq_attr "length" "8")
7279 (const_string "yes")
7280 (const_string "no")))
7281 (set (attr "length")
7283 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7284 (le (minus (match_dup 3) (pc)) (const_int 256)))
7287 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7288 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7293 (define_insn "*tstsi3_cbranch"
7296 (match_operator 3 "equality_operator"
7297 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7298 (match_operand:SI 1 "s_register_operand" "l"))
7300 (label_ref (match_operand 2 "" ""))
7305 output_asm_insn (\"tst\\t%0, %1\", operands);
7306 switch (get_attr_length (insn))
7308 case 4: return \"b%d3\\t%l2\";
7309 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7310 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7313 [(set (attr "far_jump")
7315 (eq_attr "length" "8")
7316 (const_string "yes")
7317 (const_string "no")))
7318 (set (attr "length")
7320 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7321 (le (minus (match_dup 2) (pc)) (const_int 256)))
7324 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7325 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7330 (define_insn "*cbranchne_decr1"
7332 (if_then_else (match_operator 3 "equality_operator"
7333 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7335 (label_ref (match_operand 4 "" ""))
7337 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7338 (plus:SI (match_dup 2) (const_int -1)))
7339 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7344 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7346 VOIDmode, operands[2], const1_rtx);
7347 cond[1] = operands[4];
7349 if (which_alternative == 0)
7350 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7351 else if (which_alternative == 1)
7353 /* We must provide an alternative for a hi reg because reload
7354 cannot handle output reloads on a jump instruction, but we
7355 can't subtract into that. Fortunately a mov from lo to hi
7356 does not clobber the condition codes. */
7357 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7358 output_asm_insn (\"mov\\t%0, %1\", operands);
7362 /* Similarly, but the target is memory. */
7363 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7364 output_asm_insn (\"str\\t%1, %0\", operands);
7367 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7370 output_asm_insn (\"b%d0\\t%l1\", cond);
7373 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7374 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7376 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7377 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7381 [(set (attr "far_jump")
7383 (ior (and (eq (symbol_ref ("which_alternative"))
7385 (eq_attr "length" "8"))
7386 (eq_attr "length" "10"))
7387 (const_string "yes")
7388 (const_string "no")))
7389 (set_attr_alternative "length"
7393 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7394 (le (minus (match_dup 4) (pc)) (const_int 256)))
7397 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7398 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7403 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7404 (le (minus (match_dup 4) (pc)) (const_int 256)))
7407 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7408 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7413 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7414 (le (minus (match_dup 4) (pc)) (const_int 256)))
7417 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7418 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7423 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7424 (le (minus (match_dup 4) (pc)) (const_int 256)))
7427 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7428 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7433 (define_insn "*addsi3_cbranch"
7436 (match_operator 4 "arm_comparison_operator"
7438 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7439 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7441 (label_ref (match_operand 5 "" ""))
7444 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7445 (plus:SI (match_dup 2) (match_dup 3)))
7446 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7448 && (GET_CODE (operands[4]) == EQ
7449 || GET_CODE (operands[4]) == NE
7450 || GET_CODE (operands[4]) == GE
7451 || GET_CODE (operands[4]) == LT)"
7456 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7457 cond[1] = operands[2];
7458 cond[2] = operands[3];
7460 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7461 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7463 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7465 if (which_alternative >= 2
7466 && which_alternative < 4)
7467 output_asm_insn (\"mov\\t%0, %1\", operands);
7468 else if (which_alternative >= 4)
7469 output_asm_insn (\"str\\t%1, %0\", operands);
7471 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7474 return \"b%d4\\t%l5\";
7476 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7478 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7482 [(set (attr "far_jump")
7484 (ior (and (lt (symbol_ref ("which_alternative"))
7486 (eq_attr "length" "8"))
7487 (eq_attr "length" "10"))
7488 (const_string "yes")
7489 (const_string "no")))
7490 (set (attr "length")
7492 (lt (symbol_ref ("which_alternative"))
7495 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7496 (le (minus (match_dup 5) (pc)) (const_int 256)))
7499 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7500 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7504 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7505 (le (minus (match_dup 5) (pc)) (const_int 256)))
7508 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7509 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7514 (define_insn "*addsi3_cbranch_scratch"
7517 (match_operator 3 "arm_comparison_operator"
7519 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7520 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7522 (label_ref (match_operand 4 "" ""))
7524 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7526 && (GET_CODE (operands[3]) == EQ
7527 || GET_CODE (operands[3]) == NE
7528 || GET_CODE (operands[3]) == GE
7529 || GET_CODE (operands[3]) == LT)"
7532 switch (which_alternative)
7535 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7538 output_asm_insn (\"cmn\t%1, %2\", operands);
7541 if (INTVAL (operands[2]) < 0)
7542 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7544 output_asm_insn (\"add\t%0, %1, %2\", operands);
7547 if (INTVAL (operands[2]) < 0)
7548 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7550 output_asm_insn (\"add\t%0, %0, %2\", operands);
7554 switch (get_attr_length (insn))
7557 return \"b%d3\\t%l4\";
7559 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7561 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7565 [(set (attr "far_jump")
7567 (eq_attr "length" "8")
7568 (const_string "yes")
7569 (const_string "no")))
7570 (set (attr "length")
7572 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7573 (le (minus (match_dup 4) (pc)) (const_int 256)))
7576 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7577 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7583 ;; Comparison and test insns
7585 (define_insn "*arm_cmpsi_insn"
7586 [(set (reg:CC CC_REGNUM)
7587 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7588 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7595 [(set_attr "conds" "set")
7596 (set_attr "arch" "t2,t2,any,any")
7597 (set_attr "length" "2,2,4,4")
7598 (set_attr "predicable" "yes")]
7601 (define_insn "*cmpsi_shiftsi"
7602 [(set (reg:CC CC_REGNUM)
7603 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7604 (match_operator:SI 3 "shift_operator"
7605 [(match_operand:SI 1 "s_register_operand" "r,r")
7606 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7609 [(set_attr "conds" "set")
7610 (set_attr "shift" "1")
7611 (set_attr "arch" "32,a")
7612 (set_attr "type" "alu_shift,alu_shift_reg")])
7614 (define_insn "*cmpsi_shiftsi_swp"
7615 [(set (reg:CC_SWP CC_REGNUM)
7616 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7617 [(match_operand:SI 1 "s_register_operand" "r,r")
7618 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7619 (match_operand:SI 0 "s_register_operand" "r,r")))]
7622 [(set_attr "conds" "set")
7623 (set_attr "shift" "1")
7624 (set_attr "arch" "32,a")
7625 (set_attr "type" "alu_shift,alu_shift_reg")])
7627 (define_insn "*arm_cmpsi_negshiftsi_si"
7628 [(set (reg:CC_Z CC_REGNUM)
7630 (neg:SI (match_operator:SI 1 "shift_operator"
7631 [(match_operand:SI 2 "s_register_operand" "r")
7632 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7633 (match_operand:SI 0 "s_register_operand" "r")))]
7636 [(set_attr "conds" "set")
7637 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7638 (const_string "alu_shift")
7639 (const_string "alu_shift_reg")))
7640 (set_attr "predicable" "yes")]
7643 ;; DImode comparisons. The generic code generates branches that
7644 ;; if-conversion can not reduce to a conditional compare, so we do
7647 (define_insn "*arm_cmpdi_insn"
7648 [(set (reg:CC_NCV CC_REGNUM)
7649 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7650 (match_operand:DI 1 "arm_di_operand" "rDi")))
7651 (clobber (match_scratch:SI 2 "=r"))]
7652 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7653 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7654 [(set_attr "conds" "set")
7655 (set_attr "length" "8")]
7658 (define_insn "*arm_cmpdi_unsigned"
7659 [(set (reg:CC_CZ CC_REGNUM)
7660 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7661 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7663 "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
7664 [(set_attr "conds" "set")
7665 (set_attr "length" "8")]
7668 (define_insn "*arm_cmpdi_zero"
7669 [(set (reg:CC_Z CC_REGNUM)
7670 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7672 (clobber (match_scratch:SI 1 "=r"))]
7674 "orr%.\\t%1, %Q0, %R0"
7675 [(set_attr "conds" "set")]
7678 (define_insn "*thumb_cmpdi_zero"
7679 [(set (reg:CC_Z CC_REGNUM)
7680 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7682 (clobber (match_scratch:SI 1 "=l"))]
7684 "orr\\t%1, %Q0, %R0"
7685 [(set_attr "conds" "set")
7686 (set_attr "length" "2")]
7689 ;; Cirrus SF compare instruction
7690 (define_insn "*cirrus_cmpsf"
7691 [(set (reg:CCFP CC_REGNUM)
7692 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7693 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7694 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7695 "cfcmps%?\\tr15, %V0, %V1"
7696 [(set_attr "type" "mav_farith")
7697 (set_attr "cirrus" "compare")]
7700 ;; Cirrus DF compare instruction
7701 (define_insn "*cirrus_cmpdf"
7702 [(set (reg:CCFP CC_REGNUM)
7703 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7704 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7705 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7706 "cfcmpd%?\\tr15, %V0, %V1"
7707 [(set_attr "type" "mav_farith")
7708 (set_attr "cirrus" "compare")]
7711 (define_insn "*cirrus_cmpdi"
7712 [(set (reg:CC CC_REGNUM)
7713 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7714 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7715 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7716 "cfcmp64%?\\tr15, %V0, %V1"
7717 [(set_attr "type" "mav_farith")
7718 (set_attr "cirrus" "compare")]
7721 ; This insn allows redundant compares to be removed by cse, nothing should
7722 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7723 ; is deleted later on. The match_dup will match the mode here, so that
7724 ; mode changes of the condition codes aren't lost by this even though we don't
7725 ; specify what they are.
7727 (define_insn "*deleted_compare"
7728 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7730 "\\t%@ deleted compare"
7731 [(set_attr "conds" "set")
7732 (set_attr "length" "0")]
7736 ;; Conditional branch insns
7738 (define_expand "cbranch_cc"
7740 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7741 (match_operand 2 "" "")])
7742 (label_ref (match_operand 3 "" ""))
7745 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7746 operands[1], operands[2], NULL_RTX);
7747 operands[2] = const0_rtx;"
7751 ;; Patterns to match conditional branch insns.
7754 (define_insn "*arm_cond_branch"
7756 (if_then_else (match_operator 1 "arm_comparison_operator"
7757 [(match_operand 2 "cc_register" "") (const_int 0)])
7758 (label_ref (match_operand 0 "" ""))
7762 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7764 arm_ccfsm_state += 2;
7767 return \"b%d1\\t%l0\";
7769 [(set_attr "conds" "use")
7770 (set_attr "type" "branch")
7771 (set (attr "length")
7773 (and (match_test "TARGET_THUMB2")
7774 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7775 (le (minus (match_dup 0) (pc)) (const_int 256))))
7780 (define_insn "*arm_cond_branch_reversed"
7782 (if_then_else (match_operator 1 "arm_comparison_operator"
7783 [(match_operand 2 "cc_register" "") (const_int 0)])
7785 (label_ref (match_operand 0 "" ""))))]
7788 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7790 arm_ccfsm_state += 2;
7793 return \"b%D1\\t%l0\";
7795 [(set_attr "conds" "use")
7796 (set_attr "type" "branch")
7797 (set (attr "length")
7799 (and (match_test "TARGET_THUMB2")
7800 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7801 (le (minus (match_dup 0) (pc)) (const_int 256))))
7810 (define_expand "cstore_cc"
7811 [(set (match_operand:SI 0 "s_register_operand" "")
7812 (match_operator:SI 1 "" [(match_operand 2 "" "")
7813 (match_operand 3 "" "")]))]
7815 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7816 operands[2], operands[3], NULL_RTX);
7817 operands[3] = const0_rtx;"
7820 (define_insn "*mov_scc"
7821 [(set (match_operand:SI 0 "s_register_operand" "=r")
7822 (match_operator:SI 1 "arm_comparison_operator"
7823 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7825 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7826 [(set_attr "conds" "use")
7827 (set_attr "insn" "mov")
7828 (set_attr "length" "8")]
7831 (define_insn "*mov_negscc"
7832 [(set (match_operand:SI 0 "s_register_operand" "=r")
7833 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7834 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7836 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7837 [(set_attr "conds" "use")
7838 (set_attr "insn" "mov")
7839 (set_attr "length" "8")]
7842 (define_insn "*mov_notscc"
7843 [(set (match_operand:SI 0 "s_register_operand" "=r")
7844 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7845 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7847 "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7848 [(set_attr "conds" "use")
7849 (set_attr "insn" "mov")
7850 (set_attr "length" "8")]
7853 (define_expand "cstoresi4"
7854 [(set (match_operand:SI 0 "s_register_operand" "")
7855 (match_operator:SI 1 "expandable_comparison_operator"
7856 [(match_operand:SI 2 "s_register_operand" "")
7857 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7858 "TARGET_32BIT || TARGET_THUMB1"
7860 rtx op3, scratch, scratch2;
7864 if (!arm_add_operand (operands[3], SImode))
7865 operands[3] = force_reg (SImode, operands[3]);
7866 emit_insn (gen_cstore_cc (operands[0], operands[1],
7867 operands[2], operands[3]));
7871 if (operands[3] == const0_rtx)
7873 switch (GET_CODE (operands[1]))
7876 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7880 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7884 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7885 NULL_RTX, 0, OPTAB_WIDEN);
7886 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7887 NULL_RTX, 0, OPTAB_WIDEN);
7888 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7889 operands[0], 1, OPTAB_WIDEN);
7893 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7895 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7896 NULL_RTX, 1, OPTAB_WIDEN);
7900 scratch = expand_binop (SImode, ashr_optab, operands[2],
7901 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7902 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7903 NULL_RTX, 0, OPTAB_WIDEN);
7904 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7908 /* LT is handled by generic code. No need for unsigned with 0. */
7915 switch (GET_CODE (operands[1]))
7918 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7919 NULL_RTX, 0, OPTAB_WIDEN);
7920 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7924 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7925 NULL_RTX, 0, OPTAB_WIDEN);
7926 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7930 op3 = force_reg (SImode, operands[3]);
7932 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7933 NULL_RTX, 1, OPTAB_WIDEN);
7934 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7935 NULL_RTX, 0, OPTAB_WIDEN);
7936 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7942 if (!thumb1_cmp_operand (op3, SImode))
7943 op3 = force_reg (SImode, op3);
7944 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7945 NULL_RTX, 0, OPTAB_WIDEN);
7946 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7947 NULL_RTX, 1, OPTAB_WIDEN);
7948 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7953 op3 = force_reg (SImode, operands[3]);
7954 scratch = force_reg (SImode, const0_rtx);
7955 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7961 if (!thumb1_cmp_operand (op3, SImode))
7962 op3 = force_reg (SImode, op3);
7963 scratch = force_reg (SImode, const0_rtx);
7964 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7970 if (!thumb1_cmp_operand (op3, SImode))
7971 op3 = force_reg (SImode, op3);
7972 scratch = gen_reg_rtx (SImode);
7973 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7977 op3 = force_reg (SImode, operands[3]);
7978 scratch = gen_reg_rtx (SImode);
7979 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7982 /* No good sequences for GT, LT. */
7989 (define_expand "cstoresf4"
7990 [(set (match_operand:SI 0 "s_register_operand" "")
7991 (match_operator:SI 1 "expandable_comparison_operator"
7992 [(match_operand:SF 2 "s_register_operand" "")
7993 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7994 "TARGET_32BIT && TARGET_HARD_FLOAT"
7995 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7996 operands[2], operands[3])); DONE;"
7999 (define_expand "cstoredf4"
8000 [(set (match_operand:SI 0 "s_register_operand" "")
8001 (match_operator:SI 1 "expandable_comparison_operator"
8002 [(match_operand:DF 2 "s_register_operand" "")
8003 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8004 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8005 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8006 operands[2], operands[3])); DONE;"
8009 (define_expand "cstoredi4"
8010 [(set (match_operand:SI 0 "s_register_operand" "")
8011 (match_operator:SI 1 "expandable_comparison_operator"
8012 [(match_operand:DI 2 "cmpdi_operand" "")
8013 (match_operand:DI 3 "cmpdi_operand" "")]))]
8016 rtx swap = NULL_RTX;
8017 enum rtx_code code = GET_CODE (operands[1]);
8019 /* We should not have two constants. */
8020 gcc_assert (GET_MODE (operands[2]) == DImode
8021 || GET_MODE (operands[3]) == DImode);
8023 /* Flip unimplemented DImode comparisons to a form that
8024 arm_gen_compare_reg can handle. */
8028 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
8030 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
8032 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
8034 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
8039 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
8042 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8048 (define_expand "cstoresi_eq0_thumb1"
8050 [(set (match_operand:SI 0 "s_register_operand" "")
8051 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8053 (clobber (match_dup:SI 2))])]
8055 "operands[2] = gen_reg_rtx (SImode);"
8058 (define_expand "cstoresi_ne0_thumb1"
8060 [(set (match_operand:SI 0 "s_register_operand" "")
8061 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8063 (clobber (match_dup:SI 2))])]
8065 "operands[2] = gen_reg_rtx (SImode);"
8068 (define_insn "*cstoresi_eq0_thumb1_insn"
8069 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8070 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8072 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8075 neg\\t%0, %1\;adc\\t%0, %0, %1
8076 neg\\t%2, %1\;adc\\t%0, %1, %2"
8077 [(set_attr "length" "4")]
8080 (define_insn "*cstoresi_ne0_thumb1_insn"
8081 [(set (match_operand:SI 0 "s_register_operand" "=l")
8082 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8084 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8086 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8087 [(set_attr "length" "4")]
8090 ;; Used as part of the expansion of thumb ltu and gtu sequences
8091 (define_insn "cstoresi_nltu_thumb1"
8092 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8093 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8094 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8096 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8097 [(set_attr "length" "4")]
8100 (define_insn_and_split "cstoresi_ltu_thumb1"
8101 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8102 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8103 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8108 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8109 (set (match_dup 0) (neg:SI (match_dup 3)))]
8110 "operands[3] = gen_reg_rtx (SImode);"
8111 [(set_attr "length" "4")]
8114 ;; Used as part of the expansion of thumb les sequence.
8115 (define_insn "thumb1_addsi3_addgeu"
8116 [(set (match_operand:SI 0 "s_register_operand" "=l")
8117 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8118 (match_operand:SI 2 "s_register_operand" "l"))
8119 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8120 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8122 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8123 [(set_attr "length" "4")]
8127 ;; Conditional move insns
8129 (define_expand "movsicc"
8130 [(set (match_operand:SI 0 "s_register_operand" "")
8131 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8132 (match_operand:SI 2 "arm_not_operand" "")
8133 (match_operand:SI 3 "arm_not_operand" "")))]
8137 enum rtx_code code = GET_CODE (operands[1]);
8140 if (code == UNEQ || code == LTGT)
8143 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8144 XEXP (operands[1], 1), NULL_RTX);
8145 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8149 (define_expand "movsfcc"
8150 [(set (match_operand:SF 0 "s_register_operand" "")
8151 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8152 (match_operand:SF 2 "s_register_operand" "")
8153 (match_operand:SF 3 "nonmemory_operand" "")))]
8154 "TARGET_32BIT && TARGET_HARD_FLOAT"
8157 enum rtx_code code = GET_CODE (operands[1]);
8160 if (code == UNEQ || code == LTGT)
8163 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8164 Otherwise, ensure it is a valid FP add operand */
8165 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8166 || (!arm_float_add_operand (operands[3], SFmode)))
8167 operands[3] = force_reg (SFmode, operands[3]);
8169 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8170 XEXP (operands[1], 1), NULL_RTX);
8171 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8175 (define_expand "movdfcc"
8176 [(set (match_operand:DF 0 "s_register_operand" "")
8177 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8178 (match_operand:DF 2 "s_register_operand" "")
8179 (match_operand:DF 3 "arm_float_add_operand" "")))]
8180 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8183 enum rtx_code code = GET_CODE (operands[1]);
8186 if (code == UNEQ || code == LTGT)
8189 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8190 XEXP (operands[1], 1), NULL_RTX);
8191 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8195 (define_insn "*movsicc_insn"
8196 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8198 (match_operator 3 "arm_comparison_operator"
8199 [(match_operand 4 "cc_register" "") (const_int 0)])
8200 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8201 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8208 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8209 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8210 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8211 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8212 [(set_attr "length" "4,4,4,4,8,8,8,8")
8213 (set_attr "conds" "use")
8214 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
8217 (define_insn "*movsfcc_soft_insn"
8218 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8219 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8220 [(match_operand 4 "cc_register" "") (const_int 0)])
8221 (match_operand:SF 1 "s_register_operand" "0,r")
8222 (match_operand:SF 2 "s_register_operand" "r,0")))]
8223 "TARGET_ARM && TARGET_SOFT_FLOAT"
8227 [(set_attr "conds" "use")
8228 (set_attr "insn" "mov")]
8232 ;; Jump and linkage insns
8234 (define_expand "jump"
8236 (label_ref (match_operand 0 "" "")))]
8241 (define_insn "*arm_jump"
8243 (label_ref (match_operand 0 "" "")))]
8247 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8249 arm_ccfsm_state += 2;
8252 return \"b%?\\t%l0\";
8255 [(set_attr "predicable" "yes")
8256 (set (attr "length")
8258 (and (match_test "TARGET_THUMB2")
8259 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8260 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8265 (define_insn "*thumb_jump"
8267 (label_ref (match_operand 0 "" "")))]
8270 if (get_attr_length (insn) == 2)
8272 return \"bl\\t%l0\\t%@ far jump\";
8274 [(set (attr "far_jump")
8276 (eq_attr "length" "4")
8277 (const_string "yes")
8278 (const_string "no")))
8279 (set (attr "length")
8281 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8282 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8287 (define_expand "call"
8288 [(parallel [(call (match_operand 0 "memory_operand" "")
8289 (match_operand 1 "general_operand" ""))
8290 (use (match_operand 2 "" ""))
8291 (clobber (reg:SI LR_REGNUM))])]
8297 /* In an untyped call, we can get NULL for operand 2. */
8298 if (operands[2] == NULL_RTX)
8299 operands[2] = const0_rtx;
8301 /* Decide if we should generate indirect calls by loading the
8302 32-bit address of the callee into a register before performing the
8304 callee = XEXP (operands[0], 0);
8305 if (GET_CODE (callee) == SYMBOL_REF
8306 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8308 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8310 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8311 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8316 (define_expand "call_internal"
8317 [(parallel [(call (match_operand 0 "memory_operand" "")
8318 (match_operand 1 "general_operand" ""))
8319 (use (match_operand 2 "" ""))
8320 (clobber (reg:SI LR_REGNUM))])])
8322 (define_insn "*call_reg_armv5"
8323 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8324 (match_operand 1 "" ""))
8325 (use (match_operand 2 "" ""))
8326 (clobber (reg:SI LR_REGNUM))]
8327 "TARGET_ARM && arm_arch5"
8329 [(set_attr "type" "call")]
8332 (define_insn "*call_reg_arm"
8333 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8334 (match_operand 1 "" ""))
8335 (use (match_operand 2 "" ""))
8336 (clobber (reg:SI LR_REGNUM))]
8337 "TARGET_ARM && !arm_arch5"
8339 return output_call (operands);
8341 ;; length is worst case, normally it is only two
8342 [(set_attr "length" "12")
8343 (set_attr "type" "call")]
8347 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8348 ;; considered a function call by the branch predictor of some cores (PR40887).
8349 ;; Falls back to blx rN (*call_reg_armv5).
8351 (define_insn "*call_mem"
8352 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8353 (match_operand 1 "" ""))
8354 (use (match_operand 2 "" ""))
8355 (clobber (reg:SI LR_REGNUM))]
8356 "TARGET_ARM && !arm_arch5"
8358 return output_call_mem (operands);
8360 [(set_attr "length" "12")
8361 (set_attr "type" "call")]
8364 (define_insn "*call_reg_thumb1_v5"
8365 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8366 (match_operand 1 "" ""))
8367 (use (match_operand 2 "" ""))
8368 (clobber (reg:SI LR_REGNUM))]
8369 "TARGET_THUMB1 && arm_arch5"
8371 [(set_attr "length" "2")
8372 (set_attr "type" "call")]
8375 (define_insn "*call_reg_thumb1"
8376 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8377 (match_operand 1 "" ""))
8378 (use (match_operand 2 "" ""))
8379 (clobber (reg:SI LR_REGNUM))]
8380 "TARGET_THUMB1 && !arm_arch5"
8383 if (!TARGET_CALLER_INTERWORKING)
8384 return thumb_call_via_reg (operands[0]);
8385 else if (operands[1] == const0_rtx)
8386 return \"bl\\t%__interwork_call_via_%0\";
8387 else if (frame_pointer_needed)
8388 return \"bl\\t%__interwork_r7_call_via_%0\";
8390 return \"bl\\t%__interwork_r11_call_via_%0\";
8392 [(set_attr "type" "call")]
8395 (define_expand "call_value"
8396 [(parallel [(set (match_operand 0 "" "")
8397 (call (match_operand 1 "memory_operand" "")
8398 (match_operand 2 "general_operand" "")))
8399 (use (match_operand 3 "" ""))
8400 (clobber (reg:SI LR_REGNUM))])]
8406 /* In an untyped call, we can get NULL for operand 2. */
8407 if (operands[3] == 0)
8408 operands[3] = const0_rtx;
8410 /* Decide if we should generate indirect calls by loading the
8411 32-bit address of the callee into a register before performing the
8413 callee = XEXP (operands[1], 0);
8414 if (GET_CODE (callee) == SYMBOL_REF
8415 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8417 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8419 pat = gen_call_value_internal (operands[0], operands[1],
8420 operands[2], operands[3]);
8421 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8426 (define_expand "call_value_internal"
8427 [(parallel [(set (match_operand 0 "" "")
8428 (call (match_operand 1 "memory_operand" "")
8429 (match_operand 2 "general_operand" "")))
8430 (use (match_operand 3 "" ""))
8431 (clobber (reg:SI LR_REGNUM))])])
8433 (define_insn "*call_value_reg_armv5"
8434 [(set (match_operand 0 "" "")
8435 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8436 (match_operand 2 "" "")))
8437 (use (match_operand 3 "" ""))
8438 (clobber (reg:SI LR_REGNUM))]
8439 "TARGET_ARM && arm_arch5"
8441 [(set_attr "type" "call")]
8444 (define_insn "*call_value_reg_arm"
8445 [(set (match_operand 0 "" "")
8446 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8447 (match_operand 2 "" "")))
8448 (use (match_operand 3 "" ""))
8449 (clobber (reg:SI LR_REGNUM))]
8450 "TARGET_ARM && !arm_arch5"
8452 return output_call (&operands[1]);
8454 [(set_attr "length" "12")
8455 (set_attr "type" "call")]
8458 ;; Note: see *call_mem
8460 (define_insn "*call_value_mem"
8461 [(set (match_operand 0 "" "")
8462 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8463 (match_operand 2 "" "")))
8464 (use (match_operand 3 "" ""))
8465 (clobber (reg:SI LR_REGNUM))]
8466 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8468 return output_call_mem (&operands[1]);
8470 [(set_attr "length" "12")
8471 (set_attr "type" "call")]
8474 (define_insn "*call_value_reg_thumb1_v5"
8475 [(set (match_operand 0 "" "")
8476 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8477 (match_operand 2 "" "")))
8478 (use (match_operand 3 "" ""))
8479 (clobber (reg:SI LR_REGNUM))]
8480 "TARGET_THUMB1 && arm_arch5"
8482 [(set_attr "length" "2")
8483 (set_attr "type" "call")]
8486 (define_insn "*call_value_reg_thumb1"
8487 [(set (match_operand 0 "" "")
8488 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8489 (match_operand 2 "" "")))
8490 (use (match_operand 3 "" ""))
8491 (clobber (reg:SI LR_REGNUM))]
8492 "TARGET_THUMB1 && !arm_arch5"
8495 if (!TARGET_CALLER_INTERWORKING)
8496 return thumb_call_via_reg (operands[1]);
8497 else if (operands[2] == const0_rtx)
8498 return \"bl\\t%__interwork_call_via_%1\";
8499 else if (frame_pointer_needed)
8500 return \"bl\\t%__interwork_r7_call_via_%1\";
8502 return \"bl\\t%__interwork_r11_call_via_%1\";
8504 [(set_attr "type" "call")]
8507 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8508 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8510 (define_insn "*call_symbol"
8511 [(call (mem:SI (match_operand:SI 0 "" ""))
8512 (match_operand 1 "" ""))
8513 (use (match_operand 2 "" ""))
8514 (clobber (reg:SI LR_REGNUM))]
8516 && (GET_CODE (operands[0]) == SYMBOL_REF)
8517 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8520 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8522 [(set_attr "type" "call")]
8525 (define_insn "*call_value_symbol"
8526 [(set (match_operand 0 "" "")
8527 (call (mem:SI (match_operand:SI 1 "" ""))
8528 (match_operand:SI 2 "" "")))
8529 (use (match_operand 3 "" ""))
8530 (clobber (reg:SI LR_REGNUM))]
8532 && (GET_CODE (operands[1]) == SYMBOL_REF)
8533 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8536 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8538 [(set_attr "type" "call")]
8541 (define_insn "*call_insn"
8542 [(call (mem:SI (match_operand:SI 0 "" ""))
8543 (match_operand:SI 1 "" ""))
8544 (use (match_operand 2 "" ""))
8545 (clobber (reg:SI LR_REGNUM))]
8547 && GET_CODE (operands[0]) == SYMBOL_REF
8548 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8550 [(set_attr "length" "4")
8551 (set_attr "type" "call")]
8554 (define_insn "*call_value_insn"
8555 [(set (match_operand 0 "" "")
8556 (call (mem:SI (match_operand 1 "" ""))
8557 (match_operand 2 "" "")))
8558 (use (match_operand 3 "" ""))
8559 (clobber (reg:SI LR_REGNUM))]
8561 && GET_CODE (operands[1]) == SYMBOL_REF
8562 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8564 [(set_attr "length" "4")
8565 (set_attr "type" "call")]
8568 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8569 (define_expand "sibcall"
8570 [(parallel [(call (match_operand 0 "memory_operand" "")
8571 (match_operand 1 "general_operand" ""))
8573 (use (match_operand 2 "" ""))])]
8577 if (operands[2] == NULL_RTX)
8578 operands[2] = const0_rtx;
8582 (define_expand "sibcall_value"
8583 [(parallel [(set (match_operand 0 "" "")
8584 (call (match_operand 1 "memory_operand" "")
8585 (match_operand 2 "general_operand" "")))
8587 (use (match_operand 3 "" ""))])]
8591 if (operands[3] == NULL_RTX)
8592 operands[3] = const0_rtx;
8596 (define_insn "*sibcall_insn"
8597 [(call (mem:SI (match_operand:SI 0 "" "X"))
8598 (match_operand 1 "" ""))
8600 (use (match_operand 2 "" ""))]
8601 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8603 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8605 [(set_attr "type" "call")]
8608 (define_insn "*sibcall_value_insn"
8609 [(set (match_operand 0 "" "")
8610 (call (mem:SI (match_operand:SI 1 "" "X"))
8611 (match_operand 2 "" "")))
8613 (use (match_operand 3 "" ""))]
8614 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8616 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8618 [(set_attr "type" "call")]
8621 (define_expand "return"
8623 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8626 ;; Often the return insn will be the same as loading from memory, so set attr
8627 (define_insn "*arm_return"
8629 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8632 if (arm_ccfsm_state == 2)
8634 arm_ccfsm_state += 2;
8637 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8639 [(set_attr "type" "load1")
8640 (set_attr "length" "12")
8641 (set_attr "predicable" "yes")]
8644 (define_insn "*cond_return"
8646 (if_then_else (match_operator 0 "arm_comparison_operator"
8647 [(match_operand 1 "cc_register" "") (const_int 0)])
8650 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8653 if (arm_ccfsm_state == 2)
8655 arm_ccfsm_state += 2;
8658 return output_return_instruction (operands[0], TRUE, FALSE);
8660 [(set_attr "conds" "use")
8661 (set_attr "length" "12")
8662 (set_attr "type" "load1")]
8665 (define_insn "*cond_return_inverted"
8667 (if_then_else (match_operator 0 "arm_comparison_operator"
8668 [(match_operand 1 "cc_register" "") (const_int 0)])
8671 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8674 if (arm_ccfsm_state == 2)
8676 arm_ccfsm_state += 2;
8679 return output_return_instruction (operands[0], TRUE, TRUE);
8681 [(set_attr "conds" "use")
8682 (set_attr "length" "12")
8683 (set_attr "type" "load1")]
8686 ;; Generate a sequence of instructions to determine if the processor is
8687 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8690 (define_expand "return_addr_mask"
8692 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8694 (set (match_operand:SI 0 "s_register_operand" "")
8695 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8697 (const_int 67108860)))] ; 0x03fffffc
8700 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8703 (define_insn "*check_arch2"
8704 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8705 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8708 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8709 [(set_attr "length" "8")
8710 (set_attr "conds" "set")]
8713 ;; Call subroutine returning any type.
8715 (define_expand "untyped_call"
8716 [(parallel [(call (match_operand 0 "" "")
8718 (match_operand 1 "" "")
8719 (match_operand 2 "" "")])]
8724 rtx par = gen_rtx_PARALLEL (VOIDmode,
8725 rtvec_alloc (XVECLEN (operands[2], 0)));
8726 rtx addr = gen_reg_rtx (Pmode);
8730 emit_move_insn (addr, XEXP (operands[1], 0));
8731 mem = change_address (operands[1], BLKmode, addr);
8733 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8735 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8737 /* Default code only uses r0 as a return value, but we could
8738 be using anything up to 4 registers. */
8739 if (REGNO (src) == R0_REGNUM)
8740 src = gen_rtx_REG (TImode, R0_REGNUM);
8742 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8744 size += GET_MODE_SIZE (GET_MODE (src));
8747 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8752 for (i = 0; i < XVECLEN (par, 0); i++)
8754 HOST_WIDE_INT offset = 0;
8755 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8758 emit_move_insn (addr, plus_constant (addr, size));
8760 mem = change_address (mem, GET_MODE (reg), NULL);
8761 if (REGNO (reg) == R0_REGNUM)
8763 /* On thumb we have to use a write-back instruction. */
8764 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8765 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8766 size = TARGET_ARM ? 16 : 0;
8770 emit_move_insn (mem, reg);
8771 size = GET_MODE_SIZE (GET_MODE (reg));
8775 /* The optimizer does not know that the call sets the function value
8776 registers we stored in the result block. We avoid problems by
8777 claiming that all hard registers are used and clobbered at this
8779 emit_insn (gen_blockage ());
8785 (define_expand "untyped_return"
8786 [(match_operand:BLK 0 "memory_operand" "")
8787 (match_operand 1 "" "")]
8792 rtx addr = gen_reg_rtx (Pmode);
8796 emit_move_insn (addr, XEXP (operands[0], 0));
8797 mem = change_address (operands[0], BLKmode, addr);
8799 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8801 HOST_WIDE_INT offset = 0;
8802 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8805 emit_move_insn (addr, plus_constant (addr, size));
8807 mem = change_address (mem, GET_MODE (reg), NULL);
8808 if (REGNO (reg) == R0_REGNUM)
8810 /* On thumb we have to use a write-back instruction. */
8811 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8812 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8813 size = TARGET_ARM ? 16 : 0;
8817 emit_move_insn (reg, mem);
8818 size = GET_MODE_SIZE (GET_MODE (reg));
8822 /* Emit USE insns before the return. */
8823 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8824 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8826 /* Construct the return. */
8827 expand_naked_return ();
8833 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8834 ;; all of memory. This blocks insns from being moved across this point.
8836 (define_insn "blockage"
8837 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8840 [(set_attr "length" "0")
8841 (set_attr "type" "block")]
8844 (define_expand "casesi"
8845 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8846 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8847 (match_operand:SI 2 "const_int_operand" "") ; total range
8848 (match_operand:SI 3 "" "") ; table label
8849 (match_operand:SI 4 "" "")] ; Out of range label
8850 "TARGET_32BIT || optimize_size || flag_pic"
8853 enum insn_code code;
8854 if (operands[1] != const0_rtx)
8856 rtx reg = gen_reg_rtx (SImode);
8858 emit_insn (gen_addsi3 (reg, operands[0],
8859 gen_int_mode (-INTVAL (operands[1]),
8865 code = CODE_FOR_arm_casesi_internal;
8866 else if (TARGET_THUMB1)
8867 code = CODE_FOR_thumb1_casesi_internal_pic;
8869 code = CODE_FOR_thumb2_casesi_internal_pic;
8871 code = CODE_FOR_thumb2_casesi_internal;
8873 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8874 operands[2] = force_reg (SImode, operands[2]);
8876 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8877 operands[3], operands[4]));
8882 ;; The USE in this pattern is needed to tell flow analysis that this is
8883 ;; a CASESI insn. It has no other purpose.
8884 (define_insn "arm_casesi_internal"
8885 [(parallel [(set (pc)
8887 (leu (match_operand:SI 0 "s_register_operand" "r")
8888 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8889 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8890 (label_ref (match_operand 2 "" ""))))
8891 (label_ref (match_operand 3 "" ""))))
8892 (clobber (reg:CC CC_REGNUM))
8893 (use (label_ref (match_dup 2)))])]
8897 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8898 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8900 [(set_attr "conds" "clob")
8901 (set_attr "length" "12")]
8904 (define_expand "thumb1_casesi_internal_pic"
8905 [(match_operand:SI 0 "s_register_operand" "")
8906 (match_operand:SI 1 "thumb1_cmp_operand" "")
8907 (match_operand 2 "" "")
8908 (match_operand 3 "" "")]
8912 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8913 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8915 reg0 = gen_rtx_REG (SImode, 0);
8916 emit_move_insn (reg0, operands[0]);
8917 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8922 (define_insn "thumb1_casesi_dispatch"
8923 [(parallel [(set (pc) (unspec [(reg:SI 0)
8924 (label_ref (match_operand 0 "" ""))
8925 ;; (label_ref (match_operand 1 "" ""))
8927 UNSPEC_THUMB1_CASESI))
8928 (clobber (reg:SI IP_REGNUM))
8929 (clobber (reg:SI LR_REGNUM))])]
8931 "* return thumb1_output_casesi(operands);"
8932 [(set_attr "length" "4")]
8935 (define_expand "indirect_jump"
8937 (match_operand:SI 0 "s_register_operand" ""))]
8940 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8941 address and use bx. */
8945 tmp = gen_reg_rtx (SImode);
8946 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8952 ;; NB Never uses BX.
8953 (define_insn "*arm_indirect_jump"
8955 (match_operand:SI 0 "s_register_operand" "r"))]
8957 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8958 [(set_attr "predicable" "yes")]
8961 (define_insn "*load_indirect_jump"
8963 (match_operand:SI 0 "memory_operand" "m"))]
8965 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8966 [(set_attr "type" "load1")
8967 (set_attr "pool_range" "4096")
8968 (set_attr "neg_pool_range" "4084")
8969 (set_attr "predicable" "yes")]
8972 ;; NB Never uses BX.
8973 (define_insn "*thumb1_indirect_jump"
8975 (match_operand:SI 0 "register_operand" "l*r"))]
8978 [(set_attr "conds" "clob")
8979 (set_attr "length" "2")]
8989 if (TARGET_UNIFIED_ASM)
8992 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8993 return \"mov\\tr8, r8\";
8995 [(set (attr "length")
8996 (if_then_else (eq_attr "is_thumb" "yes")
9002 ;; Patterns to allow combination of arithmetic, cond code and shifts
9004 (define_insn "*arith_shiftsi"
9005 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9006 (match_operator:SI 1 "shiftable_operator"
9007 [(match_operator:SI 3 "shift_operator"
9008 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
9009 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
9010 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
9012 "%i1%?\\t%0, %2, %4%S3"
9013 [(set_attr "predicable" "yes")
9014 (set_attr "shift" "4")
9015 (set_attr "arch" "a,t2,t2,a")
9016 ;; Thumb2 doesn't allow the stack pointer to be used for
9017 ;; operand1 for all operations other than add and sub. In this case
9018 ;; the minus operation is a candidate for an rsub and hence needs
9020 ;; We have to make sure to disable the fourth alternative if
9021 ;; the shift_operator is MULT, since otherwise the insn will
9022 ;; also match a multiply_accumulate pattern and validate_change
9023 ;; will allow a replacement of the constant with a register
9024 ;; despite the checks done in shift_operator.
9025 (set_attr_alternative "insn_enabled"
9026 [(const_string "yes")
9028 (match_operand:SI 1 "add_operator" "")
9029 (const_string "yes") (const_string "no"))
9030 (const_string "yes")
9032 (match_operand:SI 3 "mult_operator" "")
9033 (const_string "no") (const_string "yes"))])
9034 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
9037 [(set (match_operand:SI 0 "s_register_operand" "")
9038 (match_operator:SI 1 "shiftable_operator"
9039 [(match_operator:SI 2 "shiftable_operator"
9040 [(match_operator:SI 3 "shift_operator"
9041 [(match_operand:SI 4 "s_register_operand" "")
9042 (match_operand:SI 5 "reg_or_int_operand" "")])
9043 (match_operand:SI 6 "s_register_operand" "")])
9044 (match_operand:SI 7 "arm_rhs_operand" "")]))
9045 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9048 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9051 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9054 (define_insn "*arith_shiftsi_compare0"
9055 [(set (reg:CC_NOOV CC_REGNUM)
9057 (match_operator:SI 1 "shiftable_operator"
9058 [(match_operator:SI 3 "shift_operator"
9059 [(match_operand:SI 4 "s_register_operand" "r,r")
9060 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9061 (match_operand:SI 2 "s_register_operand" "r,r")])
9063 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9064 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9067 "%i1%.\\t%0, %2, %4%S3"
9068 [(set_attr "conds" "set")
9069 (set_attr "shift" "4")
9070 (set_attr "arch" "32,a")
9071 (set_attr "type" "alu_shift,alu_shift_reg")])
9073 (define_insn "*arith_shiftsi_compare0_scratch"
9074 [(set (reg:CC_NOOV CC_REGNUM)
9076 (match_operator:SI 1 "shiftable_operator"
9077 [(match_operator:SI 3 "shift_operator"
9078 [(match_operand:SI 4 "s_register_operand" "r,r")
9079 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9080 (match_operand:SI 2 "s_register_operand" "r,r")])
9082 (clobber (match_scratch:SI 0 "=r,r"))]
9084 "%i1%.\\t%0, %2, %4%S3"
9085 [(set_attr "conds" "set")
9086 (set_attr "shift" "4")
9087 (set_attr "arch" "32,a")
9088 (set_attr "type" "alu_shift,alu_shift_reg")])
9090 (define_insn "*sub_shiftsi"
9091 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9092 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9093 (match_operator:SI 2 "shift_operator"
9094 [(match_operand:SI 3 "s_register_operand" "r,r")
9095 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9097 "sub%?\\t%0, %1, %3%S2"
9098 [(set_attr "predicable" "yes")
9099 (set_attr "shift" "3")
9100 (set_attr "arch" "32,a")
9101 (set_attr "type" "alu_shift,alu_shift_reg")])
9103 (define_insn "*sub_shiftsi_compare0"
9104 [(set (reg:CC_NOOV CC_REGNUM)
9106 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9107 (match_operator:SI 2 "shift_operator"
9108 [(match_operand:SI 3 "s_register_operand" "r,r")
9109 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9111 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9112 (minus:SI (match_dup 1)
9113 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9115 "sub%.\\t%0, %1, %3%S2"
9116 [(set_attr "conds" "set")
9117 (set_attr "shift" "3")
9118 (set_attr "arch" "32,a")
9119 (set_attr "type" "alu_shift,alu_shift_reg")])
9121 (define_insn "*sub_shiftsi_compare0_scratch"
9122 [(set (reg:CC_NOOV CC_REGNUM)
9124 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9125 (match_operator:SI 2 "shift_operator"
9126 [(match_operand:SI 3 "s_register_operand" "r,r")
9127 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9129 (clobber (match_scratch:SI 0 "=r,r"))]
9131 "sub%.\\t%0, %1, %3%S2"
9132 [(set_attr "conds" "set")
9133 (set_attr "shift" "3")
9134 (set_attr "arch" "32,a")
9135 (set_attr "type" "alu_shift,alu_shift_reg")])
9138 (define_insn "*and_scc"
9139 [(set (match_operand:SI 0 "s_register_operand" "=r")
9140 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9141 [(match_operand 3 "cc_register" "") (const_int 0)])
9142 (match_operand:SI 2 "s_register_operand" "r")))]
9144 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9145 [(set_attr "conds" "use")
9146 (set_attr "insn" "mov")
9147 (set_attr "length" "8")]
9150 (define_insn "*ior_scc"
9151 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9152 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9153 [(match_operand 3 "cc_register" "") (const_int 0)])
9154 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9158 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9159 [(set_attr "conds" "use")
9160 (set_attr "length" "4,8")]
9163 ; A series of splitters for the compare_scc pattern below. Note that
9164 ; order is important.
9166 [(set (match_operand:SI 0 "s_register_operand" "")
9167 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9169 (clobber (reg:CC CC_REGNUM))]
9170 "TARGET_32BIT && reload_completed"
9171 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9174 [(set (match_operand:SI 0 "s_register_operand" "")
9175 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9177 (clobber (reg:CC CC_REGNUM))]
9178 "TARGET_32BIT && reload_completed"
9179 [(set (match_dup 0) (not:SI (match_dup 1)))
9180 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9183 [(set (match_operand:SI 0 "s_register_operand" "")
9184 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9186 (clobber (reg:CC CC_REGNUM))]
9187 "TARGET_32BIT && reload_completed"
9189 [(set (reg:CC CC_REGNUM)
9190 (compare:CC (const_int 1) (match_dup 1)))
9192 (minus:SI (const_int 1) (match_dup 1)))])
9193 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9194 (set (match_dup 0) (const_int 0)))])
9197 [(set (match_operand:SI 0 "s_register_operand" "")
9198 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9199 (match_operand:SI 2 "const_int_operand" "")))
9200 (clobber (reg:CC CC_REGNUM))]
9201 "TARGET_32BIT && reload_completed"
9203 [(set (reg:CC CC_REGNUM)
9204 (compare:CC (match_dup 1) (match_dup 2)))
9205 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9206 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9207 (set (match_dup 0) (const_int 1)))]
9209 operands[3] = GEN_INT (-INTVAL (operands[2]));
9213 [(set (match_operand:SI 0 "s_register_operand" "")
9214 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9215 (match_operand:SI 2 "arm_add_operand" "")))
9216 (clobber (reg:CC CC_REGNUM))]
9217 "TARGET_32BIT && reload_completed"
9219 [(set (reg:CC_NOOV CC_REGNUM)
9220 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9222 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9223 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9224 (set (match_dup 0) (const_int 1)))])
9226 (define_insn_and_split "*compare_scc"
9227 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9228 (match_operator:SI 1 "arm_comparison_operator"
9229 [(match_operand:SI 2 "s_register_operand" "r,r")
9230 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9231 (clobber (reg:CC CC_REGNUM))]
9234 "&& reload_completed"
9235 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9236 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9237 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9240 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9241 operands[2], operands[3]);
9242 enum rtx_code rc = GET_CODE (operands[1]);
9244 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9246 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9247 if (mode == CCFPmode || mode == CCFPEmode)
9248 rc = reverse_condition_maybe_unordered (rc);
9250 rc = reverse_condition (rc);
9251 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9254 ;; Attempt to improve the sequence generated by the compare_scc splitters
9255 ;; not to use conditional execution.
9257 [(set (reg:CC CC_REGNUM)
9258 (compare:CC (match_operand:SI 1 "register_operand" "")
9259 (match_operand:SI 2 "arm_rhs_operand" "")))
9260 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9261 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9262 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9263 (set (match_dup 0) (const_int 1)))
9264 (match_scratch:SI 3 "r")]
9267 [(set (reg:CC CC_REGNUM)
9268 (compare:CC (match_dup 1) (match_dup 2)))
9269 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9271 [(set (reg:CC CC_REGNUM)
9272 (compare:CC (const_int 0) (match_dup 3)))
9273 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9276 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9277 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9278 (clobber (reg:CC CC_REGNUM))])])
9280 (define_insn "*cond_move"
9281 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9282 (if_then_else:SI (match_operator 3 "equality_operator"
9283 [(match_operator 4 "arm_comparison_operator"
9284 [(match_operand 5 "cc_register" "") (const_int 0)])
9286 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9287 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9290 if (GET_CODE (operands[3]) == NE)
9292 if (which_alternative != 1)
9293 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9294 if (which_alternative != 0)
9295 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9298 if (which_alternative != 0)
9299 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9300 if (which_alternative != 1)
9301 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9304 [(set_attr "conds" "use")
9305 (set_attr "insn" "mov")
9306 (set_attr "length" "4,4,8")]
9309 (define_insn "*cond_arith"
9310 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9311 (match_operator:SI 5 "shiftable_operator"
9312 [(match_operator:SI 4 "arm_comparison_operator"
9313 [(match_operand:SI 2 "s_register_operand" "r,r")
9314 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9315 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9316 (clobber (reg:CC CC_REGNUM))]
9319 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9320 return \"%i5\\t%0, %1, %2, lsr #31\";
9322 output_asm_insn (\"cmp\\t%2, %3\", operands);
9323 if (GET_CODE (operands[5]) == AND)
9324 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9325 else if (GET_CODE (operands[5]) == MINUS)
9326 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9327 else if (which_alternative != 0)
9328 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9329 return \"%i5%d4\\t%0, %1, #1\";
9331 [(set_attr "conds" "clob")
9332 (set_attr "length" "12")]
9335 (define_insn "*cond_sub"
9336 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9337 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9338 (match_operator:SI 4 "arm_comparison_operator"
9339 [(match_operand:SI 2 "s_register_operand" "r,r")
9340 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9341 (clobber (reg:CC CC_REGNUM))]
9344 output_asm_insn (\"cmp\\t%2, %3\", operands);
9345 if (which_alternative != 0)
9346 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9347 return \"sub%d4\\t%0, %1, #1\";
9349 [(set_attr "conds" "clob")
9350 (set_attr "length" "8,12")]
9353 (define_insn "*cmp_ite0"
9354 [(set (match_operand 6 "dominant_cc_register" "")
9357 (match_operator 4 "arm_comparison_operator"
9358 [(match_operand:SI 0 "s_register_operand"
9359 "l,l,l,r,r,r,r,r,r")
9360 (match_operand:SI 1 "arm_add_operand"
9361 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9362 (match_operator:SI 5 "arm_comparison_operator"
9363 [(match_operand:SI 2 "s_register_operand"
9364 "l,r,r,l,l,r,r,r,r")
9365 (match_operand:SI 3 "arm_add_operand"
9366 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9372 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9374 {\"cmp%d5\\t%0, %1\",
9375 \"cmp%d4\\t%2, %3\"},
9376 {\"cmn%d5\\t%0, #%n1\",
9377 \"cmp%d4\\t%2, %3\"},
9378 {\"cmp%d5\\t%0, %1\",
9379 \"cmn%d4\\t%2, #%n3\"},
9380 {\"cmn%d5\\t%0, #%n1\",
9381 \"cmn%d4\\t%2, #%n3\"}
9383 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9388 \"cmn\\t%0, #%n1\"},
9389 {\"cmn\\t%2, #%n3\",
9391 {\"cmn\\t%2, #%n3\",
9394 static const char * const ite[2] =
9399 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9400 CMP_CMP, CMN_CMP, CMP_CMP,
9401 CMN_CMP, CMP_CMN, CMN_CMN};
9403 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9405 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9406 if (TARGET_THUMB2) {
9407 output_asm_insn (ite[swap], operands);
9409 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9412 [(set_attr "conds" "set")
9413 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9414 (set_attr_alternative "length"
9420 (if_then_else (eq_attr "is_thumb" "no")
9423 (if_then_else (eq_attr "is_thumb" "no")
9426 (if_then_else (eq_attr "is_thumb" "no")
9429 (if_then_else (eq_attr "is_thumb" "no")
9434 (define_insn "*cmp_ite1"
9435 [(set (match_operand 6 "dominant_cc_register" "")
9438 (match_operator 4 "arm_comparison_operator"
9439 [(match_operand:SI 0 "s_register_operand"
9440 "l,l,l,r,r,r,r,r,r")
9441 (match_operand:SI 1 "arm_add_operand"
9442 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9443 (match_operator:SI 5 "arm_comparison_operator"
9444 [(match_operand:SI 2 "s_register_operand"
9445 "l,r,r,l,l,r,r,r,r")
9446 (match_operand:SI 3 "arm_add_operand"
9447 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9453 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9457 {\"cmn\\t%0, #%n1\",
9460 \"cmn\\t%2, #%n3\"},
9461 {\"cmn\\t%0, #%n1\",
9464 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9466 {\"cmp%d4\\t%2, %3\",
9467 \"cmp%D5\\t%0, %1\"},
9468 {\"cmp%d4\\t%2, %3\",
9469 \"cmn%D5\\t%0, #%n1\"},
9470 {\"cmn%d4\\t%2, #%n3\",
9471 \"cmp%D5\\t%0, %1\"},
9472 {\"cmn%d4\\t%2, #%n3\",
9473 \"cmn%D5\\t%0, #%n1\"}
9475 static const char * const ite[2] =
9480 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9481 CMP_CMP, CMN_CMP, CMP_CMP,
9482 CMN_CMP, CMP_CMN, CMN_CMN};
9484 comparison_dominates_p (GET_CODE (operands[5]),
9485 reverse_condition (GET_CODE (operands[4])));
9487 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9488 if (TARGET_THUMB2) {
9489 output_asm_insn (ite[swap], operands);
9491 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9494 [(set_attr "conds" "set")
9495 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9496 (set_attr_alternative "length"
9502 (if_then_else (eq_attr "is_thumb" "no")
9505 (if_then_else (eq_attr "is_thumb" "no")
9508 (if_then_else (eq_attr "is_thumb" "no")
9511 (if_then_else (eq_attr "is_thumb" "no")
9516 (define_insn "*cmp_and"
9517 [(set (match_operand 6 "dominant_cc_register" "")
9520 (match_operator 4 "arm_comparison_operator"
9521 [(match_operand:SI 0 "s_register_operand"
9522 "l,l,l,r,r,r,r,r,r")
9523 (match_operand:SI 1 "arm_add_operand"
9524 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9525 (match_operator:SI 5 "arm_comparison_operator"
9526 [(match_operand:SI 2 "s_register_operand"
9527 "l,r,r,l,l,r,r,r,r")
9528 (match_operand:SI 3 "arm_add_operand"
9529 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9534 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9536 {\"cmp%d5\\t%0, %1\",
9537 \"cmp%d4\\t%2, %3\"},
9538 {\"cmn%d5\\t%0, #%n1\",
9539 \"cmp%d4\\t%2, %3\"},
9540 {\"cmp%d5\\t%0, %1\",
9541 \"cmn%d4\\t%2, #%n3\"},
9542 {\"cmn%d5\\t%0, #%n1\",
9543 \"cmn%d4\\t%2, #%n3\"}
9545 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9550 \"cmn\\t%0, #%n1\"},
9551 {\"cmn\\t%2, #%n3\",
9553 {\"cmn\\t%2, #%n3\",
9556 static const char *const ite[2] =
9561 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9562 CMP_CMP, CMN_CMP, CMP_CMP,
9563 CMN_CMP, CMP_CMN, CMN_CMN};
9565 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9567 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9568 if (TARGET_THUMB2) {
9569 output_asm_insn (ite[swap], operands);
9571 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9574 [(set_attr "conds" "set")
9575 (set_attr "predicable" "no")
9576 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9577 (set_attr_alternative "length"
9583 (if_then_else (eq_attr "is_thumb" "no")
9586 (if_then_else (eq_attr "is_thumb" "no")
9589 (if_then_else (eq_attr "is_thumb" "no")
9592 (if_then_else (eq_attr "is_thumb" "no")
9597 (define_insn "*cmp_ior"
9598 [(set (match_operand 6 "dominant_cc_register" "")
9601 (match_operator 4 "arm_comparison_operator"
9602 [(match_operand:SI 0 "s_register_operand"
9603 "l,l,l,r,r,r,r,r,r")
9604 (match_operand:SI 1 "arm_add_operand"
9605 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9606 (match_operator:SI 5 "arm_comparison_operator"
9607 [(match_operand:SI 2 "s_register_operand"
9608 "l,r,r,l,l,r,r,r,r")
9609 (match_operand:SI 3 "arm_add_operand"
9610 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9615 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9619 {\"cmn\\t%0, #%n1\",
9622 \"cmn\\t%2, #%n3\"},
9623 {\"cmn\\t%0, #%n1\",
9626 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9628 {\"cmp%D4\\t%2, %3\",
9629 \"cmp%D5\\t%0, %1\"},
9630 {\"cmp%D4\\t%2, %3\",
9631 \"cmn%D5\\t%0, #%n1\"},
9632 {\"cmn%D4\\t%2, #%n3\",
9633 \"cmp%D5\\t%0, %1\"},
9634 {\"cmn%D4\\t%2, #%n3\",
9635 \"cmn%D5\\t%0, #%n1\"}
9637 static const char *const ite[2] =
9642 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9643 CMP_CMP, CMN_CMP, CMP_CMP,
9644 CMN_CMP, CMP_CMN, CMN_CMN};
9646 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9648 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9649 if (TARGET_THUMB2) {
9650 output_asm_insn (ite[swap], operands);
9652 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9656 [(set_attr "conds" "set")
9657 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9658 (set_attr_alternative "length"
9664 (if_then_else (eq_attr "is_thumb" "no")
9667 (if_then_else (eq_attr "is_thumb" "no")
9670 (if_then_else (eq_attr "is_thumb" "no")
9673 (if_then_else (eq_attr "is_thumb" "no")
9678 (define_insn_and_split "*ior_scc_scc"
9679 [(set (match_operand:SI 0 "s_register_operand" "=r")
9680 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9681 [(match_operand:SI 1 "s_register_operand" "r")
9682 (match_operand:SI 2 "arm_add_operand" "rIL")])
9683 (match_operator:SI 6 "arm_comparison_operator"
9684 [(match_operand:SI 4 "s_register_operand" "r")
9685 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9686 (clobber (reg:CC CC_REGNUM))]
9688 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9691 "TARGET_32BIT && reload_completed"
9695 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9696 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9698 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9700 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9703 [(set_attr "conds" "clob")
9704 (set_attr "length" "16")])
9706 ; If the above pattern is followed by a CMP insn, then the compare is
9707 ; redundant, since we can rework the conditional instruction that follows.
9708 (define_insn_and_split "*ior_scc_scc_cmp"
9709 [(set (match_operand 0 "dominant_cc_register" "")
9710 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9711 [(match_operand:SI 1 "s_register_operand" "r")
9712 (match_operand:SI 2 "arm_add_operand" "rIL")])
9713 (match_operator:SI 6 "arm_comparison_operator"
9714 [(match_operand:SI 4 "s_register_operand" "r")
9715 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9717 (set (match_operand:SI 7 "s_register_operand" "=r")
9718 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9719 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9722 "TARGET_32BIT && reload_completed"
9726 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9727 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9729 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9731 [(set_attr "conds" "set")
9732 (set_attr "length" "16")])
9734 (define_insn_and_split "*and_scc_scc"
9735 [(set (match_operand:SI 0 "s_register_operand" "=r")
9736 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9737 [(match_operand:SI 1 "s_register_operand" "r")
9738 (match_operand:SI 2 "arm_add_operand" "rIL")])
9739 (match_operator:SI 6 "arm_comparison_operator"
9740 [(match_operand:SI 4 "s_register_operand" "r")
9741 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9742 (clobber (reg:CC CC_REGNUM))]
9744 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9747 "TARGET_32BIT && reload_completed
9748 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9753 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9754 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9756 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9758 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9761 [(set_attr "conds" "clob")
9762 (set_attr "length" "16")])
9764 ; If the above pattern is followed by a CMP insn, then the compare is
9765 ; redundant, since we can rework the conditional instruction that follows.
9766 (define_insn_and_split "*and_scc_scc_cmp"
9767 [(set (match_operand 0 "dominant_cc_register" "")
9768 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9769 [(match_operand:SI 1 "s_register_operand" "r")
9770 (match_operand:SI 2 "arm_add_operand" "rIL")])
9771 (match_operator:SI 6 "arm_comparison_operator"
9772 [(match_operand:SI 4 "s_register_operand" "r")
9773 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9775 (set (match_operand:SI 7 "s_register_operand" "=r")
9776 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9777 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9780 "TARGET_32BIT && reload_completed"
9784 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9785 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9787 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9789 [(set_attr "conds" "set")
9790 (set_attr "length" "16")])
9792 ;; If there is no dominance in the comparison, then we can still save an
9793 ;; instruction in the AND case, since we can know that the second compare
9794 ;; need only zero the value if false (if true, then the value is already
9796 (define_insn_and_split "*and_scc_scc_nodom"
9797 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9798 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9799 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9800 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9801 (match_operator:SI 6 "arm_comparison_operator"
9802 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9803 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9804 (clobber (reg:CC CC_REGNUM))]
9806 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9809 "TARGET_32BIT && reload_completed"
9810 [(parallel [(set (match_dup 0)
9811 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9812 (clobber (reg:CC CC_REGNUM))])
9813 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9815 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9818 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9819 operands[4], operands[5]),
9821 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "20")])
9827 [(set (reg:CC_NOOV CC_REGNUM)
9828 (compare:CC_NOOV (ior:SI
9829 (and:SI (match_operand:SI 0 "s_register_operand" "")
9831 (match_operator:SI 1 "arm_comparison_operator"
9832 [(match_operand:SI 2 "s_register_operand" "")
9833 (match_operand:SI 3 "arm_add_operand" "")]))
9835 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9838 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9840 (set (reg:CC_NOOV CC_REGNUM)
9841 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9846 [(set (reg:CC_NOOV CC_REGNUM)
9847 (compare:CC_NOOV (ior:SI
9848 (match_operator:SI 1 "arm_comparison_operator"
9849 [(match_operand:SI 2 "s_register_operand" "")
9850 (match_operand:SI 3 "arm_add_operand" "")])
9851 (and:SI (match_operand:SI 0 "s_register_operand" "")
9854 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9857 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9859 (set (reg:CC_NOOV CC_REGNUM)
9860 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9863 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9865 (define_insn "*negscc"
9866 [(set (match_operand:SI 0 "s_register_operand" "=r")
9867 (neg:SI (match_operator 3 "arm_comparison_operator"
9868 [(match_operand:SI 1 "s_register_operand" "r")
9869 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9870 (clobber (reg:CC CC_REGNUM))]
9873 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9874 return \"mov\\t%0, %1, asr #31\";
9876 if (GET_CODE (operands[3]) == NE)
9877 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9879 output_asm_insn (\"cmp\\t%1, %2\", operands);
9880 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9881 return \"mvn%d3\\t%0, #0\";
9883 [(set_attr "conds" "clob")
9884 (set_attr "length" "12")]
9887 (define_insn "movcond"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9890 (match_operator 5 "arm_comparison_operator"
9891 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9892 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9893 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9894 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9895 (clobber (reg:CC CC_REGNUM))]
9898 if (GET_CODE (operands[5]) == LT
9899 && (operands[4] == const0_rtx))
9901 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9903 if (operands[2] == const0_rtx)
9904 return \"and\\t%0, %1, %3, asr #31\";
9905 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9907 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9909 if (operands[1] == const0_rtx)
9910 return \"bic\\t%0, %2, %3, asr #31\";
9911 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9913 /* The only case that falls through to here is when both ops 1 & 2
9917 if (GET_CODE (operands[5]) == GE
9918 && (operands[4] == const0_rtx))
9920 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9922 if (operands[2] == const0_rtx)
9923 return \"bic\\t%0, %1, %3, asr #31\";
9924 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9926 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9928 if (operands[1] == const0_rtx)
9929 return \"and\\t%0, %2, %3, asr #31\";
9930 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9932 /* The only case that falls through to here is when both ops 1 & 2
9935 if (GET_CODE (operands[4]) == CONST_INT
9936 && !const_ok_for_arm (INTVAL (operands[4])))
9937 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9939 output_asm_insn (\"cmp\\t%3, %4\", operands);
9940 if (which_alternative != 0)
9941 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9942 if (which_alternative != 1)
9943 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9946 [(set_attr "conds" "clob")
9947 (set_attr "length" "8,8,12")]
9950 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9952 (define_insn "*ifcompare_plus_move"
9953 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9954 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9955 [(match_operand:SI 4 "s_register_operand" "r,r")
9956 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9958 (match_operand:SI 2 "s_register_operand" "r,r")
9959 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9960 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9961 (clobber (reg:CC CC_REGNUM))]
9964 [(set_attr "conds" "clob")
9965 (set_attr "length" "8,12")]
9968 (define_insn "*if_plus_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9971 (match_operator 4 "arm_comparison_operator"
9972 [(match_operand 5 "cc_register" "") (const_int 0)])
9974 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9975 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9976 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9980 sub%d4\\t%0, %2, #%n3
9981 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9982 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9983 [(set_attr "conds" "use")
9984 (set_attr "length" "4,4,8,8")
9985 (set_attr "type" "*,*,*,*")]
9988 (define_insn "*ifcompare_move_plus"
9989 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9990 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9991 [(match_operand:SI 4 "s_register_operand" "r,r")
9992 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9993 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9995 (match_operand:SI 2 "s_register_operand" "r,r")
9996 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9997 (clobber (reg:CC CC_REGNUM))]
10000 [(set_attr "conds" "clob")
10001 (set_attr "length" "8,12")]
10004 (define_insn "*if_move_plus"
10005 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10007 (match_operator 4 "arm_comparison_operator"
10008 [(match_operand 5 "cc_register" "") (const_int 0)])
10009 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10011 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10012 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10015 add%D4\\t%0, %2, %3
10016 sub%D4\\t%0, %2, #%n3
10017 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10018 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10019 [(set_attr "conds" "use")
10020 (set_attr "length" "4,4,8,8")
10021 (set_attr "type" "*,*,*,*")]
10024 (define_insn "*ifcompare_arith_arith"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r")
10026 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10027 [(match_operand:SI 5 "s_register_operand" "r")
10028 (match_operand:SI 6 "arm_add_operand" "rIL")])
10029 (match_operator:SI 8 "shiftable_operator"
10030 [(match_operand:SI 1 "s_register_operand" "r")
10031 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10032 (match_operator:SI 7 "shiftable_operator"
10033 [(match_operand:SI 3 "s_register_operand" "r")
10034 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10035 (clobber (reg:CC CC_REGNUM))]
10038 [(set_attr "conds" "clob")
10039 (set_attr "length" "12")]
10042 (define_insn "*if_arith_arith"
10043 [(set (match_operand:SI 0 "s_register_operand" "=r")
10044 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10045 [(match_operand 8 "cc_register" "") (const_int 0)])
10046 (match_operator:SI 6 "shiftable_operator"
10047 [(match_operand:SI 1 "s_register_operand" "r")
10048 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10049 (match_operator:SI 7 "shiftable_operator"
10050 [(match_operand:SI 3 "s_register_operand" "r")
10051 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10053 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10054 [(set_attr "conds" "use")
10055 (set_attr "length" "8")]
10058 (define_insn "*ifcompare_arith_move"
10059 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10060 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10061 [(match_operand:SI 2 "s_register_operand" "r,r")
10062 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10063 (match_operator:SI 7 "shiftable_operator"
10064 [(match_operand:SI 4 "s_register_operand" "r,r")
10065 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10066 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10067 (clobber (reg:CC CC_REGNUM))]
10070 /* If we have an operation where (op x 0) is the identity operation and
10071 the conditional operator is LT or GE and we are comparing against zero and
10072 everything is in registers then we can do this in two instructions. */
10073 if (operands[3] == const0_rtx
10074 && GET_CODE (operands[7]) != AND
10075 && GET_CODE (operands[5]) == REG
10076 && GET_CODE (operands[1]) == REG
10077 && REGNO (operands[1]) == REGNO (operands[4])
10078 && REGNO (operands[4]) != REGNO (operands[0]))
10080 if (GET_CODE (operands[6]) == LT)
10081 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10082 else if (GET_CODE (operands[6]) == GE)
10083 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10085 if (GET_CODE (operands[3]) == CONST_INT
10086 && !const_ok_for_arm (INTVAL (operands[3])))
10087 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10089 output_asm_insn (\"cmp\\t%2, %3\", operands);
10090 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10091 if (which_alternative != 0)
10092 return \"mov%D6\\t%0, %1\";
10095 [(set_attr "conds" "clob")
10096 (set_attr "length" "8,12")]
10099 (define_insn "*if_arith_move"
10100 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10101 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10102 [(match_operand 6 "cc_register" "") (const_int 0)])
10103 (match_operator:SI 5 "shiftable_operator"
10104 [(match_operand:SI 2 "s_register_operand" "r,r")
10105 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10106 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10109 %I5%d4\\t%0, %2, %3
10110 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10111 [(set_attr "conds" "use")
10112 (set_attr "length" "4,8")
10113 (set_attr "type" "*,*")]
10116 (define_insn "*ifcompare_move_arith"
10117 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10118 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10119 [(match_operand:SI 4 "s_register_operand" "r,r")
10120 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10121 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10122 (match_operator:SI 7 "shiftable_operator"
10123 [(match_operand:SI 2 "s_register_operand" "r,r")
10124 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10125 (clobber (reg:CC CC_REGNUM))]
10128 /* If we have an operation where (op x 0) is the identity operation and
10129 the conditional operator is LT or GE and we are comparing against zero and
10130 everything is in registers then we can do this in two instructions */
10131 if (operands[5] == const0_rtx
10132 && GET_CODE (operands[7]) != AND
10133 && GET_CODE (operands[3]) == REG
10134 && GET_CODE (operands[1]) == REG
10135 && REGNO (operands[1]) == REGNO (operands[2])
10136 && REGNO (operands[2]) != REGNO (operands[0]))
10138 if (GET_CODE (operands[6]) == GE)
10139 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10140 else if (GET_CODE (operands[6]) == LT)
10141 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10144 if (GET_CODE (operands[5]) == CONST_INT
10145 && !const_ok_for_arm (INTVAL (operands[5])))
10146 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10148 output_asm_insn (\"cmp\\t%4, %5\", operands);
10150 if (which_alternative != 0)
10151 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10152 return \"%I7%D6\\t%0, %2, %3\";
10154 [(set_attr "conds" "clob")
10155 (set_attr "length" "8,12")]
10158 (define_insn "*if_move_arith"
10159 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10161 (match_operator 4 "arm_comparison_operator"
10162 [(match_operand 6 "cc_register" "") (const_int 0)])
10163 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10164 (match_operator:SI 5 "shiftable_operator"
10165 [(match_operand:SI 2 "s_register_operand" "r,r")
10166 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10169 %I5%D4\\t%0, %2, %3
10170 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10171 [(set_attr "conds" "use")
10172 (set_attr "length" "4,8")
10173 (set_attr "type" "*,*")]
10176 (define_insn "*ifcompare_move_not"
10177 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10179 (match_operator 5 "arm_comparison_operator"
10180 [(match_operand:SI 3 "s_register_operand" "r,r")
10181 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10182 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10184 (match_operand:SI 2 "s_register_operand" "r,r"))))
10185 (clobber (reg:CC CC_REGNUM))]
10188 [(set_attr "conds" "clob")
10189 (set_attr "length" "8,12")]
10192 (define_insn "*if_move_not"
10193 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10195 (match_operator 4 "arm_comparison_operator"
10196 [(match_operand 3 "cc_register" "") (const_int 0)])
10197 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10198 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10202 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10203 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10204 [(set_attr "conds" "use")
10205 (set_attr "insn" "mvn")
10206 (set_attr "length" "4,8,8")]
10209 (define_insn "*ifcompare_not_move"
10210 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10212 (match_operator 5 "arm_comparison_operator"
10213 [(match_operand:SI 3 "s_register_operand" "r,r")
10214 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10216 (match_operand:SI 2 "s_register_operand" "r,r"))
10217 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10218 (clobber (reg:CC CC_REGNUM))]
10221 [(set_attr "conds" "clob")
10222 (set_attr "length" "8,12")]
10225 (define_insn "*if_not_move"
10226 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10228 (match_operator 4 "arm_comparison_operator"
10229 [(match_operand 3 "cc_register" "") (const_int 0)])
10230 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10231 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10235 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10236 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10237 [(set_attr "conds" "use")
10238 (set_attr "insn" "mvn")
10239 (set_attr "length" "4,8,8")]
10242 (define_insn "*ifcompare_shift_move"
10243 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10245 (match_operator 6 "arm_comparison_operator"
10246 [(match_operand:SI 4 "s_register_operand" "r,r")
10247 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10248 (match_operator:SI 7 "shift_operator"
10249 [(match_operand:SI 2 "s_register_operand" "r,r")
10250 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10251 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10252 (clobber (reg:CC CC_REGNUM))]
10255 [(set_attr "conds" "clob")
10256 (set_attr "length" "8,12")]
10259 (define_insn "*if_shift_move"
10260 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10262 (match_operator 5 "arm_comparison_operator"
10263 [(match_operand 6 "cc_register" "") (const_int 0)])
10264 (match_operator:SI 4 "shift_operator"
10265 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10266 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10267 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10271 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10272 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10273 [(set_attr "conds" "use")
10274 (set_attr "shift" "2")
10275 (set_attr "length" "4,8,8")
10276 (set_attr "insn" "mov")
10277 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10278 (const_string "alu_shift")
10279 (const_string "alu_shift_reg")))]
10282 (define_insn "*ifcompare_move_shift"
10283 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10285 (match_operator 6 "arm_comparison_operator"
10286 [(match_operand:SI 4 "s_register_operand" "r,r")
10287 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10288 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10289 (match_operator:SI 7 "shift_operator"
10290 [(match_operand:SI 2 "s_register_operand" "r,r")
10291 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10292 (clobber (reg:CC CC_REGNUM))]
10295 [(set_attr "conds" "clob")
10296 (set_attr "length" "8,12")]
10299 (define_insn "*if_move_shift"
10300 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10302 (match_operator 5 "arm_comparison_operator"
10303 [(match_operand 6 "cc_register" "") (const_int 0)])
10304 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10305 (match_operator:SI 4 "shift_operator"
10306 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10307 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10311 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10312 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10313 [(set_attr "conds" "use")
10314 (set_attr "shift" "2")
10315 (set_attr "length" "4,8,8")
10316 (set_attr "insn" "mov")
10317 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10318 (const_string "alu_shift")
10319 (const_string "alu_shift_reg")))]
10322 (define_insn "*ifcompare_shift_shift"
10323 [(set (match_operand:SI 0 "s_register_operand" "=r")
10325 (match_operator 7 "arm_comparison_operator"
10326 [(match_operand:SI 5 "s_register_operand" "r")
10327 (match_operand:SI 6 "arm_add_operand" "rIL")])
10328 (match_operator:SI 8 "shift_operator"
10329 [(match_operand:SI 1 "s_register_operand" "r")
10330 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10331 (match_operator:SI 9 "shift_operator"
10332 [(match_operand:SI 3 "s_register_operand" "r")
10333 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10334 (clobber (reg:CC CC_REGNUM))]
10337 [(set_attr "conds" "clob")
10338 (set_attr "length" "12")]
10341 (define_insn "*if_shift_shift"
10342 [(set (match_operand:SI 0 "s_register_operand" "=r")
10344 (match_operator 5 "arm_comparison_operator"
10345 [(match_operand 8 "cc_register" "") (const_int 0)])
10346 (match_operator:SI 6 "shift_operator"
10347 [(match_operand:SI 1 "s_register_operand" "r")
10348 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10349 (match_operator:SI 7 "shift_operator"
10350 [(match_operand:SI 3 "s_register_operand" "r")
10351 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10353 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10354 [(set_attr "conds" "use")
10355 (set_attr "shift" "1")
10356 (set_attr "length" "8")
10357 (set_attr "insn" "mov")
10358 (set (attr "type") (if_then_else
10359 (and (match_operand 2 "const_int_operand" "")
10360 (match_operand 4 "const_int_operand" ""))
10361 (const_string "alu_shift")
10362 (const_string "alu_shift_reg")))]
10365 (define_insn "*ifcompare_not_arith"
10366 [(set (match_operand:SI 0 "s_register_operand" "=r")
10368 (match_operator 6 "arm_comparison_operator"
10369 [(match_operand:SI 4 "s_register_operand" "r")
10370 (match_operand:SI 5 "arm_add_operand" "rIL")])
10371 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10372 (match_operator:SI 7 "shiftable_operator"
10373 [(match_operand:SI 2 "s_register_operand" "r")
10374 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10375 (clobber (reg:CC CC_REGNUM))]
10378 [(set_attr "conds" "clob")
10379 (set_attr "length" "12")]
10382 (define_insn "*if_not_arith"
10383 [(set (match_operand:SI 0 "s_register_operand" "=r")
10385 (match_operator 5 "arm_comparison_operator"
10386 [(match_operand 4 "cc_register" "") (const_int 0)])
10387 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10388 (match_operator:SI 6 "shiftable_operator"
10389 [(match_operand:SI 2 "s_register_operand" "r")
10390 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10392 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10393 [(set_attr "conds" "use")
10394 (set_attr "insn" "mvn")
10395 (set_attr "length" "8")]
10398 (define_insn "*ifcompare_arith_not"
10399 [(set (match_operand:SI 0 "s_register_operand" "=r")
10401 (match_operator 6 "arm_comparison_operator"
10402 [(match_operand:SI 4 "s_register_operand" "r")
10403 (match_operand:SI 5 "arm_add_operand" "rIL")])
10404 (match_operator:SI 7 "shiftable_operator"
10405 [(match_operand:SI 2 "s_register_operand" "r")
10406 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10407 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10408 (clobber (reg:CC CC_REGNUM))]
10411 [(set_attr "conds" "clob")
10412 (set_attr "length" "12")]
10415 (define_insn "*if_arith_not"
10416 [(set (match_operand:SI 0 "s_register_operand" "=r")
10418 (match_operator 5 "arm_comparison_operator"
10419 [(match_operand 4 "cc_register" "") (const_int 0)])
10420 (match_operator:SI 6 "shiftable_operator"
10421 [(match_operand:SI 2 "s_register_operand" "r")
10422 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10423 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10425 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10426 [(set_attr "conds" "use")
10427 (set_attr "insn" "mvn")
10428 (set_attr "length" "8")]
10431 (define_insn "*ifcompare_neg_move"
10432 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10434 (match_operator 5 "arm_comparison_operator"
10435 [(match_operand:SI 3 "s_register_operand" "r,r")
10436 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10437 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10438 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10439 (clobber (reg:CC CC_REGNUM))]
10442 [(set_attr "conds" "clob")
10443 (set_attr "length" "8,12")]
10446 (define_insn "*if_neg_move"
10447 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10449 (match_operator 4 "arm_comparison_operator"
10450 [(match_operand 3 "cc_register" "") (const_int 0)])
10451 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10452 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10455 rsb%d4\\t%0, %2, #0
10456 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10457 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10458 [(set_attr "conds" "use")
10459 (set_attr "length" "4,8,8")]
10462 (define_insn "*ifcompare_move_neg"
10463 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10465 (match_operator 5 "arm_comparison_operator"
10466 [(match_operand:SI 3 "s_register_operand" "r,r")
10467 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10468 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10469 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10470 (clobber (reg:CC CC_REGNUM))]
10473 [(set_attr "conds" "clob")
10474 (set_attr "length" "8,12")]
10477 (define_insn "*if_move_neg"
10478 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10480 (match_operator 4 "arm_comparison_operator"
10481 [(match_operand 3 "cc_register" "") (const_int 0)])
10482 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10483 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10486 rsb%D4\\t%0, %2, #0
10487 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10488 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10489 [(set_attr "conds" "use")
10490 (set_attr "length" "4,8,8")]
10493 (define_insn "*arith_adjacentmem"
10494 [(set (match_operand:SI 0 "s_register_operand" "=r")
10495 (match_operator:SI 1 "shiftable_operator"
10496 [(match_operand:SI 2 "memory_operand" "m")
10497 (match_operand:SI 3 "memory_operand" "m")]))
10498 (clobber (match_scratch:SI 4 "=r"))]
10499 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10505 HOST_WIDE_INT val1 = 0, val2 = 0;
10507 if (REGNO (operands[0]) > REGNO (operands[4]))
10509 ldm[1] = operands[4];
10510 ldm[2] = operands[0];
10514 ldm[1] = operands[0];
10515 ldm[2] = operands[4];
10518 base_reg = XEXP (operands[2], 0);
10520 if (!REG_P (base_reg))
10522 val1 = INTVAL (XEXP (base_reg, 1));
10523 base_reg = XEXP (base_reg, 0);
10526 if (!REG_P (XEXP (operands[3], 0)))
10527 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10529 arith[0] = operands[0];
10530 arith[3] = operands[1];
10544 if (val1 !=0 && val2 != 0)
10548 if (val1 == 4 || val2 == 4)
10549 /* Other val must be 8, since we know they are adjacent and neither
10551 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10552 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10554 ldm[0] = ops[0] = operands[4];
10556 ops[2] = GEN_INT (val1);
10557 output_add_immediate (ops);
10559 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10561 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10565 /* Offset is out of range for a single add, so use two ldr. */
10568 ops[2] = GEN_INT (val1);
10569 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10571 ops[2] = GEN_INT (val2);
10572 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10575 else if (val1 != 0)
10578 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10580 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10585 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10587 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10589 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10592 [(set_attr "length" "12")
10593 (set_attr "predicable" "yes")
10594 (set_attr "type" "load1")]
10597 ; This pattern is never tried by combine, so do it as a peephole
10600 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10601 (match_operand:SI 1 "arm_general_register_operand" ""))
10602 (set (reg:CC CC_REGNUM)
10603 (compare:CC (match_dup 1) (const_int 0)))]
10605 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10606 (set (match_dup 0) (match_dup 1))])]
10611 [(set (match_operand:SI 0 "s_register_operand" "")
10612 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10614 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10615 [(match_operand:SI 3 "s_register_operand" "")
10616 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10617 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10619 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10620 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10625 ;; This split can be used because CC_Z mode implies that the following
10626 ;; branch will be an equality, or an unsigned inequality, so the sign
10627 ;; extension is not needed.
10630 [(set (reg:CC_Z CC_REGNUM)
10632 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10634 (match_operand 1 "const_int_operand" "")))
10635 (clobber (match_scratch:SI 2 ""))]
10637 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10638 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10639 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10640 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10642 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10645 ;; ??? Check the patterns above for Thumb-2 usefulness
10647 (define_expand "prologue"
10648 [(clobber (const_int 0))]
10651 arm_expand_prologue ();
10653 thumb1_expand_prologue ();
10658 (define_expand "epilogue"
10659 [(clobber (const_int 0))]
10662 if (crtl->calls_eh_return)
10663 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10665 thumb1_expand_epilogue ();
10666 else if (USE_RETURN_INSN (FALSE))
10668 emit_jump_insn (gen_return ());
10671 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10672 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10677 (define_insn "prologue_thumb1_interwork"
10678 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10680 "* return thumb1_output_interwork ();"
10681 [(set_attr "length" "8")]
10684 ;; Note - although unspec_volatile's USE all hard registers,
10685 ;; USEs are ignored after relaod has completed. Thus we need
10686 ;; to add an unspec of the link register to ensure that flow
10687 ;; does not think that it is unused by the sibcall branch that
10688 ;; will replace the standard function epilogue.
10689 (define_insn "sibcall_epilogue"
10690 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10691 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10694 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10695 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10696 return arm_output_epilogue (next_nonnote_insn (insn));
10698 ;; Length is absolute worst case
10699 [(set_attr "length" "44")
10700 (set_attr "type" "block")
10701 ;; We don't clobber the conditions, but the potential length of this
10702 ;; operation is sufficient to make conditionalizing the sequence
10703 ;; unlikely to be profitable.
10704 (set_attr "conds" "clob")]
10707 (define_insn "*epilogue_insns"
10708 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10712 return arm_output_epilogue (NULL);
10713 else /* TARGET_THUMB1 */
10714 return thumb_unexpanded_epilogue ();
10716 ; Length is absolute worst case
10717 [(set_attr "length" "44")
10718 (set_attr "type" "block")
10719 ;; We don't clobber the conditions, but the potential length of this
10720 ;; operation is sufficient to make conditionalizing the sequence
10721 ;; unlikely to be profitable.
10722 (set_attr "conds" "clob")]
10725 (define_expand "eh_epilogue"
10726 [(use (match_operand:SI 0 "register_operand" ""))
10727 (use (match_operand:SI 1 "register_operand" ""))
10728 (use (match_operand:SI 2 "register_operand" ""))]
10732 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10733 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10735 rtx ra = gen_rtx_REG (Pmode, 2);
10737 emit_move_insn (ra, operands[2]);
10740 /* This is a hack -- we may have crystalized the function type too
10742 cfun->machine->func_type = 0;
10746 ;; This split is only used during output to reduce the number of patterns
10747 ;; that need assembler instructions adding to them. We allowed the setting
10748 ;; of the conditions to be implicit during rtl generation so that
10749 ;; the conditional compare patterns would work. However this conflicts to
10750 ;; some extent with the conditional data operations, so we have to split them
10753 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10754 ;; conditional execution sufficient?
10757 [(set (match_operand:SI 0 "s_register_operand" "")
10758 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10759 [(match_operand 2 "" "") (match_operand 3 "" "")])
10761 (match_operand 4 "" "")))
10762 (clobber (reg:CC CC_REGNUM))]
10763 "TARGET_ARM && reload_completed"
10764 [(set (match_dup 5) (match_dup 6))
10765 (cond_exec (match_dup 7)
10766 (set (match_dup 0) (match_dup 4)))]
10769 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10770 operands[2], operands[3]);
10771 enum rtx_code rc = GET_CODE (operands[1]);
10773 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10774 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10775 if (mode == CCFPmode || mode == CCFPEmode)
10776 rc = reverse_condition_maybe_unordered (rc);
10778 rc = reverse_condition (rc);
10780 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10785 [(set (match_operand:SI 0 "s_register_operand" "")
10786 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10787 [(match_operand 2 "" "") (match_operand 3 "" "")])
10788 (match_operand 4 "" "")
10790 (clobber (reg:CC CC_REGNUM))]
10791 "TARGET_ARM && reload_completed"
10792 [(set (match_dup 5) (match_dup 6))
10793 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10794 (set (match_dup 0) (match_dup 4)))]
10797 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10798 operands[2], operands[3]);
10800 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10801 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10806 [(set (match_operand:SI 0 "s_register_operand" "")
10807 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10808 [(match_operand 2 "" "") (match_operand 3 "" "")])
10809 (match_operand 4 "" "")
10810 (match_operand 5 "" "")))
10811 (clobber (reg:CC CC_REGNUM))]
10812 "TARGET_ARM && reload_completed"
10813 [(set (match_dup 6) (match_dup 7))
10814 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10815 (set (match_dup 0) (match_dup 4)))
10816 (cond_exec (match_dup 8)
10817 (set (match_dup 0) (match_dup 5)))]
10820 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10821 operands[2], operands[3]);
10822 enum rtx_code rc = GET_CODE (operands[1]);
10824 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10825 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10826 if (mode == CCFPmode || mode == CCFPEmode)
10827 rc = reverse_condition_maybe_unordered (rc);
10829 rc = reverse_condition (rc);
10831 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10836 [(set (match_operand:SI 0 "s_register_operand" "")
10837 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10838 [(match_operand:SI 2 "s_register_operand" "")
10839 (match_operand:SI 3 "arm_add_operand" "")])
10840 (match_operand:SI 4 "arm_rhs_operand" "")
10842 (match_operand:SI 5 "s_register_operand" ""))))
10843 (clobber (reg:CC CC_REGNUM))]
10844 "TARGET_ARM && reload_completed"
10845 [(set (match_dup 6) (match_dup 7))
10846 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10847 (set (match_dup 0) (match_dup 4)))
10848 (cond_exec (match_dup 8)
10849 (set (match_dup 0) (not:SI (match_dup 5))))]
10852 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10853 operands[2], operands[3]);
10854 enum rtx_code rc = GET_CODE (operands[1]);
10856 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10857 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10858 if (mode == CCFPmode || mode == CCFPEmode)
10859 rc = reverse_condition_maybe_unordered (rc);
10861 rc = reverse_condition (rc);
10863 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10867 (define_insn "*cond_move_not"
10868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10869 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10870 [(match_operand 3 "cc_register" "") (const_int 0)])
10871 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10873 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10877 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10878 [(set_attr "conds" "use")
10879 (set_attr "insn" "mvn")
10880 (set_attr "length" "4,8")]
10883 ;; The next two patterns occur when an AND operation is followed by a
10884 ;; scc insn sequence
10886 (define_insn "*sign_extract_onebit"
10887 [(set (match_operand:SI 0 "s_register_operand" "=r")
10888 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10890 (match_operand:SI 2 "const_int_operand" "n")))
10891 (clobber (reg:CC CC_REGNUM))]
10894 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10895 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10896 return \"mvnne\\t%0, #0\";
10898 [(set_attr "conds" "clob")
10899 (set_attr "length" "8")]
10902 (define_insn "*not_signextract_onebit"
10903 [(set (match_operand:SI 0 "s_register_operand" "=r")
10905 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10907 (match_operand:SI 2 "const_int_operand" "n"))))
10908 (clobber (reg:CC CC_REGNUM))]
10911 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10912 output_asm_insn (\"tst\\t%1, %2\", operands);
10913 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10914 return \"movne\\t%0, #0\";
10916 [(set_attr "conds" "clob")
10917 (set_attr "length" "12")]
10919 ;; ??? The above patterns need auditing for Thumb-2
10921 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10922 ;; expressions. For simplicity, the first register is also in the unspec
10924 ;; To avoid the usage of GNU extension, the length attribute is computed
10925 ;; in a C function arm_attr_length_push_multi.
10926 (define_insn "*push_multi"
10927 [(match_parallel 2 "multi_register_push"
10928 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10929 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10930 UNSPEC_PUSH_MULT))])]
10934 int num_saves = XVECLEN (operands[2], 0);
10936 /* For the StrongARM at least it is faster to
10937 use STR to store only a single register.
10938 In Thumb mode always use push, and the assembler will pick
10939 something appropriate. */
10940 if (num_saves == 1 && TARGET_ARM)
10941 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10948 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10949 else if (TARGET_THUMB2)
10950 strcpy (pattern, \"push%?\\t{%1\");
10952 strcpy (pattern, \"push\\t{%1\");
10954 for (i = 1; i < num_saves; i++)
10956 strcat (pattern, \", %|\");
10958 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10961 strcat (pattern, \"}\");
10962 output_asm_insn (pattern, operands);
10967 [(set_attr "type" "store4")
10968 (set (attr "length")
10969 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10972 (define_insn "stack_tie"
10973 [(set (mem:BLK (scratch))
10974 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10975 (match_operand:SI 1 "s_register_operand" "rk")]
10979 [(set_attr "length" "0")]
10982 ;; Similarly for the floating point registers
10983 (define_insn "*push_fp_multi"
10984 [(match_parallel 2 "multi_register_push"
10985 [(set (match_operand:BLK 0 "memory_operand" "=m")
10986 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10987 UNSPEC_PUSH_MULT))])]
10988 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10993 sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10994 output_asm_insn (pattern, operands);
10997 [(set_attr "type" "f_fpa_store")]
11000 ;; Special patterns for dealing with the constant pool
11002 (define_insn "align_4"
11003 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11006 assemble_align (32);
11011 (define_insn "align_8"
11012 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11015 assemble_align (64);
11020 (define_insn "consttable_end"
11021 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11024 making_const_table = FALSE;
11029 (define_insn "consttable_1"
11030 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11033 making_const_table = TRUE;
11034 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11035 assemble_zeros (3);
11038 [(set_attr "length" "4")]
11041 (define_insn "consttable_2"
11042 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11045 making_const_table = TRUE;
11046 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
11047 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11048 assemble_zeros (2);
11051 [(set_attr "length" "4")]
11054 (define_insn "consttable_4"
11055 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11059 rtx x = operands[0];
11060 making_const_table = TRUE;
11061 switch (GET_MODE_CLASS (GET_MODE (x)))
11064 if (GET_MODE (x) == HFmode)
11065 arm_emit_fp16_const (x);
11069 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
11070 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
11074 /* XXX: Sometimes gcc does something really dumb and ends up with
11075 a HIGH in a constant pool entry, usually because it's trying to
11076 load into a VFP register. We know this will always be used in
11077 combination with a LO_SUM which ignores the high bits, so just
11078 strip off the HIGH. */
11079 if (GET_CODE (x) == HIGH)
11081 assemble_integer (x, 4, BITS_PER_WORD, 1);
11082 mark_symbol_refs_as_used (x);
11087 [(set_attr "length" "4")]
11090 (define_insn "consttable_8"
11091 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11095 making_const_table = TRUE;
11096 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11101 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11102 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11106 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11111 [(set_attr "length" "8")]
11114 (define_insn "consttable_16"
11115 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11119 making_const_table = TRUE;
11120 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11125 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11126 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11130 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11135 [(set_attr "length" "16")]
11138 ;; Miscellaneous Thumb patterns
11140 (define_expand "tablejump"
11141 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11142 (use (label_ref (match_operand 1 "" "")))])]
11147 /* Hopefully, CSE will eliminate this copy. */
11148 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11149 rtx reg2 = gen_reg_rtx (SImode);
11151 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11152 operands[0] = reg2;
11157 ;; NB never uses BX.
11158 (define_insn "*thumb1_tablejump"
11159 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11160 (use (label_ref (match_operand 1 "" "")))]
11163 [(set_attr "length" "2")]
11166 ;; V5 Instructions,
11168 (define_insn "clzsi2"
11169 [(set (match_operand:SI 0 "s_register_operand" "=r")
11170 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11171 "TARGET_32BIT && arm_arch5"
11173 [(set_attr "predicable" "yes")
11174 (set_attr "insn" "clz")])
11176 (define_insn "rbitsi2"
11177 [(set (match_operand:SI 0 "s_register_operand" "=r")
11178 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11179 "TARGET_32BIT && arm_arch_thumb2"
11181 [(set_attr "predicable" "yes")
11182 (set_attr "insn" "clz")])
11184 (define_expand "ctzsi2"
11185 [(set (match_operand:SI 0 "s_register_operand" "")
11186 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11187 "TARGET_32BIT && arm_arch_thumb2"
11190 rtx tmp = gen_reg_rtx (SImode);
11191 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11192 emit_insn (gen_clzsi2 (operands[0], tmp));
11198 ;; V5E instructions.
11200 (define_insn "prefetch"
11201 [(prefetch (match_operand:SI 0 "address_operand" "p")
11202 (match_operand:SI 1 "" "")
11203 (match_operand:SI 2 "" ""))]
11204 "TARGET_32BIT && arm_arch5e"
11207 ;; General predication pattern
11210 [(match_operator 0 "arm_comparison_operator"
11211 [(match_operand 1 "cc_register" "")
11217 (define_insn "prologue_use"
11218 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11220 "%@ %0 needed for prologue"
11221 [(set_attr "length" "0")]
11225 ;; Patterns for exception handling
11227 (define_expand "eh_return"
11228 [(use (match_operand 0 "general_operand" ""))]
11233 emit_insn (gen_arm_eh_return (operands[0]));
11235 emit_insn (gen_thumb_eh_return (operands[0]));
11240 ;; We can't expand this before we know where the link register is stored.
11241 (define_insn_and_split "arm_eh_return"
11242 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11244 (clobber (match_scratch:SI 1 "=&r"))]
11247 "&& reload_completed"
11251 arm_set_return_address (operands[0], operands[1]);
11256 (define_insn_and_split "thumb_eh_return"
11257 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11259 (clobber (match_scratch:SI 1 "=&l"))]
11262 "&& reload_completed"
11266 thumb_set_return_address (operands[0], operands[1]);
11274 (define_insn "load_tp_hard"
11275 [(set (match_operand:SI 0 "register_operand" "=r")
11276 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11278 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11279 [(set_attr "predicable" "yes")]
11282 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11283 (define_insn "load_tp_soft"
11284 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11285 (clobber (reg:SI LR_REGNUM))
11286 (clobber (reg:SI IP_REGNUM))
11287 (clobber (reg:CC CC_REGNUM))]
11289 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11290 [(set_attr "conds" "clob")]
11293 ;; tls descriptor call
11294 (define_insn "tlscall"
11295 [(set (reg:SI R0_REGNUM)
11296 (unspec:SI [(reg:SI R0_REGNUM)
11297 (match_operand:SI 0 "" "X")
11298 (match_operand 1 "" "")] UNSPEC_TLS))
11299 (clobber (reg:SI R1_REGNUM))
11300 (clobber (reg:SI LR_REGNUM))
11301 (clobber (reg:SI CC_REGNUM))]
11304 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11305 INTVAL (operands[1]));
11306 return "bl\\t%c0(tlscall)";
11308 [(set_attr "conds" "clob")
11309 (set_attr "length" "4")]
11314 ;; We only care about the lower 16 bits of the constant
11315 ;; being inserted into the upper 16 bits of the register.
11316 (define_insn "*arm_movtas_ze"
11317 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11320 (match_operand:SI 1 "const_int_operand" ""))]
11323 [(set_attr "predicable" "yes")
11324 (set_attr "length" "4")]
11327 (define_insn "*arm_rev"
11328 [(set (match_operand:SI 0 "s_register_operand" "=r")
11329 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11330 "TARGET_32BIT && arm_arch6"
11332 [(set_attr "predicable" "yes")
11333 (set_attr "length" "4")]
11336 (define_insn "*thumb1_rev"
11337 [(set (match_operand:SI 0 "s_register_operand" "=l")
11338 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11339 "TARGET_THUMB1 && arm_arch6"
11341 [(set_attr "length" "2")]
11344 (define_expand "arm_legacy_rev"
11345 [(set (match_operand:SI 2 "s_register_operand" "")
11346 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11350 (lshiftrt:SI (match_dup 2)
11352 (set (match_operand:SI 3 "s_register_operand" "")
11353 (rotatert:SI (match_dup 1)
11356 (and:SI (match_dup 2)
11357 (const_int -65281)))
11358 (set (match_operand:SI 0 "s_register_operand" "")
11359 (xor:SI (match_dup 3)
11365 ;; Reuse temporaries to keep register pressure down.
11366 (define_expand "thumb_legacy_rev"
11367 [(set (match_operand:SI 2 "s_register_operand" "")
11368 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11370 (set (match_operand:SI 3 "s_register_operand" "")
11371 (lshiftrt:SI (match_dup 1)
11374 (ior:SI (match_dup 3)
11376 (set (match_operand:SI 4 "s_register_operand" "")
11378 (set (match_operand:SI 5 "s_register_operand" "")
11379 (rotatert:SI (match_dup 1)
11382 (ashift:SI (match_dup 5)
11385 (lshiftrt:SI (match_dup 5)
11388 (ior:SI (match_dup 5)
11391 (rotatert:SI (match_dup 5)
11393 (set (match_operand:SI 0 "s_register_operand" "")
11394 (ior:SI (match_dup 5)
11400 (define_expand "bswapsi2"
11401 [(set (match_operand:SI 0 "s_register_operand" "=r")
11402 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11403 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11407 rtx op2 = gen_reg_rtx (SImode);
11408 rtx op3 = gen_reg_rtx (SImode);
11412 rtx op4 = gen_reg_rtx (SImode);
11413 rtx op5 = gen_reg_rtx (SImode);
11415 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11416 op2, op3, op4, op5));
11420 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11429 ;; Load the load/store multiple patterns
11430 (include "ldmstm.md")
11431 ;; Load the FPA co-processor patterns
11433 ;; Load the Maverick co-processor patterns
11434 (include "cirrus.md")
11435 ;; Vector bits common to IWMMXT and Neon
11436 (include "vec-common.md")
11437 ;; Load the Intel Wireless Multimedia Extension patterns
11438 (include "iwmmxt.md")
11439 ;; Load the VFP co-processor patterns
11441 ;; Thumb-2 patterns
11442 (include "thumb2.md")
11444 (include "neon.md")
11445 ;; Synchronization Primitives
11446 (include "sync.md")
11447 ;; Fixed-point patterns
11448 (include "arm-fixed.md")