1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2024 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
45 (RA_AUTH_CODE 107) ; Pseudo register to save PAC.
48 ;; 3rd operand to select_dominance_cc_mode
55 ;; conditional compare combination
66 ;;---------------------------------------------------------------------------
69 ;; Processor type. This is created automatically from arm-cores.def.
70 (include "arm-tune.md")
72 ;; Instruction classification types
75 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
76 ; generating ARM code. This is used to control the length of some insn
77 ; patterns that share the same RTL in both ARM and Thumb code.
78 (define_attr "is_thumb" "yes,no"
79 (const (if_then_else (symbol_ref "TARGET_THUMB")
80 (const_string "yes") (const_string "no"))))
82 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
83 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
85 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
86 (define_attr "is_thumb1" "yes,no"
87 (const (if_then_else (symbol_ref "TARGET_THUMB1")
88 (const_string "yes") (const_string "no"))))
90 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
91 ; The arm_restrict_it flag enables the "short IT" feature which
92 ; restricts IT blocks to a single 16-bit instruction.
93 ; This attribute should only be used on 16-bit Thumb-2 instructions
94 ; which may be predicated (the "predicable" attribute must be set).
95 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
97 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
98 ; This attribute should only be used on instructions which may emit
99 ; an IT block in their expansion which is not a short IT.
100 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
102 ; Mark an instruction sequence as the required way of loading a
103 ; constant when -mpure-code is enabled (which implies
104 ; arm_disable_literal_pool)
105 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
107 ;; Operand number of an input operand that is shifted. Zero if the
108 ;; given instruction does not shift one of its input operands.
109 (define_attr "shift" "" (const_int 0))
111 ;; [For compatibility with AArch64 in pipeline models]
112 ;; Attribute that specifies whether or not the instruction touches fp
114 (define_attr "fp" "no,yes" (const_string "no"))
116 ; Floating Point Unit. If we only have floating point emulation, then there
117 ; is no point in scheduling the floating point insns. (Well, for best
118 ; performance we should try and group them together).
119 (define_attr "fpu" "none,vfp"
120 (const (symbol_ref "arm_fpu_attr")))
122 ; Predicated means that the insn form is conditionally executed based on a
123 ; predicate. We default to 'no' because no Thumb patterns match this rule
124 ; and not all ARM insns do.
125 (define_attr "predicated" "yes,no" (const_string "no"))
127 ; An attribute that encodes the CODE_FOR_<insn> of the MVE VPT unpredicated
128 ; version of a VPT-predicated instruction. For unpredicated instructions
129 ; that are predicable, encode the same pattern's CODE_FOR_<insn> as a way to
130 ; encode that it is a predicable instruction.
131 (define_attr "mve_unpredicated_insn" "" (symbol_ref "CODE_FOR_nothing"))
133 ; An attribute used by the loop-doloop pass when determining whether it is
134 ; safe to predicate a MVE instruction, that operates across lanes, and was
135 ; previously not predicated. The pass will still check whether all inputs
136 ; are predicated by the VCTP predication mask.
137 (define_attr "mve_safe_imp_xlane_pred" "yes,no" (const_string "no"))
139 ; LENGTH of an instruction (in bytes)
140 (define_attr "length" ""
143 ; The architecture which supports the instruction (or alternative).
144 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
145 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
146 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
147 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
148 ; Baseline. "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
149 ; This attribute is used to compute attribute "enabled",
150 ; use type "any" to enable an alternative in all cases.
151 (define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
152 v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
154 (const_string "any"))
156 (define_attr "arch_enabled" "no,yes"
157 (cond [(eq_attr "arch" "any")
160 (and (eq_attr "arch" "a")
161 (match_test "TARGET_ARM"))
164 (and (eq_attr "arch" "t")
165 (match_test "TARGET_THUMB"))
168 (and (eq_attr "arch" "t1")
169 (match_test "TARGET_THUMB1"))
172 (and (eq_attr "arch" "t2")
173 (match_test "TARGET_THUMB2"))
176 (and (eq_attr "arch" "32")
177 (match_test "TARGET_32BIT"))
180 (and (eq_attr "arch" "v6")
181 (match_test "TARGET_32BIT && arm_arch6"))
184 (and (eq_attr "arch" "nov6")
185 (match_test "TARGET_32BIT && !arm_arch6"))
188 (and (eq_attr "arch" "v6t2")
189 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
192 (and (eq_attr "arch" "v8mb")
193 (match_test "TARGET_THUMB1 && arm_arch8"))
196 (and (eq_attr "arch" "fix_vlldm")
197 (match_test "fix_vlldm"))
200 (and (eq_attr "arch" "iwmmxt2")
201 (match_test "TARGET_REALLY_IWMMXT2"))
204 (and (eq_attr "arch" "armv6_or_vfpv3")
205 (match_test "arm_arch6 || TARGET_VFP3"))
208 (and (eq_attr "arch" "neon")
209 (match_test "TARGET_NEON"))
212 (and (eq_attr "arch" "mve")
213 (match_test "TARGET_HAVE_MVE"))
217 (const_string "no")))
219 (define_attr "opt" "any,speed,size"
220 (const_string "any"))
222 (define_attr "opt_enabled" "no,yes"
223 (cond [(eq_attr "opt" "any")
226 (and (eq_attr "opt" "speed")
227 (match_test "optimize_function_for_speed_p (cfun)"))
230 (and (eq_attr "opt" "size")
231 (match_test "optimize_function_for_size_p (cfun)"))
232 (const_string "yes")]
233 (const_string "no")))
235 (define_attr "use_literal_pool" "no,yes"
236 (cond [(and (eq_attr "type" "f_loads,f_loadd")
237 (match_test "CONSTANT_P (operands[1])"))
238 (const_string "yes")]
239 (const_string "no")))
241 ; Enable all alternatives that are both arch_enabled and insn_enabled.
242 ; FIXME:: opt_enabled has been temporarily removed till the time we have
243 ; an attribute that allows the use of such alternatives.
244 ; This depends on caching of speed_p, size_p on a per
245 ; alternative basis. The problem is that the enabled attribute
246 ; cannot depend on any state that is not cached or is not constant
247 ; for a compilation unit. We probably need a generic "hot/cold"
248 ; alternative which if implemented can help with this. We disable this
249 ; until such a time as this is implemented and / or the improvements or
250 ; regressions with removing this attribute are double checked.
251 ; See ashldi3_neon and <shift>di3_neon in neon.md.
253 (define_attr "enabled" "no,yes"
254 (cond [(and (eq_attr "predicable_short_it" "no")
255 (and (eq_attr "predicated" "yes")
256 (match_test "arm_restrict_it")))
259 (and (eq_attr "enabled_for_short_it" "no")
260 (match_test "arm_restrict_it"))
263 (and (eq_attr "required_for_purecode" "yes")
264 (not (match_test "arm_disable_literal_pool")))
267 (eq_attr "arch_enabled" "no")
269 (const_string "yes")))
271 ; POOL_RANGE is how far away from a constant pool entry that this insn
272 ; can be placed. If the distance is zero, then this insn will never
273 ; reference the pool.
274 ; Note that for Thumb constant pools the PC value is rounded down to the
275 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
276 ; Thumb insns) should be set to <max_range> - 2.
277 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
278 ; before its address. It is set to <max_range> - (8 + <data_size>).
279 (define_attr "arm_pool_range" "" (const_int 0))
280 (define_attr "thumb2_pool_range" "" (const_int 0))
281 (define_attr "arm_neg_pool_range" "" (const_int 0))
282 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
284 (define_attr "pool_range" ""
285 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
286 (attr "arm_pool_range")))
287 (define_attr "neg_pool_range" ""
288 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
289 (attr "arm_neg_pool_range")))
291 ; An assembler sequence may clobber the condition codes without us knowing.
292 ; If such an insn references the pool, then we have no way of knowing how,
293 ; so use the most conservative value for pool_range.
294 (define_asm_attributes
295 [(set_attr "conds" "clob")
296 (set_attr "length" "4")
297 (set_attr "pool_range" "250")])
299 ; Load scheduling, set from the arm_ld_sched variable
300 ; initialized by arm_option_override()
301 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
303 ; condition codes: this one is used by final_prescan_insn to speed up
304 ; conditionalizing instructions. It saves having to scan the rtl to see if
305 ; it uses or alters the condition codes.
307 ; USE means that the condition codes are used by the insn in the process of
308 ; outputting code, this means (at present) that we can't use the insn in
311 ; SET means that the purpose of the insn is to set the condition codes in a
312 ; well defined manner.
314 ; CLOB means that the condition codes are altered in an undefined manner, if
315 ; they are altered at all
317 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
318 ; that the instruction does not use or alter the condition codes.
320 ; NOCOND means that the instruction does not use or alter the condition
321 ; codes but can be converted into a conditionally exectuted instruction.
322 ; Given that NOCOND is the default for most instructions if omitted,
323 ; the attribute predicable must be set to yes as well.
325 (define_attr "conds" "use,set,clob,unconditional,nocond"
327 (ior (eq_attr "is_thumb1" "yes")
328 (eq_attr "type" "call"))
329 (const_string "clob")
331 (ior (eq_attr "is_neon_type" "yes")
332 (eq_attr "is_mve_type" "yes"))
333 (const_string "unconditional")
334 (const_string "nocond"))))
336 ; Predicable means that the insn can be conditionally executed based on
337 ; an automatically added predicate (additional patterns are generated by
338 ; gen...). We default to 'no' because no Thumb patterns match this rule
339 ; and not all ARM patterns do.
340 (define_attr "predicable" "no,yes" (const_string "no"))
342 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
343 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
344 ; suffer blockages enough to warrant modelling this (and it can adversely
345 ; affect the schedule).
346 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
348 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
349 ; to stall the processor. Used with model_wbuf above.
350 (define_attr "write_conflict" "no,yes"
351 (if_then_else (eq_attr "type"
354 (const_string "no")))
356 ; Classify the insns into those that take one cycle and those that take more
357 ; than one on the main cpu execution unit.
358 (define_attr "core_cycles" "single,multi"
359 (if_then_else (eq_attr "type"
360 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
361 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
362 alus_ext, alus_imm, alus_sreg,\
363 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
364 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
365 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
366 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
367 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
368 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
369 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
370 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
371 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
372 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
373 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
374 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
375 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
376 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
377 (const_string "single")
378 (const_string "multi")))
380 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
381 ;; distant label. Only applicable to Thumb code.
382 (define_attr "far_jump" "yes,no" (const_string "no"))
385 ;; The number of machine instructions this pattern expands to.
386 ;; Used for Thumb-2 conditional execution.
387 (define_attr "ce_count" "" (const_int 1))
389 ;;---------------------------------------------------------------------------
392 (include "unspecs.md")
394 ;;---------------------------------------------------------------------------
397 (include "iterators.md")
399 ;;---------------------------------------------------------------------------
402 (include "predicates.md")
403 (include "constraints.md")
405 ;;---------------------------------------------------------------------------
406 ;; Pipeline descriptions
408 (define_attr "tune_cortexr4" "yes,no"
410 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
412 (const_string "no"))))
414 ;; True if the generic scheduling description should be used.
416 (define_attr "generic_sched" "yes,no"
418 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
419 arm926ejs,arm10e,arm1026ejs,arm1136js,\
420 arm1136jfs,cortexa5,cortexa7,cortexa8,\
421 cortexa9,cortexa12,cortexa15,cortexa17,\
422 cortexa53,cortexa57,cortexm4,cortexm7,\
423 exynosm1,marvell_pj4,xgene1")
424 (eq_attr "tune_cortexr4" "yes"))
426 (const_string "yes"))))
428 (define_attr "generic_vfp" "yes,no"
430 (and (eq_attr "fpu" "vfp")
431 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
432 cortexa8,cortexa9,cortexa53,cortexm4,\
433 cortexm7,marvell_pj4,xgene1")
434 (eq_attr "tune_cortexr4" "no"))
436 (const_string "no"))))
438 (include "marvell-f-iwmmxt.md")
439 (include "arm-generic.md")
440 (include "arm926ejs.md")
441 (include "arm1020e.md")
442 (include "arm1026ejs.md")
443 (include "arm1136jfs.md")
445 (include "fa606te.md")
446 (include "fa626te.md")
447 (include "fmp626.md")
448 (include "fa726te.md")
449 (include "cortex-a5.md")
450 (include "cortex-a7.md")
451 (include "cortex-a8.md")
452 (include "cortex-a9.md")
453 (include "cortex-a15.md")
454 (include "cortex-a17.md")
455 (include "cortex-a53.md")
456 (include "cortex-a57.md")
457 (include "cortex-r4.md")
458 (include "cortex-r4f.md")
459 (include "cortex-m7.md")
460 (include "cortex-m4.md")
461 (include "cortex-m4-fpu.md")
462 (include "exynos-m1.md")
464 (include "marvell-pj4.md")
465 (include "xgene1.md")
467 ;; define_subst and associated attributes
469 (define_subst "add_setq"
470 [(set (match_operand:SI 0 "" "")
471 (match_operand:SI 1 "" ""))]
475 (set (reg:CC APSRQ_REGNUM)
476 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
478 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
479 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
482 ;;---------------------------------------------------------------------------
487 ;; Note: For DImode insns, there is normally no reason why operands should
488 ;; not be in the same register, what we don't want is for something being
489 ;; written to partially overlap something that is an input.
491 (define_expand "adddi3"
493 [(set (match_operand:DI 0 "s_register_operand")
494 (plus:DI (match_operand:DI 1 "s_register_operand")
495 (match_operand:DI 2 "reg_or_int_operand")))
496 (clobber (reg:CC CC_REGNUM))])]
501 if (!REG_P (operands[2]))
502 operands[2] = force_reg (DImode, operands[2]);
506 rtx lo_result, hi_result, lo_dest, hi_dest;
507 rtx lo_op1, hi_op1, lo_op2, hi_op2;
508 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
510 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
511 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
513 if (lo_op2 == const0_rtx)
516 if (!arm_add_operand (hi_op2, SImode))
517 hi_op2 = force_reg (SImode, hi_op2);
518 /* Assume hi_op2 won't also be zero. */
519 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
523 if (!arm_add_operand (lo_op2, SImode))
524 lo_op2 = force_reg (SImode, lo_op2);
525 if (!arm_not_operand (hi_op2, SImode))
526 hi_op2 = force_reg (SImode, hi_op2);
528 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
529 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
531 if (hi_op2 == const0_rtx)
532 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
534 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
537 if (lo_result != lo_dest)
538 emit_move_insn (lo_result, lo_dest);
539 if (hi_result != hi_dest)
540 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
546 (define_expand "addvsi4"
547 [(match_operand:SI 0 "s_register_operand")
548 (match_operand:SI 1 "s_register_operand")
549 (match_operand:SI 2 "arm_add_operand")
550 (match_operand 3 "")]
553 if (CONST_INT_P (operands[2]))
554 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
556 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
557 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
562 (define_expand "addvdi4"
563 [(match_operand:DI 0 "s_register_operand")
564 (match_operand:DI 1 "s_register_operand")
565 (match_operand:DI 2 "reg_or_int_operand")
566 (match_operand 3 "")]
569 rtx lo_result, hi_result;
570 rtx lo_op1, hi_op1, lo_op2, hi_op2;
571 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
573 lo_result = gen_lowpart (SImode, operands[0]);
574 hi_result = gen_highpart (SImode, operands[0]);
576 if (lo_op2 == const0_rtx)
578 emit_move_insn (lo_result, lo_op1);
579 if (!arm_add_operand (hi_op2, SImode))
580 hi_op2 = force_reg (SImode, hi_op2);
582 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
586 if (!arm_add_operand (lo_op2, SImode))
587 lo_op2 = force_reg (SImode, lo_op2);
588 if (!arm_not_operand (hi_op2, SImode))
589 hi_op2 = force_reg (SImode, hi_op2);
591 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
593 if (hi_op2 == const0_rtx)
594 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
595 else if (CONST_INT_P (hi_op2))
596 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
598 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
600 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
606 (define_expand "addsi3_cin_vout_reg"
611 (plus:DI (match_dup 4)
612 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
613 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
614 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
616 (set (match_operand:SI 0 "s_register_operand")
617 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
621 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
622 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
623 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
624 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
628 (define_insn "*addsi3_cin_vout_reg_insn"
629 [(set (reg:CC_V CC_REGNUM)
633 (match_operand:DI 3 "arm_carry_operation" "")
634 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
635 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
637 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
640 (set (match_operand:SI 0 "s_register_operand" "=l,r")
641 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
647 [(set_attr "type" "alus_sreg")
648 (set_attr "arch" "t2,*")
649 (set_attr "length" "2,4")]
652 (define_expand "addsi3_cin_vout_imm"
657 (plus:DI (match_dup 4)
658 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
660 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
662 (set (match_operand:SI 0 "s_register_operand")
663 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
664 (match_operand 2 "arm_adcimm_operand")))])]
667 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
668 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
669 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
670 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
674 (define_insn "*addsi3_cin_vout_imm_insn"
675 [(set (reg:CC_V CC_REGNUM)
679 (match_operand:DI 3 "arm_carry_operation" "")
680 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
681 (match_operand 2 "arm_adcimm_operand" "I,K"))
683 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
686 (set (match_operand:SI 0 "s_register_operand" "=r,r")
687 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
692 sbcs%?\\t%0, %1, #%B2"
693 [(set_attr "type" "alus_imm")]
696 (define_expand "addsi3_cin_vout_0"
700 (plus:DI (match_dup 3)
701 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
702 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
703 (set (match_operand:SI 0 "s_register_operand")
704 (plus:SI (match_dup 4) (match_dup 1)))])]
707 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
708 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
709 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
710 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
714 (define_insn "*addsi3_cin_vout_0_insn"
715 [(set (reg:CC_V CC_REGNUM)
718 (match_operand:DI 2 "arm_carry_operation" "")
719 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
720 (sign_extend:DI (plus:SI
721 (match_operand:SI 3 "arm_carry_operation" "")
723 (set (match_operand:SI 0 "s_register_operand" "=r")
724 (plus:SI (match_dup 3) (match_dup 1)))]
726 "adcs%?\\t%0, %1, #0"
727 [(set_attr "type" "alus_imm")]
730 (define_expand "uaddvsi4"
731 [(match_operand:SI 0 "s_register_operand")
732 (match_operand:SI 1 "s_register_operand")
733 (match_operand:SI 2 "arm_add_operand")
734 (match_operand 3 "")]
737 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
738 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
743 (define_expand "uaddvdi4"
744 [(match_operand:DI 0 "s_register_operand")
745 (match_operand:DI 1 "s_register_operand")
746 (match_operand:DI 2 "reg_or_int_operand")
747 (match_operand 3 "")]
750 rtx lo_result, hi_result;
751 rtx lo_op1, hi_op1, lo_op2, hi_op2;
752 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
754 lo_result = gen_lowpart (SImode, operands[0]);
755 hi_result = gen_highpart (SImode, operands[0]);
757 if (lo_op2 == const0_rtx)
759 emit_move_insn (lo_result, lo_op1);
760 if (!arm_add_operand (hi_op2, SImode))
761 hi_op2 = force_reg (SImode, hi_op2);
763 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
767 if (!arm_add_operand (lo_op2, SImode))
768 lo_op2 = force_reg (SImode, lo_op2);
769 if (!arm_not_operand (hi_op2, SImode))
770 hi_op2 = force_reg (SImode, hi_op2);
772 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
774 if (hi_op2 == const0_rtx)
775 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
776 else if (CONST_INT_P (hi_op2))
777 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
779 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
781 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
787 (define_expand "addsi3_cin_cout_reg"
792 (plus:DI (match_dup 4)
793 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
794 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
795 (const_int 4294967296)))
796 (set (match_operand:SI 0 "s_register_operand")
797 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
801 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
802 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
803 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
804 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
808 (define_insn "*addsi3_cin_cout_reg_insn"
809 [(set (reg:CC_ADC CC_REGNUM)
813 (match_operand:DI 3 "arm_carry_operation" "")
814 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
815 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
816 (const_int 4294967296)))
817 (set (match_operand:SI 0 "s_register_operand" "=l,r")
818 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
825 [(set_attr "type" "alus_sreg")
826 (set_attr "arch" "t2,*")
827 (set_attr "length" "2,4")]
830 (define_expand "addsi3_cin_cout_imm"
835 (plus:DI (match_dup 4)
836 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
838 (const_int 4294967296)))
839 (set (match_operand:SI 0 "s_register_operand")
840 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
841 (match_operand:SI 2 "arm_adcimm_operand")))])]
844 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
845 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
846 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
847 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
848 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
852 (define_insn "*addsi3_cin_cout_imm_insn"
853 [(set (reg:CC_ADC CC_REGNUM)
857 (match_operand:DI 3 "arm_carry_operation" "")
858 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
859 (match_operand:DI 5 "const_int_operand" "n,n"))
860 (const_int 4294967296)))
861 (set (match_operand:SI 0 "s_register_operand" "=r,r")
862 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
864 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
866 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
869 sbcs%?\\t%0, %1, #%B2"
870 [(set_attr "type" "alus_imm")]
873 (define_expand "addsi3_cin_cout_0"
877 (plus:DI (match_dup 3)
878 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
879 (const_int 4294967296)))
880 (set (match_operand:SI 0 "s_register_operand")
881 (plus:SI (match_dup 4) (match_dup 1)))])]
884 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
885 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
886 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
887 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
891 (define_insn "*addsi3_cin_cout_0_insn"
892 [(set (reg:CC_ADC CC_REGNUM)
895 (match_operand:DI 2 "arm_carry_operation" "")
896 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
897 (const_int 4294967296)))
898 (set (match_operand:SI 0 "s_register_operand" "=r")
899 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
901 "adcs%?\\t%0, %1, #0"
902 [(set_attr "type" "alus_imm")]
905 (define_expand "addsi3"
906 [(set (match_operand:SI 0 "s_register_operand")
907 (plus:SI (match_operand:SI 1 "s_register_operand")
908 (match_operand:SI 2 "reg_or_int_operand")))]
911 if (TARGET_32BIT && CONST_INT_P (operands[2]))
913 arm_split_constant (PLUS, SImode, NULL_RTX,
914 INTVAL (operands[2]), operands[0], operands[1],
915 optimize && can_create_pseudo_p ());
921 ; If there is a scratch available, this will be faster than synthesizing the
924 [(match_scratch:SI 3 "r")
925 (set (match_operand:SI 0 "arm_general_register_operand" "")
926 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
927 (match_operand:SI 2 "const_int_operand" "")))]
929 !(const_ok_for_arm (INTVAL (operands[2]))
930 || const_ok_for_arm (-INTVAL (operands[2])))
931 && const_ok_for_arm (~INTVAL (operands[2]))"
932 [(set (match_dup 3) (match_dup 2))
933 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
937 ;; The r/r/k alternative is required when reloading the address
938 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
939 ;; put the duplicated register first, and not try the commutative version.
940 (define_insn_and_split "*arm_addsi3"
941 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
942 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
943 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
959 subw%?\\t%0, %1, #%n2
960 subw%?\\t%0, %1, #%n2
963 && CONST_INT_P (operands[2])
964 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
965 && (reload_completed || !arm_eliminable_register (operands[1]))"
966 [(clobber (const_int 0))]
968 arm_split_constant (PLUS, SImode, curr_insn,
969 INTVAL (operands[2]), operands[0],
973 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
974 (set_attr "predicable" "yes")
975 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
976 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
977 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
978 (const_string "alu_imm")
979 (const_string "alu_sreg")))
983 (define_insn "addsi3_compareV_reg"
984 [(set (reg:CC_V CC_REGNUM)
987 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
988 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
989 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
990 (set (match_operand:SI 0 "register_operand" "=l,r,r")
991 (plus:SI (match_dup 1) (match_dup 2)))]
993 "adds%?\\t%0, %1, %2"
994 [(set_attr "conds" "set")
995 (set_attr "arch" "t2,t2,*")
996 (set_attr "length" "2,2,4")
997 (set_attr "type" "alus_sreg")]
1000 (define_insn "*addsi3_compareV_reg_nosum"
1001 [(set (reg:CC_V CC_REGNUM)
1004 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
1005 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
1006 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1009 [(set_attr "conds" "set")
1010 (set_attr "arch" "t2,*")
1011 (set_attr "length" "2,4")
1012 (set_attr "type" "alus_sreg")]
1015 (define_insn "subvsi3_intmin"
1016 [(set (reg:CC_V CC_REGNUM)
1020 (match_operand:SI 1 "register_operand" "r"))
1021 (const_int 2147483648))
1022 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1023 (set (match_operand:SI 0 "register_operand" "=r")
1024 (plus:SI (match_dup 1) (const_int -2147483648)))]
1026 "subs%?\\t%0, %1, #-2147483648"
1027 [(set_attr "conds" "set")
1028 (set_attr "type" "alus_imm")]
1031 (define_insn "addsi3_compareV_imm"
1032 [(set (reg:CC_V CC_REGNUM)
1036 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1037 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1038 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1039 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1040 (plus:SI (match_dup 1) (match_dup 2)))]
1042 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1046 subs%?\\t%0, %1, #%n2
1047 subs%?\\t%0, %0, #%n2
1049 subs%?\\t%0, %1, #%n2"
1050 [(set_attr "conds" "set")
1051 (set_attr "arch" "t2,t2,t2,t2,*,*")
1052 (set_attr "length" "2,2,2,2,4,4")
1053 (set_attr "type" "alus_imm")]
1056 (define_insn "addsi3_compareV_imm_nosum"
1057 [(set (reg:CC_V CC_REGNUM)
1061 (match_operand:SI 0 "register_operand" "l,r,r"))
1062 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1063 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1065 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1070 [(set_attr "conds" "set")
1071 (set_attr "arch" "t2,*,*")
1072 (set_attr "length" "2,4,4")
1073 (set_attr "type" "alus_imm")]
1076 ;; We can handle more constants efficently if we can clobber either a scratch
1077 ;; or the other source operand. We deliberately leave this late as in
1078 ;; high register pressure situations it's not worth forcing any reloads.
1080 [(match_scratch:SI 2 "l")
1081 (set (reg:CC_V CC_REGNUM)
1085 (match_operand:SI 0 "low_register_operand"))
1086 (match_operand 1 "const_int_operand"))
1087 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1089 && satisfies_constraint_Pd (operands[1])"
1091 (set (reg:CC_V CC_REGNUM)
1093 (plus:DI (sign_extend:DI (match_dup 0))
1094 (sign_extend:DI (match_dup 1)))
1095 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1096 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1100 [(set (reg:CC_V CC_REGNUM)
1104 (match_operand:SI 0 "low_register_operand"))
1105 (match_operand 1 "const_int_operand"))
1106 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1108 && dead_or_set_p (peep2_next_insn (0), operands[0])
1109 && satisfies_constraint_Py (operands[1])"
1111 (set (reg:CC_V CC_REGNUM)
1113 (plus:DI (sign_extend:DI (match_dup 0))
1114 (sign_extend:DI (match_dup 1)))
1115 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1116 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1119 (define_insn "addsi3_compare0"
1120 [(set (reg:CC_NZ CC_REGNUM)
1122 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1123 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1125 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1126 (plus:SI (match_dup 1) (match_dup 2)))]
1130 subs%?\\t%0, %1, #%n2
1131 adds%?\\t%0, %1, %2"
1132 [(set_attr "conds" "set")
1133 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1136 (define_insn "*addsi3_compare0_scratch"
1137 [(set (reg:CC_NZ CC_REGNUM)
1139 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1140 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1147 [(set_attr "conds" "set")
1148 (set_attr "predicable" "yes")
1149 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1152 (define_insn "*compare_negsi_si"
1153 [(set (reg:CC_Z CC_REGNUM)
1155 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1156 (match_operand:SI 1 "s_register_operand" "l,r")))]
1159 [(set_attr "conds" "set")
1160 (set_attr "predicable" "yes")
1161 (set_attr "arch" "t2,*")
1162 (set_attr "length" "2,4")
1163 (set_attr "predicable_short_it" "yes,no")
1164 (set_attr "type" "alus_sreg")]
1167 ;; This is the canonicalization of subsi3_compare when the
1168 ;; addend is a constant.
1169 (define_insn "cmpsi2_addneg"
1170 [(set (reg:CC CC_REGNUM)
1172 (match_operand:SI 1 "s_register_operand" "r,r")
1173 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1174 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1175 (plus:SI (match_dup 1)
1176 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1178 && (INTVAL (operands[2])
1179 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1181 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1182 in different condition codes (like cmn rather than like cmp), so that
1183 alternative comes first. Both alternatives can match for any 0x??000000
1184 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1185 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1186 as it is shorter. */
1187 if (which_alternative == 0 && operands[3] != const1_rtx)
1188 return "subs%?\\t%0, %1, #%n3";
1190 return "adds%?\\t%0, %1, %3";
1192 [(set_attr "conds" "set")
1193 (set_attr "type" "alus_sreg")]
1196 ;; Convert the sequence
1198 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1202 ;; bcs dest ((unsigned)rn >= 1)
1203 ;; similarly for the beq variant using bcc.
1204 ;; This is a common looping idiom (while (n--))
1206 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1207 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1209 (set (match_operand 2 "cc_register" "")
1210 (compare (match_dup 0) (const_int -1)))
1212 (if_then_else (match_operator 3 "equality_operator"
1213 [(match_dup 2) (const_int 0)])
1214 (match_operand 4 "" "")
1215 (match_operand 5 "" "")))]
1216 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1220 (match_dup 1) (const_int 1)))
1221 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1223 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1226 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1227 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1230 operands[2], const0_rtx);"
1233 ;; The next four insns work because they compare the result with one of
1234 ;; the operands, and we know that the use of the condition code is
1235 ;; either GEU or LTU, so we can use the carry flag from the addition
1236 ;; instead of doing the compare a second time.
1237 (define_insn "addsi3_compare_op1"
1238 [(set (reg:CC_C CC_REGNUM)
1240 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1241 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1243 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1244 (plus:SI (match_dup 1) (match_dup 2)))]
1249 subs%?\\t%0, %1, #%n2
1250 subs%?\\t%0, %0, #%n2
1252 subs%?\\t%0, %1, #%n2"
1253 [(set_attr "conds" "set")
1254 (set_attr "arch" "t2,t2,t2,t2,*,*")
1255 (set_attr "length" "2,2,2,2,4,4")
1257 (if_then_else (match_operand 2 "const_int_operand")
1258 (const_string "alu_imm")
1259 (const_string "alu_sreg")))]
1262 (define_insn "*addsi3_compare_op2"
1263 [(set (reg:CC_C CC_REGNUM)
1265 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1266 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1268 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1269 (plus:SI (match_dup 1) (match_dup 2)))]
1274 subs%?\\t%0, %1, #%n2
1275 subs%?\\t%0, %0, #%n2
1277 subs%?\\t%0, %1, #%n2"
1278 [(set_attr "conds" "set")
1279 (set_attr "arch" "t2,t2,t2,t2,*,*")
1280 (set_attr "length" "2,2,2,2,4,4")
1282 (if_then_else (match_operand 2 "const_int_operand")
1283 (const_string "alu_imm")
1284 (const_string "alu_sreg")))]
1287 (define_insn "*compare_addsi2_op0"
1288 [(set (reg:CC_C CC_REGNUM)
1290 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1291 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1299 [(set_attr "conds" "set")
1300 (set_attr "predicable" "yes")
1301 (set_attr "arch" "t2,t2,*,*")
1302 (set_attr "predicable_short_it" "yes,yes,no,no")
1303 (set_attr "length" "2,2,4,4")
1305 (if_then_else (match_operand 1 "const_int_operand")
1306 (const_string "alu_imm")
1307 (const_string "alu_sreg")))]
1310 (define_insn "*compare_addsi2_op1"
1311 [(set (reg:CC_C CC_REGNUM)
1313 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1314 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1322 [(set_attr "conds" "set")
1323 (set_attr "predicable" "yes")
1324 (set_attr "arch" "t2,t2,*,*")
1325 (set_attr "predicable_short_it" "yes,yes,no,no")
1326 (set_attr "length" "2,2,4,4")
1328 (if_then_else (match_operand 1 "const_int_operand")
1329 (const_string "alu_imm")
1330 (const_string "alu_sreg")))]
1333 (define_insn "addsi3_carryin"
1334 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1335 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1336 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1337 (match_operand:SI 3 "arm_carry_operation" "")))]
1342 sbc%?\\t%0, %1, #%B2"
1343 [(set_attr "conds" "use")
1344 (set_attr "predicable" "yes")
1345 (set_attr "arch" "t2,*,*")
1346 (set_attr "length" "4")
1347 (set_attr "predicable_short_it" "yes,no,no")
1348 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1351 ;; Canonicalization of the above when the immediate is zero.
1352 (define_insn "add0si3_carryin"
1353 [(set (match_operand:SI 0 "s_register_operand" "=r")
1354 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1355 (match_operand:SI 1 "arm_not_operand" "r")))]
1357 "adc%?\\t%0, %1, #0"
1358 [(set_attr "conds" "use")
1359 (set_attr "predicable" "yes")
1360 (set_attr "length" "4")
1361 (set_attr "type" "adc_imm")]
1364 (define_insn "*addsi3_carryin_alt2"
1365 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1366 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1367 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1368 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1373 sbc%?\\t%0, %1, #%B2"
1374 [(set_attr "conds" "use")
1375 (set_attr "predicable" "yes")
1376 (set_attr "arch" "t2,*,*")
1377 (set_attr "length" "4")
1378 (set_attr "predicable_short_it" "yes,no,no")
1379 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1382 (define_insn "*addsi3_carryin_shift"
1383 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1385 (match_operator:SI 2 "shift_operator"
1386 [(match_operand:SI 3 "s_register_operand" "r,r")
1387 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1388 (match_operand:SI 5 "arm_carry_operation" ""))
1389 (match_operand:SI 1 "s_register_operand" "r,r")))]
1391 "adc%?\\t%0, %1, %3%S2"
1392 [(set_attr "conds" "use")
1393 (set_attr "arch" "32,a")
1394 (set_attr "shift" "3")
1395 (set_attr "predicable" "yes")
1396 (set_attr "autodetect_type" "alu_shift_operator2")]
1399 (define_insn "*addsi3_carryin_clobercc"
1400 [(set (match_operand:SI 0 "s_register_operand" "=r")
1401 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1402 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1403 (match_operand:SI 3 "arm_carry_operation" "")))
1404 (clobber (reg:CC CC_REGNUM))]
1406 "adcs%?\\t%0, %1, %2"
1407 [(set_attr "conds" "set")
1408 (set_attr "type" "adcs_reg")]
1411 (define_expand "subvsi4"
1412 [(match_operand:SI 0 "s_register_operand")
1413 (match_operand:SI 1 "arm_rhs_operand")
1414 (match_operand:SI 2 "arm_add_operand")
1415 (match_operand 3 "")]
1418 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1420 /* If both operands are constants we can decide the result statically. */
1421 wi::overflow_type overflow;
1422 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1423 rtx_mode_t (operands[2], SImode),
1425 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1426 if (overflow != wi::OVF_NONE)
1427 emit_jump_insn (gen_jump (operands[3]));
1430 else if (CONST_INT_P (operands[2]))
1432 operands[2] = GEN_INT (-INTVAL (operands[2]));
1433 /* Special case for INT_MIN. */
1434 if (INTVAL (operands[2]) == 0x80000000)
1435 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1437 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1440 else if (CONST_INT_P (operands[1]))
1441 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1443 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1445 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1449 (define_expand "subvdi4"
1450 [(match_operand:DI 0 "s_register_operand")
1451 (match_operand:DI 1 "reg_or_int_operand")
1452 (match_operand:DI 2 "reg_or_int_operand")
1453 (match_operand 3 "")]
1456 rtx lo_result, hi_result;
1457 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1458 lo_result = gen_lowpart (SImode, operands[0]);
1459 hi_result = gen_highpart (SImode, operands[0]);
1460 machine_mode mode = CCmode;
1462 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1464 /* If both operands are constants we can decide the result statically. */
1465 wi::overflow_type overflow;
1466 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1467 rtx_mode_t (operands[2], DImode),
1469 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1470 if (overflow != wi::OVF_NONE)
1471 emit_jump_insn (gen_jump (operands[3]));
1474 else if (CONST_INT_P (operands[1]))
1476 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1478 if (const_ok_for_arm (INTVAL (lo_op1)))
1480 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1481 GEN_INT (~UINTVAL (lo_op1))));
1482 /* We could potentially use RSC here in Arm state, but not
1483 in Thumb, so it's probably not worth the effort of handling
1485 hi_op1 = force_reg (SImode, hi_op1);
1489 operands[1] = force_reg (DImode, operands[1]);
1492 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1494 if (lo_op2 == const0_rtx)
1496 emit_move_insn (lo_result, lo_op1);
1497 if (!arm_add_operand (hi_op2, SImode))
1498 hi_op2 = force_reg (SImode, hi_op2);
1499 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1503 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1504 lo_op2 = force_reg (SImode, lo_op2);
1505 if (CONST_INT_P (lo_op2))
1506 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1507 gen_int_mode (-INTVAL (lo_op2), SImode)));
1509 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1512 if (!arm_not_operand (hi_op2, SImode))
1513 hi_op2 = force_reg (SImode, hi_op2);
1514 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1515 if (CONST_INT_P (hi_op2))
1516 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1517 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1518 gen_rtx_LTU (DImode, ccreg,
1521 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1522 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1523 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1524 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1529 (define_expand "usubvsi4"
1530 [(match_operand:SI 0 "s_register_operand")
1531 (match_operand:SI 1 "arm_rhs_operand")
1532 (match_operand:SI 2 "arm_add_operand")
1533 (match_operand 3 "")]
1536 machine_mode mode = CCmode;
1537 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1539 /* If both operands are constants we can decide the result statically. */
1540 wi::overflow_type overflow;
1541 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1542 rtx_mode_t (operands[2], SImode),
1543 UNSIGNED, &overflow);
1544 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1545 if (overflow != wi::OVF_NONE)
1546 emit_jump_insn (gen_jump (operands[3]));
1549 else if (CONST_INT_P (operands[2]))
1550 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1551 gen_int_mode (-INTVAL (operands[2]),
1553 else if (CONST_INT_P (operands[1]))
1556 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1557 GEN_INT (~UINTVAL (operands[1]))));
1560 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1561 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1566 (define_expand "usubvdi4"
1567 [(match_operand:DI 0 "s_register_operand")
1568 (match_operand:DI 1 "reg_or_int_operand")
1569 (match_operand:DI 2 "reg_or_int_operand")
1570 (match_operand 3 "")]
1573 rtx lo_result, hi_result;
1574 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1575 lo_result = gen_lowpart (SImode, operands[0]);
1576 hi_result = gen_highpart (SImode, operands[0]);
1577 machine_mode mode = CCmode;
1579 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1581 /* If both operands are constants we can decide the result statically. */
1582 wi::overflow_type overflow;
1583 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1584 rtx_mode_t (operands[2], DImode),
1585 UNSIGNED, &overflow);
1586 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1587 if (overflow != wi::OVF_NONE)
1588 emit_jump_insn (gen_jump (operands[3]));
1591 else if (CONST_INT_P (operands[1]))
1593 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1595 if (const_ok_for_arm (INTVAL (lo_op1)))
1597 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1598 GEN_INT (~UINTVAL (lo_op1))));
1599 /* We could potentially use RSC here in Arm state, but not
1600 in Thumb, so it's probably not worth the effort of handling
1602 hi_op1 = force_reg (SImode, hi_op1);
1606 operands[1] = force_reg (DImode, operands[1]);
1609 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1611 if (lo_op2 == const0_rtx)
1613 emit_move_insn (lo_result, lo_op1);
1614 if (!arm_add_operand (hi_op2, SImode))
1615 hi_op2 = force_reg (SImode, hi_op2);
1616 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1620 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1621 lo_op2 = force_reg (SImode, lo_op2);
1622 if (CONST_INT_P (lo_op2))
1623 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1624 gen_int_mode (-INTVAL (lo_op2), SImode)));
1626 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1629 if (!arm_not_operand (hi_op2, SImode))
1630 hi_op2 = force_reg (SImode, hi_op2);
1631 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1632 if (CONST_INT_P (hi_op2))
1633 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1634 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1635 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1636 gen_rtx_LTU (DImode, ccreg,
1639 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1640 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1641 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1642 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1647 (define_insn "subsi3_compare1"
1648 [(set (reg:CC CC_REGNUM)
1650 (match_operand:SI 1 "register_operand" "r")
1651 (match_operand:SI 2 "register_operand" "r")))
1652 (set (match_operand:SI 0 "register_operand" "=r")
1653 (minus:SI (match_dup 1) (match_dup 2)))]
1655 "subs%?\\t%0, %1, %2"
1656 [(set_attr "conds" "set")
1657 (set_attr "type" "alus_sreg")]
1660 (define_insn "subvsi3"
1661 [(set (reg:CC_V CC_REGNUM)
1664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1665 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1666 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1667 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1668 (minus:SI (match_dup 1) (match_dup 2)))]
1670 "subs%?\\t%0, %1, %2"
1671 [(set_attr "conds" "set")
1672 (set_attr "arch" "t2,*")
1673 (set_attr "length" "2,4")
1674 (set_attr "type" "alus_sreg")]
1677 (define_insn "subvsi3_imm1"
1678 [(set (reg:CC_V CC_REGNUM)
1681 (match_operand 1 "arm_immediate_operand" "I")
1682 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1683 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1684 (set (match_operand:SI 0 "s_register_operand" "=r")
1685 (minus:SI (match_dup 1) (match_dup 2)))]
1687 "rsbs%?\\t%0, %2, %1"
1688 [(set_attr "conds" "set")
1689 (set_attr "type" "alus_imm")]
1692 (define_insn "subsi3_carryin"
1693 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1694 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1695 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1696 (match_operand:SI 3 "arm_borrow_operation" "")))]
1701 sbc%?\\t%0, %2, %2, lsl #1"
1702 [(set_attr "conds" "use")
1703 (set_attr "arch" "*,a,t2")
1704 (set_attr "predicable" "yes")
1705 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1708 ;; Special canonicalization of the above when operand1 == (const_int 1):
1709 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1710 (define_insn "rsbsi_carryin_reg"
1711 [(set (match_operand:SI 0 "s_register_operand" "=r")
1712 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1713 (match_operand:SI 2 "s_register_operand" "r")))]
1715 "rsc%?\\t%0, %2, #1"
1716 [(set_attr "conds" "use")
1717 (set_attr "predicable" "yes")
1718 (set_attr "type" "adc_imm")]
1721 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1722 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1723 (define_insn "*add_not_cin"
1724 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1726 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1727 (match_operand:SI 3 "arm_carry_operation" ""))
1728 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1729 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1733 [(set_attr "conds" "use")
1734 (set_attr "predicable" "yes")
1735 (set_attr "arch" "*,a")
1736 (set_attr "type" "adc_reg,adc_imm")]
1739 ;; On Arm we can also use the same trick when the non-inverted operand is
1740 ;; shifted, using RSC.
1741 (define_insn "add_not_shift_cin"
1742 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1744 (plus:SI (match_operator:SI 3 "shift_operator"
1745 [(match_operand:SI 1 "s_register_operand" "r,r")
1746 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1747 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1748 (match_operand:SI 5 "arm_carry_operation" "")))]
1750 "rsc%?\\t%0, %4, %1%S3"
1751 [(set_attr "conds" "use")
1752 (set_attr "predicable" "yes")
1753 (set_attr "autodetect_type" "alu_shift_operator3")]
1756 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1757 [(set (reg:<CC_EXTEND> CC_REGNUM)
1758 (compare:<CC_EXTEND>
1759 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1760 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1761 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1762 (clobber (match_scratch:SI 0 "=l,r"))]
1765 [(set_attr "conds" "set")
1766 (set_attr "arch" "t2,*")
1767 (set_attr "length" "2,4")
1768 (set_attr "type" "adc_reg")]
1771 ;; Similar to the above, but handling a constant which has a different
1772 ;; canonicalization.
1773 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1774 [(set (reg:<CC_EXTEND> CC_REGNUM)
1775 (compare:<CC_EXTEND>
1776 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1777 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1778 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1779 (clobber (match_scratch:SI 0 "=l,r"))]
1783 adcs\\t%0, %1, #%B2"
1784 [(set_attr "conds" "set")
1785 (set_attr "type" "adc_imm")]
1788 ;; Further canonicalization when the constant is zero.
1789 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1790 [(set (reg:<CC_EXTEND> CC_REGNUM)
1791 (compare:<CC_EXTEND>
1792 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1793 (match_operand:DI 2 "arm_borrow_operation" "")))
1794 (clobber (match_scratch:SI 0 "=l,r"))]
1797 [(set_attr "conds" "set")
1798 (set_attr "type" "adc_imm")]
1801 (define_insn "*subsi3_carryin_const"
1802 [(set (match_operand:SI 0 "s_register_operand" "=r")
1804 (match_operand:SI 1 "s_register_operand" "r")
1805 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1806 (match_operand:SI 3 "arm_borrow_operation" "")))]
1808 "sbc\\t%0, %1, #%n2"
1809 [(set_attr "conds" "use")
1810 (set_attr "type" "adc_imm")]
1813 (define_insn "*subsi3_carryin_const0"
1814 [(set (match_operand:SI 0 "s_register_operand" "=r")
1815 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1816 (match_operand:SI 2 "arm_borrow_operation" "")))]
1819 [(set_attr "conds" "use")
1820 (set_attr "type" "adc_imm")]
1823 (define_insn "*subsi3_carryin_shift"
1824 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1826 (match_operand:SI 1 "s_register_operand" "r,r")
1827 (match_operator:SI 2 "shift_operator"
1828 [(match_operand:SI 3 "s_register_operand" "r,r")
1829 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1830 (match_operand:SI 5 "arm_borrow_operation" "")))]
1832 "sbc%?\\t%0, %1, %3%S2"
1833 [(set_attr "conds" "use")
1834 (set_attr "arch" "32,a")
1835 (set_attr "shift" "3")
1836 (set_attr "predicable" "yes")
1837 (set_attr "autodetect_type" "alu_shift_operator2")]
1840 (define_insn "*subsi3_carryin_shift_alt"
1841 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1843 (match_operand:SI 1 "s_register_operand" "r,r")
1844 (match_operand:SI 5 "arm_borrow_operation" ""))
1845 (match_operator:SI 2 "shift_operator"
1846 [(match_operand:SI 3 "s_register_operand" "r,r")
1847 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1849 "sbc%?\\t%0, %1, %3%S2"
1850 [(set_attr "conds" "use")
1851 (set_attr "arch" "32,a")
1852 (set_attr "shift" "3")
1853 (set_attr "predicable" "yes")
1854 (set_attr "autodetect_type" "alu_shift_operator2")]
1858 (define_insn "*rsbsi3_carryin_shift"
1859 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1861 (match_operator:SI 2 "shift_operator"
1862 [(match_operand:SI 3 "s_register_operand" "r,r")
1863 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1864 (match_operand:SI 1 "s_register_operand" "r,r"))
1865 (match_operand:SI 5 "arm_borrow_operation" "")))]
1867 "rsc%?\\t%0, %1, %3%S2"
1868 [(set_attr "conds" "use")
1869 (set_attr "predicable" "yes")
1870 (set_attr "autodetect_type" "alu_shift_operator2")]
1873 (define_insn "*rsbsi3_carryin_shift_alt"
1874 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1876 (match_operator:SI 2 "shift_operator"
1877 [(match_operand:SI 3 "s_register_operand" "r,r")
1878 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1879 (match_operand:SI 5 "arm_borrow_operation" ""))
1880 (match_operand:SI 1 "s_register_operand" "r,r")))]
1882 "rsc%?\\t%0, %1, %3%S2"
1883 [(set_attr "conds" "use")
1884 (set_attr "predicable" "yes")
1885 (set_attr "autodetect_type" "alu_shift_operator2")]
1888 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1890 [(set (match_operand:SI 0 "s_register_operand" "")
1891 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1892 (match_operand:SI 2 "s_register_operand" ""))
1894 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1896 [(set (match_dup 3) (match_dup 1))
1897 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1899 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1902 (define_expand "addsf3"
1903 [(set (match_operand:SF 0 "s_register_operand")
1904 (plus:SF (match_operand:SF 1 "s_register_operand")
1905 (match_operand:SF 2 "s_register_operand")))]
1906 "TARGET_32BIT && TARGET_HARD_FLOAT"
1910 (define_expand "adddf3"
1911 [(set (match_operand:DF 0 "s_register_operand")
1912 (plus:DF (match_operand:DF 1 "s_register_operand")
1913 (match_operand:DF 2 "s_register_operand")))]
1914 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1918 (define_expand "subdi3"
1920 [(set (match_operand:DI 0 "s_register_operand")
1921 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1922 (match_operand:DI 2 "s_register_operand")))
1923 (clobber (reg:CC CC_REGNUM))])]
1928 if (!REG_P (operands[1]))
1929 operands[1] = force_reg (DImode, operands[1]);
1933 rtx lo_result, hi_result, lo_dest, hi_dest;
1934 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1937 /* Since operands[1] may be an integer, pass it second, so that
1938 any necessary simplifications will be done on the decomposed
1940 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1942 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1943 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1945 if (!arm_rhs_operand (lo_op1, SImode))
1946 lo_op1 = force_reg (SImode, lo_op1);
1948 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1949 || !arm_rhs_operand (hi_op1, SImode))
1950 hi_op1 = force_reg (SImode, hi_op1);
1953 if (lo_op1 == const0_rtx)
1955 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1956 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1958 else if (CONST_INT_P (lo_op1))
1960 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1961 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1962 GEN_INT (~UINTVAL (lo_op1))));
1966 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1967 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1970 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1972 if (hi_op1 == const0_rtx)
1973 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1975 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1977 if (lo_result != lo_dest)
1978 emit_move_insn (lo_result, lo_dest);
1980 if (hi_result != hi_dest)
1981 emit_move_insn (hi_result, hi_dest);
1988 (define_expand "subsi3"
1989 [(set (match_operand:SI 0 "s_register_operand")
1990 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1991 (match_operand:SI 2 "s_register_operand")))]
1994 if (CONST_INT_P (operands[1]))
1998 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1999 operands[1] = force_reg (SImode, operands[1]);
2002 arm_split_constant (MINUS, SImode, NULL_RTX,
2003 INTVAL (operands[1]), operands[0],
2005 optimize && can_create_pseudo_p ());
2009 else /* TARGET_THUMB1 */
2010 operands[1] = force_reg (SImode, operands[1]);
2015 ; ??? Check Thumb-2 split length
2016 (define_insn_and_split "*arm_subsi3_insn"
2017 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2018 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2019 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2031 "&& (CONST_INT_P (operands[1])
2032 && !const_ok_for_arm (INTVAL (operands[1])))"
2033 [(clobber (const_int 0))]
2035 arm_split_constant (MINUS, SImode, curr_insn,
2036 INTVAL (operands[1]), operands[0], operands[2], 0);
2039 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2040 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2041 (set_attr "predicable" "yes")
2042 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2043 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2047 [(match_scratch:SI 3 "r")
2048 (set (match_operand:SI 0 "arm_general_register_operand" "")
2049 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2050 (match_operand:SI 2 "arm_general_register_operand" "")))]
2052 && !const_ok_for_arm (INTVAL (operands[1]))
2053 && const_ok_for_arm (~INTVAL (operands[1]))"
2054 [(set (match_dup 3) (match_dup 1))
2055 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2059 (define_insn "subsi3_compare0"
2060 [(set (reg:CC_NZ CC_REGNUM)
2062 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2063 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2065 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2066 (minus:SI (match_dup 1) (match_dup 2)))]
2071 rsbs%?\\t%0, %2, %1"
2072 [(set_attr "conds" "set")
2073 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2076 (define_insn "subsi3_compare"
2077 [(set (reg:CC CC_REGNUM)
2078 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2079 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2080 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2081 (minus:SI (match_dup 1) (match_dup 2)))]
2086 rsbs%?\\t%0, %2, %1"
2087 [(set_attr "conds" "set")
2088 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2091 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2092 ;; rather than (0 cmp reg). This gives the same results for unsigned
2093 ;; and equality compares which is what we mostly need here.
2094 (define_insn "rsb_imm_compare"
2095 [(set (reg:CC_RSB CC_REGNUM)
2096 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2097 (match_operand 3 "const_int_operand" "")))
2098 (set (match_operand:SI 0 "s_register_operand" "=r")
2099 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2101 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2103 [(set_attr "conds" "set")
2104 (set_attr "type" "alus_imm")]
2107 ;; Similarly, but the result is unused.
2108 (define_insn "rsb_imm_compare_scratch"
2109 [(set (reg:CC_RSB CC_REGNUM)
2110 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2111 (match_operand 1 "arm_not_immediate_operand" "K")))
2112 (clobber (match_scratch:SI 0 "=r"))]
2114 "rsbs\\t%0, %2, #%B1"
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "alus_imm")]
2119 ;; Compare the sum of a value plus a carry against a constant. Uses
2120 ;; RSC, so the result is swapped. Only available on Arm
2121 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2122 [(set (reg:CC_SWP CC_REGNUM)
2124 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2125 (match_operand:DI 3 "arm_borrow_operation" ""))
2126 (match_operand 1 "arm_immediate_operand" "I")))
2127 (clobber (match_scratch:SI 0 "=r"))]
2130 [(set_attr "conds" "set")
2131 (set_attr "type" "alus_imm")]
2134 (define_insn "usubvsi3_borrow"
2135 [(set (reg:CC_B CC_REGNUM)
2137 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2138 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2140 (match_operand:SI 2 "s_register_operand" "l,r")))))
2141 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2142 (minus:SI (match_dup 1)
2143 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2146 "sbcs%?\\t%0, %1, %2"
2147 [(set_attr "conds" "set")
2148 (set_attr "arch" "t2,*")
2149 (set_attr "length" "2,4")]
2152 (define_insn "usubvsi3_borrow_imm"
2153 [(set (reg:CC_B CC_REGNUM)
2155 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2156 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2157 (match_operand:DI 3 "const_int_operand" "n,n"))))
2158 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2159 (minus:SI (match_dup 1)
2160 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2161 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2163 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2166 adcs%?\\t%0, %1, #%B2"
2167 [(set_attr "conds" "set")
2168 (set_attr "type" "alus_imm")]
2171 (define_insn "subvsi3_borrow"
2172 [(set (reg:CC_V CC_REGNUM)
2176 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2177 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2178 (match_operand:DI 4 "arm_borrow_operation" ""))
2180 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2181 (match_operand:SI 3 "arm_borrow_operation" "")))))
2182 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2183 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2186 "sbcs%?\\t%0, %1, %2"
2187 [(set_attr "conds" "set")
2188 (set_attr "arch" "t2,*")
2189 (set_attr "length" "2,4")]
2192 (define_insn "subvsi3_borrow_imm"
2193 [(set (reg:CC_V CC_REGNUM)
2197 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2198 (match_operand 2 "arm_adcimm_operand" "I,K"))
2199 (match_operand:DI 4 "arm_borrow_operation" ""))
2201 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2202 (match_operand:SI 3 "arm_borrow_operation" "")))))
2203 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2204 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2207 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2210 adcs%?\\t%0, %1, #%B2"
2211 [(set_attr "conds" "set")
2212 (set_attr "type" "alus_imm")]
2215 (define_expand "subsf3"
2216 [(set (match_operand:SF 0 "s_register_operand")
2217 (minus:SF (match_operand:SF 1 "s_register_operand")
2218 (match_operand:SF 2 "s_register_operand")))]
2219 "TARGET_32BIT && TARGET_HARD_FLOAT"
2223 (define_expand "subdf3"
2224 [(set (match_operand:DF 0 "s_register_operand")
2225 (minus:DF (match_operand:DF 1 "s_register_operand")
2226 (match_operand:DF 2 "s_register_operand")))]
2227 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2232 ;; Multiplication insns
2234 (define_expand "mulhi3"
2235 [(set (match_operand:HI 0 "s_register_operand")
2236 (mult:HI (match_operand:HI 1 "s_register_operand")
2237 (match_operand:HI 2 "s_register_operand")))]
2238 "TARGET_DSP_MULTIPLY"
2241 rtx result = gen_reg_rtx (SImode);
2242 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2243 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2248 (define_expand "mulsi3"
2249 [(set (match_operand:SI 0 "s_register_operand")
2250 (mult:SI (match_operand:SI 2 "s_register_operand")
2251 (match_operand:SI 1 "s_register_operand")))]
2256 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2258 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2259 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2260 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2262 "mul%?\\t%0, %2, %1"
2263 [(set_attr "type" "mul")
2264 (set_attr "predicable" "yes")
2265 (set_attr "arch" "t2,v6,nov6,nov6")
2266 (set_attr "length" "4")
2267 (set_attr "predicable_short_it" "yes,no,*,*")]
2270 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2271 ;; reusing the same register.
2274 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2276 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2277 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2278 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2280 "mla%?\\t%0, %3, %2, %1"
2281 [(set_attr "type" "mla")
2282 (set_attr "predicable" "yes")
2283 (set_attr "arch" "v6,nov6,nov6,nov6")]
2287 [(set (match_operand:SI 0 "s_register_operand" "=r")
2289 (match_operand:SI 1 "s_register_operand" "r")
2290 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2291 (match_operand:SI 2 "s_register_operand" "r"))))]
2292 "TARGET_32BIT && arm_arch_thumb2"
2293 "mls%?\\t%0, %3, %2, %1"
2294 [(set_attr "type" "mla")
2295 (set_attr "predicable" "yes")]
2298 (define_insn "*mulsi3_compare0"
2299 [(set (reg:CC_NZ CC_REGNUM)
2300 (compare:CC_NZ (mult:SI
2301 (match_operand:SI 2 "s_register_operand" "r,r")
2302 (match_operand:SI 1 "s_register_operand" "%0,r"))
2304 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2305 (mult:SI (match_dup 2) (match_dup 1)))]
2306 "TARGET_ARM && !arm_arch6"
2307 "muls%?\\t%0, %2, %1"
2308 [(set_attr "conds" "set")
2309 (set_attr "type" "muls")]
2312 (define_insn "*mulsi3_compare0_v6"
2313 [(set (reg:CC_NZ CC_REGNUM)
2314 (compare:CC_NZ (mult:SI
2315 (match_operand:SI 2 "s_register_operand" "r")
2316 (match_operand:SI 1 "s_register_operand" "r"))
2318 (set (match_operand:SI 0 "s_register_operand" "=r")
2319 (mult:SI (match_dup 2) (match_dup 1)))]
2320 "TARGET_ARM && arm_arch6 && optimize_size"
2321 "muls%?\\t%0, %2, %1"
2322 [(set_attr "conds" "set")
2323 (set_attr "type" "muls")]
2326 (define_insn "*mulsi_compare0_scratch"
2327 [(set (reg:CC_NZ CC_REGNUM)
2328 (compare:CC_NZ (mult:SI
2329 (match_operand:SI 2 "s_register_operand" "r,r")
2330 (match_operand:SI 1 "s_register_operand" "%0,r"))
2332 (clobber (match_scratch:SI 0 "=&r,&r"))]
2333 "TARGET_ARM && !arm_arch6"
2334 "muls%?\\t%0, %2, %1"
2335 [(set_attr "conds" "set")
2336 (set_attr "type" "muls")]
2339 (define_insn "*mulsi_compare0_scratch_v6"
2340 [(set (reg:CC_NZ CC_REGNUM)
2341 (compare:CC_NZ (mult:SI
2342 (match_operand:SI 2 "s_register_operand" "r")
2343 (match_operand:SI 1 "s_register_operand" "r"))
2345 (clobber (match_scratch:SI 0 "=r"))]
2346 "TARGET_ARM && arm_arch6 && optimize_size"
2347 "muls%?\\t%0, %2, %1"
2348 [(set_attr "conds" "set")
2349 (set_attr "type" "muls")]
2352 (define_insn "*mulsi3addsi_compare0"
2353 [(set (reg:CC_NZ CC_REGNUM)
2356 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2357 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2358 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2360 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2363 "TARGET_ARM && arm_arch6"
2364 "mlas%?\\t%0, %2, %1, %3"
2365 [(set_attr "conds" "set")
2366 (set_attr "type" "mlas")]
2369 (define_insn "*mulsi3addsi_compare0_v6"
2370 [(set (reg:CC_NZ CC_REGNUM)
2373 (match_operand:SI 2 "s_register_operand" "r")
2374 (match_operand:SI 1 "s_register_operand" "r"))
2375 (match_operand:SI 3 "s_register_operand" "r"))
2377 (set (match_operand:SI 0 "s_register_operand" "=r")
2378 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2380 "TARGET_ARM && arm_arch6 && optimize_size"
2381 "mlas%?\\t%0, %2, %1, %3"
2382 [(set_attr "conds" "set")
2383 (set_attr "type" "mlas")]
2386 (define_insn "*mulsi3addsi_compare0_scratch"
2387 [(set (reg:CC_NZ CC_REGNUM)
2390 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2391 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2392 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2394 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2395 "TARGET_ARM && !arm_arch6"
2396 "mlas%?\\t%0, %2, %1, %3"
2397 [(set_attr "conds" "set")
2398 (set_attr "type" "mlas")]
2401 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2402 [(set (reg:CC_NZ CC_REGNUM)
2405 (match_operand:SI 2 "s_register_operand" "r")
2406 (match_operand:SI 1 "s_register_operand" "r"))
2407 (match_operand:SI 3 "s_register_operand" "r"))
2409 (clobber (match_scratch:SI 0 "=r"))]
2410 "TARGET_ARM && arm_arch6 && optimize_size"
2411 "mlas%?\\t%0, %2, %1, %3"
2412 [(set_attr "conds" "set")
2413 (set_attr "type" "mlas")]
2416 ;; 32x32->64 widening multiply.
2417 ;; The only difference between the v3-5 and v6+ versions is the requirement
2418 ;; that the output does not overlap with either input.
2420 (define_expand "<Us>mulsidi3"
2421 [(set (match_operand:DI 0 "s_register_operand")
2423 (SE:DI (match_operand:SI 1 "s_register_operand"))
2424 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2427 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2428 gen_highpart (SImode, operands[0]),
2429 operands[1], operands[2]));
2434 (define_insn "<US>mull"
2435 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2437 (match_operand:SI 2 "s_register_operand" "%r,r")
2438 (match_operand:SI 3 "s_register_operand" "r,r")))
2439 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2442 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2445 "<US>mull%?\\t%0, %1, %2, %3"
2446 [(set_attr "type" "umull")
2447 (set_attr "predicable" "yes")
2448 (set_attr "arch" "v6,nov6")]
2451 (define_expand "<Us>maddsidi4"
2452 [(set (match_operand:DI 0 "s_register_operand")
2455 (SE:DI (match_operand:SI 1 "s_register_operand"))
2456 (SE:DI (match_operand:SI 2 "s_register_operand")))
2457 (match_operand:DI 3 "s_register_operand")))]
2460 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2461 gen_lowpart (SImode, operands[3]),
2462 gen_highpart (SImode, operands[0]),
2463 gen_highpart (SImode, operands[3]),
2464 operands[1], operands[2]));
2469 (define_insn "<US>mlal"
2470 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2473 (match_operand:SI 4 "s_register_operand" "%r,r")
2474 (match_operand:SI 5 "s_register_operand" "r,r"))
2475 (match_operand:SI 1 "s_register_operand" "0,0")))
2476 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2481 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2482 (zero_extend:DI (match_dup 1)))
2484 (match_operand:SI 3 "s_register_operand" "2,2")))]
2486 "<US>mlal%?\\t%0, %2, %4, %5"
2487 [(set_attr "type" "umlal")
2488 (set_attr "predicable" "yes")
2489 (set_attr "arch" "v6,nov6")]
2492 (define_expand "<US>mulsi3_highpart"
2494 [(set (match_operand:SI 0 "s_register_operand")
2498 (SE:DI (match_operand:SI 1 "s_register_operand"))
2499 (SE:DI (match_operand:SI 2 "s_register_operand")))
2501 (clobber (match_scratch:SI 3 ""))])]
2506 (define_insn "*<US>mull_high"
2507 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2511 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2512 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2514 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2516 "<US>mull%?\\t%3, %0, %2, %1"
2517 [(set_attr "type" "umull")
2518 (set_attr "predicable" "yes")
2519 (set_attr "arch" "v6,nov6,nov6")]
2522 (define_insn "mulhisi3"
2523 [(set (match_operand:SI 0 "s_register_operand" "=r")
2524 (mult:SI (sign_extend:SI
2525 (match_operand:HI 1 "s_register_operand" "%r"))
2527 (match_operand:HI 2 "s_register_operand" "r"))))]
2528 "TARGET_DSP_MULTIPLY"
2529 "smulbb%?\\t%0, %1, %2"
2530 [(set_attr "type" "smulxy")
2531 (set_attr "predicable" "yes")]
2534 (define_insn "*mulhisi3tb"
2535 [(set (match_operand:SI 0 "s_register_operand" "=r")
2536 (mult:SI (ashiftrt:SI
2537 (match_operand:SI 1 "s_register_operand" "r")
2540 (match_operand:HI 2 "s_register_operand" "r"))))]
2541 "TARGET_DSP_MULTIPLY"
2542 "smultb%?\\t%0, %1, %2"
2543 [(set_attr "type" "smulxy")
2544 (set_attr "predicable" "yes")]
2547 (define_insn "*mulhisi3bt"
2548 [(set (match_operand:SI 0 "s_register_operand" "=r")
2549 (mult:SI (sign_extend:SI
2550 (match_operand:HI 1 "s_register_operand" "r"))
2552 (match_operand:SI 2 "s_register_operand" "r")
2554 "TARGET_DSP_MULTIPLY"
2555 "smulbt%?\\t%0, %1, %2"
2556 [(set_attr "type" "smulxy")
2557 (set_attr "predicable" "yes")]
2560 (define_insn "*mulhisi3tt"
2561 [(set (match_operand:SI 0 "s_register_operand" "=r")
2562 (mult:SI (ashiftrt:SI
2563 (match_operand:SI 1 "s_register_operand" "r")
2566 (match_operand:SI 2 "s_register_operand" "r")
2568 "TARGET_DSP_MULTIPLY"
2569 "smultt%?\\t%0, %1, %2"
2570 [(set_attr "type" "smulxy")
2571 (set_attr "predicable" "yes")]
2574 (define_expand "maddhisi4"
2575 [(set (match_operand:SI 0 "s_register_operand")
2576 (plus:SI (mult:SI (sign_extend:SI
2577 (match_operand:HI 1 "s_register_operand"))
2579 (match_operand:HI 2 "s_register_operand")))
2580 (match_operand:SI 3 "s_register_operand")))]
2581 "TARGET_DSP_MULTIPLY"
2583 /* If this function reads the Q bit from ACLE intrinsics break up the
2584 multiplication and accumulation as an overflow during accumulation will
2585 clobber the Q flag. */
2588 rtx tmp = gen_reg_rtx (SImode);
2589 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2590 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2596 (define_insn "*arm_maddhisi4"
2597 [(set (match_operand:SI 0 "s_register_operand" "=r")
2598 (plus:SI (mult:SI (sign_extend:SI
2599 (match_operand:HI 1 "s_register_operand" "r"))
2601 (match_operand:HI 2 "s_register_operand" "r")))
2602 (match_operand:SI 3 "s_register_operand" "r")))]
2603 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2604 "smlabb%?\\t%0, %1, %2, %3"
2605 [(set_attr "type" "smlaxy")
2606 (set_attr "predicable" "yes")]
2609 (define_insn "arm_smlabb_setq"
2610 [(set (match_operand:SI 0 "s_register_operand" "=r")
2611 (plus:SI (mult:SI (sign_extend:SI
2612 (match_operand:HI 1 "s_register_operand" "r"))
2614 (match_operand:HI 2 "s_register_operand" "r")))
2615 (match_operand:SI 3 "s_register_operand" "r")))
2616 (set (reg:CC APSRQ_REGNUM)
2617 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2618 "TARGET_DSP_MULTIPLY"
2619 "smlabb%?\\t%0, %1, %2, %3"
2620 [(set_attr "type" "smlaxy")
2621 (set_attr "predicable" "yes")]
2624 (define_expand "arm_smlabb"
2625 [(match_operand:SI 0 "s_register_operand")
2626 (match_operand:SI 1 "s_register_operand")
2627 (match_operand:SI 2 "s_register_operand")
2628 (match_operand:SI 3 "s_register_operand")]
2629 "TARGET_DSP_MULTIPLY"
2631 rtx mult1 = gen_lowpart (HImode, operands[1]);
2632 rtx mult2 = gen_lowpart (HImode, operands[2]);
2634 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2636 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2641 ;; Note: there is no maddhisi4ibt because this one is canonical form
2642 (define_insn "maddhisi4tb"
2643 [(set (match_operand:SI 0 "s_register_operand" "=r")
2644 (plus:SI (mult:SI (ashiftrt:SI
2645 (match_operand:SI 1 "s_register_operand" "r")
2648 (match_operand:HI 2 "s_register_operand" "r")))
2649 (match_operand:SI 3 "s_register_operand" "r")))]
2650 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2651 "smlatb%?\\t%0, %1, %2, %3"
2652 [(set_attr "type" "smlaxy")
2653 (set_attr "predicable" "yes")]
2656 (define_insn "arm_smlatb_setq"
2657 [(set (match_operand:SI 0 "s_register_operand" "=r")
2658 (plus:SI (mult:SI (ashiftrt:SI
2659 (match_operand:SI 1 "s_register_operand" "r")
2662 (match_operand:HI 2 "s_register_operand" "r")))
2663 (match_operand:SI 3 "s_register_operand" "r")))
2664 (set (reg:CC APSRQ_REGNUM)
2665 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2666 "TARGET_DSP_MULTIPLY"
2667 "smlatb%?\\t%0, %1, %2, %3"
2668 [(set_attr "type" "smlaxy")
2669 (set_attr "predicable" "yes")]
2672 (define_expand "arm_smlatb"
2673 [(match_operand:SI 0 "s_register_operand")
2674 (match_operand:SI 1 "s_register_operand")
2675 (match_operand:SI 2 "s_register_operand")
2676 (match_operand:SI 3 "s_register_operand")]
2677 "TARGET_DSP_MULTIPLY"
2679 rtx mult2 = gen_lowpart (HImode, operands[2]);
2681 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2682 mult2, operands[3]));
2684 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2685 mult2, operands[3]));
2690 (define_insn "maddhisi4tt"
2691 [(set (match_operand:SI 0 "s_register_operand" "=r")
2692 (plus:SI (mult:SI (ashiftrt:SI
2693 (match_operand:SI 1 "s_register_operand" "r")
2696 (match_operand:SI 2 "s_register_operand" "r")
2698 (match_operand:SI 3 "s_register_operand" "r")))]
2699 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2700 "smlatt%?\\t%0, %1, %2, %3"
2701 [(set_attr "type" "smlaxy")
2702 (set_attr "predicable" "yes")]
2705 (define_insn "arm_smlatt_setq"
2706 [(set (match_operand:SI 0 "s_register_operand" "=r")
2707 (plus:SI (mult:SI (ashiftrt:SI
2708 (match_operand:SI 1 "s_register_operand" "r")
2711 (match_operand:SI 2 "s_register_operand" "r")
2713 (match_operand:SI 3 "s_register_operand" "r")))
2714 (set (reg:CC APSRQ_REGNUM)
2715 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2716 "TARGET_DSP_MULTIPLY"
2717 "smlatt%?\\t%0, %1, %2, %3"
2718 [(set_attr "type" "smlaxy")
2719 (set_attr "predicable" "yes")]
2722 (define_expand "arm_smlatt"
2723 [(match_operand:SI 0 "s_register_operand")
2724 (match_operand:SI 1 "s_register_operand")
2725 (match_operand:SI 2 "s_register_operand")
2726 (match_operand:SI 3 "s_register_operand")]
2727 "TARGET_DSP_MULTIPLY"
2730 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2731 operands[2], operands[3]));
2733 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2734 operands[2], operands[3]));
2739 (define_insn "maddhidi4"
2740 [(set (match_operand:DI 0 "s_register_operand" "=r")
2742 (mult:DI (sign_extend:DI
2743 (match_operand:HI 1 "s_register_operand" "r"))
2745 (match_operand:HI 2 "s_register_operand" "r")))
2746 (match_operand:DI 3 "s_register_operand" "0")))]
2747 "TARGET_DSP_MULTIPLY"
2748 "smlalbb%?\\t%Q0, %R0, %1, %2"
2749 [(set_attr "type" "smlalxy")
2750 (set_attr "predicable" "yes")])
2752 ;; Note: there is no maddhidi4ibt because this one is canonical form
2753 (define_insn "*maddhidi4tb"
2754 [(set (match_operand:DI 0 "s_register_operand" "=r")
2756 (mult:DI (sign_extend:DI
2758 (match_operand:SI 1 "s_register_operand" "r")
2761 (match_operand:HI 2 "s_register_operand" "r")))
2762 (match_operand:DI 3 "s_register_operand" "0")))]
2763 "TARGET_DSP_MULTIPLY"
2764 "smlaltb%?\\t%Q0, %R0, %1, %2"
2765 [(set_attr "type" "smlalxy")
2766 (set_attr "predicable" "yes")])
2768 (define_insn "*maddhidi4tt"
2769 [(set (match_operand:DI 0 "s_register_operand" "=r")
2771 (mult:DI (sign_extend:DI
2773 (match_operand:SI 1 "s_register_operand" "r")
2777 (match_operand:SI 2 "s_register_operand" "r")
2779 (match_operand:DI 3 "s_register_operand" "0")))]
2780 "TARGET_DSP_MULTIPLY"
2781 "smlaltt%?\\t%Q0, %R0, %1, %2"
2782 [(set_attr "type" "smlalxy")
2783 (set_attr "predicable" "yes")])
2785 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2786 [(set (match_operand:SI 0 "s_register_operand" "=r")
2788 [(match_operand:SI 1 "s_register_operand" "r")
2789 (match_operand:SI 2 "s_register_operand" "r")
2790 (match_operand:SI 3 "s_register_operand" "r")]
2792 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2793 "<smlaw_op>%?\\t%0, %1, %2, %3"
2794 [(set_attr "type" "smlaxy")
2795 (set_attr "predicable" "yes")]
2798 (define_expand "arm_<smlaw_op>"
2799 [(set (match_operand:SI 0 "s_register_operand")
2801 [(match_operand:SI 1 "s_register_operand")
2802 (match_operand:SI 2 "s_register_operand")
2803 (match_operand:SI 3 "s_register_operand")]
2805 "TARGET_DSP_MULTIPLY"
2808 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2809 operands[2], operands[3]));
2811 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2812 operands[2], operands[3]));
2817 (define_expand "mulsf3"
2818 [(set (match_operand:SF 0 "s_register_operand")
2819 (mult:SF (match_operand:SF 1 "s_register_operand")
2820 (match_operand:SF 2 "s_register_operand")))]
2821 "TARGET_32BIT && TARGET_HARD_FLOAT"
2825 (define_expand "muldf3"
2826 [(set (match_operand:DF 0 "s_register_operand")
2827 (mult:DF (match_operand:DF 1 "s_register_operand")
2828 (match_operand:DF 2 "s_register_operand")))]
2829 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2835 (define_expand "divsf3"
2836 [(set (match_operand:SF 0 "s_register_operand")
2837 (div:SF (match_operand:SF 1 "s_register_operand")
2838 (match_operand:SF 2 "s_register_operand")))]
2839 "TARGET_32BIT && TARGET_HARD_FLOAT"
2842 (define_expand "divdf3"
2843 [(set (match_operand:DF 0 "s_register_operand")
2844 (div:DF (match_operand:DF 1 "s_register_operand")
2845 (match_operand:DF 2 "s_register_operand")))]
2846 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2850 ; Expand logical operations. The mid-end expander does not split off memory
2851 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2852 ; So an explicit expander is needed to generate better code.
2854 (define_expand "<LOGICAL:optab>di3"
2855 [(set (match_operand:DI 0 "s_register_operand")
2856 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2857 (match_operand:DI 2 "arm_<optab>di_operand")))]
2860 rtx low = simplify_gen_binary (<CODE>, SImode,
2861 gen_lowpart (SImode, operands[1]),
2862 gen_lowpart (SImode, operands[2]));
2863 rtx high = simplify_gen_binary (<CODE>, SImode,
2864 gen_highpart (SImode, operands[1]),
2865 gen_highpart_mode (SImode, DImode,
2868 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2869 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2874 (define_expand "one_cmpldi2"
2875 [(set (match_operand:DI 0 "s_register_operand")
2876 (not:DI (match_operand:DI 1 "s_register_operand")))]
2879 rtx low = simplify_gen_unary (NOT, SImode,
2880 gen_lowpart (SImode, operands[1]),
2882 rtx high = simplify_gen_unary (NOT, SImode,
2883 gen_highpart_mode (SImode, DImode,
2887 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2888 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2893 ;; Split DImode and, ior, xor operations. Simply perform the logical
2894 ;; operation on the upper and lower halves of the registers.
2895 ;; This is needed for atomic operations in arm_split_atomic_op.
2896 ;; Avoid splitting IWMMXT instructions.
2898 [(set (match_operand:DI 0 "s_register_operand" "")
2899 (match_operator:DI 6 "logical_binary_operator"
2900 [(match_operand:DI 1 "s_register_operand" "")
2901 (match_operand:DI 2 "s_register_operand" "")]))]
2902 "TARGET_32BIT && reload_completed
2903 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2904 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2905 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2908 operands[3] = gen_highpart (SImode, operands[0]);
2909 operands[0] = gen_lowpart (SImode, operands[0]);
2910 operands[4] = gen_highpart (SImode, operands[1]);
2911 operands[1] = gen_lowpart (SImode, operands[1]);
2912 operands[5] = gen_highpart (SImode, operands[2]);
2913 operands[2] = gen_lowpart (SImode, operands[2]);
2917 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2918 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2920 [(set (match_operand:DI 0 "s_register_operand")
2921 (not:DI (match_operand:DI 1 "s_register_operand")))]
2923 [(set (match_dup 0) (not:SI (match_dup 1)))
2924 (set (match_dup 2) (not:SI (match_dup 3)))]
2927 operands[2] = gen_highpart (SImode, operands[0]);
2928 operands[0] = gen_lowpart (SImode, operands[0]);
2929 operands[3] = gen_highpart (SImode, operands[1]);
2930 operands[1] = gen_lowpart (SImode, operands[1]);
2934 (define_expand "andsi3"
2935 [(set (match_operand:SI 0 "s_register_operand")
2936 (and:SI (match_operand:SI 1 "s_register_operand")
2937 (match_operand:SI 2 "reg_or_int_operand")))]
2942 if (CONST_INT_P (operands[2]))
2944 if (INTVAL (operands[2]) == 255 && arm_arch6)
2946 operands[1] = convert_to_mode (QImode, operands[1], 1);
2947 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2951 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2952 operands[2] = force_reg (SImode, operands[2]);
2955 arm_split_constant (AND, SImode, NULL_RTX,
2956 INTVAL (operands[2]), operands[0],
2958 optimize && can_create_pseudo_p ());
2964 else /* TARGET_THUMB1 */
2966 if (!CONST_INT_P (operands[2]))
2968 rtx tmp = force_reg (SImode, operands[2]);
2969 if (rtx_equal_p (operands[0], operands[1]))
2973 operands[2] = operands[1];
2981 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2983 operands[2] = force_reg (SImode,
2984 GEN_INT (~INTVAL (operands[2])));
2986 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2991 for (i = 9; i <= 31; i++)
2993 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2995 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2999 else if ((HOST_WIDE_INT_1 << i) - 1
3000 == ~INTVAL (operands[2]))
3002 rtx shift = GEN_INT (i);
3003 rtx reg = gen_reg_rtx (SImode);
3005 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
3006 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
3012 operands[2] = force_reg (SImode, operands[2]);
3018 ; ??? Check split length for Thumb-2
3019 (define_insn_and_split "*arm_andsi3_insn"
3020 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r,r")
3021 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,0,r")
3022 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,Dj,?n")))]
3027 bic%?\\t%0, %1, #%B2
3032 && CONST_INT_P (operands[2])
3033 && !(const_ok_for_arm (INTVAL (operands[2]))
3034 || const_ok_for_arm (~INTVAL (operands[2]))
3036 && satisfies_constraint_Dj (operands[2])
3037 && (rtx_equal_p (operands[0], operands[1])
3038 || !reload_completed)))"
3039 [(clobber (const_int 0))]
3041 arm_split_constant (AND, SImode, curr_insn,
3042 INTVAL (operands[2]), operands[0], operands[1], 0);
3045 [(set_attr "length" "4,4,4,4,4,16")
3046 (set_attr "predicable" "yes")
3047 (set_attr "predicable_short_it" "no,yes,no,no,no,no")
3048 (set_attr "arch" "*,*,*,*,v6t2,*")
3049 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,bfm,logic_imm")]
3052 (define_insn "*andsi3_compare0"
3053 [(set (reg:CC_NZ CC_REGNUM)
3055 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3056 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3058 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3059 (and:SI (match_dup 1) (match_dup 2)))]
3063 bics%?\\t%0, %1, #%B2
3064 ands%?\\t%0, %1, %2"
3065 [(set_attr "conds" "set")
3066 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3069 (define_insn "*andsi3_compare0_scratch"
3070 [(set (reg:CC_NZ CC_REGNUM)
3072 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3073 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3075 (clobber (match_scratch:SI 2 "=X,r,X"))]
3079 bics%?\\t%2, %0, #%B1
3081 [(set_attr "conds" "set")
3082 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3085 (define_insn "*zeroextractsi_compare0_scratch"
3086 [(set (reg:CC_NZ CC_REGNUM)
3087 (compare:CC_NZ (zero_extract:SI
3088 (match_operand:SI 0 "s_register_operand" "r")
3089 (match_operand 1 "const_int_operand" "n")
3090 (match_operand 2 "const_int_operand" "n"))
3093 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3094 && INTVAL (operands[1]) > 0
3095 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3096 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3098 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3099 << INTVAL (operands[2]));
3100 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3103 [(set_attr "conds" "set")
3104 (set_attr "predicable" "yes")
3105 (set_attr "type" "logics_imm")]
3108 (define_insn_and_split "*ne_zeroextractsi"
3109 [(set (match_operand:SI 0 "s_register_operand" "=r")
3110 (ne:SI (zero_extract:SI
3111 (match_operand:SI 1 "s_register_operand" "r")
3112 (match_operand:SI 2 "const_int_operand" "n")
3113 (match_operand:SI 3 "const_int_operand" "n"))
3115 (clobber (reg:CC CC_REGNUM))]
3117 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3118 && INTVAL (operands[2]) > 0
3119 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3120 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3123 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3124 && INTVAL (operands[2]) > 0
3125 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3126 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3127 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3128 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3130 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3132 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3133 (match_dup 0) (const_int 1)))]
3135 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3136 << INTVAL (operands[3]));
3138 [(set_attr "conds" "clob")
3139 (set (attr "length")
3140 (if_then_else (eq_attr "is_thumb" "yes")
3143 (set_attr "type" "multiple")]
3146 (define_insn_and_split "*ne_zeroextractsi_shifted"
3147 [(set (match_operand:SI 0 "s_register_operand" "=r")
3148 (ne:SI (zero_extract:SI
3149 (match_operand:SI 1 "s_register_operand" "r")
3150 (match_operand:SI 2 "const_int_operand" "n")
3153 (clobber (reg:CC CC_REGNUM))]
3157 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3158 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3160 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3162 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3163 (match_dup 0) (const_int 1)))]
3165 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3167 [(set_attr "conds" "clob")
3168 (set_attr "length" "8")
3169 (set_attr "type" "multiple")]
3172 (define_insn_and_split "*ite_ne_zeroextractsi"
3173 [(set (match_operand:SI 0 "s_register_operand" "=r")
3174 (if_then_else:SI (ne (zero_extract:SI
3175 (match_operand:SI 1 "s_register_operand" "r")
3176 (match_operand:SI 2 "const_int_operand" "n")
3177 (match_operand:SI 3 "const_int_operand" "n"))
3179 (match_operand:SI 4 "arm_not_operand" "rIK")
3181 (clobber (reg:CC CC_REGNUM))]
3183 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3184 && INTVAL (operands[2]) > 0
3185 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3186 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3187 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3190 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3191 && INTVAL (operands[2]) > 0
3192 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3193 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3194 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3195 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3196 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3198 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3200 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3201 (match_dup 0) (match_dup 4)))]
3203 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3204 << INTVAL (operands[3]));
3206 [(set_attr "conds" "clob")
3207 (set_attr "length" "8")
3208 (set_attr "type" "multiple")]
3211 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3212 [(set (match_operand:SI 0 "s_register_operand" "=r")
3213 (if_then_else:SI (ne (zero_extract:SI
3214 (match_operand:SI 1 "s_register_operand" "r")
3215 (match_operand:SI 2 "const_int_operand" "n")
3218 (match_operand:SI 3 "arm_not_operand" "rIK")
3220 (clobber (reg:CC CC_REGNUM))]
3221 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3223 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3224 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3225 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3227 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3229 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3230 (match_dup 0) (match_dup 3)))]
3232 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3234 [(set_attr "conds" "clob")
3235 (set_attr "length" "8")
3236 (set_attr "type" "multiple")]
3239 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3241 [(set (match_operand:SI 0 "s_register_operand" "")
3242 (match_operator:SI 1 "shiftable_operator"
3243 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3244 (match_operand:SI 3 "const_int_operand" "")
3245 (match_operand:SI 4 "const_int_operand" ""))
3246 (match_operand:SI 5 "s_register_operand" "")]))
3247 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3249 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3252 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3255 HOST_WIDE_INT temp = INTVAL (operands[3]);
3257 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3258 operands[4] = GEN_INT (32 - temp);
3263 [(set (match_operand:SI 0 "s_register_operand" "")
3264 (match_operator:SI 1 "shiftable_operator"
3265 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3266 (match_operand:SI 3 "const_int_operand" "")
3267 (match_operand:SI 4 "const_int_operand" ""))
3268 (match_operand:SI 5 "s_register_operand" "")]))
3269 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3271 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3274 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3277 HOST_WIDE_INT temp = INTVAL (operands[3]);
3279 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3280 operands[4] = GEN_INT (32 - temp);
3284 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3285 ;;; represented by the bitfield, then this will produce incorrect results.
3286 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3287 ;;; which have a real bit-field insert instruction, the truncation happens
3288 ;;; in the bit-field insert instruction itself. Since arm does not have a
3289 ;;; bit-field insert instruction, we would have to emit code here to truncate
3290 ;;; the value before we insert. This loses some of the advantage of having
3291 ;;; this insv pattern, so this pattern needs to be reevalutated.
3293 (define_expand "insv"
3294 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3295 (match_operand 1 "general_operand")
3296 (match_operand 2 "general_operand"))
3297 (match_operand 3 "reg_or_int_operand"))]
3298 "TARGET_ARM || arm_arch_thumb2"
3301 int start_bit = INTVAL (operands[2]);
3302 int width = INTVAL (operands[1]);
3303 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3304 rtx target, subtarget;
3306 if (arm_arch_thumb2)
3308 if (unaligned_access && MEM_P (operands[0])
3309 && s_register_operand (operands[3], GET_MODE (operands[3]))
3310 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3314 if (BYTES_BIG_ENDIAN)
3315 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3320 base_addr = adjust_address (operands[0], SImode,
3321 start_bit / BITS_PER_UNIT);
3322 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3326 rtx tmp = gen_reg_rtx (HImode);
3328 base_addr = adjust_address (operands[0], HImode,
3329 start_bit / BITS_PER_UNIT);
3330 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3331 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3335 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3337 bool use_bfi = TRUE;
3339 if (CONST_INT_P (operands[3]))
3341 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3345 emit_insn (gen_insv_zero (operands[0], operands[1],
3350 /* See if the set can be done with a single orr instruction. */
3351 if (val == mask && const_ok_for_arm (val << start_bit))
3357 if (!REG_P (operands[3]))
3358 operands[3] = force_reg (SImode, operands[3]);
3360 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3369 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3372 target = copy_rtx (operands[0]);
3373 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3374 subreg as the final target. */
3375 if (GET_CODE (target) == SUBREG)
3377 subtarget = gen_reg_rtx (SImode);
3378 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3379 < GET_MODE_SIZE (SImode))
3380 target = SUBREG_REG (target);
3385 if (CONST_INT_P (operands[3]))
3387 /* Since we are inserting a known constant, we may be able to
3388 reduce the number of bits that we have to clear so that
3389 the mask becomes simple. */
3390 /* ??? This code does not check to see if the new mask is actually
3391 simpler. It may not be. */
3392 rtx op1 = gen_reg_rtx (SImode);
3393 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3394 start of this pattern. */
3395 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3396 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3398 emit_insn (gen_andsi3 (op1, operands[0],
3399 gen_int_mode (~mask2, SImode)));
3400 emit_insn (gen_iorsi3 (subtarget, op1,
3401 gen_int_mode (op3_value << start_bit, SImode)));
3403 else if (start_bit == 0
3404 && !(const_ok_for_arm (mask)
3405 || const_ok_for_arm (~mask)))
3407 /* A Trick, since we are setting the bottom bits in the word,
3408 we can shift operand[3] up, operand[0] down, OR them together
3409 and rotate the result back again. This takes 3 insns, and
3410 the third might be mergeable into another op. */
3411 /* The shift up copes with the possibility that operand[3] is
3412 wider than the bitfield. */
3413 rtx op0 = gen_reg_rtx (SImode);
3414 rtx op1 = gen_reg_rtx (SImode);
3416 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3417 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3418 emit_insn (gen_iorsi3 (op1, op1, op0));
3419 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3421 else if ((width + start_bit == 32)
3422 && !(const_ok_for_arm (mask)
3423 || const_ok_for_arm (~mask)))
3425 /* Similar trick, but slightly less efficient. */
3427 rtx op0 = gen_reg_rtx (SImode);
3428 rtx op1 = gen_reg_rtx (SImode);
3430 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3431 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3432 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3433 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3437 rtx op0 = gen_int_mode (mask, SImode);
3438 rtx op1 = gen_reg_rtx (SImode);
3439 rtx op2 = gen_reg_rtx (SImode);
3441 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3443 rtx tmp = gen_reg_rtx (SImode);
3445 emit_insn (gen_movsi (tmp, op0));
3449 /* Mask out any bits in operand[3] that are not needed. */
3450 emit_insn (gen_andsi3 (op1, operands[3], op0));
3452 if (CONST_INT_P (op0)
3453 && (const_ok_for_arm (mask << start_bit)
3454 || const_ok_for_arm (~(mask << start_bit))))
3456 op0 = gen_int_mode (~(mask << start_bit), SImode);
3457 emit_insn (gen_andsi3 (op2, operands[0], op0));
3461 if (CONST_INT_P (op0))
3463 rtx tmp = gen_reg_rtx (SImode);
3465 emit_insn (gen_movsi (tmp, op0));
3470 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3472 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3476 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3478 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3481 if (subtarget != target)
3483 /* If TARGET is still a SUBREG, then it must be wider than a word,
3484 so we must be careful only to set the subword we were asked to. */
3485 if (GET_CODE (target) == SUBREG)
3486 emit_move_insn (target, subtarget);
3488 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3495 (define_insn_and_split "insv_zero"
3496 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3497 (match_operand:SI 1 "const_int_M_operand" "M")
3498 (match_operand:SI 2 "const_int_M_operand" "M"))
3503 [(set (match_dup 0) (and:SI (match_dup 0) (match_dup 1)))]
3505 /* Convert back to a normal AND operation, so that we can take advantage
3506 of BIC and AND when appropriate; we'll still emit BFC if that's the
3507 right thing to do. */
3508 unsigned HOST_WIDE_INT width = UINTVAL (operands[1]);
3509 unsigned HOST_WIDE_INT lsb = UINTVAL (operands[2]);
3510 unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << width) - 1;
3512 operands[1] = gen_int_mode (~(mask << lsb), SImode);
3514 [(set_attr "length" "4")
3515 (set_attr "predicable" "yes")
3516 (set_attr "type" "bfm")]
3519 (define_insn "insv_t2"
3520 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3521 (match_operand:SI 1 "const_int_M_operand" "M")
3522 (match_operand:SI 2 "const_int_M_operand" "M"))
3523 (match_operand:SI 3 "s_register_operand" "r"))]
3525 "bfi%?\t%0, %3, %2, %1"
3526 [(set_attr "length" "4")
3527 (set_attr "predicable" "yes")
3528 (set_attr "type" "bfm")]
3532 [(set (match_operand:SI 0 "s_register_operand" "=r")
3533 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3534 (match_operand 2 "const_int_operand" "Dj"))
3536 (match_operand:SI 3 "s_register_operand" "r")
3537 (match_operand 4 "const_int_operand" "i"))
3538 (match_operand 5 "const_int_operand" "i"))))]
3540 && UINTVAL (operands[4]) < 32
3541 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3542 && (exact_log2 (UINTVAL (operands[5])
3543 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3545 "bfi%?\t%0, %3, %V2"
3546 [(set_attr "length" "4")
3547 (set_attr "predicable" "yes")
3548 (set_attr "type" "bfm")]
3551 (define_insn "*bfi_alt1"
3552 [(set (match_operand:SI 0 "s_register_operand" "=r")
3553 (ior:SI (and:SI (ashift:SI
3554 (match_operand:SI 3 "s_register_operand" "r")
3555 (match_operand 4 "const_int_operand" "i"))
3556 (match_operand 5 "const_int_operand" "i"))
3557 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3558 (match_operand 2 "const_int_operand" "Dj"))))]
3560 && UINTVAL (operands[4]) < 32
3561 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3562 && (exact_log2 (UINTVAL (operands[5])
3563 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3565 "bfi%?\t%0, %3, %V2"
3566 [(set_attr "length" "4")
3567 (set_attr "predicable" "yes")
3568 (set_attr "type" "bfm")]
3571 (define_insn "*bfi_alt2"
3572 [(set (match_operand:SI 0 "s_register_operand" "=r")
3573 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3574 (match_operand 2 "const_int_operand" "i"))
3575 (and:SI (match_operand:SI 3 "s_register_operand" "r")
3576 (match_operand 4 "const_int_operand" "i"))))]
3578 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3579 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3580 "bfi%?\t%0, %3, %V2"
3581 [(set_attr "length" "4")
3582 (set_attr "predicable" "yes")
3583 (set_attr "type" "bfm")]
3586 (define_insn "*bfi_alt3"
3587 [(set (match_operand:SI 0 "s_register_operand" "=r")
3588 (ior:SI (and:SI (match_operand:SI 3 "s_register_operand" "r")
3589 (match_operand 4 "const_int_operand" "i"))
3590 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3591 (match_operand 2 "const_int_operand" "i"))))]
3593 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3594 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3595 "bfi%?\t%0, %3, %V2"
3596 [(set_attr "length" "4")
3597 (set_attr "predicable" "yes")
3598 (set_attr "type" "bfm")]
3601 (define_insn "andsi_notsi_si"
3602 [(set (match_operand:SI 0 "s_register_operand" "=r")
3603 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3604 (match_operand:SI 1 "s_register_operand" "r")))]
3606 "bic%?\\t%0, %1, %2"
3607 [(set_attr "predicable" "yes")
3608 (set_attr "type" "logic_reg")]
3611 (define_insn "andsi_not_shiftsi_si"
3612 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3613 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3614 [(match_operand:SI 2 "s_register_operand" "r,r")
3615 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3616 (match_operand:SI 1 "s_register_operand" "r,r")))]
3618 "bic%?\\t%0, %1, %2%S4"
3619 [(set_attr "predicable" "yes")
3620 (set_attr "shift" "2")
3621 (set_attr "arch" "32,a")
3622 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3625 ;; Shifted bics pattern used to set up CC status register and not reusing
3626 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3627 ;; does not support shift by register.
3628 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3629 [(set (reg:CC_NZ CC_REGNUM)
3631 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3632 [(match_operand:SI 1 "s_register_operand" "r,r")
3633 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3634 (match_operand:SI 3 "s_register_operand" "r,r"))
3636 (clobber (match_scratch:SI 4 "=r,r"))]
3638 "bics%?\\t%4, %3, %1%S0"
3639 [(set_attr "predicable" "yes")
3640 (set_attr "arch" "32,a")
3641 (set_attr "conds" "set")
3642 (set_attr "shift" "1")
3643 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3646 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3647 ;; getting reused later.
3648 (define_insn "andsi_not_shiftsi_si_scc"
3649 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3651 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3652 [(match_operand:SI 1 "s_register_operand" "r,r")
3653 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3654 (match_operand:SI 3 "s_register_operand" "r,r"))
3656 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3657 (and:SI (not:SI (match_op_dup 0
3662 "bics%?\\t%4, %3, %1%S0"
3663 [(set_attr "predicable" "yes")
3664 (set_attr "arch" "32,a")
3665 (set_attr "conds" "set")
3666 (set_attr "shift" "1")
3667 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3670 (define_insn "*andsi_notsi_si_compare0"
3671 [(set (reg:CC_NZ CC_REGNUM)
3673 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3674 (match_operand:SI 1 "s_register_operand" "r"))
3676 (set (match_operand:SI 0 "s_register_operand" "=r")
3677 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3680 [(set_attr "conds" "set")
3681 (set_attr "type" "logics_shift_reg")]
3684 (define_insn "*andsi_notsi_si_compare0_scratch"
3685 [(set (reg:CC_NZ CC_REGNUM)
3687 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3688 (match_operand:SI 1 "s_register_operand" "r"))
3690 (clobber (match_scratch:SI 0 "=r"))]
3693 [(set_attr "conds" "set")
3694 (set_attr "type" "logics_shift_reg")]
3697 (define_expand "iorsi3"
3698 [(set (match_operand:SI 0 "s_register_operand")
3699 (ior:SI (match_operand:SI 1 "s_register_operand")
3700 (match_operand:SI 2 "reg_or_int_operand")))]
3703 if (CONST_INT_P (operands[2]))
3707 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3708 operands[2] = force_reg (SImode, operands[2]);
3711 arm_split_constant (IOR, SImode, NULL_RTX,
3712 INTVAL (operands[2]), operands[0],
3714 optimize && can_create_pseudo_p ());
3718 else /* TARGET_THUMB1 */
3720 rtx tmp = force_reg (SImode, operands[2]);
3721 if (rtx_equal_p (operands[0], operands[1]))
3725 operands[2] = operands[1];
3733 (define_insn_and_split "*iorsi3_insn"
3734 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3735 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3736 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3741 orn%?\\t%0, %1, #%B2
3745 && CONST_INT_P (operands[2])
3746 && !(const_ok_for_arm (INTVAL (operands[2]))
3747 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3748 [(clobber (const_int 0))]
3750 arm_split_constant (IOR, SImode, curr_insn,
3751 INTVAL (operands[2]), operands[0], operands[1], 0);
3754 [(set_attr "length" "4,4,4,4,16")
3755 (set_attr "arch" "32,t2,t2,32,32")
3756 (set_attr "predicable" "yes")
3757 (set_attr "predicable_short_it" "no,yes,no,no,no")
3758 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3762 [(match_scratch:SI 3 "r")
3763 (set (match_operand:SI 0 "arm_general_register_operand" "")
3764 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3765 (match_operand:SI 2 "const_int_operand" "")))]
3767 && !const_ok_for_arm (INTVAL (operands[2]))
3768 && const_ok_for_arm (~INTVAL (operands[2]))"
3769 [(set (match_dup 3) (match_dup 2))
3770 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3774 (define_insn "*iorsi3_compare0"
3775 [(set (reg:CC_NZ CC_REGNUM)
3777 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3778 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3780 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3781 (ior:SI (match_dup 1) (match_dup 2)))]
3783 "orrs%?\\t%0, %1, %2"
3784 [(set_attr "conds" "set")
3785 (set_attr "arch" "*,t2,*")
3786 (set_attr "length" "4,2,4")
3787 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3790 (define_insn "*iorsi3_compare0_scratch"
3791 [(set (reg:CC_NZ CC_REGNUM)
3793 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3794 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3796 (clobber (match_scratch:SI 0 "=r,l,r"))]
3798 "orrs%?\\t%0, %1, %2"
3799 [(set_attr "conds" "set")
3800 (set_attr "arch" "*,t2,*")
3801 (set_attr "length" "4,2,4")
3802 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3805 (define_expand "xorsi3"
3806 [(set (match_operand:SI 0 "s_register_operand")
3807 (xor:SI (match_operand:SI 1 "s_register_operand")
3808 (match_operand:SI 2 "reg_or_int_operand")))]
3810 "if (CONST_INT_P (operands[2]))
3814 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3815 operands[2] = force_reg (SImode, operands[2]);
3818 arm_split_constant (XOR, SImode, NULL_RTX,
3819 INTVAL (operands[2]), operands[0],
3821 optimize && can_create_pseudo_p ());
3825 else /* TARGET_THUMB1 */
3827 rtx tmp = force_reg (SImode, operands[2]);
3828 if (rtx_equal_p (operands[0], operands[1]))
3832 operands[2] = operands[1];
3839 (define_insn_and_split "*arm_xorsi3"
3840 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3841 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3842 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3850 && CONST_INT_P (operands[2])
3851 && !const_ok_for_arm (INTVAL (operands[2]))"
3852 [(clobber (const_int 0))]
3854 arm_split_constant (XOR, SImode, curr_insn,
3855 INTVAL (operands[2]), operands[0], operands[1], 0);
3858 [(set_attr "length" "4,4,4,16")
3859 (set_attr "predicable" "yes")
3860 (set_attr "predicable_short_it" "no,yes,no,no")
3861 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3864 (define_insn "*xorsi3_compare0"
3865 [(set (reg:CC_NZ CC_REGNUM)
3866 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3867 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3869 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3870 (xor:SI (match_dup 1) (match_dup 2)))]
3872 "eors%?\\t%0, %1, %2"
3873 [(set_attr "conds" "set")
3874 (set_attr "type" "logics_imm,logics_reg")]
3877 (define_insn "*xorsi3_compare0_scratch"
3878 [(set (reg:CC_NZ CC_REGNUM)
3879 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3880 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3884 [(set_attr "conds" "set")
3885 (set_attr "type" "logics_imm,logics_reg")]
3888 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3889 ; (NOT D) we can sometimes merge the final NOT into one of the following
3893 [(set (match_operand:SI 0 "s_register_operand" "")
3894 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3895 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3896 (match_operand:SI 3 "arm_rhs_operand" "")))
3897 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3899 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3900 (not:SI (match_dup 3))))
3901 (set (match_dup 0) (not:SI (match_dup 4)))]
3905 (define_insn_and_split "*andsi_iorsi3_notsi"
3906 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3907 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3908 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3909 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3911 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3912 "&& reload_completed"
3913 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3914 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3916 /* If operands[3] is a constant make sure to fold the NOT into it
3917 to avoid creating a NOT of a CONST_INT. */
3918 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3919 if (CONST_INT_P (not_rtx))
3921 operands[4] = operands[0];
3922 operands[5] = not_rtx;
3926 operands[5] = operands[0];
3927 operands[4] = not_rtx;
3930 [(set_attr "length" "8")
3931 (set_attr "ce_count" "2")
3932 (set_attr "predicable" "yes")
3933 (set_attr "type" "multiple")]
3936 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3937 ; insns are available?
3939 [(set (match_operand:SI 0 "s_register_operand" "")
3940 (match_operator:SI 1 "logical_binary_operator"
3941 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3942 (match_operand:SI 3 "const_int_operand" "")
3943 (match_operand:SI 4 "const_int_operand" ""))
3944 (match_operator:SI 9 "logical_binary_operator"
3945 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3946 (match_operand:SI 6 "const_int_operand" ""))
3947 (match_operand:SI 7 "s_register_operand" "")])]))
3948 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3950 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3951 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3954 [(ashift:SI (match_dup 2) (match_dup 4))
3958 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3961 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3965 [(set (match_operand:SI 0 "s_register_operand" "")
3966 (match_operator:SI 1 "logical_binary_operator"
3967 [(match_operator:SI 9 "logical_binary_operator"
3968 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3969 (match_operand:SI 6 "const_int_operand" ""))
3970 (match_operand:SI 7 "s_register_operand" "")])
3971 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3972 (match_operand:SI 3 "const_int_operand" "")
3973 (match_operand:SI 4 "const_int_operand" ""))]))
3974 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3976 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3977 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3980 [(ashift:SI (match_dup 2) (match_dup 4))
3984 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3987 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3991 [(set (match_operand:SI 0 "s_register_operand" "")
3992 (match_operator:SI 1 "logical_binary_operator"
3993 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3994 (match_operand:SI 3 "const_int_operand" "")
3995 (match_operand:SI 4 "const_int_operand" ""))
3996 (match_operator:SI 9 "logical_binary_operator"
3997 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3998 (match_operand:SI 6 "const_int_operand" ""))
3999 (match_operand:SI 7 "s_register_operand" "")])]))
4000 (clobber (match_operand:SI 8 "s_register_operand" ""))]
4002 && GET_CODE (operands[1]) == GET_CODE (operands[9])
4003 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
4006 [(ashift:SI (match_dup 2) (match_dup 4))
4010 [(ashiftrt:SI (match_dup 8) (match_dup 6))
4013 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
4017 [(set (match_operand:SI 0 "s_register_operand" "")
4018 (match_operator:SI 1 "logical_binary_operator"
4019 [(match_operator:SI 9 "logical_binary_operator"
4020 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
4021 (match_operand:SI 6 "const_int_operand" ""))
4022 (match_operand:SI 7 "s_register_operand" "")])
4023 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
4024 (match_operand:SI 3 "const_int_operand" "")
4025 (match_operand:SI 4 "const_int_operand" ""))]))
4026 (clobber (match_operand:SI 8 "s_register_operand" ""))]
4028 && GET_CODE (operands[1]) == GET_CODE (operands[9])
4029 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
4032 [(ashift:SI (match_dup 2) (match_dup 4))
4036 [(ashiftrt:SI (match_dup 8) (match_dup 6))
4039 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
4043 ;; Minimum and maximum insns
4045 (define_expand "smaxsi3"
4047 (set (match_operand:SI 0 "s_register_operand")
4048 (smax:SI (match_operand:SI 1 "s_register_operand")
4049 (match_operand:SI 2 "arm_rhs_operand")))
4050 (clobber (reg:CC CC_REGNUM))])]
4053 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
4055 /* No need for a clobber of the condition code register here. */
4056 emit_insn (gen_rtx_SET (operands[0],
4057 gen_rtx_SMAX (SImode, operands[1],
4063 (define_insn "*smax_0"
4064 [(set (match_operand:SI 0 "s_register_operand" "=r")
4065 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4068 "bic%?\\t%0, %1, %1, asr #31"
4069 [(set_attr "predicable" "yes")
4070 (set_attr "type" "logic_shift_reg")]
4073 (define_insn "*smax_m1"
4074 [(set (match_operand:SI 0 "s_register_operand" "=r")
4075 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4078 "orr%?\\t%0, %1, %1, asr #31"
4079 [(set_attr "predicable" "yes")
4080 (set_attr "type" "logic_shift_reg")]
4083 (define_insn_and_split "*arm_smax_insn"
4084 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4085 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4086 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4087 (clobber (reg:CC CC_REGNUM))]
4090 ; cmp\\t%1, %2\;movlt\\t%0, %2
4091 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
4093 [(set (reg:CC CC_REGNUM)
4094 (compare:CC (match_dup 1) (match_dup 2)))
4096 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
4100 [(set_attr "conds" "clob")
4101 (set_attr "length" "8,12")
4102 (set_attr "type" "multiple")]
4105 (define_expand "sminsi3"
4107 (set (match_operand:SI 0 "s_register_operand")
4108 (smin:SI (match_operand:SI 1 "s_register_operand")
4109 (match_operand:SI 2 "arm_rhs_operand")))
4110 (clobber (reg:CC CC_REGNUM))])]
4113 if (operands[2] == const0_rtx)
4115 /* No need for a clobber of the condition code register here. */
4116 emit_insn (gen_rtx_SET (operands[0],
4117 gen_rtx_SMIN (SImode, operands[1],
4123 (define_insn "*smin_0"
4124 [(set (match_operand:SI 0 "s_register_operand" "=r")
4125 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4128 "and%?\\t%0, %1, %1, asr #31"
4129 [(set_attr "predicable" "yes")
4130 (set_attr "type" "logic_shift_reg")]
4133 (define_insn_and_split "*arm_smin_insn"
4134 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4135 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4136 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4137 (clobber (reg:CC CC_REGNUM))]
4140 ; cmp\\t%1, %2\;movge\\t%0, %2
4141 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4143 [(set (reg:CC CC_REGNUM)
4144 (compare:CC (match_dup 1) (match_dup 2)))
4146 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4150 [(set_attr "conds" "clob")
4151 (set_attr "length" "8,12")
4152 (set_attr "type" "multiple,multiple")]
4155 (define_expand "umaxsi3"
4157 (set (match_operand:SI 0 "s_register_operand")
4158 (umax:SI (match_operand:SI 1 "s_register_operand")
4159 (match_operand:SI 2 "arm_rhs_operand")))
4160 (clobber (reg:CC CC_REGNUM))])]
4165 (define_insn_and_split "*arm_umaxsi3"
4166 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4167 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4168 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4169 (clobber (reg:CC CC_REGNUM))]
4172 ; cmp\\t%1, %2\;movcc\\t%0, %2
4173 ; cmp\\t%1, %2\;movcs\\t%0, %1
4174 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4176 [(set (reg:CC CC_REGNUM)
4177 (compare:CC (match_dup 1) (match_dup 2)))
4179 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4183 [(set_attr "conds" "clob")
4184 (set_attr "length" "8,8,12")
4185 (set_attr "type" "store_4")]
4188 (define_expand "uminsi3"
4190 (set (match_operand:SI 0 "s_register_operand")
4191 (umin:SI (match_operand:SI 1 "s_register_operand")
4192 (match_operand:SI 2 "arm_rhs_operand")))
4193 (clobber (reg:CC CC_REGNUM))])]
4198 (define_insn_and_split "*arm_uminsi3"
4199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4200 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4201 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4202 (clobber (reg:CC CC_REGNUM))]
4205 ; cmp\\t%1, %2\;movcs\\t%0, %2
4206 ; cmp\\t%1, %2\;movcc\\t%0, %1
4207 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4209 [(set (reg:CC CC_REGNUM)
4210 (compare:CC (match_dup 1) (match_dup 2)))
4212 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4216 [(set_attr "conds" "clob")
4217 (set_attr "length" "8,8,12")
4218 (set_attr "type" "store_4")]
4221 (define_insn "*store_minmaxsi"
4222 [(set (match_operand:SI 0 "memory_operand" "=m")
4223 (match_operator:SI 3 "minmax_operator"
4224 [(match_operand:SI 1 "s_register_operand" "r")
4225 (match_operand:SI 2 "s_register_operand" "r")]))
4226 (clobber (reg:CC CC_REGNUM))]
4227 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4229 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4230 operands[1], operands[2]);
4231 output_asm_insn (\"cmp\\t%1, %2\", operands);
4233 output_asm_insn (\"ite\t%d3\", operands);
4234 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4235 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4238 [(set_attr "conds" "clob")
4239 (set (attr "length")
4240 (if_then_else (eq_attr "is_thumb" "yes")
4243 (set_attr "type" "store_4")]
4246 ; Reject the frame pointer in operand[1], since reloading this after
4247 ; it has been eliminated can cause carnage.
4248 (define_insn "*minmax_arithsi"
4249 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4250 (match_operator:SI 4 "shiftable_operator"
4251 [(match_operator:SI 5 "minmax_operator"
4252 [(match_operand:SI 2 "s_register_operand" "r,r")
4253 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4254 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4255 (clobber (reg:CC CC_REGNUM))]
4256 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4259 enum rtx_code code = GET_CODE (operands[4]);
4262 if (which_alternative != 0 || operands[3] != const0_rtx
4263 || (code != PLUS && code != IOR && code != XOR))
4268 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4269 operands[2], operands[3]);
4270 output_asm_insn (\"cmp\\t%2, %3\", operands);
4274 output_asm_insn (\"ite\\t%d5\", operands);
4276 output_asm_insn (\"it\\t%d5\", operands);
4278 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4280 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4283 [(set_attr "conds" "clob")
4284 (set (attr "length")
4285 (if_then_else (eq_attr "is_thumb" "yes")
4288 (set_attr "type" "multiple")]
4291 ; Reject the frame pointer in operand[1], since reloading this after
4292 ; it has been eliminated can cause carnage.
4293 (define_insn_and_split "*minmax_arithsi_non_canon"
4294 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4296 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4297 (match_operator:SI 4 "minmax_operator"
4298 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4299 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4300 (clobber (reg:CC CC_REGNUM))]
4301 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4302 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4304 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4305 [(set (reg:CC CC_REGNUM)
4306 (compare:CC (match_dup 2) (match_dup 3)))
4308 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4310 (minus:SI (match_dup 1)
4312 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4316 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4317 operands[2], operands[3]);
4318 enum rtx_code rc = minmax_code (operands[4]);
4319 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4320 operands[2], operands[3]);
4322 if (mode == CCFPmode || mode == CCFPEmode)
4323 rc = reverse_condition_maybe_unordered (rc);
4325 rc = reverse_condition (rc);
4326 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4327 if (CONST_INT_P (operands[3]))
4328 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4330 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4332 [(set_attr "conds" "clob")
4333 (set (attr "length")
4334 (if_then_else (eq_attr "is_thumb" "yes")
4337 (set_attr "type" "multiple")]
4341 (define_expand "arm_<ss_op>"
4342 [(set (match_operand:SI 0 "s_register_operand")
4343 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4344 (match_operand:SI 2 "s_register_operand")))]
4345 "TARGET_DSP_MULTIPLY"
4348 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4349 operands[1], operands[2]));
4351 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4356 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r")
4358 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4359 (match_operand:SI 2 "s_register_operand" "r")))]
4360 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4361 "<ss_op>%?\t%0, %1, %2"
4362 [(set_attr "predicable" "yes")
4363 (set_attr "type" "alu_dsp_reg")]
4366 (define_code_iterator SAT [smin smax])
4367 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4368 (define_code_attr SATlo [(smin "1") (smax "2")])
4369 (define_code_attr SAThi [(smin "2") (smax "1")])
4371 (define_expand "arm_ssat"
4372 [(match_operand:SI 0 "s_register_operand")
4373 (match_operand:SI 1 "s_register_operand")
4374 (match_operand:SI 2 "const_int_operand")]
4375 "TARGET_32BIT && arm_arch6"
4377 HOST_WIDE_INT val = INTVAL (operands[2]);
4378 /* The builtin checking code should have ensured the right
4379 range for the immediate. */
4380 gcc_assert (IN_RANGE (val, 1, 32));
4381 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4382 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4383 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4384 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4386 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4387 up_rtx, operands[1]));
4389 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4394 (define_expand "arm_usat"
4395 [(match_operand:SI 0 "s_register_operand")
4396 (match_operand:SI 1 "s_register_operand")
4397 (match_operand:SI 2 "const_int_operand")]
4398 "TARGET_32BIT && arm_arch6"
4400 HOST_WIDE_INT val = INTVAL (operands[2]);
4401 /* The builtin checking code should have ensured the right
4402 range for the immediate. */
4403 gcc_assert (IN_RANGE (val, 0, 31));
4404 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4405 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4406 rtx lo_rtx = CONST0_RTX (SImode);
4408 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4411 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4416 (define_insn "arm_get_apsr"
4417 [(set (match_operand:SI 0 "s_register_operand" "=r")
4418 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4421 [(set_attr "predicable" "yes")
4422 (set_attr "conds" "use")]
4425 (define_insn "arm_set_apsr"
4426 [(set (reg:CC APSRQ_REGNUM)
4428 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4430 "msr%?\tAPSR_nzcvq, %0"
4431 [(set_attr "predicable" "yes")
4432 (set_attr "conds" "set")]
4435 ;; Read the APSR and extract the Q bit (bit 27)
4436 (define_expand "arm_saturation_occurred"
4437 [(match_operand:SI 0 "s_register_operand")]
4440 rtx apsr = gen_reg_rtx (SImode);
4441 emit_insn (gen_arm_get_apsr (apsr));
4442 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4443 gen_int_mode (27, SImode)));
4448 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4449 (define_expand "arm_set_saturation"
4450 [(match_operand:SI 0 "reg_or_int_operand")]
4453 rtx apsr = gen_reg_rtx (SImode);
4454 emit_insn (gen_arm_get_apsr (apsr));
4455 rtx to_insert = gen_reg_rtx (SImode);
4456 if (CONST_INT_P (operands[0]))
4457 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4458 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4461 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4462 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4463 CONST0_RTX (SImode)));
4465 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4466 gen_int_mode (27, SImode), to_insert));
4467 emit_insn (gen_arm_set_apsr (apsr));
4472 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4473 [(set (match_operand:SI 0 "s_register_operand" "=r")
4474 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4475 (match_operand:SI 1 "const_int_operand" "i"))
4476 (match_operand:SI 2 "const_int_operand" "i")))]
4477 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4478 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4482 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4483 &mask, &signed_sat))
4486 operands[1] = GEN_INT (mask);
4488 return "ssat%?\t%0, %1, %3";
4490 return "usat%?\t%0, %1, %3";
4492 [(set_attr "predicable" "yes")
4493 (set_attr "type" "alus_imm")]
4496 (define_insn "*satsi_<SAT:code>_shift"
4497 [(set (match_operand:SI 0 "s_register_operand" "=r")
4498 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4499 [(match_operand:SI 4 "s_register_operand" "r")
4500 (match_operand:SI 5 "const_int_operand" "i")])
4501 (match_operand:SI 1 "const_int_operand" "i"))
4502 (match_operand:SI 2 "const_int_operand" "i")))]
4503 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4504 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4508 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4509 &mask, &signed_sat))
4512 operands[1] = GEN_INT (mask);
4514 return "ssat%?\t%0, %1, %4%S3";
4516 return "usat%?\t%0, %1, %4%S3";
4518 [(set_attr "predicable" "yes")
4519 (set_attr "shift" "3")
4520 (set_attr "type" "logic_shift_reg")])
4522 ;; Custom Datapath Extension insns.
4523 (define_insn "arm_cx1<mode>"
4524 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4525 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4526 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4529 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4530 [(set_attr "type" "coproc")]
4533 (define_insn "arm_cx1a<mode>"
4534 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4535 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4536 (match_operand:SIDI 2 "s_register_operand" "0")
4537 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4540 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4541 [(set_attr "type" "coproc")]
4544 (define_insn "arm_cx2<mode>"
4545 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4546 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4547 (match_operand:SI 2 "s_register_operand" "r")
4548 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4551 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4552 [(set_attr "type" "coproc")]
4555 (define_insn "arm_cx2a<mode>"
4556 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4557 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4558 (match_operand:SIDI 2 "s_register_operand" "0")
4559 (match_operand:SI 3 "s_register_operand" "r")
4560 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4563 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4564 [(set_attr "type" "coproc")]
4567 (define_insn "arm_cx3<mode>"
4568 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4569 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4570 (match_operand:SI 2 "s_register_operand" "r")
4571 (match_operand:SI 3 "s_register_operand" "r")
4572 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4575 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4576 [(set_attr "type" "coproc")]
4579 (define_insn "arm_cx3a<mode>"
4580 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4581 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4582 (match_operand:SIDI 2 "s_register_operand" "0")
4583 (match_operand:SI 3 "s_register_operand" "r")
4584 (match_operand:SI 4 "s_register_operand" "r")
4585 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4588 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4589 [(set_attr "type" "coproc")]
4592 ;; Shift and rotation insns
4594 (define_expand "ashldi3"
4595 [(set (match_operand:DI 0 "s_register_operand")
4596 (ashift:DI (match_operand:DI 1 "s_register_operand")
4597 (match_operand:SI 2 "reg_or_int_operand")))]
4600 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4602 if (!reg_or_int_operand (operands[2], SImode))
4603 operands[2] = force_reg (SImode, operands[2]);
4605 /* Armv8.1-M Mainline double shifts are not expanded. */
4606 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4607 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4609 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4610 emit_insn (gen_movdi (operands[0], operands[1]));
4612 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4617 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4618 operands[2], gen_reg_rtx (SImode),
4619 gen_reg_rtx (SImode));
4623 (define_expand "ashlsi3"
4624 [(set (match_operand:SI 0 "s_register_operand")
4625 (ashift:SI (match_operand:SI 1 "s_register_operand")
4626 (match_operand:SI 2 "arm_rhs_operand")))]
4629 if (CONST_INT_P (operands[2])
4630 && (UINTVAL (operands[2])) > 31)
4632 emit_insn (gen_movsi (operands[0], const0_rtx));
4638 (define_expand "ashrdi3"
4639 [(set (match_operand:DI 0 "s_register_operand")
4640 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4641 (match_operand:SI 2 "reg_or_int_operand")))]
4644 /* Armv8.1-M Mainline double shifts are not expanded. */
4645 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4646 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4648 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4649 emit_insn (gen_movdi (operands[0], operands[1]));
4651 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4655 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4656 operands[2], gen_reg_rtx (SImode),
4657 gen_reg_rtx (SImode));
4661 (define_expand "ashrsi3"
4662 [(set (match_operand:SI 0 "s_register_operand")
4663 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4664 (match_operand:SI 2 "arm_rhs_operand")))]
4667 if (CONST_INT_P (operands[2])
4668 && UINTVAL (operands[2]) > 31)
4669 operands[2] = GEN_INT (31);
4673 (define_expand "lshrdi3"
4674 [(set (match_operand:DI 0 "s_register_operand")
4675 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4676 (match_operand:SI 2 "reg_or_int_operand")))]
4679 /* Armv8.1-M Mainline double shifts are not expanded. */
4680 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4681 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4683 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4684 emit_insn (gen_movdi (operands[0], operands[1]));
4686 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4690 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4691 operands[2], gen_reg_rtx (SImode),
4692 gen_reg_rtx (SImode));
4696 (define_expand "lshrsi3"
4697 [(set (match_operand:SI 0 "s_register_operand")
4698 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4699 (match_operand:SI 2 "arm_rhs_operand")))]
4702 if (CONST_INT_P (operands[2])
4703 && (UINTVAL (operands[2])) > 31)
4705 emit_insn (gen_movsi (operands[0], const0_rtx));
4711 (define_expand "rotlsi3"
4712 [(set (match_operand:SI 0 "s_register_operand")
4713 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4714 (match_operand:SI 2 "reg_or_int_operand")))]
4717 if (CONST_INT_P (operands[2]))
4718 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4721 rtx reg = gen_reg_rtx (SImode);
4722 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4728 (define_expand "rotrsi3"
4729 [(set (match_operand:SI 0 "s_register_operand")
4730 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4731 (match_operand:SI 2 "arm_rhs_operand")))]
4736 if (CONST_INT_P (operands[2])
4737 && UINTVAL (operands[2]) > 31)
4738 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4740 else /* TARGET_THUMB1 */
4742 if (CONST_INT_P (operands [2]))
4743 operands [2] = force_reg (SImode, operands[2]);
4748 (define_insn "*arm_shiftsi3"
4749 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4750 (match_operator:SI 3 "shift_operator"
4751 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4752 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4754 "* return arm_output_shift(operands, 0);"
4755 [(set_attr "predicable" "yes")
4756 (set_attr "arch" "t2,t2,*,*")
4757 (set_attr "predicable_short_it" "yes,yes,no,no")
4758 (set_attr "length" "4")
4759 (set_attr "shift" "1")
4760 (set_attr "autodetect_type" "alu_shift_operator3")]
4763 (define_insn "*shiftsi3_compare0"
4764 [(set (reg:CC_NZ CC_REGNUM)
4765 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4766 [(match_operand:SI 1 "s_register_operand" "r,r")
4767 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4769 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4770 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4772 "* return arm_output_shift(operands, 1);"
4773 [(set_attr "conds" "set")
4774 (set_attr "shift" "1")
4775 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4778 (define_insn "*shiftsi3_compare0_scratch"
4779 [(set (reg:CC_NZ CC_REGNUM)
4780 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4781 [(match_operand:SI 1 "s_register_operand" "r,r")
4782 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4784 (clobber (match_scratch:SI 0 "=r,r"))]
4786 "* return arm_output_shift(operands, 1);"
4787 [(set_attr "conds" "set")
4788 (set_attr "shift" "1")
4789 (set_attr "type" "shift_imm,shift_reg")]
4792 (define_insn "*not_shiftsi"
4793 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4794 (not:SI (match_operator:SI 3 "shift_operator"
4795 [(match_operand:SI 1 "s_register_operand" "r,r")
4796 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4799 [(set_attr "predicable" "yes")
4800 (set_attr "shift" "1")
4801 (set_attr "arch" "32,a")
4802 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4804 (define_insn "*not_shiftsi_compare0"
4805 [(set (reg:CC_NZ CC_REGNUM)
4807 (not:SI (match_operator:SI 3 "shift_operator"
4808 [(match_operand:SI 1 "s_register_operand" "r,r")
4809 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4811 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4812 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4814 "mvns%?\\t%0, %1%S3"
4815 [(set_attr "conds" "set")
4816 (set_attr "shift" "1")
4817 (set_attr "arch" "32,a")
4818 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4820 (define_insn "*not_shiftsi_compare0_scratch"
4821 [(set (reg:CC_NZ CC_REGNUM)
4823 (not:SI (match_operator:SI 3 "shift_operator"
4824 [(match_operand:SI 1 "s_register_operand" "r,r")
4825 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4827 (clobber (match_scratch:SI 0 "=r,r"))]
4829 "mvns%?\\t%0, %1%S3"
4830 [(set_attr "conds" "set")
4831 (set_attr "shift" "1")
4832 (set_attr "arch" "32,a")
4833 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4835 ;; We don't really have extzv, but defining this using shifts helps
4836 ;; to reduce register pressure later on.
4838 (define_expand "extzv"
4839 [(set (match_operand 0 "s_register_operand")
4840 (zero_extract (match_operand 1 "nonimmediate_operand")
4841 (match_operand 2 "const_int_operand")
4842 (match_operand 3 "const_int_operand")))]
4843 "TARGET_THUMB1 || arm_arch_thumb2"
4846 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4847 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4849 if (arm_arch_thumb2)
4851 HOST_WIDE_INT width = INTVAL (operands[2]);
4852 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4854 if (unaligned_access && MEM_P (operands[1])
4855 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4859 if (BYTES_BIG_ENDIAN)
4860 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4865 base_addr = adjust_address (operands[1], SImode,
4866 bitpos / BITS_PER_UNIT);
4867 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4871 rtx dest = operands[0];
4872 rtx tmp = gen_reg_rtx (SImode);
4874 /* We may get a paradoxical subreg here. Strip it off. */
4875 if (GET_CODE (dest) == SUBREG
4876 && GET_MODE (dest) == SImode
4877 && GET_MODE (SUBREG_REG (dest)) == HImode)
4878 dest = SUBREG_REG (dest);
4880 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4883 base_addr = adjust_address (operands[1], HImode,
4884 bitpos / BITS_PER_UNIT);
4885 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4886 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4890 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4892 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4900 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4903 operands[3] = GEN_INT (rshift);
4907 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4911 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4912 operands[3], gen_reg_rtx (SImode)));
4917 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4919 (define_expand "extzv_t1"
4920 [(set (match_operand:SI 4 "s_register_operand")
4921 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4922 (match_operand:SI 2 "const_int_operand")))
4923 (set (match_operand:SI 0 "s_register_operand")
4924 (lshiftrt:SI (match_dup 4)
4925 (match_operand:SI 3 "const_int_operand")))]
4929 (define_expand "extv"
4930 [(set (match_operand 0 "s_register_operand")
4931 (sign_extract (match_operand 1 "nonimmediate_operand")
4932 (match_operand 2 "const_int_operand")
4933 (match_operand 3 "const_int_operand")))]
4936 HOST_WIDE_INT width = INTVAL (operands[2]);
4937 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4939 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4940 && (bitpos % BITS_PER_UNIT) == 0)
4944 if (BYTES_BIG_ENDIAN)
4945 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4949 base_addr = adjust_address (operands[1], SImode,
4950 bitpos / BITS_PER_UNIT);
4951 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4955 rtx dest = operands[0];
4956 rtx tmp = gen_reg_rtx (SImode);
4958 /* We may get a paradoxical subreg here. Strip it off. */
4959 if (GET_CODE (dest) == SUBREG
4960 && GET_MODE (dest) == SImode
4961 && GET_MODE (SUBREG_REG (dest)) == HImode)
4962 dest = SUBREG_REG (dest);
4964 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4967 base_addr = adjust_address (operands[1], HImode,
4968 bitpos / BITS_PER_UNIT);
4969 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4970 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4975 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4977 else if (GET_MODE (operands[0]) == SImode
4978 && GET_MODE (operands[1]) == SImode)
4980 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4988 ; Helper to expand register forms of extv with the proper modes.
4990 (define_expand "extv_regsi"
4991 [(set (match_operand:SI 0 "s_register_operand")
4992 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4993 (match_operand 2 "const_int_operand")
4994 (match_operand 3 "const_int_operand")))]
4999 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
5001 (define_insn "unaligned_loaddi"
5002 [(set (match_operand:DI 0 "s_register_operand" "=r")
5003 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
5004 UNSPEC_UNALIGNED_LOAD))]
5005 "TARGET_32BIT && TARGET_LDRD"
5007 return output_move_double (operands, true, NULL);
5009 [(set_attr "length" "8")
5010 (set_attr "type" "load_8")])
5012 (define_insn "unaligned_loadsi"
5013 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
5014 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
5015 UNSPEC_UNALIGNED_LOAD))]
5018 ldr\t%0, %1\t@ unaligned
5019 ldr%?\t%0, %1\t@ unaligned
5020 ldr%?\t%0, %1\t@ unaligned"
5021 [(set_attr "arch" "t1,t2,32")
5022 (set_attr "length" "2,2,4")
5023 (set_attr "predicable" "no,yes,yes")
5024 (set_attr "predicable_short_it" "no,yes,no")
5025 (set_attr "type" "load_4")])
5027 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
5028 ;; address (there's no immediate format). That's tricky to support
5029 ;; here and we don't really need this pattern for that case, so only
5030 ;; enable for 32-bit ISAs.
5031 (define_insn "unaligned_loadhis"
5032 [(set (match_operand:SI 0 "s_register_operand" "=r")
5034 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
5035 UNSPEC_UNALIGNED_LOAD)))]
5036 "unaligned_access && TARGET_32BIT"
5037 "ldrsh%?\t%0, %1\t@ unaligned"
5038 [(set_attr "predicable" "yes")
5039 (set_attr "type" "load_byte")])
5041 (define_insn "unaligned_loadhiu"
5042 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
5044 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
5045 UNSPEC_UNALIGNED_LOAD)))]
5048 ldrh\t%0, %1\t@ unaligned
5049 ldrh%?\t%0, %1\t@ unaligned
5050 ldrh%?\t%0, %1\t@ unaligned"
5051 [(set_attr "arch" "t1,t2,32")
5052 (set_attr "length" "2,2,4")
5053 (set_attr "predicable" "no,yes,yes")
5054 (set_attr "predicable_short_it" "no,yes,no")
5055 (set_attr "type" "load_byte")])
5057 (define_insn "unaligned_storedi"
5058 [(set (match_operand:DI 0 "memory_operand" "=m")
5059 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
5060 UNSPEC_UNALIGNED_STORE))]
5061 "TARGET_32BIT && TARGET_LDRD"
5063 return output_move_double (operands, true, NULL);
5065 [(set_attr "length" "8")
5066 (set_attr "type" "store_8")])
5068 (define_insn "unaligned_storesi"
5069 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
5070 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
5071 UNSPEC_UNALIGNED_STORE))]
5074 str\t%1, %0\t@ unaligned
5075 str%?\t%1, %0\t@ unaligned
5076 str%?\t%1, %0\t@ unaligned"
5077 [(set_attr "arch" "t1,t2,32")
5078 (set_attr "length" "2,2,4")
5079 (set_attr "predicable" "no,yes,yes")
5080 (set_attr "predicable_short_it" "no,yes,no")
5081 (set_attr "type" "store_4")])
5083 (define_insn "unaligned_storehi"
5084 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
5085 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
5086 UNSPEC_UNALIGNED_STORE))]
5089 strh\t%1, %0\t@ unaligned
5090 strh%?\t%1, %0\t@ unaligned
5091 strh%?\t%1, %0\t@ unaligned"
5092 [(set_attr "arch" "t1,t2,32")
5093 (set_attr "length" "2,2,4")
5094 (set_attr "predicable" "no,yes,yes")
5095 (set_attr "predicable_short_it" "no,yes,no")
5096 (set_attr "type" "store_4")])
5099 (define_insn "*extv_reg"
5100 [(set (match_operand:SI 0 "s_register_operand" "=r")
5101 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5102 (match_operand:SI 2 "const_int_operand" "n")
5103 (match_operand:SI 3 "const_int_operand" "n")))]
5105 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5106 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5107 "sbfx%?\t%0, %1, %3, %2"
5108 [(set_attr "length" "4")
5109 (set_attr "predicable" "yes")
5110 (set_attr "type" "bfm")]
5113 (define_insn "extzv_t2"
5114 [(set (match_operand:SI 0 "s_register_operand" "=r")
5115 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5116 (match_operand:SI 2 "const_int_operand" "n")
5117 (match_operand:SI 3 "const_int_operand" "n")))]
5119 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5120 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5121 "ubfx%?\t%0, %1, %3, %2"
5122 [(set_attr "length" "4")
5123 (set_attr "predicable" "yes")
5124 (set_attr "type" "bfm")]
5128 ;; Division instructions
5129 (define_insn "divsi3"
5130 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5131 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5132 (match_operand:SI 2 "s_register_operand" "r,r")))]
5137 [(set_attr "arch" "32,v8mb")
5138 (set_attr "predicable" "yes")
5139 (set_attr "type" "sdiv")]
5142 (define_insn "udivsi3"
5143 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5144 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5145 (match_operand:SI 2 "s_register_operand" "r,r")))]
5150 [(set_attr "arch" "32,v8mb")
5151 (set_attr "predicable" "yes")
5152 (set_attr "type" "udiv")]
5156 ;; Unary arithmetic insns
5158 (define_expand "negv<SIDI:mode>3"
5159 [(match_operand:SIDI 0 "s_register_operand")
5160 (match_operand:SIDI 1 "s_register_operand")
5161 (match_operand 2 "")]
5164 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5169 (define_expand "negsi2"
5170 [(set (match_operand:SI 0 "s_register_operand")
5171 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5176 (define_insn "*arm_negsi2"
5177 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5178 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5180 "rsb%?\\t%0, %1, #0"
5181 [(set_attr "predicable" "yes")
5182 (set_attr "predicable_short_it" "yes,no")
5183 (set_attr "arch" "t2,*")
5184 (set_attr "length" "4")
5185 (set_attr "type" "alu_imm")]
5188 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5189 ;; rather than (0 cmp reg). This gives the same results for unsigned
5190 ;; and equality compares which is what we mostly need here.
5191 (define_insn "negsi2_0compare"
5192 [(set (reg:CC_RSB CC_REGNUM)
5193 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5195 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5196 (neg:SI (match_dup 1)))]
5201 [(set_attr "conds" "set")
5202 (set_attr "arch" "t2,*")
5203 (set_attr "length" "2,*")
5204 (set_attr "type" "alus_imm")]
5207 (define_insn "negsi2_carryin"
5208 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5209 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5210 (match_operand:SI 2 "arm_borrow_operation" "")))]
5214 sbc\\t%0, %1, %1, lsl #1"
5215 [(set_attr "conds" "use")
5216 (set_attr "arch" "a,t2")
5217 (set_attr "type" "adc_imm,adc_reg")]
5220 (define_expand "negsf2"
5221 [(set (match_operand:SF 0 "s_register_operand")
5222 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5223 "TARGET_32BIT && TARGET_HARD_FLOAT"
5227 (define_expand "negdf2"
5228 [(set (match_operand:DF 0 "s_register_operand")
5229 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5230 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5233 ;; abssi2 doesn't really clobber the condition codes if a different register
5234 ;; is being set. To keep things simple, assume during rtl manipulations that
5235 ;; it does, but tell the final scan operator the truth. Similarly for
5238 (define_expand "abssi2"
5240 [(set (match_operand:SI 0 "s_register_operand")
5241 (abs:SI (match_operand:SI 1 "s_register_operand")))
5242 (clobber (match_dup 2))])]
5246 operands[2] = gen_rtx_SCRATCH (SImode);
5248 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5251 (define_insn_and_split "*arm_abssi2"
5252 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5253 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5254 (clobber (reg:CC CC_REGNUM))]
5257 "&& reload_completed"
5260 /* if (which_alternative == 0) */
5261 if (REGNO(operands[0]) == REGNO(operands[1]))
5263 /* Emit the pattern:
5264 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5265 [(set (reg:CC CC_REGNUM)
5266 (compare:CC (match_dup 0) (const_int 0)))
5267 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5268 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5270 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5271 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5272 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5273 (gen_rtx_LT (SImode,
5274 gen_rtx_REG (CCmode, CC_REGNUM),
5276 (gen_rtx_SET (operands[0],
5277 (gen_rtx_MINUS (SImode,
5284 /* Emit the pattern:
5285 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5287 (xor:SI (match_dup 1)
5288 (ashiftrt:SI (match_dup 1) (const_int 31))))
5290 (minus:SI (match_dup 0)
5291 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5293 emit_insn (gen_rtx_SET (operands[0],
5294 gen_rtx_XOR (SImode,
5295 gen_rtx_ASHIFTRT (SImode,
5299 emit_insn (gen_rtx_SET (operands[0],
5300 gen_rtx_MINUS (SImode,
5302 gen_rtx_ASHIFTRT (SImode,
5308 [(set_attr "conds" "clob,*")
5309 (set_attr "shift" "1")
5310 (set_attr "predicable" "no, yes")
5311 (set_attr "length" "8")
5312 (set_attr "type" "multiple")]
5315 (define_insn_and_split "*arm_neg_abssi2"
5316 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5317 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5318 (clobber (reg:CC CC_REGNUM))]
5321 "&& reload_completed"
5324 /* if (which_alternative == 0) */
5325 if (REGNO (operands[0]) == REGNO (operands[1]))
5327 /* Emit the pattern:
5328 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5330 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5331 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5332 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5334 gen_rtx_REG (CCmode, CC_REGNUM),
5336 gen_rtx_SET (operands[0],
5337 (gen_rtx_MINUS (SImode,
5343 /* Emit the pattern:
5344 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5346 emit_insn (gen_rtx_SET (operands[0],
5347 gen_rtx_XOR (SImode,
5348 gen_rtx_ASHIFTRT (SImode,
5352 emit_insn (gen_rtx_SET (operands[0],
5353 gen_rtx_MINUS (SImode,
5354 gen_rtx_ASHIFTRT (SImode,
5361 [(set_attr "conds" "clob,*")
5362 (set_attr "shift" "1")
5363 (set_attr "predicable" "no, yes")
5364 (set_attr "length" "8")
5365 (set_attr "type" "multiple")]
5368 (define_expand "abssf2"
5369 [(set (match_operand:SF 0 "s_register_operand")
5370 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5371 "TARGET_32BIT && TARGET_HARD_FLOAT"
5374 (define_expand "absdf2"
5375 [(set (match_operand:DF 0 "s_register_operand")
5376 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5377 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5380 (define_expand "sqrtsf2"
5381 [(set (match_operand:SF 0 "s_register_operand")
5382 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5383 "TARGET_32BIT && TARGET_HARD_FLOAT"
5386 (define_expand "sqrtdf2"
5387 [(set (match_operand:DF 0 "s_register_operand")
5388 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5389 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5392 (define_expand "one_cmplsi2"
5393 [(set (match_operand:SI 0 "s_register_operand")
5394 (not:SI (match_operand:SI 1 "s_register_operand")))]
5399 (define_insn "*arm_one_cmplsi2"
5400 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5401 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5404 [(set_attr "predicable" "yes")
5405 (set_attr "predicable_short_it" "yes,no")
5406 (set_attr "arch" "t2,*")
5407 (set_attr "length" "4")
5408 (set_attr "type" "mvn_reg")]
5411 (define_insn "*notsi_compare0"
5412 [(set (reg:CC_NZ CC_REGNUM)
5413 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5415 (set (match_operand:SI 0 "s_register_operand" "=r")
5416 (not:SI (match_dup 1)))]
5419 [(set_attr "conds" "set")
5420 (set_attr "type" "mvn_reg")]
5423 (define_insn "*notsi_compare0_scratch"
5424 [(set (reg:CC_NZ CC_REGNUM)
5425 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5427 (clobber (match_scratch:SI 0 "=r"))]
5430 [(set_attr "conds" "set")
5431 (set_attr "type" "mvn_reg")]
5434 ;; Fixed <--> Floating conversion insns
5436 (define_expand "floatsihf2"
5437 [(set (match_operand:HF 0 "general_operand")
5438 (float:HF (match_operand:SI 1 "general_operand")))]
5442 rtx op1 = gen_reg_rtx (SFmode);
5443 expand_float (op1, operands[1], 0);
5444 op1 = convert_to_mode (HFmode, op1, 0);
5445 emit_move_insn (operands[0], op1);
5450 (define_expand "floatdihf2"
5451 [(set (match_operand:HF 0 "general_operand")
5452 (float:HF (match_operand:DI 1 "general_operand")))]
5456 rtx op1 = gen_reg_rtx (SFmode);
5457 expand_float (op1, operands[1], 0);
5458 op1 = convert_to_mode (HFmode, op1, 0);
5459 emit_move_insn (operands[0], op1);
5464 (define_expand "floatsisf2"
5465 [(set (match_operand:SF 0 "s_register_operand")
5466 (float:SF (match_operand:SI 1 "s_register_operand")))]
5467 "TARGET_32BIT && TARGET_HARD_FLOAT"
5471 (define_expand "floatsidf2"
5472 [(set (match_operand:DF 0 "s_register_operand")
5473 (float:DF (match_operand:SI 1 "s_register_operand")))]
5474 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5478 (define_expand "fix_trunchfsi2"
5479 [(set (match_operand:SI 0 "general_operand")
5480 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5484 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5485 expand_fix (operands[0], op1, 0);
5490 (define_expand "fix_trunchfdi2"
5491 [(set (match_operand:DI 0 "general_operand")
5492 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5496 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5497 expand_fix (operands[0], op1, 0);
5502 (define_expand "fix_truncsfsi2"
5503 [(set (match_operand:SI 0 "s_register_operand")
5504 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5505 "TARGET_32BIT && TARGET_HARD_FLOAT"
5509 (define_expand "fix_truncdfsi2"
5510 [(set (match_operand:SI 0 "s_register_operand")
5511 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5512 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5518 (define_expand "truncdfsf2"
5519 [(set (match_operand:SF 0 "s_register_operand")
5521 (match_operand:DF 1 "s_register_operand")))]
5522 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5526 ;; DFmode to HFmode conversions on targets without a single-step hardware
5527 ;; instruction for it would have to go through SFmode. This is dangerous
5528 ;; as it introduces double rounding.
5530 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5531 ;; a single-step instruction.
5533 (define_expand "truncdfhf2"
5534 [(set (match_operand:HF 0 "s_register_operand")
5536 (match_operand:DF 1 "s_register_operand")))]
5537 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5538 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5540 /* We don't have a direct instruction for this, so we must be in
5541 an unsafe math mode, and going via SFmode. */
5543 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5546 op1 = convert_to_mode (SFmode, operands[1], 0);
5547 op1 = convert_to_mode (HFmode, op1, 0);
5548 emit_move_insn (operands[0], op1);
5551 /* Otherwise, we will pick this up as a single instruction with
5552 no intermediary rounding. */
5556 ;; Zero and sign extension instructions.
5558 (define_expand "zero_extend<mode>di2"
5559 [(set (match_operand:DI 0 "s_register_operand" "")
5560 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5561 "TARGET_32BIT <qhs_zextenddi_cond>"
5563 rtx res_lo, res_hi, op0_lo, op0_hi;
5564 res_lo = gen_lowpart (SImode, operands[0]);
5565 res_hi = gen_highpart (SImode, operands[0]);
5566 if (can_create_pseudo_p ())
5568 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5569 op0_hi = gen_reg_rtx (SImode);
5573 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5576 if (<MODE>mode != SImode)
5577 emit_insn (gen_rtx_SET (op0_lo,
5578 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5579 emit_insn (gen_movsi (op0_hi, const0_rtx));
5580 if (res_lo != op0_lo)
5581 emit_move_insn (res_lo, op0_lo);
5582 if (res_hi != op0_hi)
5583 emit_move_insn (res_hi, op0_hi);
5588 (define_expand "extend<mode>di2"
5589 [(set (match_operand:DI 0 "s_register_operand" "")
5590 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5591 "TARGET_32BIT <qhs_sextenddi_cond>"
5593 rtx res_lo, res_hi, op0_lo, op0_hi;
5594 res_lo = gen_lowpart (SImode, operands[0]);
5595 res_hi = gen_highpart (SImode, operands[0]);
5596 if (can_create_pseudo_p ())
5598 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5599 op0_hi = gen_reg_rtx (SImode);
5603 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5606 if (<MODE>mode != SImode)
5607 emit_insn (gen_rtx_SET (op0_lo,
5608 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5609 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5610 if (res_lo != op0_lo)
5611 emit_move_insn (res_lo, op0_lo);
5612 if (res_hi != op0_hi)
5613 emit_move_insn (res_hi, op0_hi);
5618 ;; Splits for all extensions to DImode
5620 [(set (match_operand:DI 0 "s_register_operand" "")
5621 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5623 [(set (match_dup 0) (match_dup 1))]
5625 rtx lo_part = gen_lowpart (SImode, operands[0]);
5626 machine_mode src_mode = GET_MODE (operands[1]);
5628 if (src_mode == SImode)
5629 emit_move_insn (lo_part, operands[1]);
5631 emit_insn (gen_rtx_SET (lo_part,
5632 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5633 operands[0] = gen_highpart (SImode, operands[0]);
5634 operands[1] = const0_rtx;
5638 [(set (match_operand:DI 0 "s_register_operand" "")
5639 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5641 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5643 rtx lo_part = gen_lowpart (SImode, operands[0]);
5644 machine_mode src_mode = GET_MODE (operands[1]);
5646 if (src_mode == SImode)
5647 emit_move_insn (lo_part, operands[1]);
5649 emit_insn (gen_rtx_SET (lo_part,
5650 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5651 operands[1] = lo_part;
5652 operands[0] = gen_highpart (SImode, operands[0]);
5655 (define_expand "zero_extendhisi2"
5656 [(set (match_operand:SI 0 "s_register_operand")
5657 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5660 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5662 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5665 if (!arm_arch6 && !MEM_P (operands[1]))
5667 rtx t = gen_lowpart (SImode, operands[1]);
5668 rtx tmp = gen_reg_rtx (SImode);
5669 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5670 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5676 [(set (match_operand:SI 0 "s_register_operand" "")
5677 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5678 "!TARGET_THUMB2 && !arm_arch6"
5679 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5680 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5682 operands[2] = gen_lowpart (SImode, operands[1]);
5685 (define_insn "*arm_zero_extendhisi2"
5686 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5687 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5688 "TARGET_ARM && arm_arch4 && !arm_arch6"
5692 [(set_attr "type" "alu_shift_reg,load_byte")
5693 (set_attr "predicable" "yes")]
5696 (define_insn "*arm_zero_extendhisi2_v6"
5697 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5698 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5699 "TARGET_ARM && arm_arch6"
5703 [(set_attr "predicable" "yes")
5704 (set_attr "type" "extend,load_byte")]
5707 (define_insn "*arm_zero_extendhisi2addsi"
5708 [(set (match_operand:SI 0 "s_register_operand" "=r")
5709 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5710 (match_operand:SI 2 "s_register_operand" "r")))]
5712 "uxtah%?\\t%0, %2, %1"
5713 [(set_attr "type" "alu_shift_reg")
5714 (set_attr "predicable" "yes")]
5717 (define_expand "zero_extendqisi2"
5718 [(set (match_operand:SI 0 "s_register_operand")
5719 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5722 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5724 emit_insn (gen_andsi3 (operands[0],
5725 gen_lowpart (SImode, operands[1]),
5729 if (!arm_arch6 && !MEM_P (operands[1]))
5731 rtx t = gen_lowpart (SImode, operands[1]);
5732 rtx tmp = gen_reg_rtx (SImode);
5733 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5734 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5740 [(set (match_operand:SI 0 "s_register_operand" "")
5741 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5743 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5744 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5746 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5749 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5754 (define_insn "*arm_zero_extendqisi2"
5755 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5756 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5757 "TARGET_ARM && !arm_arch6"
5760 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5761 [(set_attr "length" "8,4")
5762 (set_attr "type" "alu_shift_reg,load_byte")
5763 (set_attr "predicable" "yes")]
5766 (define_insn "*arm_zero_extendqisi2_v6"
5767 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5768 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5769 "TARGET_ARM && arm_arch6"
5772 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5773 [(set_attr "type" "extend,load_byte")
5774 (set_attr "predicable" "yes")]
5777 (define_insn "*arm_zero_extendqisi2addsi"
5778 [(set (match_operand:SI 0 "s_register_operand" "=r")
5779 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5780 (match_operand:SI 2 "s_register_operand" "r")))]
5782 "uxtab%?\\t%0, %2, %1"
5783 [(set_attr "predicable" "yes")
5784 (set_attr "type" "alu_shift_reg")]
5788 [(set (match_operand:SI 0 "s_register_operand" "")
5789 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5790 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5791 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5792 [(set (match_dup 2) (match_dup 1))
5793 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5798 [(set (match_operand:SI 0 "s_register_operand" "")
5799 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5800 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5801 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5802 [(set (match_dup 2) (match_dup 1))
5803 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5809 [(set (match_operand:SI 0 "s_register_operand" "")
5810 (IOR_XOR:SI (and:SI (ashift:SI
5811 (match_operand:SI 1 "s_register_operand" "")
5812 (match_operand:SI 2 "const_int_operand" ""))
5813 (match_operand:SI 3 "const_int_operand" ""))
5815 (match_operator 5 "subreg_lowpart_operator"
5816 [(match_operand:SI 4 "s_register_operand" "")]))))]
5818 && (UINTVAL (operands[3])
5819 == (GET_MODE_MASK (GET_MODE (operands[5]))
5820 & (GET_MODE_MASK (GET_MODE (operands[5]))
5821 << (INTVAL (operands[2])))))"
5822 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5824 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5825 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5828 (define_insn "*compareqi_eq0"
5829 [(set (reg:CC_Z CC_REGNUM)
5830 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5834 [(set_attr "conds" "set")
5835 (set_attr "predicable" "yes")
5836 (set_attr "type" "logic_imm")]
5839 (define_expand "extendhisi2"
5840 [(set (match_operand:SI 0 "s_register_operand")
5841 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5846 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5849 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5851 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5855 if (!arm_arch6 && !MEM_P (operands[1]))
5857 rtx t = gen_lowpart (SImode, operands[1]);
5858 rtx tmp = gen_reg_rtx (SImode);
5859 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5860 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5867 [(set (match_operand:SI 0 "register_operand" "")
5868 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5869 (clobber (match_scratch:SI 2 ""))])]
5871 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5872 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5874 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5877 ;; This pattern will only be used when ldsh is not available
5878 (define_expand "extendhisi2_mem"
5879 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5881 (zero_extend:SI (match_dup 7)))
5882 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5883 (set (match_operand:SI 0 "" "")
5884 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5889 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5891 mem1 = change_address (operands[1], QImode, addr);
5892 mem2 = change_address (operands[1], QImode,
5893 plus_constant (Pmode, addr, 1));
5894 operands[0] = gen_lowpart (SImode, operands[0]);
5896 operands[2] = gen_reg_rtx (SImode);
5897 operands[3] = gen_reg_rtx (SImode);
5898 operands[6] = gen_reg_rtx (SImode);
5901 if (BYTES_BIG_ENDIAN)
5903 operands[4] = operands[2];
5904 operands[5] = operands[3];
5908 operands[4] = operands[3];
5909 operands[5] = operands[2];
5915 [(set (match_operand:SI 0 "register_operand" "")
5916 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5918 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5919 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5921 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5924 (define_insn "*arm_extendhisi2"
5925 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5926 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5927 "TARGET_ARM && arm_arch4 && !arm_arch6"
5931 [(set_attr "length" "8,4")
5932 (set_attr "type" "alu_shift_reg,load_byte")
5933 (set_attr "predicable" "yes")]
5936 ;; ??? Check Thumb-2 pool range
5937 (define_insn "*arm_extendhisi2_v6"
5938 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5939 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5940 "TARGET_32BIT && arm_arch6"
5944 [(set_attr "type" "extend,load_byte")
5945 (set_attr "predicable" "yes")]
5948 (define_insn "*arm_extendhisi2addsi"
5949 [(set (match_operand:SI 0 "s_register_operand" "=r")
5950 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5951 (match_operand:SI 2 "s_register_operand" "r")))]
5953 "sxtah%?\\t%0, %2, %1"
5954 [(set_attr "type" "alu_shift_reg")]
5957 (define_expand "extendqihi2"
5959 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5961 (set (match_operand:HI 0 "s_register_operand")
5962 (ashiftrt:SI (match_dup 2)
5967 if (arm_arch4 && MEM_P (operands[1]))
5969 emit_insn (gen_rtx_SET (operands[0],
5970 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5973 if (!s_register_operand (operands[1], QImode))
5974 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5975 operands[0] = gen_lowpart (SImode, operands[0]);
5976 operands[1] = gen_lowpart (SImode, operands[1]);
5977 operands[2] = gen_reg_rtx (SImode);
5981 (define_insn "*arm_extendqihi_insn"
5982 [(set (match_operand:HI 0 "s_register_operand" "=r")
5983 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5984 "TARGET_ARM && arm_arch4"
5986 [(set_attr "type" "load_byte")
5987 (set_attr "predicable" "yes")]
5990 (define_expand "extendqisi2"
5991 [(set (match_operand:SI 0 "s_register_operand")
5992 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5995 if (!arm_arch4 && MEM_P (operands[1]))
5996 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5998 if (!arm_arch6 && !MEM_P (operands[1]))
6000 rtx t = gen_lowpart (SImode, operands[1]);
6001 rtx tmp = gen_reg_rtx (SImode);
6002 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
6003 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
6009 [(set (match_operand:SI 0 "register_operand" "")
6010 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
6012 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
6013 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
6015 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
6018 (define_insn "*arm_extendqisi"
6019 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6020 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6021 "TARGET_ARM && arm_arch4 && !arm_arch6"
6025 [(set_attr "length" "8,4")
6026 (set_attr "type" "alu_shift_reg,load_byte")
6027 (set_attr "predicable" "yes")]
6030 (define_insn "*arm_extendqisi_v6"
6031 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6033 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6034 "TARGET_ARM && arm_arch6"
6038 [(set_attr "type" "extend,load_byte")
6039 (set_attr "predicable" "yes")]
6042 (define_insn "*arm_extendqisi2addsi"
6043 [(set (match_operand:SI 0 "s_register_operand" "=r")
6044 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6045 (match_operand:SI 2 "s_register_operand" "r")))]
6047 "sxtab%?\\t%0, %2, %1"
6048 [(set_attr "type" "alu_shift_reg")
6049 (set_attr "predicable" "yes")]
6052 (define_insn "arm_<sup>xtb16"
6053 [(set (match_operand:SI 0 "s_register_operand" "=r")
6055 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
6057 "<sup>xtb16%?\\t%0, %1"
6058 [(set_attr "predicable" "yes")
6059 (set_attr "type" "alu_dsp_reg")])
6061 (define_insn "arm_<simd32_op>"
6062 [(set (match_operand:SI 0 "s_register_operand" "=r")
6064 [(match_operand:SI 1 "s_register_operand" "r")
6065 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
6067 "<simd32_op>%?\\t%0, %1, %2"
6068 [(set_attr "predicable" "yes")
6069 (set_attr "type" "alu_dsp_reg")])
6071 (define_insn "arm_usada8"
6072 [(set (match_operand:SI 0 "s_register_operand" "=r")
6074 [(match_operand:SI 1 "s_register_operand" "r")
6075 (match_operand:SI 2 "s_register_operand" "r")
6076 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
6078 "usada8%?\\t%0, %1, %2, %3"
6079 [(set_attr "predicable" "yes")
6080 (set_attr "type" "alu_dsp_reg")])
6082 (define_insn "arm_<simd32_op>"
6083 [(set (match_operand:DI 0 "s_register_operand" "=r")
6085 [(match_operand:SI 1 "s_register_operand" "r")
6086 (match_operand:SI 2 "s_register_operand" "r")
6087 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
6089 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
6090 [(set_attr "predicable" "yes")
6091 (set_attr "type" "smlald")])
6093 (define_insn "arm_<simd32_op>"
6094 [(set (match_operand:SI 0 "s_register_operand" "=r")
6096 [(match_operand:SI 1 "s_register_operand" "r")
6097 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
6098 (set (reg:CC APSRGE_REGNUM)
6099 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
6101 "<simd32_op>%?\\t%0, %1, %2"
6102 [(set_attr "predicable" "yes")
6103 (set_attr "type" "alu_sreg")])
6105 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6106 [(set (match_operand:SI 0 "s_register_operand" "=r")
6108 [(match_operand:SI 1 "s_register_operand" "r")
6109 (match_operand:SI 2 "s_register_operand" "r")
6110 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6111 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6112 "<simd32_op>%?\\t%0, %1, %2, %3"
6113 [(set_attr "predicable" "yes")
6114 (set_attr "type" "alu_sreg")])
6116 (define_expand "arm_<simd32_op>"
6117 [(set (match_operand:SI 0 "s_register_operand")
6119 [(match_operand:SI 1 "s_register_operand")
6120 (match_operand:SI 2 "s_register_operand")
6121 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6125 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6126 operands[2], operands[3]));
6128 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6129 operands[2], operands[3]));
6134 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6135 [(set (match_operand:SI 0 "s_register_operand" "=r")
6137 [(match_operand:SI 1 "s_register_operand" "r")
6138 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6139 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6140 "<simd32_op>%?\\t%0, %1, %2"
6141 [(set_attr "predicable" "yes")
6142 (set_attr "type" "alu_sreg")])
6144 (define_expand "arm_<simd32_op>"
6145 [(set (match_operand:SI 0 "s_register_operand")
6147 [(match_operand:SI 1 "s_register_operand")
6148 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6152 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6155 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6161 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6162 [(set (match_operand:SI 0 "s_register_operand" "=r")
6164 [(match_operand:SI 1 "s_register_operand" "r")
6165 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6166 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6167 "<simd32_op>%?\\t%0, %2, %1"
6168 [(set_attr "predicable" "yes")
6169 (set_attr "type" "alu_sreg")])
6171 (define_expand "arm_<simd32_op>"
6172 [(set (match_operand:SI 0 "s_register_operand")
6174 [(match_operand:SI 1 "s_register_operand")
6175 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6179 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6182 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6188 (define_insn "arm_sel"
6189 [(set (match_operand:SI 0 "s_register_operand" "=r")
6191 [(match_operand:SI 1 "s_register_operand" "r")
6192 (match_operand:SI 2 "s_register_operand" "r")
6193 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6195 "sel%?\\t%0, %1, %2"
6196 [(set_attr "predicable" "yes")
6197 (set_attr "type" "alu_sreg")])
6199 (define_expand "extendsfdf2"
6200 [(set (match_operand:DF 0 "s_register_operand")
6201 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6202 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6206 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6207 ;; must go through SFmode.
6209 ;; This is always safe for an extend.
6211 (define_expand "extendhfdf2"
6212 [(set (match_operand:DF 0 "s_register_operand")
6213 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6216 /* We don't have a direct instruction for this, so go via SFmode. */
6217 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6220 op1 = convert_to_mode (SFmode, operands[1], 0);
6221 op1 = convert_to_mode (DFmode, op1, 0);
6222 emit_insn (gen_movdf (operands[0], op1));
6225 /* Otherwise, we're done producing RTL and will pick up the correct
6226 pattern to do this with one rounding-step in a single instruction. */
6230 ;; Move insns (including loads and stores)
6232 ;; XXX Just some ideas about movti.
6233 ;; I don't think these are a good idea on the arm, there just aren't enough
6235 ;;(define_expand "loadti"
6236 ;; [(set (match_operand:TI 0 "s_register_operand")
6237 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6240 ;;(define_expand "storeti"
6241 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6242 ;; (match_operand:TI 1 "s_register_operand"))]
6245 ;;(define_expand "movti"
6246 ;; [(set (match_operand:TI 0 "general_operand")
6247 ;; (match_operand:TI 1 "general_operand"))]
6253 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6254 ;; operands[1] = copy_to_reg (operands[1]);
6255 ;; if (MEM_P (operands[0]))
6256 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6257 ;; else if (MEM_P (operands[1]))
6258 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6262 ;; emit_insn (insn);
6266 ;; Recognize garbage generated above.
6269 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6270 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6274 ;; register mem = (which_alternative < 3);
6275 ;; register const char *template;
6277 ;; operands[mem] = XEXP (operands[mem], 0);
6278 ;; switch (which_alternative)
6280 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6281 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6282 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6283 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6284 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6285 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6287 ;; output_asm_insn (template, operands);
6291 (define_expand "movdi"
6292 [(set (match_operand:DI 0 "general_operand")
6293 (match_operand:DI 1 "general_operand"))]
6296 gcc_checking_assert (aligned_operand (operands[0], DImode));
6297 gcc_checking_assert (aligned_operand (operands[1], DImode));
6298 if (can_create_pseudo_p ())
6300 if (!REG_P (operands[0]))
6301 operands[1] = force_reg (DImode, operands[1]);
6303 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6304 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6306 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6307 when expanding function calls. */
6308 gcc_assert (can_create_pseudo_p ());
6309 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6311 /* Perform load into legal reg pair first, then move. */
6312 rtx reg = gen_reg_rtx (DImode);
6313 emit_insn (gen_movdi (reg, operands[1]));
6316 emit_move_insn (gen_lowpart (SImode, operands[0]),
6317 gen_lowpart (SImode, operands[1]));
6318 emit_move_insn (gen_highpart (SImode, operands[0]),
6319 gen_highpart (SImode, operands[1]));
6322 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6323 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6325 /* Avoid STRD's from an odd-numbered register pair in ARM state
6326 when expanding function prologue. */
6327 gcc_assert (can_create_pseudo_p ());
6328 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6329 ? gen_reg_rtx (DImode)
6331 emit_move_insn (gen_lowpart (SImode, split_dest),
6332 gen_lowpart (SImode, operands[1]));
6333 emit_move_insn (gen_highpart (SImode, split_dest),
6334 gen_highpart (SImode, operands[1]));
6335 if (split_dest != operands[0])
6336 emit_insn (gen_movdi (operands[0], split_dest));
6342 (define_insn "*arm_movdi"
6343 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6344 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6346 && !(TARGET_HARD_FLOAT)
6347 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6349 && ( register_operand (operands[0], DImode)
6350 || register_operand (operands[1], DImode))"
6352 switch (which_alternative)
6359 /* Cannot load it directly, split to load it via MOV / MOVT. */
6360 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6364 return output_move_double (operands, true, NULL);
6367 [(set_attr "length" "8,12,16,8,8")
6368 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6369 (set_attr "arm_pool_range" "*,*,*,1020,*")
6370 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6371 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6372 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6376 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6377 (match_operand:ANY64 1 "immediate_operand" ""))]
6380 && (arm_disable_literal_pool
6381 || (arm_const_double_inline_cost (operands[1])
6382 <= arm_max_const_double_inline_cost ()))"
6385 arm_split_constant (SET, SImode, curr_insn,
6386 INTVAL (gen_lowpart (SImode, operands[1])),
6387 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6388 arm_split_constant (SET, SImode, curr_insn,
6389 INTVAL (gen_highpart_mode (SImode,
6390 GET_MODE (operands[0]),
6392 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6397 ; If optimizing for size, or if we have load delay slots, then
6398 ; we want to split the constant into two separate operations.
6399 ; In both cases this may split a trivial part into a single data op
6400 ; leaving a single complex constant to load. We can also get longer
6401 ; offsets in a LDR which means we get better chances of sharing the pool
6402 ; entries. Finally, we can normally do a better job of scheduling
6403 ; LDR instructions than we can with LDM.
6404 ; This pattern will only match if the one above did not.
6406 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6407 (match_operand:ANY64 1 "const_double_operand" ""))]
6408 "TARGET_ARM && reload_completed
6409 && arm_const_double_by_parts (operands[1])"
6410 [(set (match_dup 0) (match_dup 1))
6411 (set (match_dup 2) (match_dup 3))]
6413 operands[2] = gen_highpart (SImode, operands[0]);
6414 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6416 operands[0] = gen_lowpart (SImode, operands[0]);
6417 operands[1] = gen_lowpart (SImode, operands[1]);
6422 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6423 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6424 "TARGET_EITHER && reload_completed"
6425 [(set (match_dup 0) (match_dup 1))
6426 (set (match_dup 2) (match_dup 3))]
6428 operands[2] = gen_highpart (SImode, operands[0]);
6429 operands[3] = gen_highpart (SImode, operands[1]);
6430 operands[0] = gen_lowpart (SImode, operands[0]);
6431 operands[1] = gen_lowpart (SImode, operands[1]);
6433 /* Handle a partial overlap. */
6434 if (rtx_equal_p (operands[0], operands[3]))
6436 rtx tmp0 = operands[0];
6437 rtx tmp1 = operands[1];
6439 operands[0] = operands[2];
6440 operands[1] = operands[3];
6447 ;; We can't actually do base+index doubleword loads if the index and
6448 ;; destination overlap. Split here so that we at least have chance to
6451 [(set (match_operand:DI 0 "s_register_operand" "")
6452 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6453 (match_operand:SI 2 "s_register_operand" ""))))]
6455 && reg_overlap_mentioned_p (operands[0], operands[1])
6456 && reg_overlap_mentioned_p (operands[0], operands[2])"
6458 (plus:SI (match_dup 1)
6461 (mem:DI (match_dup 4)))]
6463 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6467 (define_expand "movsi"
6468 [(set (match_operand:SI 0 "general_operand")
6469 (match_operand:SI 1 "general_operand"))]
6473 rtx base, offset, tmp;
6475 gcc_checking_assert (aligned_operand (operands[0], SImode));
6476 gcc_checking_assert (aligned_operand (operands[1], SImode));
6477 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6479 /* Everything except mem = const or mem = mem can be done easily. */
6480 if (MEM_P (operands[0]))
6481 operands[1] = force_reg (SImode, operands[1]);
6482 if (arm_general_register_operand (operands[0], SImode)
6483 && CONST_INT_P (operands[1])
6484 && !(const_ok_for_arm (INTVAL (operands[1]))
6485 || const_ok_for_arm (~INTVAL (operands[1]))))
6487 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6489 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6494 arm_split_constant (SET, SImode, NULL_RTX,
6495 INTVAL (operands[1]), operands[0], NULL_RTX,
6496 optimize && can_create_pseudo_p ());
6501 else /* Target doesn't have MOVT... */
6503 if (can_create_pseudo_p ())
6505 if (!REG_P (operands[0]))
6506 operands[1] = force_reg (SImode, operands[1]);
6510 split_const (operands[1], &base, &offset);
6511 if (INTVAL (offset) != 0
6512 && targetm.cannot_force_const_mem (SImode, operands[1]))
6514 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6515 emit_move_insn (tmp, base);
6516 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6520 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6522 /* Recognize the case where operand[1] is a reference to thread-local
6523 data and load its address to a register. Offsets have been split off
6525 if (arm_tls_referenced_p (operands[1]))
6526 operands[1] = legitimize_tls_address (operands[1], tmp);
6528 && (CONSTANT_P (operands[1])
6529 || symbol_mentioned_p (operands[1])
6530 || label_mentioned_p (operands[1])))
6532 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6537 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6538 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6539 ;; so this does not matter.
6540 (define_insn "*arm_movt"
6541 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6542 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6543 (match_operand:SI 2 "general_operand" "i,i")))]
6544 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6546 movt%?\t%0, #:upper16:%c2
6547 movt\t%0, #:upper16:%c2"
6548 [(set_attr "arch" "32,v8mb")
6549 (set_attr "predicable" "yes")
6550 (set_attr "length" "4")
6551 (set_attr "type" "alu_sreg")]
6554 (define_insn "*arm_movsi_insn"
6555 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6556 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6557 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6558 && ( register_operand (operands[0], SImode)
6559 || register_operand (operands[1], SImode))"
6567 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6568 (set_attr "predicable" "yes")
6569 (set_attr "arch" "*,*,*,v6t2,*,*")
6570 (set_attr "pool_range" "*,*,*,*,4096,*")
6571 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6575 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6576 (match_operand:SI 1 "const_int_operand" ""))]
6577 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6578 && (!(const_ok_for_arm (INTVAL (operands[1]))
6579 || const_ok_for_arm (~INTVAL (operands[1]))))"
6580 [(clobber (const_int 0))]
6582 arm_split_constant (SET, SImode, NULL_RTX,
6583 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6588 ;; A normal way to do (symbol + offset) requires three instructions at least
6589 ;; (depends on how big the offset is) as below:
6590 ;; movw r0, #:lower16:g
6591 ;; movw r0, #:upper16:g
6594 ;; A better way would be:
6595 ;; movw r0, #:lower16:g+4
6596 ;; movw r0, #:upper16:g+4
6598 ;; The limitation of this way is that the length of offset should be a 16-bit
6599 ;; signed value, because current assembler only supports REL type relocation for
6600 ;; such case. If the more powerful RELA type is supported in future, we should
6601 ;; update this pattern to go with better way.
6603 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6604 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6605 (match_operand:SI 2 "const_int_operand" ""))))]
6608 && arm_disable_literal_pool
6610 && GET_CODE (operands[1]) == SYMBOL_REF"
6611 [(clobber (const_int 0))]
6613 int offset = INTVAL (operands[2]);
6615 if (offset < -0x8000 || offset > 0x7fff)
6617 arm_emit_movpair (operands[0], operands[1]);
6618 emit_insn (gen_rtx_SET (operands[0],
6619 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6623 rtx op = gen_rtx_CONST (SImode,
6624 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6625 arm_emit_movpair (operands[0], op);
6630 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6631 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6632 ;; and lo_sum would be merged back into memory load at cprop. However,
6633 ;; if the default is to prefer movt/movw rather than a load from the constant
6634 ;; pool, the performance is better.
6636 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6637 (match_operand:SI 1 "general_operand" ""))]
6638 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6639 && !target_word_relocations
6640 && !arm_tls_referenced_p (operands[1])"
6641 [(clobber (const_int 0))]
6643 arm_emit_movpair (operands[0], operands[1]);
6647 ;; When generating pic, we need to load the symbol offset into a register.
6648 ;; So that the optimizer does not confuse this with a normal symbol load
6649 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6650 ;; since that is the only type of relocation we can use.
6652 ;; Wrap calculation of the whole PIC address in a single pattern for the
6653 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6654 ;; a PIC address involves two loads from memory, so we want to CSE it
6655 ;; as often as possible.
6656 ;; This pattern will be split into one of the pic_load_addr_* patterns
6657 ;; and a move after GCSE optimizations.
6659 ;; Note: Update arm.cc: legitimize_pic_address() when changing this pattern.
6660 (define_expand "calculate_pic_address"
6661 [(set (match_operand:SI 0 "register_operand")
6662 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6663 (unspec:SI [(match_operand:SI 2 "" "")]
6668 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6670 [(set (match_operand:SI 0 "register_operand" "")
6671 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6672 (unspec:SI [(match_operand:SI 2 "" "")]
6675 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6676 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6677 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6680 ;; operand1 is the memory address to go into
6681 ;; pic_load_addr_32bit.
6682 ;; operand2 is the PIC label to be emitted
6683 ;; from pic_add_dot_plus_eight.
6684 ;; We do this to allow hoisting of the entire insn.
6685 (define_insn_and_split "pic_load_addr_unified"
6686 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6687 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6688 (match_operand:SI 2 "" "")]
6689 UNSPEC_PIC_UNIFIED))]
6692 "&& reload_completed"
6693 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6694 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6695 (match_dup 2)] UNSPEC_PIC_BASE))]
6696 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6697 [(set_attr "type" "load_4,load_4,load_4")
6698 (set_attr "pool_range" "4096,4094,1022")
6699 (set_attr "neg_pool_range" "4084,0,0")
6700 (set_attr "arch" "a,t2,t1")
6701 (set_attr "length" "8,6,4")]
6704 ;; The rather odd constraints on the following are to force reload to leave
6705 ;; the insn alone, and to force the minipool generation pass to then move
6706 ;; the GOT symbol to memory.
6708 (define_insn "pic_load_addr_32bit"
6709 [(set (match_operand:SI 0 "s_register_operand" "=r")
6710 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6711 "TARGET_32BIT && flag_pic"
6713 [(set_attr "type" "load_4")
6714 (set (attr "pool_range")
6715 (if_then_else (eq_attr "is_thumb" "no")
6718 (set (attr "neg_pool_range")
6719 (if_then_else (eq_attr "is_thumb" "no")
6724 (define_insn "pic_load_addr_thumb1"
6725 [(set (match_operand:SI 0 "s_register_operand" "=l")
6726 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6727 "TARGET_THUMB1 && flag_pic"
6729 [(set_attr "type" "load_4")
6730 (set (attr "pool_range") (const_int 1018))]
6733 (define_insn "pic_add_dot_plus_four"
6734 [(set (match_operand:SI 0 "register_operand" "=r")
6735 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6737 (match_operand 2 "" "")]
6741 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6742 INTVAL (operands[2]));
6743 return \"add\\t%0, %|pc\";
6745 [(set_attr "length" "2")
6746 (set_attr "type" "alu_sreg")]
6749 (define_insn "pic_add_dot_plus_eight"
6750 [(set (match_operand:SI 0 "register_operand" "=r")
6751 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6753 (match_operand 2 "" "")]
6757 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6758 INTVAL (operands[2]));
6759 return \"add%?\\t%0, %|pc, %1\";
6761 [(set_attr "predicable" "yes")
6762 (set_attr "type" "alu_sreg")]
6765 (define_insn "tls_load_dot_plus_eight"
6766 [(set (match_operand:SI 0 "register_operand" "=r")
6767 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6769 (match_operand 2 "" "")]
6773 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6774 INTVAL (operands[2]));
6775 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6777 [(set_attr "predicable" "yes")
6778 (set_attr "type" "load_4")]
6781 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6782 ;; followed by a load. These sequences can be crunched down to
6783 ;; tls_load_dot_plus_eight by a peephole.
6786 [(set (match_operand:SI 0 "register_operand" "")
6787 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6789 (match_operand 1 "" "")]
6791 (set (match_operand:SI 2 "arm_general_register_operand" "")
6792 (mem:SI (match_dup 0)))]
6793 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6795 (mem:SI (unspec:SI [(match_dup 3)
6802 (define_insn "pic_offset_arm"
6803 [(set (match_operand:SI 0 "register_operand" "=r")
6804 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6805 (unspec:SI [(match_operand:SI 2 "" "X")]
6806 UNSPEC_PIC_OFFSET))))]
6807 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6808 "ldr%?\\t%0, [%1,%2]"
6809 [(set_attr "type" "load_4")]
6812 (define_expand "builtin_setjmp_receiver"
6813 [(label_ref (match_operand 0 "" ""))]
6817 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6819 if (arm_pic_register != INVALID_REGNUM)
6820 arm_load_pic_register (1UL << 3, NULL_RTX);
6824 ;; If copying one reg to another we can set the condition codes according to
6825 ;; its value. Such a move is common after a return from subroutine and the
6826 ;; result is being tested against zero.
6828 (define_insn "*movsi_compare0"
6829 [(set (reg:CC CC_REGNUM)
6830 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6832 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6840 subs%?\\t%0, %1, #0"
6841 [(set_attr "conds" "set")
6842 (set_attr "arch" "t2,*,t2,t2,a")
6843 (set_attr "type" "alus_imm")
6844 (set_attr "length" "2,4,2,4,4")]
6847 ;; Subroutine to store a half word from a register into memory.
6848 ;; Operand 0 is the source register (HImode)
6849 ;; Operand 1 is the destination address in a register (SImode)
6851 ;; In both this routine and the next, we must be careful not to spill
6852 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6853 ;; can generate unrecognizable rtl.
6855 (define_expand "storehi"
6856 [;; store the low byte
6857 (set (match_operand 1 "" "") (match_dup 3))
6858 ;; extract the high byte
6860 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6861 ;; store the high byte
6862 (set (match_dup 4) (match_dup 5))]
6866 rtx op1 = operands[1];
6867 rtx addr = XEXP (op1, 0);
6868 enum rtx_code code = GET_CODE (addr);
6870 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6872 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6874 operands[4] = adjust_address (op1, QImode, 1);
6875 operands[1] = adjust_address (operands[1], QImode, 0);
6876 operands[3] = gen_lowpart (QImode, operands[0]);
6877 operands[0] = gen_lowpart (SImode, operands[0]);
6878 operands[2] = gen_reg_rtx (SImode);
6879 operands[5] = gen_lowpart (QImode, operands[2]);
6883 (define_expand "storehi_bigend"
6884 [(set (match_dup 4) (match_dup 3))
6886 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6887 (set (match_operand 1 "" "") (match_dup 5))]
6891 rtx op1 = operands[1];
6892 rtx addr = XEXP (op1, 0);
6893 enum rtx_code code = GET_CODE (addr);
6895 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6897 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6899 operands[4] = adjust_address (op1, QImode, 1);
6900 operands[1] = adjust_address (operands[1], QImode, 0);
6901 operands[3] = gen_lowpart (QImode, operands[0]);
6902 operands[0] = gen_lowpart (SImode, operands[0]);
6903 operands[2] = gen_reg_rtx (SImode);
6904 operands[5] = gen_lowpart (QImode, operands[2]);
6908 ;; Subroutine to store a half word integer constant into memory.
6909 (define_expand "storeinthi"
6910 [(set (match_operand 0 "" "")
6911 (match_operand 1 "" ""))
6912 (set (match_dup 3) (match_dup 2))]
6916 HOST_WIDE_INT value = INTVAL (operands[1]);
6917 rtx addr = XEXP (operands[0], 0);
6918 rtx op0 = operands[0];
6919 enum rtx_code code = GET_CODE (addr);
6921 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6923 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6925 operands[1] = gen_reg_rtx (SImode);
6926 if (BYTES_BIG_ENDIAN)
6928 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6929 if ((value & 255) == ((value >> 8) & 255))
6930 operands[2] = operands[1];
6933 operands[2] = gen_reg_rtx (SImode);
6934 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6939 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6940 if ((value & 255) == ((value >> 8) & 255))
6941 operands[2] = operands[1];
6944 operands[2] = gen_reg_rtx (SImode);
6945 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6949 operands[3] = adjust_address (op0, QImode, 1);
6950 operands[0] = adjust_address (operands[0], QImode, 0);
6951 operands[2] = gen_lowpart (QImode, operands[2]);
6952 operands[1] = gen_lowpart (QImode, operands[1]);
6956 (define_expand "storehi_single_op"
6957 [(set (match_operand:HI 0 "memory_operand")
6958 (match_operand:HI 1 "general_operand"))]
6959 "TARGET_32BIT && arm_arch4"
6961 if (!s_register_operand (operands[1], HImode))
6962 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6966 (define_expand "movhi"
6967 [(set (match_operand:HI 0 "general_operand")
6968 (match_operand:HI 1 "general_operand"))]
6971 gcc_checking_assert (aligned_operand (operands[0], HImode));
6972 gcc_checking_assert (aligned_operand (operands[1], HImode));
6975 if (can_create_pseudo_p ())
6977 if (MEM_P (operands[0]))
6981 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6984 if (CONST_INT_P (operands[1]))
6985 emit_insn (gen_storeinthi (operands[0], operands[1]));
6988 if (MEM_P (operands[1]))
6989 operands[1] = force_reg (HImode, operands[1]);
6990 if (BYTES_BIG_ENDIAN)
6991 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6993 emit_insn (gen_storehi (operands[1], operands[0]));
6997 /* Sign extend a constant, and keep it in an SImode reg. */
6998 else if (CONST_INT_P (operands[1]))
7000 rtx reg = gen_reg_rtx (SImode);
7001 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7003 /* If the constant is already valid, leave it alone. */
7004 if (!const_ok_for_arm (val))
7006 /* If setting all the top bits will make the constant
7007 loadable in a single instruction, then set them.
7008 Otherwise, sign extend the number. */
7010 if (const_ok_for_arm (~(val | ~0xffff)))
7012 else if (val & 0x8000)
7016 emit_insn (gen_movsi (reg, GEN_INT (val)));
7017 operands[1] = gen_lowpart (HImode, reg);
7019 else if (arm_arch4 && optimize && can_create_pseudo_p ()
7020 && MEM_P (operands[1]))
7022 rtx reg = gen_reg_rtx (SImode);
7024 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7025 operands[1] = gen_lowpart (HImode, reg);
7027 else if (!arm_arch4)
7029 if (MEM_P (operands[1]))
7032 rtx offset = const0_rtx;
7033 rtx reg = gen_reg_rtx (SImode);
7035 if ((REG_P (base = XEXP (operands[1], 0))
7036 || (GET_CODE (base) == PLUS
7037 && (CONST_INT_P (offset = XEXP (base, 1)))
7038 && ((INTVAL(offset) & 1) != 1)
7039 && REG_P (base = XEXP (base, 0))))
7040 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
7044 new_rtx = widen_memory_access (operands[1], SImode,
7045 ((INTVAL (offset) & ~3)
7046 - INTVAL (offset)));
7047 emit_insn (gen_movsi (reg, new_rtx));
7048 if (((INTVAL (offset) & 2) != 0)
7049 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
7051 rtx reg2 = gen_reg_rtx (SImode);
7053 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
7058 emit_insn (gen_movhi_bytes (reg, operands[1]));
7060 operands[1] = gen_lowpart (HImode, reg);
7064 /* Handle loading a large integer during reload. */
7065 else if (CONST_INT_P (operands[1])
7066 && !const_ok_for_arm (INTVAL (operands[1]))
7067 && !const_ok_for_arm (~INTVAL (operands[1])))
7069 /* Writing a constant to memory needs a scratch, which should
7070 be handled with SECONDARY_RELOADs. */
7071 gcc_assert (REG_P (operands[0]));
7073 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7074 emit_insn (gen_movsi (operands[0], operands[1]));
7078 else if (TARGET_THUMB2)
7080 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7081 if (can_create_pseudo_p ())
7083 if (!REG_P (operands[0]))
7084 operands[1] = force_reg (HImode, operands[1]);
7085 /* Zero extend a constant, and keep it in an SImode reg. */
7086 else if (CONST_INT_P (operands[1]))
7088 rtx reg = gen_reg_rtx (SImode);
7089 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7091 emit_insn (gen_movsi (reg, GEN_INT (val)));
7092 operands[1] = gen_lowpart (HImode, reg);
7096 else /* TARGET_THUMB1 */
7098 if (can_create_pseudo_p ())
7100 if (CONST_INT_P (operands[1]))
7102 rtx reg = gen_reg_rtx (SImode);
7104 emit_insn (gen_movsi (reg, operands[1]));
7105 operands[1] = gen_lowpart (HImode, reg);
7108 /* ??? We shouldn't really get invalid addresses here, but this can
7109 happen if we are passed a SP (never OK for HImode/QImode) or
7110 virtual register (also rejected as illegitimate for HImode/QImode)
7111 relative address. */
7112 /* ??? This should perhaps be fixed elsewhere, for instance, in
7113 fixup_stack_1, by checking for other kinds of invalid addresses,
7114 e.g. a bare reference to a virtual register. This may confuse the
7115 alpha though, which must handle this case differently. */
7116 if (MEM_P (operands[0])
7117 && !memory_address_p (GET_MODE (operands[0]),
7118 XEXP (operands[0], 0)))
7120 = replace_equiv_address (operands[0],
7121 copy_to_reg (XEXP (operands[0], 0)));
7123 if (MEM_P (operands[1])
7124 && !memory_address_p (GET_MODE (operands[1]),
7125 XEXP (operands[1], 0)))
7127 = replace_equiv_address (operands[1],
7128 copy_to_reg (XEXP (operands[1], 0)));
7130 if (MEM_P (operands[1]) && optimize > 0)
7132 rtx reg = gen_reg_rtx (SImode);
7134 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7135 operands[1] = gen_lowpart (HImode, reg);
7138 if (MEM_P (operands[0]))
7139 operands[1] = force_reg (HImode, operands[1]);
7141 else if (CONST_INT_P (operands[1])
7142 && !satisfies_constraint_I (operands[1]))
7144 /* Handle loading a large integer during reload. */
7146 /* Writing a constant to memory needs a scratch, which should
7147 be handled with SECONDARY_RELOADs. */
7148 gcc_assert (REG_P (operands[0]));
7150 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7151 emit_insn (gen_movsi (operands[0], operands[1]));
7158 (define_expand "movhi_bytes"
7159 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7161 (zero_extend:SI (match_dup 6)))
7162 (set (match_operand:SI 0 "" "")
7163 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7168 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7170 mem1 = change_address (operands[1], QImode, addr);
7171 mem2 = change_address (operands[1], QImode,
7172 plus_constant (Pmode, addr, 1));
7173 operands[0] = gen_lowpart (SImode, operands[0]);
7175 operands[2] = gen_reg_rtx (SImode);
7176 operands[3] = gen_reg_rtx (SImode);
7179 if (BYTES_BIG_ENDIAN)
7181 operands[4] = operands[2];
7182 operands[5] = operands[3];
7186 operands[4] = operands[3];
7187 operands[5] = operands[2];
7192 (define_expand "movhi_bigend"
7194 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7197 (ashiftrt:SI (match_dup 2) (const_int 16)))
7198 (set (match_operand:HI 0 "s_register_operand")
7202 operands[2] = gen_reg_rtx (SImode);
7203 operands[3] = gen_reg_rtx (SImode);
7204 operands[4] = gen_lowpart (HImode, operands[3]);
7208 ;; Pattern to recognize insn generated default case above
7209 (define_insn "*movhi_insn_arch4"
7210 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7211 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7213 && arm_arch4 && !TARGET_HARD_FLOAT
7214 && (register_operand (operands[0], HImode)
7215 || register_operand (operands[1], HImode))"
7217 mov%?\\t%0, %1\\t%@ movhi
7218 mvn%?\\t%0, #%B1\\t%@ movhi
7219 movw%?\\t%0, %L1\\t%@ movhi
7220 strh%?\\t%1, %0\\t%@ movhi
7221 ldrh%?\\t%0, %1\\t%@ movhi"
7222 [(set_attr "predicable" "yes")
7223 (set_attr "pool_range" "*,*,*,*,256")
7224 (set_attr "neg_pool_range" "*,*,*,*,244")
7225 (set_attr "arch" "*,*,v6t2,*,*")
7226 (set_attr_alternative "type"
7227 [(if_then_else (match_operand 1 "const_int_operand" "")
7228 (const_string "mov_imm" )
7229 (const_string "mov_reg"))
7230 (const_string "mvn_imm")
7231 (const_string "mov_imm")
7232 (const_string "store_4")
7233 (const_string "load_4")])]
7236 (define_insn "*movhi_bytes"
7237 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7238 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7239 "TARGET_ARM && !TARGET_HARD_FLOAT"
7241 mov%?\\t%0, %1\\t%@ movhi
7242 mov%?\\t%0, %1\\t%@ movhi
7243 mvn%?\\t%0, #%B1\\t%@ movhi"
7244 [(set_attr "predicable" "yes")
7245 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7248 ;; We use a DImode scratch because we may occasionally need an additional
7249 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7250 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7251 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7252 ;; to be correctly handled in default_secondary_reload function.
7253 (define_expand "reload_outhi"
7254 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7255 (match_operand:HI 1 "s_register_operand" "r")
7256 (match_operand:DI 2 "s_register_operand" "=&l")])]
7259 arm_reload_out_hi (operands);
7261 thumb_reload_out_hi (operands);
7266 (define_expand "reload_inhi"
7267 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7268 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7269 (match_operand:DI 2 "s_register_operand" "=&r")])]
7273 arm_reload_in_hi (operands);
7275 thumb_reload_out_hi (operands);
7279 (define_expand "movqi"
7280 [(set (match_operand:QI 0 "general_operand")
7281 (match_operand:QI 1 "general_operand"))]
7284 /* Everything except mem = const or mem = mem can be done easily */
7286 if (can_create_pseudo_p ())
7288 if (CONST_INT_P (operands[1]))
7290 rtx reg = gen_reg_rtx (SImode);
7292 /* For thumb we want an unsigned immediate, then we are more likely
7293 to be able to use a movs insn. */
7295 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7297 emit_insn (gen_movsi (reg, operands[1]));
7298 operands[1] = gen_lowpart (QImode, reg);
7303 /* ??? We shouldn't really get invalid addresses here, but this can
7304 happen if we are passed a SP (never OK for HImode/QImode) or
7305 virtual register (also rejected as illegitimate for HImode/QImode)
7306 relative address. */
7307 /* ??? This should perhaps be fixed elsewhere, for instance, in
7308 fixup_stack_1, by checking for other kinds of invalid addresses,
7309 e.g. a bare reference to a virtual register. This may confuse the
7310 alpha though, which must handle this case differently. */
7311 if (MEM_P (operands[0])
7312 && !memory_address_p (GET_MODE (operands[0]),
7313 XEXP (operands[0], 0)))
7315 = replace_equiv_address (operands[0],
7316 copy_to_reg (XEXP (operands[0], 0)));
7317 if (MEM_P (operands[1])
7318 && !memory_address_p (GET_MODE (operands[1]),
7319 XEXP (operands[1], 0)))
7321 = replace_equiv_address (operands[1],
7322 copy_to_reg (XEXP (operands[1], 0)));
7325 if (MEM_P (operands[1]) && optimize > 0)
7327 rtx reg = gen_reg_rtx (SImode);
7329 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7330 operands[1] = gen_lowpart (QImode, reg);
7333 if (MEM_P (operands[0]))
7334 operands[1] = force_reg (QImode, operands[1]);
7336 else if (TARGET_THUMB
7337 && CONST_INT_P (operands[1])
7338 && !satisfies_constraint_I (operands[1]))
7340 /* Handle loading a large integer during reload. */
7342 /* Writing a constant to memory needs a scratch, which should
7343 be handled with SECONDARY_RELOADs. */
7344 gcc_assert (REG_P (operands[0]));
7346 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7347 emit_insn (gen_movsi (operands[0], operands[1]));
7353 (define_insn "*arm_movqi_insn"
7354 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7355 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7357 && ( register_operand (operands[0], QImode)
7358 || register_operand (operands[1], QImode))"
7369 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7370 (set_attr "predicable" "yes")
7371 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7372 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7373 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7376 ;; HFmode and BFmode moves.
7377 (define_expand "mov<mode>"
7378 [(set (match_operand:HFBF 0 "general_operand")
7379 (match_operand:HFBF 1 "general_operand"))]
7382 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7383 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7386 if (MEM_P (operands[0]))
7387 operands[1] = force_reg (<MODE>mode, operands[1]);
7389 else /* TARGET_THUMB1 */
7391 if (can_create_pseudo_p ())
7393 if (!REG_P (operands[0]))
7394 operands[1] = force_reg (<MODE>mode, operands[1]);
7400 (define_insn "*arm32_mov<mode>"
7401 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7402 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7404 && !TARGET_HARD_FLOAT
7406 && ( s_register_operand (operands[0], <MODE>mode)
7407 || s_register_operand (operands[1], <MODE>mode))"
7409 switch (which_alternative)
7411 case 0: /* ARM register from memory */
7412 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7413 case 1: /* memory from ARM register */
7414 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7415 case 2: /* ARM register from ARM register */
7416 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7417 case 3: /* ARM register from constant */
7422 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7424 ops[0] = operands[0];
7425 ops[1] = GEN_INT (bits);
7426 ops[2] = GEN_INT (bits & 0xff00);
7427 ops[3] = GEN_INT (bits & 0x00ff);
7429 if (arm_arch_thumb2)
7430 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7432 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7439 [(set_attr "conds" "unconditional")
7440 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7441 (set_attr "length" "4,4,4,8")
7442 (set_attr "predicable" "yes")]
7445 (define_expand "movsf"
7446 [(set (match_operand:SF 0 "general_operand")
7447 (match_operand:SF 1 "general_operand"))]
7450 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7451 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7454 if (MEM_P (operands[0]))
7455 operands[1] = force_reg (SFmode, operands[1]);
7457 else /* TARGET_THUMB1 */
7459 if (can_create_pseudo_p ())
7461 if (!REG_P (operands[0]))
7462 operands[1] = force_reg (SFmode, operands[1]);
7466 /* Cannot load it directly, generate a load with clobber so that it can be
7467 loaded via GPR with MOV / MOVT. */
7468 if (arm_disable_literal_pool
7469 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7470 && CONST_DOUBLE_P (operands[1])
7472 && !vfp3_const_double_rtx (operands[1]))
7474 rtx clobreg = gen_reg_rtx (SFmode);
7475 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7482 ;; Transform a floating-point move of a constant into a core register into
7483 ;; an SImode operation.
7485 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7486 (match_operand:SF 1 "immediate_operand" ""))]
7489 && CONST_DOUBLE_P (operands[1])"
7490 [(set (match_dup 2) (match_dup 3))]
7492 operands[2] = gen_lowpart (SImode, operands[0]);
7493 operands[3] = gen_lowpart (SImode, operands[1]);
7494 if (operands[2] == 0 || operands[3] == 0)
7499 (define_insn "*arm_movsf_soft_insn"
7500 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7501 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7503 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7504 && (!MEM_P (operands[0])
7505 || register_operand (operands[1], SFmode))"
7507 switch (which_alternative)
7509 case 0: return \"mov%?\\t%0, %1\";
7511 /* Cannot load it directly, split to load it via MOV / MOVT. */
7512 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7514 return \"ldr%?\\t%0, %1\\t%@ float\";
7515 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7516 default: gcc_unreachable ();
7519 [(set_attr "predicable" "yes")
7520 (set_attr "type" "mov_reg,load_4,store_4")
7521 (set_attr "arm_pool_range" "*,4096,*")
7522 (set_attr "thumb2_pool_range" "*,4094,*")
7523 (set_attr "arm_neg_pool_range" "*,4084,*")
7524 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7527 ;; Splitter for the above.
7529 [(set (match_operand:SF 0 "s_register_operand")
7530 (match_operand:SF 1 "const_double_operand"))]
7531 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7535 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7536 rtx cst = gen_int_mode (buf, SImode);
7537 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7542 (define_expand "movdf"
7543 [(set (match_operand:DF 0 "general_operand")
7544 (match_operand:DF 1 "general_operand"))]
7547 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7548 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7551 if (MEM_P (operands[0]))
7552 operands[1] = force_reg (DFmode, operands[1]);
7554 else /* TARGET_THUMB */
7556 if (can_create_pseudo_p ())
7558 if (!REG_P (operands[0]))
7559 operands[1] = force_reg (DFmode, operands[1]);
7563 /* Cannot load it directly, generate a load with clobber so that it can be
7564 loaded via GPR with MOV / MOVT. */
7565 if (arm_disable_literal_pool
7566 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7567 && CONSTANT_P (operands[1])
7569 && !arm_const_double_rtx (operands[1])
7570 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7572 rtx clobreg = gen_reg_rtx (DImode);
7573 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7580 ;; Reloading a df mode value stored in integer regs to memory can require a
7582 ;; Another reload_out<m> pattern that requires special constraints.
7583 (define_expand "reload_outdf"
7584 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7585 (match_operand:DF 1 "s_register_operand" "r")
7586 (match_operand:SI 2 "s_register_operand" "=&r")]
7590 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7593 operands[2] = XEXP (operands[0], 0);
7594 else if (code == POST_INC || code == PRE_DEC)
7596 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7597 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7598 emit_insn (gen_movdi (operands[0], operands[1]));
7601 else if (code == PRE_INC)
7603 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7605 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7608 else if (code == POST_DEC)
7609 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7611 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7612 XEXP (XEXP (operands[0], 0), 1)));
7614 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7617 if (code == POST_DEC)
7618 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7624 (define_insn "*movdf_soft_insn"
7625 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7626 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7627 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7628 && ( register_operand (operands[0], DFmode)
7629 || register_operand (operands[1], DFmode))"
7631 switch (which_alternative)
7638 /* Cannot load it directly, split to load it via MOV / MOVT. */
7639 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7643 return output_move_double (operands, true, NULL);
7646 [(set_attr "length" "8,12,16,8,8")
7647 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7648 (set_attr "arm_pool_range" "*,*,*,1020,*")
7649 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7650 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7651 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7654 ;; Splitter for the above.
7656 [(set (match_operand:DF 0 "s_register_operand")
7657 (match_operand:DF 1 "const_double_operand"))]
7658 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7662 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7663 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7664 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7665 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7666 rtx cst = gen_int_mode (ival, DImode);
7667 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7673 ;; load- and store-multiple insns
7674 ;; The arm can load/store any set of registers, provided that they are in
7675 ;; ascending order, but these expanders assume a contiguous set.
7677 (define_expand "load_multiple"
7678 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7679 (match_operand:SI 1 "" ""))
7680 (use (match_operand:SI 2 "" ""))])]
7683 HOST_WIDE_INT offset = 0;
7685 /* Support only fixed point registers. */
7686 if (!CONST_INT_P (operands[2])
7687 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7688 || INTVAL (operands[2]) < 2
7689 || !MEM_P (operands[1])
7690 || !REG_P (operands[0])
7691 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7692 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7696 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7697 INTVAL (operands[2]),
7698 force_reg (SImode, XEXP (operands[1], 0)),
7699 FALSE, operands[1], &offset);
7702 (define_expand "store_multiple"
7703 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7704 (match_operand:SI 1 "" ""))
7705 (use (match_operand:SI 2 "" ""))])]
7708 HOST_WIDE_INT offset = 0;
7710 /* Support only fixed point registers. */
7711 if (!CONST_INT_P (operands[2])
7712 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7713 || INTVAL (operands[2]) < 2
7714 || !REG_P (operands[1])
7715 || !MEM_P (operands[0])
7716 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7717 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7721 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7722 INTVAL (operands[2]),
7723 force_reg (SImode, XEXP (operands[0], 0)),
7724 FALSE, operands[0], &offset);
7728 (define_expand "setmemsi"
7729 [(match_operand:BLK 0 "general_operand")
7730 (match_operand:SI 1 "const_int_operand")
7731 (match_operand:SI 2 "const_int_operand")
7732 (match_operand:SI 3 "const_int_operand")]
7735 if (arm_gen_setmem (operands))
7742 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7743 ;; We could let this apply for blocks of less than this, but it clobbers so
7744 ;; many registers that there is then probably a better way.
7746 (define_expand "cpymemqi"
7747 [(match_operand:BLK 0 "general_operand")
7748 (match_operand:BLK 1 "general_operand")
7749 (match_operand:SI 2 "const_int_operand")
7750 (match_operand:SI 3 "const_int_operand")]
7755 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7756 && !optimize_function_for_size_p (cfun))
7758 if (gen_cpymem_ldrd_strd (operands))
7763 if (arm_gen_cpymemqi (operands))
7767 else /* TARGET_THUMB1 */
7769 if ( INTVAL (operands[3]) != 4
7770 || INTVAL (operands[2]) > 48)
7773 thumb_expand_cpymemqi (operands);
7780 ;; Compare & branch insns
7781 ;; The range calculations are based as follows:
7782 ;; For forward branches, the address calculation returns the address of
7783 ;; the next instruction. This is 2 beyond the branch instruction.
7784 ;; For backward branches, the address calculation returns the address of
7785 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7786 ;; instruction for the shortest sequence, and 4 before the branch instruction
7787 ;; if we have to jump around an unconditional branch.
7788 ;; To the basic branch range the PC offset must be added (this is +4).
7789 ;; So for forward branches we have
7790 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7791 ;; And for backward branches we have
7792 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7794 ;; In 16-bit Thumb these ranges are:
7795 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7796 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7798 ;; In 32-bit Thumb these ranges are:
7799 ;; For a 'b' +/- 16MB is not checked for.
7800 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7801 ;; (-1048568 -> 1048576).
7803 (define_expand "cbranchsi4"
7804 [(set (pc) (if_then_else
7805 (match_operator 0 "expandable_comparison_operator"
7806 [(match_operand:SI 1 "s_register_operand")
7807 (match_operand:SI 2 "nonmemory_operand")])
7808 (label_ref (match_operand 3 "" ""))
7814 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7816 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7820 if (thumb1_cmpneg_operand (operands[2], SImode))
7822 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7823 operands[3], operands[0]));
7826 if (!thumb1_cmp_operand (operands[2], SImode))
7827 operands[2] = force_reg (SImode, operands[2]);
7830 (define_expand "cbranchsf4"
7831 [(set (pc) (if_then_else
7832 (match_operator 0 "expandable_comparison_operator"
7833 [(match_operand:SF 1 "s_register_operand")
7834 (match_operand:SF 2 "vfp_compare_operand")])
7835 (label_ref (match_operand 3 "" ""))
7837 "TARGET_32BIT && TARGET_HARD_FLOAT"
7838 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7839 operands[3])); DONE;"
7842 (define_expand "cbranchdf4"
7843 [(set (pc) (if_then_else
7844 (match_operator 0 "expandable_comparison_operator"
7845 [(match_operand:DF 1 "s_register_operand")
7846 (match_operand:DF 2 "vfp_compare_operand")])
7847 (label_ref (match_operand 3 "" ""))
7849 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7850 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7851 operands[3])); DONE;"
7854 (define_expand "cbranchdi4"
7855 [(set (pc) (if_then_else
7856 (match_operator 0 "expandable_comparison_operator"
7857 [(match_operand:DI 1 "s_register_operand")
7858 (match_operand:DI 2 "reg_or_int_operand")])
7859 (label_ref (match_operand 3 "" ""))
7863 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7865 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7871 ;; Comparison and test insns
7873 (define_insn "*arm_cmpsi_insn"
7874 [(set (reg:CC CC_REGNUM)
7875 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7876 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7884 [(set_attr "conds" "set")
7885 (set_attr "arch" "t2,t2,any,any,any")
7886 (set_attr "length" "2,2,4,4,4")
7887 (set_attr "predicable" "yes")
7888 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7889 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7892 (define_insn "*cmpsi_shiftsi"
7893 [(set (reg:CC CC_REGNUM)
7894 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7895 (match_operator:SI 3 "shift_operator"
7896 [(match_operand:SI 1 "s_register_operand" "r,r")
7897 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7900 [(set_attr "conds" "set")
7901 (set_attr "shift" "1")
7902 (set_attr "arch" "32,a")
7903 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7905 (define_insn "*cmpsi_shiftsi_swp"
7906 [(set (reg:CC_SWP CC_REGNUM)
7907 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7908 [(match_operand:SI 1 "s_register_operand" "r,r")
7909 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7910 (match_operand:SI 0 "s_register_operand" "r,r")))]
7913 [(set_attr "conds" "set")
7914 (set_attr "shift" "1")
7915 (set_attr "arch" "32,a")
7916 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7918 (define_insn "*arm_cmpsi_negshiftsi_si"
7919 [(set (reg:CC_Z CC_REGNUM)
7921 (neg:SI (match_operator:SI 1 "shift_operator"
7922 [(match_operand:SI 2 "s_register_operand" "r,r")
7923 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7924 (match_operand:SI 0 "s_register_operand" "r,r")))]
7927 [(set_attr "conds" "set")
7928 (set_attr "arch" "32,a")
7929 (set_attr "shift" "2")
7930 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7931 (set_attr "predicable" "yes")]
7934 ; This insn allows redundant compares to be removed by cse, nothing should
7935 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7936 ; is deleted later on. The match_dup will match the mode here, so that
7937 ; mode changes of the condition codes aren't lost by this even though we don't
7938 ; specify what they are.
7940 (define_insn "*deleted_compare"
7941 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7943 "\\t%@ deleted compare"
7944 [(set_attr "conds" "set")
7945 (set_attr "length" "0")
7946 (set_attr "type" "no_insn")]
7950 ;; Conditional branch insns
7952 (define_expand "cbranch_cc"
7954 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7955 (match_operand 2 "" "")])
7956 (label_ref (match_operand 3 "" ""))
7959 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7960 operands[1], operands[2], NULL_RTX);
7961 operands[2] = const0_rtx;"
7965 ;; Patterns to match conditional branch insns.
7968 (define_insn "arm_cond_branch"
7970 (if_then_else (match_operator 1 "arm_comparison_operator"
7971 [(match_operand 2 "cc_register" "") (const_int 0)])
7972 (label_ref (match_operand 0 "" ""))
7976 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7978 arm_ccfsm_state += 2;
7981 switch (get_attr_length (insn))
7983 case 2: /* Thumb2 16-bit b{cond}. */
7984 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7988 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7989 unconditional branch b. */
7990 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7993 [(set_attr "conds" "use")
7994 (set_attr "type" "branch")
7995 (set (attr "length")
7996 (if_then_else (match_test "!TARGET_THUMB2")
7998 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
8001 ;; Check if target is within 16-bit Thumb2 b{cond} range.
8002 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8003 (le (minus (match_dup 0) (pc)) (const_int 256)))
8005 ;; Target is Thumb2, within narrow range.
8006 ;; Generate b{cond}.
8009 ;; Check if target is within 32-bit Thumb2 b{cond} range.
8010 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
8011 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
8013 ;; Target is Thumb2, within wide range.
8016 ;; Target is Thumb2, out of range.
8017 ;; Generate narrow b{cond} and unconditional branch b.
8021 (define_insn "*arm_cond_branch_reversed"
8023 (if_then_else (match_operator 1 "arm_comparison_operator"
8024 [(match_operand 2 "cc_register" "") (const_int 0)])
8026 (label_ref (match_operand 0 "" ""))))]
8029 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8031 arm_ccfsm_state += 2;
8034 switch (get_attr_length (insn))
8036 case 2: /* Thumb2 16-bit b{cond}. */
8037 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
8041 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
8042 unconditional branch b. */
8043 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
8046 [(set_attr "conds" "use")
8047 (set_attr "type" "branch")
8048 (set (attr "length")
8049 (if_then_else (match_test "!TARGET_THUMB2")
8051 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
8054 ;; Check if target is within 16-bit Thumb2 b{cond} range.
8055 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8056 (le (minus (match_dup 0) (pc)) (const_int 256)))
8058 ;; Target is Thumb2, within narrow range.
8059 ;; Generate b{cond}.
8062 ;; Check if target is within 32-bit Thumb2 b{cond} range.
8063 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
8064 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
8066 ;; Target is Thumb2, within wide range.
8067 ;; Generate b{cond}.
8069 ;; Target is Thumb2, out of range.
8070 ;; Generate narrow b{cond} and unconditional branch b.
8078 (define_expand "cstore_cc"
8079 [(set (match_operand:SI 0 "s_register_operand")
8080 (match_operator:SI 1 "" [(match_operand 2 "" "")
8081 (match_operand 3 "" "")]))]
8083 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8084 operands[2], operands[3], NULL_RTX);
8085 operands[3] = const0_rtx;"
8088 (define_insn_and_split "*mov_scc"
8089 [(set (match_operand:SI 0 "s_register_operand" "=r")
8090 (match_operator:SI 1 "arm_comparison_operator_mode"
8091 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8093 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8096 (if_then_else:SI (match_dup 1)
8100 [(set_attr "conds" "use")
8101 (set_attr "length" "8")
8102 (set_attr "type" "multiple")]
8105 (define_insn "*negscc_borrow"
8106 [(set (match_operand:SI 0 "s_register_operand" "=r")
8107 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8110 [(set_attr "conds" "use")
8111 (set_attr "length" "4")
8112 (set_attr "type" "adc_reg")]
8115 (define_insn_and_split "*mov_negscc"
8116 [(set (match_operand:SI 0 "s_register_operand" "=r")
8117 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8118 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8119 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8120 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8123 (if_then_else:SI (match_dup 1)
8127 operands[3] = GEN_INT (~0);
8129 [(set_attr "conds" "use")
8130 (set_attr "length" "8")
8131 (set_attr "type" "multiple")]
8134 (define_insn_and_split "*mov_notscc"
8135 [(set (match_operand:SI 0 "s_register_operand" "=r")
8136 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8137 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8139 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8142 (if_then_else:SI (match_dup 1)
8146 operands[3] = GEN_INT (~1);
8147 operands[4] = GEN_INT (~0);
8149 [(set_attr "conds" "use")
8150 (set_attr "length" "8")
8151 (set_attr "type" "multiple")]
8154 (define_expand "cstoresi4"
8155 [(set (match_operand:SI 0 "s_register_operand")
8156 (match_operator:SI 1 "expandable_comparison_operator"
8157 [(match_operand:SI 2 "s_register_operand")
8158 (match_operand:SI 3 "reg_or_int_operand")]))]
8159 "TARGET_32BIT || TARGET_THUMB1"
8161 rtx op3, scratch, scratch2;
8165 if (!arm_add_operand (operands[3], SImode))
8166 operands[3] = force_reg (SImode, operands[3]);
8167 emit_insn (gen_cstore_cc (operands[0], operands[1],
8168 operands[2], operands[3]));
8172 if (operands[3] == const0_rtx)
8174 switch (GET_CODE (operands[1]))
8177 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8181 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8185 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8186 NULL_RTX, 0, OPTAB_WIDEN);
8187 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8188 NULL_RTX, 0, OPTAB_WIDEN);
8189 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8190 operands[0], 1, OPTAB_WIDEN);
8194 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8196 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8197 NULL_RTX, 1, OPTAB_WIDEN);
8201 scratch = expand_binop (SImode, ashr_optab, operands[2],
8202 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8203 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8204 NULL_RTX, 0, OPTAB_WIDEN);
8205 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8209 /* LT is handled by generic code. No need for unsigned with 0. */
8216 switch (GET_CODE (operands[1]))
8219 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8220 NULL_RTX, 0, OPTAB_WIDEN);
8221 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8225 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8226 NULL_RTX, 0, OPTAB_WIDEN);
8227 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8231 op3 = force_reg (SImode, operands[3]);
8233 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8234 NULL_RTX, 1, OPTAB_WIDEN);
8235 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8236 NULL_RTX, 0, OPTAB_WIDEN);
8237 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8243 if (!thumb1_cmp_operand (op3, SImode))
8244 op3 = force_reg (SImode, op3);
8245 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8246 NULL_RTX, 0, OPTAB_WIDEN);
8247 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8248 NULL_RTX, 1, OPTAB_WIDEN);
8249 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8254 op3 = force_reg (SImode, operands[3]);
8255 scratch = force_reg (SImode, const0_rtx);
8256 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8262 if (!thumb1_cmp_operand (op3, SImode))
8263 op3 = force_reg (SImode, op3);
8264 scratch = force_reg (SImode, const0_rtx);
8265 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8271 if (!thumb1_cmp_operand (op3, SImode))
8272 op3 = force_reg (SImode, op3);
8273 scratch = gen_reg_rtx (SImode);
8274 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8278 op3 = force_reg (SImode, operands[3]);
8279 scratch = gen_reg_rtx (SImode);
8280 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8283 /* No good sequences for GT, LT. */
8290 (define_expand "cstorehf4"
8291 [(set (match_operand:SI 0 "s_register_operand")
8292 (match_operator:SI 1 "expandable_comparison_operator"
8293 [(match_operand:HF 2 "s_register_operand")
8294 (match_operand:HF 3 "vfp_compare_operand")]))]
8295 "TARGET_VFP_FP16INST"
8297 if (!arm_validize_comparison (&operands[1],
8302 emit_insn (gen_cstore_cc (operands[0], operands[1],
8303 operands[2], operands[3]));
8308 (define_expand "cstoresf4"
8309 [(set (match_operand:SI 0 "s_register_operand")
8310 (match_operator:SI 1 "expandable_comparison_operator"
8311 [(match_operand:SF 2 "s_register_operand")
8312 (match_operand:SF 3 "vfp_compare_operand")]))]
8313 "TARGET_32BIT && TARGET_HARD_FLOAT"
8314 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8315 operands[2], operands[3])); DONE;"
8318 (define_expand "cstoredf4"
8319 [(set (match_operand:SI 0 "s_register_operand")
8320 (match_operator:SI 1 "expandable_comparison_operator"
8321 [(match_operand:DF 2 "s_register_operand")
8322 (match_operand:DF 3 "vfp_compare_operand")]))]
8323 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8324 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8325 operands[2], operands[3])); DONE;"
8328 (define_expand "cstoredi4"
8329 [(set (match_operand:SI 0 "s_register_operand")
8330 (match_operator:SI 1 "expandable_comparison_operator"
8331 [(match_operand:DI 2 "s_register_operand")
8332 (match_operand:DI 3 "reg_or_int_operand")]))]
8335 if (!arm_validize_comparison (&operands[1],
8339 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8346 ;; Conditional move insns
8348 (define_expand "movsicc"
8349 [(set (match_operand:SI 0 "s_register_operand")
8350 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8351 (match_operand:SI 2 "arm_not_operand")
8352 (match_operand:SI 3 "arm_not_operand")))]
8359 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8360 &XEXP (operands[1], 1)))
8363 code = GET_CODE (operands[1]);
8364 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8365 XEXP (operands[1], 1), NULL_RTX);
8366 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8370 (define_expand "movhfcc"
8371 [(set (match_operand:HF 0 "s_register_operand")
8372 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8373 (match_operand:HF 2 "s_register_operand")
8374 (match_operand:HF 3 "s_register_operand")))]
8375 "TARGET_VFP_FP16INST"
8378 enum rtx_code code = GET_CODE (operands[1]);
8381 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8382 &XEXP (operands[1], 1)))
8385 code = GET_CODE (operands[1]);
8386 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8387 XEXP (operands[1], 1), NULL_RTX);
8388 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8392 (define_expand "movsfcc"
8393 [(set (match_operand:SF 0 "s_register_operand")
8394 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8395 (match_operand:SF 2 "s_register_operand")
8396 (match_operand:SF 3 "s_register_operand")))]
8397 "TARGET_32BIT && TARGET_HARD_FLOAT"
8400 enum rtx_code code = GET_CODE (operands[1]);
8403 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8404 &XEXP (operands[1], 1)))
8407 code = GET_CODE (operands[1]);
8408 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8409 XEXP (operands[1], 1), NULL_RTX);
8410 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8414 (define_expand "movdfcc"
8415 [(set (match_operand:DF 0 "s_register_operand")
8416 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8417 (match_operand:DF 2 "s_register_operand")
8418 (match_operand:DF 3 "s_register_operand")))]
8419 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8422 enum rtx_code code = GET_CODE (operands[1]);
8425 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8426 &XEXP (operands[1], 1)))
8428 code = GET_CODE (operands[1]);
8429 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8430 XEXP (operands[1], 1), NULL_RTX);
8431 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8435 (define_insn "*cmov<mode>"
8436 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8437 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8438 [(match_operand 2 "cc_register" "") (const_int 0)])
8439 (match_operand:SDF 3 "s_register_operand"
8441 (match_operand:SDF 4 "s_register_operand"
8442 "<F_constraint>")))]
8443 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8446 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8453 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8458 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8464 [(set_attr "conds" "use")
8465 (set_attr "type" "fcsel")]
8468 (define_insn "*cmovhf"
8469 [(set (match_operand:HF 0 "s_register_operand" "=t")
8470 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8471 [(match_operand 2 "cc_register" "") (const_int 0)])
8472 (match_operand:HF 3 "s_register_operand" "t")
8473 (match_operand:HF 4 "s_register_operand" "t")))]
8474 "TARGET_VFP_FP16INST"
8477 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8484 return \"vsel%d1.f16\\t%0, %3, %4\";
8489 return \"vsel%D1.f16\\t%0, %4, %3\";
8495 [(set_attr "conds" "use")
8496 (set_attr "type" "fcsel")]
8499 (define_insn_and_split "*movsicc_insn"
8500 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8502 (match_operator 3 "arm_comparison_operator"
8503 [(match_operand 4 "cc_register" "") (const_int 0)])
8504 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8505 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8516 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8517 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8518 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8519 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8520 "&& reload_completed"
8523 enum rtx_code rev_code;
8527 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8529 gen_rtx_SET (operands[0], operands[1])));
8531 rev_code = GET_CODE (operands[3]);
8532 mode = GET_MODE (operands[4]);
8533 if (mode == CCFPmode || mode == CCFPEmode)
8534 rev_code = reverse_condition_maybe_unordered (rev_code);
8536 rev_code = reverse_condition (rev_code);
8538 rev_cond = gen_rtx_fmt_ee (rev_code,
8542 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8544 gen_rtx_SET (operands[0], operands[2])));
8547 [(set_attr "length" "4,4,4,4,8,8,8,8")
8548 (set_attr "conds" "use")
8549 (set_attr_alternative "type"
8550 [(if_then_else (match_operand 2 "const_int_operand" "")
8551 (const_string "mov_imm")
8552 (const_string "mov_reg"))
8553 (const_string "mvn_imm")
8554 (if_then_else (match_operand 1 "const_int_operand" "")
8555 (const_string "mov_imm")
8556 (const_string "mov_reg"))
8557 (const_string "mvn_imm")
8558 (const_string "multiple")
8559 (const_string "multiple")
8560 (const_string "multiple")
8561 (const_string "multiple")])]
8564 (define_insn "*movsfcc_soft_insn"
8565 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8566 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8567 [(match_operand 4 "cc_register" "") (const_int 0)])
8568 (match_operand:SF 1 "s_register_operand" "0,r")
8569 (match_operand:SF 2 "s_register_operand" "r,0")))]
8570 "TARGET_ARM && TARGET_SOFT_FLOAT"
8574 [(set_attr "conds" "use")
8575 (set_attr "type" "mov_reg")]
8579 ;; Jump and linkage insns
8581 (define_expand "jump"
8583 (label_ref (match_operand 0 "" "")))]
8588 (define_insn "*arm_jump"
8590 (label_ref (match_operand 0 "" "")))]
8594 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8596 arm_ccfsm_state += 2;
8599 return \"b%?\\t%l0\";
8602 [(set_attr "predicable" "yes")
8603 (set (attr "length")
8605 (and (match_test "TARGET_THUMB2")
8606 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8607 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8610 (set_attr "type" "branch")]
8613 (define_expand "call"
8614 [(parallel [(call (match_operand 0 "memory_operand")
8615 (match_operand 1 "general_operand"))
8616 (use (match_operand 2 "" ""))
8617 (clobber (reg:SI LR_REGNUM))])]
8622 tree addr = MEM_EXPR (operands[0]);
8624 /* In an untyped call, we can get NULL for operand 2. */
8625 if (operands[2] == NULL_RTX)
8626 operands[2] = const0_rtx;
8628 /* Decide if we should generate indirect calls by loading the
8629 32-bit address of the callee into a register before performing the
8631 callee = XEXP (operands[0], 0);
8632 if (GET_CODE (callee) == SYMBOL_REF
8633 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8635 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8637 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8638 /* Indirect call: set r9 with FDPIC value of callee. */
8639 XEXP (operands[0], 0)
8640 = arm_load_function_descriptor (XEXP (operands[0], 0));
8642 if (detect_cmse_nonsecure_call (addr))
8644 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8646 emit_call_insn (pat);
8650 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8651 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8654 /* Restore FDPIC register (r9) after call. */
8657 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8658 rtx initial_fdpic_reg
8659 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8661 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8662 initial_fdpic_reg));
8669 (define_insn "restore_pic_register_after_call"
8670 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8671 (unspec:SI [(match_dup 0)
8672 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8673 UNSPEC_PIC_RESTORE))]
8680 (define_expand "call_internal"
8681 [(parallel [(call (match_operand 0 "memory_operand")
8682 (match_operand 1 "general_operand"))
8683 (use (match_operand 2 "" ""))
8684 (clobber (reg:SI LR_REGNUM))])])
8686 (define_expand "nonsecure_call_internal"
8687 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8688 UNSPEC_NONSECURE_MEM)
8689 (match_operand 1 "general_operand"))
8690 (use (match_operand 2 "" ""))
8691 (clobber (reg:SI LR_REGNUM))])]
8694 rtx addr = XEXP (operands[0], 0);
8695 rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8697 if (!TARGET_HAVE_FPCXT_CMSE)
8699 rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8700 emit_move_insn (r4, tmp);
8705 operands[0] = replace_equiv_address (operands[0], tmp);
8709 (define_insn "*call_reg_armv5"
8710 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8711 (match_operand 1 "" ""))
8712 (use (match_operand 2 "" ""))
8713 (clobber (reg:SI LR_REGNUM))]
8714 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8716 [(set_attr "type" "call")]
8719 (define_insn "*call_reg_arm"
8720 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8721 (match_operand 1 "" ""))
8722 (use (match_operand 2 "" ""))
8723 (clobber (reg:SI LR_REGNUM))]
8724 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8726 return output_call (operands);
8728 ;; length is worst case, normally it is only two
8729 [(set_attr "length" "12")
8730 (set_attr "type" "call")]
8734 (define_expand "call_value"
8735 [(parallel [(set (match_operand 0 "" "")
8736 (call (match_operand 1 "memory_operand")
8737 (match_operand 2 "general_operand")))
8738 (use (match_operand 3 "" ""))
8739 (clobber (reg:SI LR_REGNUM))])]
8744 tree addr = MEM_EXPR (operands[1]);
8746 /* In an untyped call, we can get NULL for operand 2. */
8747 if (operands[3] == 0)
8748 operands[3] = const0_rtx;
8750 /* Decide if we should generate indirect calls by loading the
8751 32-bit address of the callee into a register before performing the
8753 callee = XEXP (operands[1], 0);
8754 if (GET_CODE (callee) == SYMBOL_REF
8755 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8757 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8759 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8760 /* Indirect call: set r9 with FDPIC value of callee. */
8761 XEXP (operands[1], 0)
8762 = arm_load_function_descriptor (XEXP (operands[1], 0));
8764 if (detect_cmse_nonsecure_call (addr))
8766 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8767 operands[2], operands[3]);
8768 emit_call_insn (pat);
8772 pat = gen_call_value_internal (operands[0], operands[1],
8773 operands[2], operands[3]);
8774 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8777 /* Restore FDPIC register (r9) after call. */
8780 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8781 rtx initial_fdpic_reg
8782 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8784 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8785 initial_fdpic_reg));
8792 (define_expand "call_value_internal"
8793 [(parallel [(set (match_operand 0 "" "")
8794 (call (match_operand 1 "memory_operand")
8795 (match_operand 2 "general_operand")))
8796 (use (match_operand 3 "" ""))
8797 (clobber (reg:SI LR_REGNUM))])])
8799 (define_expand "nonsecure_call_value_internal"
8800 [(parallel [(set (match_operand 0 "" "")
8801 (call (unspec:SI [(match_operand 1 "memory_operand")]
8802 UNSPEC_NONSECURE_MEM)
8803 (match_operand 2 "general_operand")))
8804 (use (match_operand 3 "" ""))
8805 (clobber (reg:SI LR_REGNUM))])]
8809 if (!TARGET_HAVE_FPCXT_CMSE)
8812 copy_to_suggested_reg (XEXP (operands[1], 0),
8813 gen_rtx_REG (SImode, R4_REGNUM),
8816 operands[1] = replace_equiv_address (operands[1], tmp);
8820 (define_insn "*call_value_reg_armv5"
8821 [(set (match_operand 0 "" "")
8822 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8823 (match_operand 2 "" "")))
8824 (use (match_operand 3 "" ""))
8825 (clobber (reg:SI LR_REGNUM))]
8826 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8828 [(set_attr "type" "call")]
8831 (define_insn "*call_value_reg_arm"
8832 [(set (match_operand 0 "" "")
8833 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8834 (match_operand 2 "" "")))
8835 (use (match_operand 3 "" ""))
8836 (clobber (reg:SI LR_REGNUM))]
8837 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8839 return output_call (&operands[1]);
8841 [(set_attr "length" "12")
8842 (set_attr "type" "call")]
8845 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8846 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8848 (define_insn "*call_symbol"
8849 [(call (mem:SI (match_operand:SI 0 "" ""))
8850 (match_operand 1 "" ""))
8851 (use (match_operand 2 "" ""))
8852 (clobber (reg:SI LR_REGNUM))]
8854 && !SIBLING_CALL_P (insn)
8855 && (GET_CODE (operands[0]) == SYMBOL_REF)
8856 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8859 rtx op = operands[0];
8861 /* Switch mode now when possible. */
8862 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8863 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8864 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8866 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8868 [(set_attr "type" "call")]
8871 (define_insn "*call_value_symbol"
8872 [(set (match_operand 0 "" "")
8873 (call (mem:SI (match_operand:SI 1 "" ""))
8874 (match_operand:SI 2 "" "")))
8875 (use (match_operand 3 "" ""))
8876 (clobber (reg:SI LR_REGNUM))]
8878 && !SIBLING_CALL_P (insn)
8879 && (GET_CODE (operands[1]) == SYMBOL_REF)
8880 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8883 rtx op = operands[1];
8885 /* Switch mode now when possible. */
8886 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8887 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8888 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8890 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8892 [(set_attr "type" "call")]
8895 (define_expand "sibcall_internal"
8896 [(parallel [(call (match_operand 0 "memory_operand")
8897 (match_operand 1 "general_operand"))
8899 (use (match_operand 2 "" ""))])])
8901 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8902 (define_expand "sibcall"
8903 [(parallel [(call (match_operand 0 "memory_operand")
8904 (match_operand 1 "general_operand"))
8906 (use (match_operand 2 "" ""))])]
8912 if ((!REG_P (XEXP (operands[0], 0))
8913 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8914 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8915 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8916 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8918 if (operands[2] == NULL_RTX)
8919 operands[2] = const0_rtx;
8921 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8922 arm_emit_call_insn (pat, operands[0], true);
8927 (define_expand "sibcall_value_internal"
8928 [(parallel [(set (match_operand 0 "" "")
8929 (call (match_operand 1 "memory_operand")
8930 (match_operand 2 "general_operand")))
8932 (use (match_operand 3 "" ""))])])
8934 (define_expand "sibcall_value"
8935 [(parallel [(set (match_operand 0 "" "")
8936 (call (match_operand 1 "memory_operand")
8937 (match_operand 2 "general_operand")))
8939 (use (match_operand 3 "" ""))])]
8945 if ((!REG_P (XEXP (operands[1], 0))
8946 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8947 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8948 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8949 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8951 if (operands[3] == NULL_RTX)
8952 operands[3] = const0_rtx;
8954 pat = gen_sibcall_value_internal (operands[0], operands[1],
8955 operands[2], operands[3]);
8956 arm_emit_call_insn (pat, operands[1], true);
8961 (define_insn "*sibcall_insn"
8962 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8963 (match_operand 1 "" ""))
8965 (use (match_operand 2 "" ""))]
8966 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8968 if (which_alternative == 1)
8969 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8972 if (arm_arch5t || arm_arch4t)
8973 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8975 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8978 [(set_attr "type" "call")]
8981 (define_insn "*sibcall_value_insn"
8982 [(set (match_operand 0 "" "")
8983 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8984 (match_operand 2 "" "")))
8986 (use (match_operand 3 "" ""))]
8987 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8989 if (which_alternative == 1)
8990 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8993 if (arm_arch5t || arm_arch4t)
8994 return \"bx%?\\t%1\";
8996 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8999 [(set_attr "type" "call")]
9002 (define_expand "<return_str>return"
9004 "(TARGET_ARM || (TARGET_THUMB2
9005 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9006 && !IS_STACKALIGN (arm_current_func_type ())))
9007 <return_cond_false>"
9012 thumb2_expand_return (<return_simple_p>);
9019 ;; Often the return insn will be the same as loading from memory, so set attr
9020 (define_insn "*arm_return"
9022 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9025 if (arm_ccfsm_state == 2)
9027 arm_ccfsm_state += 2;
9030 return output_return_instruction (const_true_rtx, true, false, false);
9032 [(set_attr "type" "load_4")
9033 (set_attr "length" "12")
9034 (set_attr "predicable" "yes")]
9037 (define_insn "*cond_<return_str>return"
9039 (if_then_else (match_operator 0 "arm_comparison_operator"
9040 [(match_operand 1 "cc_register" "") (const_int 0)])
9043 "TARGET_ARM <return_cond_true>"
9046 if (arm_ccfsm_state == 2)
9048 arm_ccfsm_state += 2;
9051 return output_return_instruction (operands[0], true, false,
9054 [(set_attr "conds" "use")
9055 (set_attr "length" "12")
9056 (set_attr "type" "load_4")]
9059 (define_insn "*cond_<return_str>return_inverted"
9061 (if_then_else (match_operator 0 "arm_comparison_operator"
9062 [(match_operand 1 "cc_register" "") (const_int 0)])
9065 "TARGET_ARM <return_cond_true>"
9068 if (arm_ccfsm_state == 2)
9070 arm_ccfsm_state += 2;
9073 return output_return_instruction (operands[0], true, true,
9076 [(set_attr "conds" "use")
9077 (set_attr "length" "12")
9078 (set_attr "type" "load_4")]
9081 (define_insn "*arm_simple_return"
9086 if (arm_ccfsm_state == 2)
9088 arm_ccfsm_state += 2;
9091 return output_return_instruction (const_true_rtx, true, false, true);
9093 [(set_attr "type" "branch")
9094 (set_attr "length" "4")
9095 (set_attr "predicable" "yes")]
9098 ;; Generate a sequence of instructions to determine if the processor is
9099 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9102 (define_expand "return_addr_mask"
9104 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9106 (set (match_operand:SI 0 "s_register_operand")
9107 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9109 (const_int 67108860)))] ; 0x03fffffc
9112 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9115 (define_insn "*check_arch2"
9116 [(set (match_operand:CC_NZ 0 "cc_register" "")
9117 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9120 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9121 [(set_attr "length" "8")
9122 (set_attr "conds" "set")
9123 (set_attr "type" "multiple")]
9126 ;; Call subroutine returning any type.
9128 (define_expand "untyped_call"
9129 [(parallel [(call (match_operand 0 "" "")
9131 (match_operand 1 "" "")
9132 (match_operand 2 "" "")])]
9133 "TARGET_EITHER && !TARGET_FDPIC"
9137 rtx par = gen_rtx_PARALLEL (VOIDmode,
9138 rtvec_alloc (XVECLEN (operands[2], 0)));
9139 rtx addr = gen_reg_rtx (Pmode);
9143 emit_move_insn (addr, XEXP (operands[1], 0));
9144 mem = change_address (operands[1], BLKmode, addr);
9146 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9148 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9150 /* Default code only uses r0 as a return value, but we could
9151 be using anything up to 4 registers. */
9152 if (REGNO (src) == R0_REGNUM)
9153 src = gen_rtx_REG (TImode, R0_REGNUM);
9155 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9157 size += GET_MODE_SIZE (GET_MODE (src));
9160 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9164 for (i = 0; i < XVECLEN (par, 0); i++)
9166 HOST_WIDE_INT offset = 0;
9167 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9170 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9172 mem = change_address (mem, GET_MODE (reg), NULL);
9173 if (REGNO (reg) == R0_REGNUM)
9175 /* On thumb we have to use a write-back instruction. */
9176 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9177 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9178 size = TARGET_ARM ? 16 : 0;
9182 emit_move_insn (mem, reg);
9183 size = GET_MODE_SIZE (GET_MODE (reg));
9187 /* The optimizer does not know that the call sets the function value
9188 registers we stored in the result block. We avoid problems by
9189 claiming that all hard registers are used and clobbered at this
9191 emit_insn (gen_blockage ());
9197 (define_expand "untyped_return"
9198 [(match_operand:BLK 0 "memory_operand")
9199 (match_operand 1 "" "")]
9200 "TARGET_EITHER && !TARGET_FDPIC"
9204 rtx addr = gen_reg_rtx (Pmode);
9208 emit_move_insn (addr, XEXP (operands[0], 0));
9209 mem = change_address (operands[0], BLKmode, addr);
9211 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9213 HOST_WIDE_INT offset = 0;
9214 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9217 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9219 mem = change_address (mem, GET_MODE (reg), NULL);
9220 if (REGNO (reg) == R0_REGNUM)
9222 /* On thumb we have to use a write-back instruction. */
9223 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9224 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9225 size = TARGET_ARM ? 16 : 0;
9229 emit_move_insn (reg, mem);
9230 size = GET_MODE_SIZE (GET_MODE (reg));
9234 /* Emit USE insns before the return. */
9235 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9236 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9238 /* Construct the return. */
9239 expand_naked_return ();
9245 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9246 ;; all of memory. This blocks insns from being moved across this point.
9248 (define_insn "blockage"
9249 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9252 [(set_attr "length" "0")
9253 (set_attr "type" "block")]
9256 ;; Since we hard code r0 here use the 'o' constraint to prevent
9257 ;; provoking undefined behaviour in the hardware with putting out
9258 ;; auto-increment operations with potentially r0 as the base register.
9259 (define_insn "probe_stack"
9260 [(set (match_operand:SI 0 "memory_operand" "=o")
9261 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9264 [(set_attr "type" "store_4")
9265 (set_attr "predicable" "yes")]
9268 (define_insn "probe_stack_range"
9269 [(set (match_operand:SI 0 "register_operand" "=r")
9270 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9271 (match_operand:SI 2 "register_operand" "r")]
9272 VUNSPEC_PROBE_STACK_RANGE))]
9275 return output_probe_stack_range (operands[0], operands[2]);
9277 [(set_attr "type" "multiple")
9278 (set_attr "conds" "clob")]
9281 ;; Named patterns for stack smashing protection.
9282 (define_expand "stack_protect_combined_set"
9284 [(set (match_operand:SI 0 "memory_operand")
9285 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9287 (clobber (match_scratch:SI 2 ""))
9288 (clobber (match_scratch:SI 3 ""))])]
9289 "arm_stack_protector_guard == SSP_GLOBAL"
9293 ;; Use a separate insn from the above expand to be able to have the mem outside
9294 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9295 ;; try to reload the guard since we need to control how PIC access is done in
9296 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9297 ;; legitimize_pic_address ()).
9298 (define_insn_and_split "*stack_protect_combined_set_insn"
9299 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9300 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9302 (clobber (match_scratch:SI 2 "=&l,&r"))
9303 (clobber (match_scratch:SI 3 "=&l,&r"))]
9307 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9309 (clobber (match_dup 2))])]
9317 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9319 pic_reg = operands[3];
9321 /* Forces recomputing of GOT base now. */
9322 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9323 true /*compute_now*/);
9327 if (address_operand (operands[1], SImode))
9328 operands[2] = operands[1];
9331 rtx mem = force_const_mem (SImode, operands[1]);
9332 if (!general_operand (mem, SImode))
9334 emit_move_insn (operands[2], XEXP (mem, 0));
9335 mem = replace_equiv_address (mem, operands[2], false);
9337 emit_move_insn (operands[2], mem);
9341 [(set_attr "arch" "t1,32")]
9344 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9345 ;; canary value does not live beyond the life of this sequence.
9346 (define_insn "*stack_protect_set_insn"
9347 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9348 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9350 (clobber (match_dup 1))]
9353 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9354 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9355 [(set_attr "length" "8,12")
9356 (set_attr "conds" "clob,nocond")
9357 (set_attr "type" "multiple")
9358 (set_attr "arch" "t1,32")]
9361 (define_expand "stack_protect_combined_test"
9365 (eq (match_operand:SI 0 "memory_operand")
9366 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9368 (label_ref (match_operand 2))
9370 (clobber (match_scratch:SI 3 ""))
9371 (clobber (match_scratch:SI 4 ""))
9372 (clobber (reg:CC CC_REGNUM))])]
9373 "arm_stack_protector_guard == SSP_GLOBAL"
9377 ;; Use a separate insn from the above expand to be able to have the mem outside
9378 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9379 ;; try to reload the guard since we need to control how PIC access is done in
9380 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9381 ;; legitimize_pic_address ()).
9382 (define_insn_and_split "*stack_protect_combined_test_insn"
9385 (eq (match_operand:SI 0 "memory_operand" "m,m")
9386 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9388 (label_ref (match_operand 2))
9390 (clobber (match_scratch:SI 3 "=&l,&r"))
9391 (clobber (match_scratch:SI 4 "=&l,&r"))
9392 (clobber (reg:CC CC_REGNUM))]
9405 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9407 pic_reg = operands[4];
9409 /* Forces recomputing of GOT base now. */
9410 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9411 true /*compute_now*/);
9415 if (address_operand (operands[1], SImode))
9416 operands[3] = operands[1];
9419 rtx mem = force_const_mem (SImode, operands[1]);
9420 if (!general_operand (mem, SImode))
9422 emit_move_insn (operands[3], XEXP (mem, 0));
9423 mem = replace_equiv_address (mem, operands[3], false);
9425 emit_move_insn (operands[3], mem);
9430 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9432 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9433 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9434 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9438 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9440 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9441 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9446 [(set_attr "arch" "t1,32")]
9449 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9450 ;; canary value does not live beyond the end of this sequence.
9451 (define_insn "arm_stack_protect_test_insn"
9452 [(set (reg:CC_Z CC_REGNUM)
9453 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9454 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9457 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9458 (clobber (match_dup 2))]
9460 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9461 [(set_attr "length" "12,16")
9462 (set_attr "conds" "set")
9463 (set_attr "type" "multiple")
9464 (set_attr "arch" "t,32")]
9467 (define_expand "stack_protect_set"
9468 [(match_operand:SI 0 "memory_operand")
9469 (match_operand:SI 1 "memory_operand")]
9470 "arm_stack_protector_guard == SSP_TLSREG"
9473 operands[1] = arm_stack_protect_tls_canary_mem (false /* reload */);
9474 emit_insn (gen_stack_protect_set_tls (operands[0], operands[1]));
9479 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9480 ;; canary value does not live beyond the life of this sequence.
9481 (define_insn "stack_protect_set_tls"
9482 [(set (match_operand:SI 0 "memory_operand" "=m")
9483 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")]
9485 (set (match_scratch:SI 2 "=&r") (const_int 0))]
9487 "ldr\\t%2, %1\;str\\t%2, %0\;mov\t%2, #0"
9488 [(set_attr "length" "12")
9489 (set_attr "conds" "unconditional")
9490 (set_attr "type" "multiple")]
9493 (define_expand "stack_protect_test"
9494 [(match_operand:SI 0 "memory_operand")
9495 (match_operand:SI 1 "memory_operand")
9496 (match_operand:SI 2)]
9497 "arm_stack_protector_guard == SSP_TLSREG"
9500 operands[1] = arm_stack_protect_tls_canary_mem (true /* reload */);
9501 emit_insn (gen_stack_protect_test_tls (operands[0], operands[1]));
9503 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9504 rtx eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9505 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9510 (define_insn "stack_protect_test_tls"
9511 [(set (reg:CC_Z CC_REGNUM)
9512 (compare:CC_Z (unspec:SI [(match_operand:SI 0 "memory_operand" "m")
9513 (match_operand:SI 1 "memory_operand" "m")]
9516 (clobber (match_scratch:SI 2 "=&r"))
9517 (clobber (match_scratch:SI 3 "=&r"))]
9519 "ldr\t%2, %0\;ldr\t%3, %1\;eors\t%2, %3, %2\;mov\t%3, #0"
9520 [(set_attr "length" "16")
9521 (set_attr "conds" "set")
9522 (set_attr "type" "multiple")]
9525 (define_expand "casesi"
9526 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9527 (match_operand:SI 1 "const_int_operand") ; lower bound
9528 (match_operand:SI 2 "const_int_operand") ; total range
9529 (match_operand:SI 3 "" "") ; table label
9530 (match_operand:SI 4 "" "")] ; Out of range label
9532 || (!target_pure_code
9533 && (TARGET_THUMB2 || optimize_size || flag_pic)))"
9536 enum insn_code code;
9537 if (operands[1] != const0_rtx)
9539 rtx reg = gen_reg_rtx (SImode);
9541 emit_insn (gen_addsi3 (reg, operands[0],
9542 gen_int_mode (-INTVAL (operands[1]),
9548 code = CODE_FOR_arm_casesi_internal;
9549 else if (TARGET_THUMB1)
9550 code = CODE_FOR_thumb1_casesi_internal_pic;
9552 code = CODE_FOR_thumb2_casesi_internal_pic;
9554 code = CODE_FOR_thumb2_casesi_internal;
9556 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9557 operands[2] = force_reg (SImode, operands[2]);
9559 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9560 operands[3], operands[4]));
9565 ;; The USE in this pattern is needed to tell flow analysis that this is
9566 ;; a CASESI insn. It has no other purpose.
9567 (define_expand "arm_casesi_internal"
9568 [(parallel [(set (pc)
9570 (leu (match_operand:SI 0 "s_register_operand")
9571 (match_operand:SI 1 "arm_rhs_operand"))
9573 (label_ref:SI (match_operand 3 ""))))
9574 (clobber (reg:CC CC_REGNUM))
9575 (clobber (match_scratch:SI 5))
9576 (use (label_ref:SI (match_operand 2 "")))])]
9579 rtx vec_table_ref = force_reg (SImode, gen_rtx_LABEL_REF (SImode, operands[2]));
9580 rtx tmp = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9581 tmp = gen_rtx_PLUS (SImode, tmp,
9583 operands[4] = gen_rtx_MEM (SImode, tmp);
9584 MEM_READONLY_P (operands[4]) = 1;
9585 MEM_NOTRAP_P (operands[4]) = 1;
9588 (define_insn "*arm_casesi_internal"
9589 [(parallel [(set (pc)
9591 (leu (match_operand:SI 0 "s_register_operand" "r")
9592 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9593 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9594 (match_operand:SI 5 "s_register_operand" "r")))
9595 (label_ref:SI (match_operand 3 "" ""))))
9596 (clobber (reg:CC CC_REGNUM))
9597 (clobber (match_scratch:SI 4 "=r"))
9598 (use (label_ref:SI (match_operand 2 "")))])]
9601 return arm_output_casesi (operands);
9603 [(set_attr "conds" "clob")
9604 (set_attr "length" "24")
9605 (set_attr "type" "multiple")]
9608 (define_expand "indirect_jump"
9610 (match_operand:SI 0 "s_register_operand"))]
9613 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9614 address and use bx. */
9618 tmp = gen_reg_rtx (SImode);
9619 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9625 ;; NB Never uses BX.
9626 (define_insn "*arm_indirect_jump"
9628 (match_operand:SI 0 "s_register_operand" "r"))]
9630 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9631 [(set_attr "predicable" "yes")
9632 (set_attr "type" "branch")]
9635 (define_insn "*load_indirect_jump"
9637 (match_operand:SI 0 "memory_operand" "m"))]
9639 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9640 [(set_attr "type" "load_4")
9641 (set_attr "pool_range" "4096")
9642 (set_attr "neg_pool_range" "4084")
9643 (set_attr "predicable" "yes")]
9653 [(set (attr "length")
9654 (if_then_else (eq_attr "is_thumb" "yes")
9657 (set_attr "type" "mov_reg")]
9661 [(trap_if (const_int 1) (const_int 0))]
9665 return \".inst\\t0xe7f000f0\";
9667 return \".inst\\t0xdeff\";
9669 [(set (attr "length")
9670 (if_then_else (eq_attr "is_thumb" "yes")
9673 (set_attr "type" "trap")
9674 (set_attr "conds" "unconditional")]
9678 ;; Patterns to allow combination of arithmetic, cond code and shifts
9680 (define_insn "*<arith_shift_insn>_multsi"
9681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9683 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9684 (match_operand:SI 3 "power_of_two_operand" ""))
9685 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9687 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9688 [(set_attr "predicable" "yes")
9689 (set_attr "shift" "2")
9690 (set_attr "arch" "a,t2")
9691 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9693 (define_insn "*<arith_shift_insn>_shiftsi"
9694 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9696 (match_operator:SI 2 "shift_nomul_operator"
9697 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9698 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9699 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9700 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9701 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9702 [(set_attr "predicable" "yes")
9703 (set_attr "shift" "3")
9704 (set_attr "arch" "a,t2,a")
9705 (set_attr "autodetect_type" "alu_shift_operator2")])
9708 [(set (match_operand:SI 0 "s_register_operand" "")
9709 (match_operator:SI 1 "shiftable_operator"
9710 [(match_operator:SI 2 "shiftable_operator"
9711 [(match_operator:SI 3 "shift_operator"
9712 [(match_operand:SI 4 "s_register_operand" "")
9713 (match_operand:SI 5 "reg_or_int_operand" "")])
9714 (match_operand:SI 6 "s_register_operand" "")])
9715 (match_operand:SI 7 "arm_rhs_operand" "")]))
9716 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9719 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9722 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9725 (define_insn "*arith_shiftsi_compare0"
9726 [(set (reg:CC_NZ CC_REGNUM)
9728 (match_operator:SI 1 "shiftable_operator"
9729 [(match_operator:SI 3 "shift_operator"
9730 [(match_operand:SI 4 "s_register_operand" "r,r")
9731 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9732 (match_operand:SI 2 "s_register_operand" "r,r")])
9734 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9735 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9738 "%i1s%?\\t%0, %2, %4%S3"
9739 [(set_attr "conds" "set")
9740 (set_attr "shift" "4")
9741 (set_attr "arch" "32,a")
9742 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9744 (define_insn "*arith_shiftsi_compare0_scratch"
9745 [(set (reg:CC_NZ CC_REGNUM)
9747 (match_operator:SI 1 "shiftable_operator"
9748 [(match_operator:SI 3 "shift_operator"
9749 [(match_operand:SI 4 "s_register_operand" "r,r")
9750 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9751 (match_operand:SI 2 "s_register_operand" "r,r")])
9753 (clobber (match_scratch:SI 0 "=r,r"))]
9755 "%i1s%?\\t%0, %2, %4%S3"
9756 [(set_attr "conds" "set")
9757 (set_attr "shift" "4")
9758 (set_attr "arch" "32,a")
9759 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9761 (define_insn "*sub_shiftsi"
9762 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9763 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9764 (match_operator:SI 2 "shift_operator"
9765 [(match_operand:SI 3 "s_register_operand" "r,r")
9766 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9768 "sub%?\\t%0, %1, %3%S2"
9769 [(set_attr "predicable" "yes")
9770 (set_attr "predicable_short_it" "no")
9771 (set_attr "shift" "3")
9772 (set_attr "arch" "32,a")
9773 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9775 (define_insn "*sub_shiftsi_compare0"
9776 [(set (reg:CC_NZ CC_REGNUM)
9778 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9779 (match_operator:SI 2 "shift_operator"
9780 [(match_operand:SI 3 "s_register_operand" "r,r")
9781 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9783 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9784 (minus:SI (match_dup 1)
9785 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9787 "subs%?\\t%0, %1, %3%S2"
9788 [(set_attr "conds" "set")
9789 (set_attr "shift" "3")
9790 (set_attr "arch" "32,a")
9791 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9793 (define_insn "*sub_shiftsi_compare0_scratch"
9794 [(set (reg:CC_NZ CC_REGNUM)
9796 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9797 (match_operator:SI 2 "shift_operator"
9798 [(match_operand:SI 3 "s_register_operand" "r,r")
9799 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9801 (clobber (match_scratch:SI 0 "=r,r"))]
9803 "subs%?\\t%0, %1, %3%S2"
9804 [(set_attr "conds" "set")
9805 (set_attr "shift" "3")
9806 (set_attr "arch" "32,a")
9807 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9810 (define_insn_and_split "*and_scc"
9811 [(set (match_operand:SI 0 "s_register_operand" "=r")
9812 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9813 [(match_operand 2 "cc_register" "") (const_int 0)])
9814 (match_operand:SI 3 "s_register_operand" "r")))]
9816 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9817 "&& reload_completed"
9818 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9819 (cond_exec (match_dup 4) (set (match_dup 0)
9820 (and:SI (match_dup 3) (const_int 1))))]
9822 machine_mode mode = GET_MODE (operands[2]);
9823 enum rtx_code rc = GET_CODE (operands[1]);
9825 /* Note that operands[4] is the same as operands[1],
9826 but with VOIDmode as the result. */
9827 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9828 if (mode == CCFPmode || mode == CCFPEmode)
9829 rc = reverse_condition_maybe_unordered (rc);
9831 rc = reverse_condition (rc);
9832 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9834 [(set_attr "conds" "use")
9835 (set_attr "type" "multiple")
9836 (set_attr "length" "8")]
9839 (define_insn_and_split "*ior_scc"
9840 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9841 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9842 [(match_operand 2 "cc_register" "") (const_int 0)])
9843 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9848 "&& reload_completed
9849 && REGNO (operands [0]) != REGNO (operands[3])"
9850 ;; && which_alternative == 1
9851 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9852 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9853 (cond_exec (match_dup 4) (set (match_dup 0)
9854 (ior:SI (match_dup 3) (const_int 1))))]
9856 machine_mode mode = GET_MODE (operands[2]);
9857 enum rtx_code rc = GET_CODE (operands[1]);
9859 /* Note that operands[4] is the same as operands[1],
9860 but with VOIDmode as the result. */
9861 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9862 if (mode == CCFPmode || mode == CCFPEmode)
9863 rc = reverse_condition_maybe_unordered (rc);
9865 rc = reverse_condition (rc);
9866 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9868 [(set_attr "conds" "use")
9869 (set_attr "length" "4,8")
9870 (set_attr "type" "logic_imm,multiple")]
9873 ; A series of splitters for the compare_scc pattern below. Note that
9874 ; order is important.
9876 [(set (match_operand:SI 0 "s_register_operand" "")
9877 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9879 (clobber (reg:CC CC_REGNUM))]
9880 "TARGET_32BIT && reload_completed"
9881 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9884 [(set (match_operand:SI 0 "s_register_operand" "")
9885 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9887 (clobber (reg:CC CC_REGNUM))]
9888 "TARGET_32BIT && reload_completed"
9889 [(set (match_dup 0) (not:SI (match_dup 1)))
9890 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9893 [(set (match_operand:SI 0 "s_register_operand" "")
9894 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9896 (clobber (reg:CC CC_REGNUM))]
9897 "arm_arch5t && TARGET_32BIT"
9898 [(set (match_dup 0) (clz:SI (match_dup 1)))
9899 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9903 [(set (match_operand:SI 0 "s_register_operand" "")
9904 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9906 (clobber (reg:CC CC_REGNUM))]
9907 "TARGET_32BIT && reload_completed"
9909 [(set (reg:CC CC_REGNUM)
9910 (compare:CC (const_int 1) (match_dup 1)))
9912 (minus:SI (const_int 1) (match_dup 1)))])
9913 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9914 (set (match_dup 0) (const_int 0)))])
9917 [(set (match_operand:SI 0 "s_register_operand" "")
9918 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9919 (match_operand:SI 2 "const_int_operand" "")))
9920 (clobber (reg:CC CC_REGNUM))]
9921 "TARGET_32BIT && reload_completed"
9923 [(set (reg:CC CC_REGNUM)
9924 (compare:CC (match_dup 1) (match_dup 2)))
9925 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9926 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9927 (set (match_dup 0) (const_int 1)))]
9929 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9933 [(set (match_operand:SI 0 "s_register_operand" "")
9934 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9935 (match_operand:SI 2 "arm_add_operand" "")))
9936 (clobber (reg:CC CC_REGNUM))]
9937 "TARGET_32BIT && reload_completed"
9939 [(set (reg:CC_NZ CC_REGNUM)
9940 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9942 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9943 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9944 (set (match_dup 0) (const_int 1)))])
9946 (define_insn_and_split "*compare_scc"
9947 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9948 (match_operator:SI 1 "arm_comparison_operator"
9949 [(match_operand:SI 2 "s_register_operand" "r,r")
9950 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9951 (clobber (reg:CC CC_REGNUM))]
9954 "&& reload_completed"
9955 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9956 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9957 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9960 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9961 operands[2], operands[3]);
9962 enum rtx_code rc = GET_CODE (operands[1]);
9964 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9966 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9967 if (mode == CCFPmode || mode == CCFPEmode)
9968 rc = reverse_condition_maybe_unordered (rc);
9970 rc = reverse_condition (rc);
9971 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9973 [(set_attr "type" "multiple")]
9976 ;; Attempt to improve the sequence generated by the compare_scc splitters
9977 ;; not to use conditional execution.
9979 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9983 [(set (reg:CC CC_REGNUM)
9984 (compare:CC (match_operand:SI 1 "register_operand" "")
9986 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9987 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9988 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9989 (set (match_dup 0) (const_int 1)))]
9990 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9991 [(set (match_dup 0) (clz:SI (match_dup 1)))
9992 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9995 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9999 [(set (reg:CC CC_REGNUM)
10000 (compare:CC (match_operand:SI 1 "register_operand" "")
10002 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10003 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10004 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10005 (set (match_dup 0) (const_int 1)))
10006 (match_scratch:SI 2 "r")]
10007 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10009 [(set (reg:CC CC_REGNUM)
10010 (compare:CC (const_int 0) (match_dup 1)))
10011 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
10013 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
10014 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10017 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
10018 ;; sub Rd, Reg1, reg2
10022 [(set (reg:CC CC_REGNUM)
10023 (compare:CC (match_operand:SI 1 "register_operand" "")
10024 (match_operand:SI 2 "arm_rhs_operand" "")))
10025 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10026 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10027 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10028 (set (match_dup 0) (const_int 1)))]
10029 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
10030 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
10031 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
10032 (set (match_dup 0) (clz:SI (match_dup 0)))
10033 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10037 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
10038 ;; sub T1, Reg1, reg2
10042 [(set (reg:CC CC_REGNUM)
10043 (compare:CC (match_operand:SI 1 "register_operand" "")
10044 (match_operand:SI 2 "arm_rhs_operand" "")))
10045 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10046 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10047 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10048 (set (match_dup 0) (const_int 1)))
10049 (match_scratch:SI 3 "r")]
10050 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10051 [(set (match_dup 3) (match_dup 4))
10053 [(set (reg:CC CC_REGNUM)
10054 (compare:CC (const_int 0) (match_dup 3)))
10055 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10057 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10058 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10060 if (CONST_INT_P (operands[2]))
10061 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
10063 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
10066 (define_insn "*cond_move"
10067 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10068 (if_then_else:SI (match_operator 3 "equality_operator"
10069 [(match_operator 4 "arm_comparison_operator"
10070 [(match_operand 5 "cc_register" "") (const_int 0)])
10072 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10073 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10076 if (GET_CODE (operands[3]) == NE)
10078 if (which_alternative != 1)
10079 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10080 if (which_alternative != 0)
10081 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10084 if (which_alternative != 0)
10085 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10086 if (which_alternative != 1)
10087 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10090 [(set_attr "conds" "use")
10091 (set_attr_alternative "type"
10092 [(if_then_else (match_operand 2 "const_int_operand" "")
10093 (const_string "mov_imm")
10094 (const_string "mov_reg"))
10095 (if_then_else (match_operand 1 "const_int_operand" "")
10096 (const_string "mov_imm")
10097 (const_string "mov_reg"))
10098 (const_string "multiple")])
10099 (set_attr "length" "4,4,8")]
10102 (define_insn "*cond_arith"
10103 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10104 (match_operator:SI 5 "shiftable_operator"
10105 [(match_operator:SI 4 "arm_comparison_operator"
10106 [(match_operand:SI 2 "s_register_operand" "r,r")
10107 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10108 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10109 (clobber (reg:CC CC_REGNUM))]
10112 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10113 return \"%i5\\t%0, %1, %2, lsr #31\";
10115 output_asm_insn (\"cmp\\t%2, %3\", operands);
10116 if (GET_CODE (operands[5]) == AND)
10117 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10118 else if (GET_CODE (operands[5]) == MINUS)
10119 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10120 else if (which_alternative != 0)
10121 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10122 return \"%i5%d4\\t%0, %1, #1\";
10124 [(set_attr "conds" "clob")
10125 (set_attr "length" "12")
10126 (set_attr "type" "multiple")]
10129 (define_insn "*cond_sub"
10130 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10131 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10132 (match_operator:SI 4 "arm_comparison_operator"
10133 [(match_operand:SI 2 "s_register_operand" "r,r")
10134 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10135 (clobber (reg:CC CC_REGNUM))]
10138 output_asm_insn (\"cmp\\t%2, %3\", operands);
10139 if (which_alternative != 0)
10140 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10141 return \"sub%d4\\t%0, %1, #1\";
10143 [(set_attr "conds" "clob")
10144 (set_attr "length" "8,12")
10145 (set_attr "type" "multiple")]
10148 (define_insn "*cmp_ite0"
10149 [(set (match_operand 6 "dominant_cc_register" "")
10152 (match_operator 4 "arm_comparison_operator"
10153 [(match_operand:SI 0 "s_register_operand"
10154 "l,l,l,r,r,r,r,r,r")
10155 (match_operand:SI 1 "arm_add_operand"
10156 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10157 (match_operator:SI 5 "arm_comparison_operator"
10158 [(match_operand:SI 2 "s_register_operand"
10159 "l,r,r,l,l,r,r,r,r")
10160 (match_operand:SI 3 "arm_add_operand"
10161 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10167 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10169 {\"cmp%d5\\t%0, %1\",
10170 \"cmp%d4\\t%2, %3\"},
10171 {\"cmn%d5\\t%0, #%n1\",
10172 \"cmp%d4\\t%2, %3\"},
10173 {\"cmp%d5\\t%0, %1\",
10174 \"cmn%d4\\t%2, #%n3\"},
10175 {\"cmn%d5\\t%0, #%n1\",
10176 \"cmn%d4\\t%2, #%n3\"}
10178 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10183 \"cmn\\t%0, #%n1\"},
10184 {\"cmn\\t%2, #%n3\",
10186 {\"cmn\\t%2, #%n3\",
10187 \"cmn\\t%0, #%n1\"}
10189 static const char * const ite[2] =
10194 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10195 CMP_CMP, CMN_CMP, CMP_CMP,
10196 CMN_CMP, CMP_CMN, CMN_CMN};
10198 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10200 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10201 if (TARGET_THUMB2) {
10202 output_asm_insn (ite[swap], operands);
10204 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10207 [(set_attr "conds" "set")
10208 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10209 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10210 (set_attr "type" "multiple")
10211 (set_attr_alternative "length"
10217 (if_then_else (eq_attr "is_thumb" "no")
10220 (if_then_else (eq_attr "is_thumb" "no")
10223 (if_then_else (eq_attr "is_thumb" "no")
10226 (if_then_else (eq_attr "is_thumb" "no")
10231 (define_insn "*cmp_ite1"
10232 [(set (match_operand 6 "dominant_cc_register" "")
10235 (match_operator 4 "arm_comparison_operator"
10236 [(match_operand:SI 0 "s_register_operand"
10237 "l,l,l,r,r,r,r,r,r")
10238 (match_operand:SI 1 "arm_add_operand"
10239 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10240 (match_operator:SI 5 "arm_comparison_operator"
10241 [(match_operand:SI 2 "s_register_operand"
10242 "l,r,r,l,l,r,r,r,r")
10243 (match_operand:SI 3 "arm_add_operand"
10244 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10250 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10254 {\"cmn\\t%0, #%n1\",
10257 \"cmn\\t%2, #%n3\"},
10258 {\"cmn\\t%0, #%n1\",
10259 \"cmn\\t%2, #%n3\"}
10261 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10263 {\"cmp%d4\\t%2, %3\",
10264 \"cmp%D5\\t%0, %1\"},
10265 {\"cmp%d4\\t%2, %3\",
10266 \"cmn%D5\\t%0, #%n1\"},
10267 {\"cmn%d4\\t%2, #%n3\",
10268 \"cmp%D5\\t%0, %1\"},
10269 {\"cmn%d4\\t%2, #%n3\",
10270 \"cmn%D5\\t%0, #%n1\"}
10272 static const char * const ite[2] =
10277 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10278 CMP_CMP, CMN_CMP, CMP_CMP,
10279 CMN_CMP, CMP_CMN, CMN_CMN};
10281 comparison_dominates_p (GET_CODE (operands[5]),
10282 reverse_condition (GET_CODE (operands[4])));
10284 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10285 if (TARGET_THUMB2) {
10286 output_asm_insn (ite[swap], operands);
10288 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10291 [(set_attr "conds" "set")
10292 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10293 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10294 (set_attr_alternative "length"
10300 (if_then_else (eq_attr "is_thumb" "no")
10303 (if_then_else (eq_attr "is_thumb" "no")
10306 (if_then_else (eq_attr "is_thumb" "no")
10309 (if_then_else (eq_attr "is_thumb" "no")
10312 (set_attr "type" "multiple")]
10315 (define_insn "*cmp_and"
10316 [(set (match_operand 6 "dominant_cc_register" "")
10319 (match_operator 4 "arm_comparison_operator"
10320 [(match_operand:SI 0 "s_register_operand"
10321 "l,l,l,r,r,r,r,r,r,r")
10322 (match_operand:SI 1 "arm_add_operand"
10323 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10324 (match_operator:SI 5 "arm_comparison_operator"
10325 [(match_operand:SI 2 "s_register_operand"
10326 "l,r,r,l,l,r,r,r,r,r")
10327 (match_operand:SI 3 "arm_add_operand"
10328 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10333 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10335 {\"cmp%d5\\t%0, %1\",
10336 \"cmp%d4\\t%2, %3\"},
10337 {\"cmn%d5\\t%0, #%n1\",
10338 \"cmp%d4\\t%2, %3\"},
10339 {\"cmp%d5\\t%0, %1\",
10340 \"cmn%d4\\t%2, #%n3\"},
10341 {\"cmn%d5\\t%0, #%n1\",
10342 \"cmn%d4\\t%2, #%n3\"}
10344 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10349 \"cmn\\t%0, #%n1\"},
10350 {\"cmn\\t%2, #%n3\",
10352 {\"cmn\\t%2, #%n3\",
10353 \"cmn\\t%0, #%n1\"}
10355 static const char *const ite[2] =
10360 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10361 CMP_CMP, CMN_CMP, CMP_CMP,
10362 CMP_CMP, CMN_CMP, CMP_CMN,
10365 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10367 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10368 if (TARGET_THUMB2) {
10369 output_asm_insn (ite[swap], operands);
10371 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10374 [(set_attr "conds" "set")
10375 (set_attr "predicable" "no")
10376 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10377 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10378 (set_attr_alternative "length"
10385 (if_then_else (eq_attr "is_thumb" "no")
10388 (if_then_else (eq_attr "is_thumb" "no")
10391 (if_then_else (eq_attr "is_thumb" "no")
10394 (if_then_else (eq_attr "is_thumb" "no")
10397 (set_attr "type" "multiple")]
10400 (define_insn "*cmp_ior"
10401 [(set (match_operand 6 "dominant_cc_register" "")
10404 (match_operator 4 "arm_comparison_operator"
10405 [(match_operand:SI 0 "s_register_operand"
10406 "l,l,l,r,r,r,r,r,r,r")
10407 (match_operand:SI 1 "arm_add_operand"
10408 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10409 (match_operator:SI 5 "arm_comparison_operator"
10410 [(match_operand:SI 2 "s_register_operand"
10411 "l,r,r,l,l,r,r,r,r,r")
10412 (match_operand:SI 3 "arm_add_operand"
10413 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10418 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10422 {\"cmn\\t%0, #%n1\",
10425 \"cmn\\t%2, #%n3\"},
10426 {\"cmn\\t%0, #%n1\",
10427 \"cmn\\t%2, #%n3\"}
10429 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10431 {\"cmp%D4\\t%2, %3\",
10432 \"cmp%D5\\t%0, %1\"},
10433 {\"cmp%D4\\t%2, %3\",
10434 \"cmn%D5\\t%0, #%n1\"},
10435 {\"cmn%D4\\t%2, #%n3\",
10436 \"cmp%D5\\t%0, %1\"},
10437 {\"cmn%D4\\t%2, #%n3\",
10438 \"cmn%D5\\t%0, #%n1\"}
10440 static const char *const ite[2] =
10445 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10446 CMP_CMP, CMN_CMP, CMP_CMP,
10447 CMP_CMP, CMN_CMP, CMP_CMN,
10450 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10452 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10453 if (TARGET_THUMB2) {
10454 output_asm_insn (ite[swap], operands);
10456 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10460 [(set_attr "conds" "set")
10461 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10462 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10463 (set_attr_alternative "length"
10470 (if_then_else (eq_attr "is_thumb" "no")
10473 (if_then_else (eq_attr "is_thumb" "no")
10476 (if_then_else (eq_attr "is_thumb" "no")
10479 (if_then_else (eq_attr "is_thumb" "no")
10482 (set_attr "type" "multiple")]
10485 (define_insn_and_split "*ior_scc_scc"
10486 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10487 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10488 [(match_operand:SI 1 "s_register_operand" "l,r")
10489 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10490 (match_operator:SI 6 "arm_comparison_operator"
10491 [(match_operand:SI 4 "s_register_operand" "l,r")
10492 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10493 (clobber (reg:CC CC_REGNUM))]
10495 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10498 "TARGET_32BIT && reload_completed"
10499 [(set (match_dup 7)
10502 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10503 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10505 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10507 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10510 [(set_attr "conds" "clob")
10511 (set_attr "enabled_for_short_it" "yes,no")
10512 (set_attr "length" "16")
10513 (set_attr "type" "multiple")]
10516 ; If the above pattern is followed by a CMP insn, then the compare is
10517 ; redundant, since we can rework the conditional instruction that follows.
10518 (define_insn_and_split "*ior_scc_scc_cmp"
10519 [(set (match_operand 0 "dominant_cc_register" "")
10520 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10521 [(match_operand:SI 1 "s_register_operand" "l,r")
10522 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10523 (match_operator:SI 6 "arm_comparison_operator"
10524 [(match_operand:SI 4 "s_register_operand" "l,r")
10525 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10527 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10528 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10529 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10532 "TARGET_32BIT && reload_completed"
10533 [(set (match_dup 0)
10536 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10537 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10539 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10541 [(set_attr "conds" "set")
10542 (set_attr "enabled_for_short_it" "yes,no")
10543 (set_attr "length" "16")
10544 (set_attr "type" "multiple")]
10547 (define_insn_and_split "*and_scc_scc"
10548 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10549 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10550 [(match_operand:SI 1 "s_register_operand" "l,r")
10551 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10552 (match_operator:SI 6 "arm_comparison_operator"
10553 [(match_operand:SI 4 "s_register_operand" "l,r")
10554 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10555 (clobber (reg:CC CC_REGNUM))]
10557 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10560 "TARGET_32BIT && reload_completed
10561 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10563 [(set (match_dup 7)
10566 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10567 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10569 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10571 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10574 [(set_attr "conds" "clob")
10575 (set_attr "enabled_for_short_it" "yes,no")
10576 (set_attr "length" "16")
10577 (set_attr "type" "multiple")]
10580 ; If the above pattern is followed by a CMP insn, then the compare is
10581 ; redundant, since we can rework the conditional instruction that follows.
10582 (define_insn_and_split "*and_scc_scc_cmp"
10583 [(set (match_operand 0 "dominant_cc_register" "")
10584 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10585 [(match_operand:SI 1 "s_register_operand" "l,r")
10586 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10587 (match_operator:SI 6 "arm_comparison_operator"
10588 [(match_operand:SI 4 "s_register_operand" "l,r")
10589 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10591 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10592 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10593 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10596 "TARGET_32BIT && reload_completed"
10597 [(set (match_dup 0)
10600 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10601 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10603 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10605 [(set_attr "conds" "set")
10606 (set_attr "enabled_for_short_it" "yes,no")
10607 (set_attr "length" "16")
10608 (set_attr "type" "multiple")]
10611 ;; If there is no dominance in the comparison, then we can still save an
10612 ;; instruction in the AND case, since we can know that the second compare
10613 ;; need only zero the value if false (if true, then the value is already
10615 (define_insn_and_split "*and_scc_scc_nodom"
10616 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10617 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10618 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10619 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10620 (match_operator:SI 6 "arm_comparison_operator"
10621 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10622 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10623 (clobber (reg:CC CC_REGNUM))]
10625 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10628 "TARGET_32BIT && reload_completed"
10629 [(parallel [(set (match_dup 0)
10630 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10631 (clobber (reg:CC CC_REGNUM))])
10632 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10634 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10637 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10638 operands[4], operands[5]),
10640 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10642 [(set_attr "conds" "clob")
10643 (set_attr "length" "20")
10644 (set_attr "type" "multiple")]
10648 [(set (reg:CC_NZ CC_REGNUM)
10649 (compare:CC_NZ (ior:SI
10650 (and:SI (match_operand:SI 0 "s_register_operand" "")
10652 (match_operator:SI 1 "arm_comparison_operator"
10653 [(match_operand:SI 2 "s_register_operand" "")
10654 (match_operand:SI 3 "arm_add_operand" "")]))
10656 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10658 [(set (match_dup 4)
10659 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10661 (set (reg:CC_NZ CC_REGNUM)
10662 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10667 [(set (reg:CC_NZ CC_REGNUM)
10668 (compare:CC_NZ (ior:SI
10669 (match_operator:SI 1 "arm_comparison_operator"
10670 [(match_operand:SI 2 "s_register_operand" "")
10671 (match_operand:SI 3 "arm_add_operand" "")])
10672 (and:SI (match_operand:SI 0 "s_register_operand" "")
10675 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10677 [(set (match_dup 4)
10678 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10680 (set (reg:CC_NZ CC_REGNUM)
10681 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10684 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10686 (define_insn_and_split "*negscc"
10687 [(set (match_operand:SI 0 "s_register_operand" "=r")
10688 (neg:SI (match_operator 3 "arm_comparison_operator"
10689 [(match_operand:SI 1 "s_register_operand" "r")
10690 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10691 (clobber (reg:CC CC_REGNUM))]
10694 "&& reload_completed"
10697 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10699 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10701 /* Emit mov\\t%0, %1, asr #31 */
10702 emit_insn (gen_rtx_SET (operands[0],
10703 gen_rtx_ASHIFTRT (SImode,
10708 else if (GET_CODE (operands[3]) == NE)
10710 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10711 if (CONST_INT_P (operands[2]))
10712 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10713 gen_int_mode (-INTVAL (operands[2]),
10716 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10718 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10719 gen_rtx_NE (SImode,
10722 gen_rtx_SET (operands[0],
10728 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10729 emit_insn (gen_rtx_SET (cc_reg,
10730 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10731 enum rtx_code rc = GET_CODE (operands[3]);
10733 rc = reverse_condition (rc);
10734 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10735 gen_rtx_fmt_ee (rc,
10739 gen_rtx_SET (operands[0], const0_rtx)));
10740 rc = GET_CODE (operands[3]);
10741 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10742 gen_rtx_fmt_ee (rc,
10746 gen_rtx_SET (operands[0],
10752 [(set_attr "conds" "clob")
10753 (set_attr "length" "12")
10754 (set_attr "type" "multiple")]
10757 (define_insn_and_split "movcond_addsi"
10758 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10760 (match_operator 5 "comparison_operator"
10761 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10762 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10764 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10765 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10766 (clobber (reg:CC CC_REGNUM))]
10769 "&& reload_completed"
10770 [(set (reg:CC_NZ CC_REGNUM)
10772 (plus:SI (match_dup 3)
10775 (set (match_dup 0) (match_dup 1))
10776 (cond_exec (match_dup 6)
10777 (set (match_dup 0) (match_dup 2)))]
10780 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10781 operands[3], operands[4]);
10782 enum rtx_code rc = GET_CODE (operands[5]);
10783 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10784 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10785 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10786 rc = reverse_condition (rc);
10788 std::swap (operands[1], operands[2]);
10790 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10793 [(set_attr "conds" "clob")
10794 (set_attr "enabled_for_short_it" "no,yes,yes")
10795 (set_attr "type" "multiple")]
10798 (define_insn "movcond"
10799 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10801 (match_operator 5 "arm_comparison_operator"
10802 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10803 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10804 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10805 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10806 (clobber (reg:CC CC_REGNUM))]
10809 if (GET_CODE (operands[5]) == LT
10810 && (operands[4] == const0_rtx))
10812 if (which_alternative != 1 && REG_P (operands[1]))
10814 if (operands[2] == const0_rtx)
10815 return \"and\\t%0, %1, %3, asr #31\";
10816 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10818 else if (which_alternative != 0 && REG_P (operands[2]))
10820 if (operands[1] == const0_rtx)
10821 return \"bic\\t%0, %2, %3, asr #31\";
10822 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10824 /* The only case that falls through to here is when both ops 1 & 2
10828 if (GET_CODE (operands[5]) == GE
10829 && (operands[4] == const0_rtx))
10831 if (which_alternative != 1 && REG_P (operands[1]))
10833 if (operands[2] == const0_rtx)
10834 return \"bic\\t%0, %1, %3, asr #31\";
10835 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10837 else if (which_alternative != 0 && REG_P (operands[2]))
10839 if (operands[1] == const0_rtx)
10840 return \"and\\t%0, %2, %3, asr #31\";
10841 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10843 /* The only case that falls through to here is when both ops 1 & 2
10846 if (CONST_INT_P (operands[4])
10847 && !const_ok_for_arm (INTVAL (operands[4])))
10848 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10850 output_asm_insn (\"cmp\\t%3, %4\", operands);
10851 if (which_alternative != 0)
10852 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10853 if (which_alternative != 1)
10854 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10857 [(set_attr "conds" "clob")
10858 (set_attr "length" "8,8,12")
10859 (set_attr "type" "multiple")]
10862 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10864 (define_insn "*ifcompare_plus_move"
10865 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10866 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10867 [(match_operand:SI 4 "s_register_operand" "r,r")
10868 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10870 (match_operand:SI 2 "s_register_operand" "r,r")
10871 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10872 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10873 (clobber (reg:CC CC_REGNUM))]
10876 [(set_attr "conds" "clob")
10877 (set_attr "length" "8,12")
10878 (set_attr "type" "multiple")]
10881 (define_insn "*if_plus_move"
10882 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10884 (match_operator 4 "arm_comparison_operator"
10885 [(match_operand 5 "cc_register" "") (const_int 0)])
10887 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10888 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10889 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10892 add%d4\\t%0, %2, %3
10893 sub%d4\\t%0, %2, #%n3
10894 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10895 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10896 [(set_attr "conds" "use")
10897 (set_attr "length" "4,4,8,8")
10898 (set_attr_alternative "type"
10899 [(if_then_else (match_operand 3 "const_int_operand" "")
10900 (const_string "alu_imm" )
10901 (const_string "alu_sreg"))
10902 (const_string "alu_imm")
10903 (const_string "multiple")
10904 (const_string "multiple")])]
10907 (define_insn "*ifcompare_move_plus"
10908 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10909 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10910 [(match_operand:SI 4 "s_register_operand" "r,r")
10911 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10912 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10914 (match_operand:SI 2 "s_register_operand" "r,r")
10915 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10916 (clobber (reg:CC CC_REGNUM))]
10919 [(set_attr "conds" "clob")
10920 (set_attr "length" "8,12")
10921 (set_attr "type" "multiple")]
10924 (define_insn "*if_move_plus"
10925 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10927 (match_operator 4 "arm_comparison_operator"
10928 [(match_operand 5 "cc_register" "") (const_int 0)])
10929 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10931 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10932 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10935 add%D4\\t%0, %2, %3
10936 sub%D4\\t%0, %2, #%n3
10937 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10938 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10939 [(set_attr "conds" "use")
10940 (set_attr "length" "4,4,8,8")
10941 (set_attr_alternative "type"
10942 [(if_then_else (match_operand 3 "const_int_operand" "")
10943 (const_string "alu_imm" )
10944 (const_string "alu_sreg"))
10945 (const_string "alu_imm")
10946 (const_string "multiple")
10947 (const_string "multiple")])]
10950 (define_insn "*ifcompare_arith_arith"
10951 [(set (match_operand:SI 0 "s_register_operand" "=r")
10952 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10953 [(match_operand:SI 5 "s_register_operand" "r")
10954 (match_operand:SI 6 "arm_add_operand" "rIL")])
10955 (match_operator:SI 8 "shiftable_operator"
10956 [(match_operand:SI 1 "s_register_operand" "r")
10957 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10958 (match_operator:SI 7 "shiftable_operator"
10959 [(match_operand:SI 3 "s_register_operand" "r")
10960 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10961 (clobber (reg:CC CC_REGNUM))]
10964 [(set_attr "conds" "clob")
10965 (set_attr "length" "12")
10966 (set_attr "type" "multiple")]
10969 (define_insn "*if_arith_arith"
10970 [(set (match_operand:SI 0 "s_register_operand" "=r")
10971 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10972 [(match_operand 8 "cc_register" "") (const_int 0)])
10973 (match_operator:SI 6 "shiftable_operator"
10974 [(match_operand:SI 1 "s_register_operand" "r")
10975 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10976 (match_operator:SI 7 "shiftable_operator"
10977 [(match_operand:SI 3 "s_register_operand" "r")
10978 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10980 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10981 [(set_attr "conds" "use")
10982 (set_attr "length" "8")
10983 (set_attr "type" "multiple")]
10986 (define_insn "*ifcompare_arith_move"
10987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10988 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10989 [(match_operand:SI 2 "s_register_operand" "r,r")
10990 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10991 (match_operator:SI 7 "shiftable_operator"
10992 [(match_operand:SI 4 "s_register_operand" "r,r")
10993 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10994 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10995 (clobber (reg:CC CC_REGNUM))]
10998 /* If we have an operation where (op x 0) is the identity operation and
10999 the conditional operator is LT or GE and we are comparing against zero and
11000 everything is in registers then we can do this in two instructions. */
11001 if (operands[3] == const0_rtx
11002 && GET_CODE (operands[7]) != AND
11003 && REG_P (operands[5])
11004 && REG_P (operands[1])
11005 && REGNO (operands[1]) == REGNO (operands[4])
11006 && REGNO (operands[4]) != REGNO (operands[0]))
11008 if (GET_CODE (operands[6]) == LT)
11009 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11010 else if (GET_CODE (operands[6]) == GE)
11011 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11013 if (CONST_INT_P (operands[3])
11014 && !const_ok_for_arm (INTVAL (operands[3])))
11015 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
11017 output_asm_insn (\"cmp\\t%2, %3\", operands);
11018 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11019 if (which_alternative != 0)
11020 return \"mov%D6\\t%0, %1\";
11023 [(set_attr "conds" "clob")
11024 (set_attr "length" "8,12")
11025 (set_attr "type" "multiple")]
11028 (define_insn "*if_arith_move"
11029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11030 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11031 [(match_operand 6 "cc_register" "") (const_int 0)])
11032 (match_operator:SI 5 "shiftable_operator"
11033 [(match_operand:SI 2 "s_register_operand" "r,r")
11034 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11035 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11038 %I5%d4\\t%0, %2, %3
11039 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11040 [(set_attr "conds" "use")
11041 (set_attr "length" "4,8")
11042 (set_attr_alternative "type"
11043 [(if_then_else (match_operand 3 "const_int_operand" "")
11044 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11045 (const_string "alu_shift_imm_lsl_1to4")
11046 (const_string "alu_shift_imm_other"))
11047 (const_string "alu_shift_reg"))
11048 (const_string "multiple")])]
11051 (define_insn "*ifcompare_move_arith"
11052 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11053 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11054 [(match_operand:SI 4 "s_register_operand" "r,r")
11055 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11056 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11057 (match_operator:SI 7 "shiftable_operator"
11058 [(match_operand:SI 2 "s_register_operand" "r,r")
11059 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11060 (clobber (reg:CC CC_REGNUM))]
11063 /* If we have an operation where (op x 0) is the identity operation and
11064 the conditional operator is LT or GE and we are comparing against zero and
11065 everything is in registers then we can do this in two instructions */
11066 if (operands[5] == const0_rtx
11067 && GET_CODE (operands[7]) != AND
11068 && REG_P (operands[3])
11069 && REG_P (operands[1])
11070 && REGNO (operands[1]) == REGNO (operands[2])
11071 && REGNO (operands[2]) != REGNO (operands[0]))
11073 if (GET_CODE (operands[6]) == GE)
11074 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11075 else if (GET_CODE (operands[6]) == LT)
11076 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11079 if (CONST_INT_P (operands[5])
11080 && !const_ok_for_arm (INTVAL (operands[5])))
11081 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11083 output_asm_insn (\"cmp\\t%4, %5\", operands);
11085 if (which_alternative != 0)
11086 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11087 return \"%I7%D6\\t%0, %2, %3\";
11089 [(set_attr "conds" "clob")
11090 (set_attr "length" "8,12")
11091 (set_attr "type" "multiple")]
11094 (define_insn "*if_move_arith"
11095 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11097 (match_operator 4 "arm_comparison_operator"
11098 [(match_operand 6 "cc_register" "") (const_int 0)])
11099 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11100 (match_operator:SI 5 "shiftable_operator"
11101 [(match_operand:SI 2 "s_register_operand" "r,r")
11102 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11105 %I5%D4\\t%0, %2, %3
11106 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11107 [(set_attr "conds" "use")
11108 (set_attr "length" "4,8")
11109 (set_attr_alternative "type"
11110 [(if_then_else (match_operand 3 "const_int_operand" "")
11111 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11112 (const_string "alu_shift_imm_lsl_1to4")
11113 (const_string "alu_shift_imm_other"))
11114 (const_string "alu_shift_reg"))
11115 (const_string "multiple")])]
11118 (define_insn "*ifcompare_move_not"
11119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11121 (match_operator 5 "arm_comparison_operator"
11122 [(match_operand:SI 3 "s_register_operand" "r,r")
11123 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11124 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11126 (match_operand:SI 2 "s_register_operand" "r,r"))))
11127 (clobber (reg:CC CC_REGNUM))]
11130 [(set_attr "conds" "clob")
11131 (set_attr "length" "8,12")
11132 (set_attr "type" "multiple")]
11135 (define_insn "*if_move_not"
11136 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11138 (match_operator 4 "arm_comparison_operator"
11139 [(match_operand 3 "cc_register" "") (const_int 0)])
11140 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11141 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11145 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11146 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11147 [(set_attr "conds" "use")
11148 (set_attr "type" "mvn_reg")
11149 (set_attr "length" "4,8,8")
11150 (set_attr "type" "mvn_reg,multiple,multiple")]
11153 (define_insn "*ifcompare_not_move"
11154 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11156 (match_operator 5 "arm_comparison_operator"
11157 [(match_operand:SI 3 "s_register_operand" "r,r")
11158 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11160 (match_operand:SI 2 "s_register_operand" "r,r"))
11161 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11162 (clobber (reg:CC CC_REGNUM))]
11165 [(set_attr "conds" "clob")
11166 (set_attr "length" "8,12")
11167 (set_attr "type" "multiple")]
11170 (define_insn "*if_not_move"
11171 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11173 (match_operator 4 "arm_comparison_operator"
11174 [(match_operand 3 "cc_register" "") (const_int 0)])
11175 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11176 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11180 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11181 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11182 [(set_attr "conds" "use")
11183 (set_attr "type" "mvn_reg,multiple,multiple")
11184 (set_attr "length" "4,8,8")]
11187 (define_insn "*ifcompare_shift_move"
11188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11190 (match_operator 6 "arm_comparison_operator"
11191 [(match_operand:SI 4 "s_register_operand" "r,r")
11192 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11193 (match_operator:SI 7 "shift_operator"
11194 [(match_operand:SI 2 "s_register_operand" "r,r")
11195 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11196 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11197 (clobber (reg:CC CC_REGNUM))]
11200 [(set_attr "conds" "clob")
11201 (set_attr "length" "8,12")
11202 (set_attr "type" "multiple")]
11205 (define_insn "*if_shift_move"
11206 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11208 (match_operator 5 "arm_comparison_operator"
11209 [(match_operand 6 "cc_register" "") (const_int 0)])
11210 (match_operator:SI 4 "shift_operator"
11211 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11212 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11213 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11217 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11218 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11219 [(set_attr "conds" "use")
11220 (set_attr "shift" "2")
11221 (set_attr "length" "4,8,8")
11222 (set_attr_alternative "type"
11223 [(if_then_else (match_operand 3 "const_int_operand" "")
11224 (const_string "mov_shift" )
11225 (const_string "mov_shift_reg"))
11226 (const_string "multiple")
11227 (const_string "multiple")])]
11230 (define_insn "*ifcompare_move_shift"
11231 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11233 (match_operator 6 "arm_comparison_operator"
11234 [(match_operand:SI 4 "s_register_operand" "r,r")
11235 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11236 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11237 (match_operator:SI 7 "shift_operator"
11238 [(match_operand:SI 2 "s_register_operand" "r,r")
11239 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11240 (clobber (reg:CC CC_REGNUM))]
11243 [(set_attr "conds" "clob")
11244 (set_attr "length" "8,12")
11245 (set_attr "type" "multiple")]
11248 (define_insn "*if_move_shift"
11249 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11251 (match_operator 5 "arm_comparison_operator"
11252 [(match_operand 6 "cc_register" "") (const_int 0)])
11253 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11254 (match_operator:SI 4 "shift_operator"
11255 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11256 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11260 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11261 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11262 [(set_attr "conds" "use")
11263 (set_attr "shift" "2")
11264 (set_attr "length" "4,8,8")
11265 (set_attr_alternative "type"
11266 [(if_then_else (match_operand 3 "const_int_operand" "")
11267 (const_string "mov_shift" )
11268 (const_string "mov_shift_reg"))
11269 (const_string "multiple")
11270 (const_string "multiple")])]
11273 (define_insn "*ifcompare_shift_shift"
11274 [(set (match_operand:SI 0 "s_register_operand" "=r")
11276 (match_operator 7 "arm_comparison_operator"
11277 [(match_operand:SI 5 "s_register_operand" "r")
11278 (match_operand:SI 6 "arm_add_operand" "rIL")])
11279 (match_operator:SI 8 "shift_operator"
11280 [(match_operand:SI 1 "s_register_operand" "r")
11281 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11282 (match_operator:SI 9 "shift_operator"
11283 [(match_operand:SI 3 "s_register_operand" "r")
11284 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11285 (clobber (reg:CC CC_REGNUM))]
11288 [(set_attr "conds" "clob")
11289 (set_attr "length" "12")
11290 (set_attr "type" "multiple")]
11293 (define_insn "*if_shift_shift"
11294 [(set (match_operand:SI 0 "s_register_operand" "=r")
11296 (match_operator 5 "arm_comparison_operator"
11297 [(match_operand 8 "cc_register" "") (const_int 0)])
11298 (match_operator:SI 6 "shift_operator"
11299 [(match_operand:SI 1 "s_register_operand" "r")
11300 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11301 (match_operator:SI 7 "shift_operator"
11302 [(match_operand:SI 3 "s_register_operand" "r")
11303 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11305 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11306 [(set_attr "conds" "use")
11307 (set_attr "shift" "1")
11308 (set_attr "length" "8")
11309 (set (attr "type") (if_then_else
11310 (and (match_operand 2 "const_int_operand" "")
11311 (match_operand 4 "const_int_operand" ""))
11312 (const_string "mov_shift")
11313 (const_string "mov_shift_reg")))]
11316 (define_insn "*ifcompare_not_arith"
11317 [(set (match_operand:SI 0 "s_register_operand" "=r")
11319 (match_operator 6 "arm_comparison_operator"
11320 [(match_operand:SI 4 "s_register_operand" "r")
11321 (match_operand:SI 5 "arm_add_operand" "rIL")])
11322 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11323 (match_operator:SI 7 "shiftable_operator"
11324 [(match_operand:SI 2 "s_register_operand" "r")
11325 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11326 (clobber (reg:CC CC_REGNUM))]
11329 [(set_attr "conds" "clob")
11330 (set_attr "length" "12")
11331 (set_attr "type" "multiple")]
11334 (define_insn "*if_not_arith"
11335 [(set (match_operand:SI 0 "s_register_operand" "=r")
11337 (match_operator 5 "arm_comparison_operator"
11338 [(match_operand 4 "cc_register" "") (const_int 0)])
11339 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11340 (match_operator:SI 6 "shiftable_operator"
11341 [(match_operand:SI 2 "s_register_operand" "r")
11342 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11344 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11345 [(set_attr "conds" "use")
11346 (set_attr "type" "mvn_reg")
11347 (set_attr "length" "8")]
11350 (define_insn "*ifcompare_arith_not"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r")
11353 (match_operator 6 "arm_comparison_operator"
11354 [(match_operand:SI 4 "s_register_operand" "r")
11355 (match_operand:SI 5 "arm_add_operand" "rIL")])
11356 (match_operator:SI 7 "shiftable_operator"
11357 [(match_operand:SI 2 "s_register_operand" "r")
11358 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11359 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11360 (clobber (reg:CC CC_REGNUM))]
11363 [(set_attr "conds" "clob")
11364 (set_attr "length" "12")
11365 (set_attr "type" "multiple")]
11368 (define_insn "*if_arith_not"
11369 [(set (match_operand:SI 0 "s_register_operand" "=r")
11371 (match_operator 5 "arm_comparison_operator"
11372 [(match_operand 4 "cc_register" "") (const_int 0)])
11373 (match_operator:SI 6 "shiftable_operator"
11374 [(match_operand:SI 2 "s_register_operand" "r")
11375 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11376 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11378 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11379 [(set_attr "conds" "use")
11380 (set_attr "type" "multiple")
11381 (set_attr "length" "8")]
11384 (define_insn "*ifcompare_neg_move"
11385 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11387 (match_operator 5 "arm_comparison_operator"
11388 [(match_operand:SI 3 "s_register_operand" "r,r")
11389 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11390 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11391 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11392 (clobber (reg:CC CC_REGNUM))]
11395 [(set_attr "conds" "clob")
11396 (set_attr "length" "8,12")
11397 (set_attr "type" "multiple")]
11400 (define_insn_and_split "*if_neg_move"
11401 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11403 (match_operator 4 "arm_comparison_operator"
11404 [(match_operand 3 "cc_register" "") (const_int 0)])
11405 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11406 (match_operand:SI 1 "s_register_operand" "0,0")))]
11407 "TARGET_32BIT && !TARGET_COND_ARITH"
11409 "&& reload_completed"
11410 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11411 (set (match_dup 0) (neg:SI (match_dup 2))))]
11413 [(set_attr "conds" "use")
11414 (set_attr "length" "4")
11415 (set_attr "arch" "t2,32")
11416 (set_attr "enabled_for_short_it" "yes,no")
11417 (set_attr "type" "logic_shift_imm")]
11420 (define_insn "*ifcompare_move_neg"
11421 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11423 (match_operator 5 "arm_comparison_operator"
11424 [(match_operand:SI 3 "s_register_operand" "r,r")
11425 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11426 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11427 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11428 (clobber (reg:CC CC_REGNUM))]
11431 [(set_attr "conds" "clob")
11432 (set_attr "length" "8,12")
11433 (set_attr "type" "multiple")]
11436 (define_insn_and_split "*if_move_neg"
11437 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11439 (match_operator 4 "arm_comparison_operator"
11440 [(match_operand 3 "cc_register" "") (const_int 0)])
11441 (match_operand:SI 1 "s_register_operand" "0,0")
11442 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11445 "&& reload_completed"
11446 [(cond_exec (match_dup 5)
11447 (set (match_dup 0) (neg:SI (match_dup 2))))]
11449 machine_mode mode = GET_MODE (operands[3]);
11450 rtx_code rc = GET_CODE (operands[4]);
11452 if (mode == CCFPmode || mode == CCFPEmode)
11453 rc = reverse_condition_maybe_unordered (rc);
11455 rc = reverse_condition (rc);
11457 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11459 [(set_attr "conds" "use")
11460 (set_attr "length" "4")
11461 (set_attr "arch" "t2,32")
11462 (set_attr "enabled_for_short_it" "yes,no")
11463 (set_attr "type" "logic_shift_imm")]
11466 (define_insn "*arith_adjacentmem"
11467 [(set (match_operand:SI 0 "s_register_operand" "=r")
11468 (match_operator:SI 1 "shiftable_operator"
11469 [(match_operand:SI 2 "memory_operand" "m")
11470 (match_operand:SI 3 "memory_operand" "m")]))
11471 (clobber (match_scratch:SI 4 "=r"))]
11472 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11478 HOST_WIDE_INT val1 = 0, val2 = 0;
11480 if (REGNO (operands[0]) > REGNO (operands[4]))
11482 ldm[1] = operands[4];
11483 ldm[2] = operands[0];
11487 ldm[1] = operands[0];
11488 ldm[2] = operands[4];
11491 base_reg = XEXP (operands[2], 0);
11493 if (!REG_P (base_reg))
11495 val1 = INTVAL (XEXP (base_reg, 1));
11496 base_reg = XEXP (base_reg, 0);
11499 if (!REG_P (XEXP (operands[3], 0)))
11500 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11502 arith[0] = operands[0];
11503 arith[3] = operands[1];
11517 if (val1 !=0 && val2 != 0)
11521 if (val1 == 4 || val2 == 4)
11522 /* Other val must be 8, since we know they are adjacent and neither
11524 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11525 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11527 ldm[0] = ops[0] = operands[4];
11529 ops[2] = GEN_INT (val1);
11530 output_add_immediate (ops);
11532 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11534 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11538 /* Offset is out of range for a single add, so use two ldr. */
11541 ops[2] = GEN_INT (val1);
11542 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11544 ops[2] = GEN_INT (val2);
11545 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11548 else if (val1 != 0)
11551 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11553 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11558 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11560 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11562 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11565 [(set_attr "length" "12")
11566 (set_attr "predicable" "yes")
11567 (set_attr "type" "load_4")]
11570 ; This pattern is never tried by combine, so do it as a peephole
11573 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11574 (match_operand:SI 1 "arm_general_register_operand" ""))
11575 (set (reg:CC CC_REGNUM)
11576 (compare:CC (match_dup 1) (const_int 0)))]
11578 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11579 (set (match_dup 0) (match_dup 1))])]
11584 [(set (match_operand:SI 0 "s_register_operand" "")
11585 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11587 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11588 [(match_operand:SI 3 "s_register_operand" "")
11589 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11590 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11592 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11593 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11598 ;; This split can be used because CC_Z mode implies that the following
11599 ;; branch will be an equality, or an unsigned inequality, so the sign
11600 ;; extension is not needed.
11603 [(set (reg:CC_Z CC_REGNUM)
11605 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11607 (match_operand 1 "const_int_operand" "")))
11608 (clobber (match_scratch:SI 2 ""))]
11610 && ((UINTVAL (operands[1]))
11611 == ((UINTVAL (operands[1])) >> 24) << 24)"
11612 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11613 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11615 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11618 ;; ??? Check the patterns above for Thumb-2 usefulness
11620 (define_expand "prologue"
11621 [(clobber (const_int 0))]
11624 arm_expand_prologue ();
11626 thumb1_expand_prologue ();
11631 (define_expand "epilogue"
11632 [(clobber (const_int 0))]
11635 if (crtl->calls_eh_return)
11636 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11639 thumb1_expand_epilogue ();
11640 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11641 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11643 else if (HAVE_return)
11645 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11646 no need for explicit testing again. */
11647 emit_jump_insn (gen_return ());
11649 else if (TARGET_32BIT)
11651 arm_expand_epilogue (true);
11657 ;; Note - although unspec_volatile's USE all hard registers,
11658 ;; USEs are ignored after relaod has completed. Thus we need
11659 ;; to add an unspec of the link register to ensure that flow
11660 ;; does not think that it is unused by the sibcall branch that
11661 ;; will replace the standard function epilogue.
11662 (define_expand "sibcall_epilogue"
11663 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11664 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11667 arm_expand_epilogue (false);
11672 (define_expand "eh_epilogue"
11673 [(use (match_operand:SI 0 "register_operand"))
11674 (use (match_operand:SI 1 "register_operand"))
11675 (use (match_operand:SI 2 "register_operand"))]
11679 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11680 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11682 rtx ra = gen_rtx_REG (Pmode, 2);
11684 emit_move_insn (ra, operands[2]);
11687 /* This is a hack -- we may have crystalized the function type too
11689 cfun->machine->func_type = 0;
11693 ;; This split is only used during output to reduce the number of patterns
11694 ;; that need assembler instructions adding to them. We allowed the setting
11695 ;; of the conditions to be implicit during rtl generation so that
11696 ;; the conditional compare patterns would work. However this conflicts to
11697 ;; some extent with the conditional data operations, so we have to split them
11700 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11701 ;; conditional execution sufficient?
11704 [(set (match_operand:SI 0 "s_register_operand" "")
11705 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11706 [(match_operand 2 "" "") (match_operand 3 "" "")])
11708 (match_operand 4 "" "")))
11709 (clobber (reg:CC CC_REGNUM))]
11710 "TARGET_ARM && reload_completed"
11711 [(set (match_dup 5) (match_dup 6))
11712 (cond_exec (match_dup 7)
11713 (set (match_dup 0) (match_dup 4)))]
11716 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11717 operands[2], operands[3]);
11718 enum rtx_code rc = GET_CODE (operands[1]);
11720 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11721 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11722 if (mode == CCFPmode || mode == CCFPEmode)
11723 rc = reverse_condition_maybe_unordered (rc);
11725 rc = reverse_condition (rc);
11727 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11732 [(set (match_operand:SI 0 "s_register_operand" "")
11733 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11734 [(match_operand 2 "" "") (match_operand 3 "" "")])
11735 (match_operand 4 "" "")
11737 (clobber (reg:CC CC_REGNUM))]
11738 "TARGET_ARM && reload_completed"
11739 [(set (match_dup 5) (match_dup 6))
11740 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11741 (set (match_dup 0) (match_dup 4)))]
11744 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11745 operands[2], operands[3]);
11747 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11748 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11753 [(set (match_operand:SI 0 "s_register_operand" "")
11754 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11755 [(match_operand 2 "" "") (match_operand 3 "" "")])
11756 (match_operand 4 "" "")
11757 (match_operand 5 "" "")))
11758 (clobber (reg:CC CC_REGNUM))]
11759 "TARGET_ARM && reload_completed"
11760 [(set (match_dup 6) (match_dup 7))
11761 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11762 (set (match_dup 0) (match_dup 4)))
11763 (cond_exec (match_dup 8)
11764 (set (match_dup 0) (match_dup 5)))]
11767 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11768 operands[2], operands[3]);
11769 enum rtx_code rc = GET_CODE (operands[1]);
11771 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11772 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11773 if (mode == CCFPmode || mode == CCFPEmode)
11774 rc = reverse_condition_maybe_unordered (rc);
11776 rc = reverse_condition (rc);
11778 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11783 [(set (match_operand:SI 0 "s_register_operand" "")
11784 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11785 [(match_operand:SI 2 "s_register_operand" "")
11786 (match_operand:SI 3 "arm_add_operand" "")])
11787 (match_operand:SI 4 "arm_rhs_operand" "")
11789 (match_operand:SI 5 "s_register_operand" ""))))
11790 (clobber (reg:CC CC_REGNUM))]
11791 "TARGET_ARM && reload_completed"
11792 [(set (match_dup 6) (match_dup 7))
11793 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11794 (set (match_dup 0) (match_dup 4)))
11795 (cond_exec (match_dup 8)
11796 (set (match_dup 0) (not:SI (match_dup 5))))]
11799 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11800 operands[2], operands[3]);
11801 enum rtx_code rc = GET_CODE (operands[1]);
11803 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11804 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11805 if (mode == CCFPmode || mode == CCFPEmode)
11806 rc = reverse_condition_maybe_unordered (rc);
11808 rc = reverse_condition (rc);
11810 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11814 (define_insn "*cond_move_not"
11815 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11816 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11817 [(match_operand 3 "cc_register" "") (const_int 0)])
11818 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11820 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11824 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11825 [(set_attr "conds" "use")
11826 (set_attr "type" "mvn_reg,multiple")
11827 (set_attr "length" "4,8")]
11830 ;; The next two patterns occur when an AND operation is followed by a
11831 ;; scc insn sequence
11833 (define_insn "*sign_extract_onebit"
11834 [(set (match_operand:SI 0 "s_register_operand" "=r")
11835 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11837 (match_operand:SI 2 "const_int_operand" "n")))
11838 (clobber (reg:CC CC_REGNUM))]
11841 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11842 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11843 return \"mvnne\\t%0, #0\";
11845 [(set_attr "conds" "clob")
11846 (set_attr "length" "8")
11847 (set_attr "type" "multiple")]
11850 (define_insn "*not_signextract_onebit"
11851 [(set (match_operand:SI 0 "s_register_operand" "=r")
11853 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11855 (match_operand:SI 2 "const_int_operand" "n"))))
11856 (clobber (reg:CC CC_REGNUM))]
11859 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11860 output_asm_insn (\"tst\\t%1, %2\", operands);
11861 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11862 return \"movne\\t%0, #0\";
11864 [(set_attr "conds" "clob")
11865 (set_attr "length" "12")
11866 (set_attr "type" "multiple")]
11868 ;; ??? The above patterns need auditing for Thumb-2
11870 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11871 ;; expressions. For simplicity, the first register is also in the unspec
11873 ;; To avoid the usage of GNU extension, the length attribute is computed
11874 ;; in a C function arm_attr_length_push_multi.
11875 (define_insn "*push_multi"
11876 [(match_parallel 2 "multi_register_push"
11877 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11878 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11879 UNSPEC_PUSH_MULT))])]
11883 int num_saves = XVECLEN (operands[2], 0);
11885 /* For the StrongARM at least it is faster to
11886 use STR to store only a single register.
11887 In Thumb mode always use push, and the assembler will pick
11888 something appropriate. */
11889 if (num_saves == 1 && TARGET_ARM)
11890 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11897 strcpy (pattern, \"push%?\\t{%1\");
11899 strcpy (pattern, \"push\\t{%1\");
11901 for (i = 1; i < num_saves; i++)
11903 strcat (pattern, \", %|\");
11905 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11908 strcat (pattern, \"}\");
11909 output_asm_insn (pattern, operands);
11914 [(set_attr "type" "store_16")
11915 (set (attr "length")
11916 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11919 (define_insn "stack_tie"
11920 [(set (mem:BLK (scratch))
11921 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11922 (match_operand:SI 1 "s_register_operand" "rk")]
11926 [(set_attr "length" "0")
11927 (set_attr "type" "block")]
11930 ;; Pop (as used in epilogue RTL)
11932 (define_insn "*load_multiple_with_writeback"
11933 [(match_parallel 0 "load_multiple_operation"
11934 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11935 (plus:SI (match_dup 1)
11936 (match_operand:SI 2 "const_int_I_operand" "I")))
11937 (set (match_operand:SI 3 "s_register_operand" "=rk")
11938 (mem:SI (match_dup 1)))
11940 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11943 arm_output_multireg_pop (operands, /*return_pc=*/false,
11944 /*cond=*/const_true_rtx,
11950 [(set_attr "type" "load_16")
11951 (set_attr "predicable" "yes")
11952 (set (attr "length")
11953 (symbol_ref "arm_attr_length_pop_multi (operands,
11954 /*return_pc=*/false,
11955 /*write_back_p=*/true)"))]
11958 ;; Pop with return (as used in epilogue RTL)
11960 ;; This instruction is generated when the registers are popped at the end of
11961 ;; epilogue. Here, instead of popping the value into LR and then generating
11962 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11964 (define_insn "*pop_multiple_with_writeback_and_return"
11965 [(match_parallel 0 "pop_multiple_return"
11967 (set (match_operand:SI 1 "s_register_operand" "+rk")
11968 (plus:SI (match_dup 1)
11969 (match_operand:SI 2 "const_int_I_operand" "I")))
11970 (set (match_operand:SI 3 "s_register_operand" "=rk")
11971 (mem:SI (match_dup 1)))
11973 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11976 arm_output_multireg_pop (operands, /*return_pc=*/true,
11977 /*cond=*/const_true_rtx,
11983 [(set_attr "type" "load_16")
11984 (set_attr "predicable" "yes")
11985 (set (attr "length")
11986 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11987 /*write_back_p=*/true)"))]
11990 (define_insn "*pop_multiple_with_return"
11991 [(match_parallel 0 "pop_multiple_return"
11993 (set (match_operand:SI 2 "s_register_operand" "=rk")
11994 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11996 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11999 arm_output_multireg_pop (operands, /*return_pc=*/true,
12000 /*cond=*/const_true_rtx,
12006 [(set_attr "type" "load_16")
12007 (set_attr "predicable" "yes")
12008 (set (attr "length")
12009 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
12010 /*write_back_p=*/false)"))]
12013 ;; Load into PC and return
12014 (define_insn "*ldr_with_return"
12016 (set (reg:SI PC_REGNUM)
12017 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12018 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12019 "ldr%?\t%|pc, [%0], #4"
12020 [(set_attr "type" "load_4")
12021 (set_attr "predicable" "yes")]
12023 ;; Pop for floating point registers (as used in epilogue RTL)
12024 (define_insn "*vfp_pop_multiple_with_writeback"
12025 [(match_parallel 0 "pop_multiple_fp"
12026 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12027 (plus:SI (match_dup 1)
12028 (match_operand:SI 2 "const_int_I_operand" "I")))
12029 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
12030 (mem:DF (match_dup 1)))])]
12031 "TARGET_32BIT && TARGET_VFP_BASE"
12034 int num_regs = XVECLEN (operands[0], 0);
12037 strcpy (pattern, \"vldm\\t\");
12038 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12039 strcat (pattern, \"!, {\");
12040 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12041 strcat (pattern, \"%P0\");
12042 if ((num_regs - 1) > 1)
12044 strcat (pattern, \"-%P1\");
12045 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12048 strcat (pattern, \"}\");
12049 output_asm_insn (pattern, op_list);
12053 [(set_attr "type" "load_16")
12054 (set_attr "conds" "unconditional")
12055 (set_attr "predicable" "no")]
12058 ;; Special patterns for dealing with the constant pool
12060 (define_insn "align_4"
12061 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12064 assemble_align (32);
12067 [(set_attr "type" "no_insn")]
12070 (define_insn "align_8"
12071 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12074 assemble_align (64);
12077 [(set_attr "type" "no_insn")]
12080 (define_insn "consttable_end"
12081 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12084 making_const_table = FALSE;
12087 [(set_attr "type" "no_insn")]
12090 (define_insn "consttable_1"
12091 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12094 making_const_table = TRUE;
12095 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12096 assemble_zeros (3);
12099 [(set_attr "length" "4")
12100 (set_attr "type" "no_insn")]
12103 (define_insn "consttable_2"
12104 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12108 rtx x = operands[0];
12109 making_const_table = TRUE;
12110 switch (GET_MODE_CLASS (GET_MODE (x)))
12113 arm_emit_fp16_const (x);
12116 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12117 assemble_zeros (2);
12122 [(set_attr "length" "4")
12123 (set_attr "type" "no_insn")]
12126 (define_insn "consttable_4"
12127 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12131 rtx x = operands[0];
12132 making_const_table = TRUE;
12133 scalar_float_mode float_mode;
12134 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
12135 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
12138 /* XXX: Sometimes gcc does something really dumb and ends up with
12139 a HIGH in a constant pool entry, usually because it's trying to
12140 load into a VFP register. We know this will always be used in
12141 combination with a LO_SUM which ignores the high bits, so just
12142 strip off the HIGH. */
12143 if (GET_CODE (x) == HIGH)
12145 assemble_integer (x, 4, BITS_PER_WORD, 1);
12146 mark_symbol_refs_as_used (x);
12150 [(set_attr "length" "4")
12151 (set_attr "type" "no_insn")]
12154 (define_insn "consttable_8"
12155 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12159 making_const_table = TRUE;
12160 scalar_float_mode float_mode;
12161 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12162 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12163 float_mode, BITS_PER_WORD);
12165 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12168 [(set_attr "length" "8")
12169 (set_attr "type" "no_insn")]
12172 (define_insn "consttable_16"
12173 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12177 making_const_table = TRUE;
12178 scalar_float_mode float_mode;
12179 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12180 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12181 float_mode, BITS_PER_WORD);
12183 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12186 [(set_attr "length" "16")
12187 (set_attr "type" "no_insn")]
12190 ;; V5 Instructions,
12192 (define_insn "clzsi2"
12193 [(set (match_operand:SI 0 "s_register_operand" "=r")
12194 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12195 "TARGET_32BIT && arm_arch5t"
12197 [(set_attr "predicable" "yes")
12198 (set_attr "type" "clz")])
12200 (define_insn "arm_rbit"
12201 [(set (match_operand:SI 0 "s_register_operand" "=r")
12202 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12203 "TARGET_32BIT && arm_arch_thumb2"
12205 [(set_attr "predicable" "yes")
12206 (set_attr "type" "clz")])
12208 ;; Keep this as a CTZ expression until after reload and then split
12209 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12210 ;; to fold with any other expression.
12212 (define_insn_and_split "ctzsi2"
12213 [(set (match_operand:SI 0 "s_register_operand" "=r")
12214 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12215 "TARGET_32BIT && arm_arch_thumb2"
12217 "&& reload_completed"
12220 emit_insn (gen_arm_rbit (operands[0], operands[1]));
12221 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12225 ;; V5E instructions.
12227 (define_insn "prefetch"
12228 [(prefetch (match_operand:SI 0 "address_operand" "p")
12229 (match_operand:SI 1 "" "")
12230 (match_operand:SI 2 "" ""))]
12231 "TARGET_32BIT && arm_arch5te"
12233 [(set_attr "type" "load_4")]
12236 ;; General predication pattern
12239 [(match_operator 0 "arm_comparison_operator"
12240 [(match_operand 1 "cc_register" "")
12243 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12245 [(set_attr "predicated" "yes")]
12248 (define_insn "force_register_use"
12249 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12252 [(set_attr "length" "0")
12253 (set_attr "type" "no_insn")]
12257 ;; Patterns for exception handling
12259 (define_expand "eh_return"
12260 [(use (match_operand 0 "general_operand"))]
12265 emit_insn (gen_arm_eh_return (operands[0]));
12267 emit_insn (gen_thumb_eh_return (operands[0]));
12272 ;; We can't expand this before we know where the link register is stored.
12273 (define_insn_and_split "arm_eh_return"
12274 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12276 (clobber (match_scratch:SI 1 "=&r"))]
12279 "&& reload_completed"
12283 arm_set_return_address (operands[0], operands[1]);
12291 (define_insn "load_tp_hard"
12292 [(set (match_operand:SI 0 "register_operand" "=r")
12293 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12295 "* return arm_output_load_tpidr (operands[0], true);"
12296 [(set_attr "predicable" "yes")
12297 (set_attr "type" "mrs")]
12300 ;; Used by the TLS register based stack protector
12301 (define_insn "reload_tp_hard"
12302 [(set (match_operand:SI 0 "register_operand" "=r")
12303 (unspec_volatile:SI [(const_int 0)] VUNSPEC_MRC))]
12305 "* return arm_output_load_tpidr (operands[0], false);"
12306 [(set_attr "type" "mrs")]
12309 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12310 (define_insn "load_tp_soft_fdpic"
12311 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12312 (clobber (reg:SI FDPIC_REGNUM))
12313 (clobber (reg:SI LR_REGNUM))
12314 (clobber (reg:SI IP_REGNUM))
12315 (clobber (reg:CC CC_REGNUM))]
12316 "TARGET_SOFT_TP && TARGET_FDPIC"
12317 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12318 [(set_attr "conds" "clob")
12319 (set_attr "type" "branch")]
12322 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12323 (define_insn "load_tp_soft"
12324 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12325 (clobber (reg:SI LR_REGNUM))
12326 (clobber (reg:SI IP_REGNUM))
12327 (clobber (reg:CC CC_REGNUM))]
12328 "TARGET_SOFT_TP && !TARGET_FDPIC"
12329 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12330 [(set_attr "conds" "clob")
12331 (set_attr "type" "branch")]
12334 ;; tls descriptor call
12335 (define_insn "tlscall"
12336 [(set (reg:SI R0_REGNUM)
12337 (unspec:SI [(reg:SI R0_REGNUM)
12338 (match_operand:SI 0 "" "X")
12339 (match_operand 1 "" "")] UNSPEC_TLS))
12340 (clobber (reg:SI R1_REGNUM))
12341 (clobber (reg:SI LR_REGNUM))
12342 (clobber (reg:SI CC_REGNUM))]
12345 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12346 INTVAL (operands[1]));
12347 return "bl\\t%c0(tlscall)";
12349 [(set_attr "conds" "clob")
12350 (set_attr "length" "4")
12351 (set_attr "type" "branch")]
12354 ;; For thread pointer builtin
12355 (define_expand "get_thread_pointersi"
12356 [(match_operand:SI 0 "s_register_operand")]
12360 arm_load_tp (operands[0]);
12366 ;; We only care about the lower 16 bits of the constant
12367 ;; being inserted into the upper 16 bits of the register.
12368 (define_insn "*arm_movtas_ze"
12369 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12372 (match_operand:SI 1 "const_int_operand" ""))]
12377 [(set_attr "arch" "32,v8mb")
12378 (set_attr "predicable" "yes")
12379 (set_attr "length" "4")
12380 (set_attr "type" "alu_sreg")]
12383 (define_insn "*arm_rev"
12384 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12385 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12391 [(set_attr "arch" "t1,t2,32")
12392 (set_attr "length" "2,2,4")
12393 (set_attr "predicable" "no,yes,yes")
12394 (set_attr "type" "rev")]
12397 (define_expand "arm_legacy_rev"
12398 [(set (match_operand:SI 2 "s_register_operand")
12399 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12403 (lshiftrt:SI (match_dup 2)
12405 (set (match_operand:SI 3 "s_register_operand")
12406 (rotatert:SI (match_dup 1)
12409 (and:SI (match_dup 2)
12410 (const_int -65281)))
12411 (set (match_operand:SI 0 "s_register_operand")
12412 (xor:SI (match_dup 3)
12418 ;; Reuse temporaries to keep register pressure down.
12419 (define_expand "thumb_legacy_rev"
12420 [(set (match_operand:SI 2 "s_register_operand")
12421 (ashift:SI (match_operand:SI 1 "s_register_operand")
12423 (set (match_operand:SI 3 "s_register_operand")
12424 (lshiftrt:SI (match_dup 1)
12427 (ior:SI (match_dup 3)
12429 (set (match_operand:SI 4 "s_register_operand")
12431 (set (match_operand:SI 5 "s_register_operand")
12432 (rotatert:SI (match_dup 1)
12435 (ashift:SI (match_dup 5)
12438 (lshiftrt:SI (match_dup 5)
12441 (ior:SI (match_dup 5)
12444 (rotatert:SI (match_dup 5)
12446 (set (match_operand:SI 0 "s_register_operand")
12447 (ior:SI (match_dup 5)
12453 ;; ARM-specific expansion of signed mod by power of 2
12454 ;; using conditional negate.
12455 ;; For r0 % n where n is a power of 2 produce:
12457 ;; and r0, r0, #(n - 1)
12458 ;; and r1, r1, #(n - 1)
12459 ;; rsbpl r0, r1, #0
12461 (define_expand "modsi3"
12462 [(match_operand:SI 0 "register_operand")
12463 (match_operand:SI 1 "register_operand")
12464 (match_operand:SI 2 "const_int_operand")]
12467 HOST_WIDE_INT val = INTVAL (operands[2]);
12470 || exact_log2 (val) <= 0)
12473 rtx mask = GEN_INT (val - 1);
12475 /* In the special case of x0 % 2 we can do the even shorter:
12478 rsblt r0, r0, #0. */
12482 rtx cc_reg = arm_gen_compare_reg (LT,
12483 operands[1], const0_rtx, NULL_RTX);
12484 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12485 rtx masked = gen_reg_rtx (SImode);
12487 emit_insn (gen_andsi3 (masked, operands[1], mask));
12488 emit_move_insn (operands[0],
12489 gen_rtx_IF_THEN_ELSE (SImode, cond,
12490 gen_rtx_NEG (SImode,
12496 rtx neg_op = gen_reg_rtx (SImode);
12497 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12500 /* Extract the condition register and mode. */
12501 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12502 rtx cc_reg = SET_DEST (cmp);
12503 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12505 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12507 rtx masked_neg = gen_reg_rtx (SImode);
12508 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12510 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12511 during expand does not always work. Do an IF_THEN_ELSE instead. */
12512 emit_move_insn (operands[0],
12513 gen_rtx_IF_THEN_ELSE (SImode, cond,
12514 gen_rtx_NEG (SImode, masked_neg),
12522 (define_expand "bswapsi2"
12523 [(set (match_operand:SI 0 "s_register_operand")
12524 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12525 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12529 rtx op2 = gen_reg_rtx (SImode);
12530 rtx op3 = gen_reg_rtx (SImode);
12534 rtx op4 = gen_reg_rtx (SImode);
12535 rtx op5 = gen_reg_rtx (SImode);
12537 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12538 op2, op3, op4, op5));
12542 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12551 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12552 ;; and unsigned variants, respectively. For rev16, expose
12553 ;; byte-swapping in the lower 16 bits only.
12554 (define_insn "*arm_revsh"
12555 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12556 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12562 [(set_attr "arch" "t1,t2,32")
12563 (set_attr "length" "2,2,4")
12564 (set_attr "predicable" "no,yes,yes")
12565 (set_attr "type" "rev")]
12568 (define_insn "*arm_rev16"
12569 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12570 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12576 [(set_attr "arch" "t1,t2,32")
12577 (set_attr "length" "2,2,4")
12578 (set_attr "predicable" "no,yes,yes")
12579 (set_attr "type" "rev")]
12582 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12583 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12584 ;; each valid permutation.
12586 (define_insn "arm_rev16si2_alt1"
12587 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12588 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12590 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12591 (and:SI (lshiftrt:SI (match_dup 1)
12593 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12595 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12596 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12601 [(set_attr "arch" "t1,t2,32")
12602 (set_attr "length" "2,2,4")
12603 (set_attr "predicable" "no,yes,yes")
12604 (set_attr "type" "rev")]
12607 (define_insn "*arm_rev16si2_alt2"
12608 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12609 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12611 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12612 (and:SI (ashift:SI (match_dup 1)
12614 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12616 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12617 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12622 [(set_attr "arch" "t1,t2,32")
12623 (set_attr "length" "2,2,4")
12624 (set_attr "predicable" "no,yes,yes")
12625 (set_attr "type" "rev")]
12628 ;; Similar pattern to match (rotate (bswap) 16)
12629 (define_insn "arm_rev16si2"
12630 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12631 (rotate:SI (bswap:SI (match_operand:SI 1 "register_operand" "l,l,r"))
12638 [(set_attr "arch" "t1,t2,32")
12639 (set_attr "length" "2,2,4")
12640 (set_attr "type" "rev")]
12643 (define_expand "bswaphi2"
12644 [(set (match_operand:HI 0 "s_register_operand")
12645 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12650 ;; Implement zero_extract using uxtb/uxth instruction with
12651 ;; the ror #N qualifier when applicable.
12653 (define_insn "*arm_zeroextractsi2_8_8"
12654 [(set (match_operand:SI 0 "s_register_operand" "=r")
12655 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12656 (const_int 8) (const_int 8)))]
12657 "TARGET_ARM && arm_arch6"
12658 "uxtb%?\\t%0, %1, ror #8"
12659 [(set_attr "predicable" "yes")
12660 (set_attr "type" "extend")]
12663 (define_insn "*arm_zeroextractsi2_8_16"
12664 [(set (match_operand:SI 0 "s_register_operand" "=r")
12665 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12666 (const_int 8) (const_int 16)))]
12667 "TARGET_ARM && arm_arch6"
12668 "uxtb%?\\t%0, %1, ror #16"
12669 [(set_attr "predicable" "yes")
12670 (set_attr "type" "extend")]
12673 (define_insn "*arm_zeroextractsi2_16_8"
12674 [(set (match_operand:SI 0 "s_register_operand" "=r")
12675 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12676 (const_int 16) (const_int 8)))]
12677 "TARGET_ARM && arm_arch6"
12678 "uxth%?\\t%0, %1, ror #8"
12679 [(set_attr "predicable" "yes")
12680 (set_attr "type" "extend")]
12683 ;; Implement sign_extract using sxtb/sxth instruction with
12684 ;; the ror #N qualifier when applicable.
12686 (define_insn "*arm_signextractsi2_8_8"
12687 [(set (match_operand:SI 0 "s_register_operand" "=r")
12688 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12689 (const_int 8) (const_int 8)))]
12690 "TARGET_ARM && arm_arch6"
12691 "sxtb%?\\t%0, %1, ror #8"
12692 [(set_attr "predicable" "yes")
12693 (set_attr "type" "extend")]
12696 (define_insn "*arm_signextractsi2_8_16"
12697 [(set (match_operand:SI 0 "s_register_operand" "=r")
12698 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12699 (const_int 8) (const_int 16)))]
12700 "TARGET_ARM && arm_arch6"
12701 "sxtb%?\\t%0, %1, ror #16"
12702 [(set_attr "predicable" "yes")
12703 (set_attr "type" "extend")]
12706 (define_insn "*arm_signextractsi2_16_8"
12707 [(set (match_operand:SI 0 "s_register_operand" "=r")
12708 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12709 (const_int 16) (const_int 8)))]
12710 "TARGET_ARM && arm_arch6"
12711 "sxth%?\\t%0, %1, ror #8"
12712 [(set_attr "predicable" "yes")
12713 (set_attr "type" "extend")]
12716 ;; Patterns for LDRD/STRD in Thumb2 mode
12718 (define_insn "*thumb2_ldrd"
12719 [(set (match_operand:SI 0 "s_register_operand" "=r")
12720 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12721 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12722 (set (match_operand:SI 3 "s_register_operand" "=r")
12723 (mem:SI (plus:SI (match_dup 1)
12724 (match_operand:SI 4 "const_int_operand" ""))))]
12725 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12726 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12727 && (operands_ok_ldrd_strd (operands[0], operands[3],
12728 operands[1], INTVAL (operands[2]),
12730 "ldrd%?\t%0, %3, [%1, %2]"
12731 [(set_attr "type" "load_8")
12732 (set_attr "predicable" "yes")])
12734 (define_insn "*thumb2_ldrd_base"
12735 [(set (match_operand:SI 0 "s_register_operand" "=r")
12736 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12737 (set (match_operand:SI 2 "s_register_operand" "=r")
12738 (mem:SI (plus:SI (match_dup 1)
12740 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12741 && (operands_ok_ldrd_strd (operands[0], operands[2],
12742 operands[1], 0, false, true))"
12743 "ldrd%?\t%0, %2, [%1]"
12744 [(set_attr "type" "load_8")
12745 (set_attr "predicable" "yes")])
12747 (define_insn "*thumb2_ldrd_base_neg"
12748 [(set (match_operand:SI 0 "s_register_operand" "=r")
12749 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12751 (set (match_operand:SI 2 "s_register_operand" "=r")
12752 (mem:SI (match_dup 1)))]
12753 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12754 && (operands_ok_ldrd_strd (operands[0], operands[2],
12755 operands[1], -4, false, true))"
12756 "ldrd%?\t%0, %2, [%1, #-4]"
12757 [(set_attr "type" "load_8")
12758 (set_attr "predicable" "yes")])
12760 (define_insn "*thumb2_strd"
12761 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12762 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12763 (match_operand:SI 2 "s_register_operand" "r"))
12764 (set (mem:SI (plus:SI (match_dup 0)
12765 (match_operand:SI 3 "const_int_operand" "")))
12766 (match_operand:SI 4 "s_register_operand" "r"))]
12767 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12768 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12769 && (operands_ok_ldrd_strd (operands[2], operands[4],
12770 operands[0], INTVAL (operands[1]),
12772 "strd%?\t%2, %4, [%0, %1]"
12773 [(set_attr "type" "store_8")
12774 (set_attr "predicable" "yes")])
12776 (define_insn "*thumb2_strd_base"
12777 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12778 (match_operand:SI 1 "s_register_operand" "r"))
12779 (set (mem:SI (plus:SI (match_dup 0)
12781 (match_operand:SI 2 "s_register_operand" "r"))]
12782 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12783 && (operands_ok_ldrd_strd (operands[1], operands[2],
12784 operands[0], 0, false, false))"
12785 "strd%?\t%1, %2, [%0]"
12786 [(set_attr "type" "store_8")
12787 (set_attr "predicable" "yes")])
12789 (define_insn "*thumb2_strd_base_neg"
12790 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12792 (match_operand:SI 1 "s_register_operand" "r"))
12793 (set (mem:SI (match_dup 0))
12794 (match_operand:SI 2 "s_register_operand" "r"))]
12795 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12796 && (operands_ok_ldrd_strd (operands[1], operands[2],
12797 operands[0], -4, false, false))"
12798 "strd%?\t%1, %2, [%0, #-4]"
12799 [(set_attr "type" "store_8")
12800 (set_attr "predicable" "yes")])
12802 ;; ARMv8 CRC32 instructions.
12803 (define_insn "arm_<crc_variant>"
12804 [(set (match_operand:SI 0 "s_register_operand" "=r")
12805 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12806 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12809 "<crc_variant>\\t%0, %1, %2"
12810 [(set_attr "type" "crc")
12811 (set_attr "conds" "unconditional")]
12814 ;; Load the load/store double peephole optimizations.
12815 (include "ldrdstrd.md")
12817 ;; Load the load/store multiple patterns
12818 (include "ldmstm.md")
12820 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12821 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12822 ;; The operands are validated through the load_multiple_operation
12823 ;; match_parallel predicate rather than through constraints so enable it only
12825 (define_insn "*load_multiple"
12826 [(match_parallel 0 "load_multiple_operation"
12827 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12828 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12830 "TARGET_32BIT && reload_completed"
12833 arm_output_multireg_pop (operands, /*return_pc=*/false,
12834 /*cond=*/const_true_rtx,
12840 [(set_attr "predicable" "yes")]
12843 (define_expand "copysignsf3"
12844 [(match_operand:SF 0 "register_operand")
12845 (match_operand:SF 1 "register_operand")
12846 (match_operand:SF 2 "register_operand")]
12847 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12849 emit_move_insn (operands[0], operands[2]);
12850 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12851 GEN_INT (31), GEN_INT (0),
12852 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12857 (define_expand "copysigndf3"
12858 [(match_operand:DF 0 "register_operand")
12859 (match_operand:DF 1 "register_operand")
12860 (match_operand:DF 2 "register_operand")]
12861 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12863 rtx op0_low = gen_lowpart (SImode, operands[0]);
12864 rtx op0_high = gen_highpart (SImode, operands[0]);
12865 rtx op1_low = gen_lowpart (SImode, operands[1]);
12866 rtx op1_high = gen_highpart (SImode, operands[1]);
12867 rtx op2_high = gen_highpart (SImode, operands[2]);
12869 rtx scratch1 = gen_reg_rtx (SImode);
12870 rtx scratch2 = gen_reg_rtx (SImode);
12871 emit_move_insn (scratch1, op2_high);
12872 emit_move_insn (scratch2, op1_high);
12874 emit_insn(gen_rtx_SET(scratch1,
12875 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12876 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12877 emit_move_insn (op0_low, op1_low);
12878 emit_move_insn (op0_high, scratch2);
12884 ;; movmisalign for DImode
12885 (define_expand "movmisaligndi"
12886 [(match_operand:DI 0 "general_operand")
12887 (match_operand:DI 1 "general_operand")]
12890 rtx lo_op0 = gen_lowpart (SImode, operands[0]);
12891 rtx lo_op1 = gen_lowpart (SImode, operands[1]);
12892 rtx hi_op0 = gen_highpart_mode (SImode, DImode, operands[0]);
12893 rtx hi_op1 = gen_highpart_mode (SImode, DImode, operands[1]);
12895 if (aligned_operand (lo_op0, SImode) && aligned_operand (lo_op1, SImode))
12897 emit_move_insn (lo_op0, lo_op1);
12898 emit_move_insn (hi_op0, hi_op1);
12902 emit_insn (gen_movmisalignsi (lo_op0, lo_op1));
12903 emit_insn (gen_movmisalignsi (hi_op0, hi_op1));
12908 ;; movmisalign patterns for HImode and SImode.
12909 (define_expand "movmisalign<mode>"
12910 [(match_operand:HSI 0 "general_operand")
12911 (match_operand:HSI 1 "general_operand")]
12914 /* This pattern is not permitted to fail during expansion: if both arguments
12915 are non-registers (e.g. memory := constant), force operand 1 into a
12917 rtx (* gen_unaligned_load)(rtx, rtx);
12918 rtx tmp_dest = operands[0];
12919 if (!s_register_operand (operands[0], <MODE>mode)
12920 && !s_register_operand (operands[1], <MODE>mode))
12921 operands[1] = force_reg (<MODE>mode, operands[1]);
12923 if (<MODE>mode == HImode)
12925 gen_unaligned_load = gen_unaligned_loadhiu;
12926 tmp_dest = gen_reg_rtx (SImode);
12929 gen_unaligned_load = gen_unaligned_loadsi;
12931 if (MEM_P (operands[1]))
12933 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12934 if (<MODE>mode == HImode)
12935 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12938 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12943 (define_insn "arm_<cdp>"
12944 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12945 (match_operand:SI 1 "immediate_operand" "n")
12946 (match_operand:SI 2 "immediate_operand" "n")
12947 (match_operand:SI 3 "immediate_operand" "n")
12948 (match_operand:SI 4 "immediate_operand" "n")
12949 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12950 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12952 arm_const_bounds (operands[0], 0, 16);
12953 arm_const_bounds (operands[1], 0, 16);
12954 arm_const_bounds (operands[2], 0, (1 << 5));
12955 arm_const_bounds (operands[3], 0, (1 << 5));
12956 arm_const_bounds (operands[4], 0, (1 << 5));
12957 arm_const_bounds (operands[5], 0, 8);
12958 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12960 [(set_attr "length" "4")
12961 (set_attr "type" "coproc")])
12963 (define_insn "*ldc"
12964 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12965 (match_operand:SI 1 "immediate_operand" "n")
12966 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12967 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12969 arm_const_bounds (operands[0], 0, 16);
12970 arm_const_bounds (operands[1], 0, (1 << 5));
12971 return "<ldc>\\tp%c0, CR%c1, %2";
12973 [(set_attr "length" "4")
12974 (set_attr "type" "coproc")])
12976 (define_insn "*stc"
12977 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12978 (match_operand:SI 1 "immediate_operand" "n")
12979 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12980 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12982 arm_const_bounds (operands[0], 0, 16);
12983 arm_const_bounds (operands[1], 0, (1 << 5));
12984 return "<stc>\\tp%c0, CR%c1, %2";
12986 [(set_attr "length" "4")
12987 (set_attr "type" "coproc")])
12989 (define_expand "arm_<ldc>"
12990 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12991 (match_operand:SI 1 "immediate_operand")
12992 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12993 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12995 (define_expand "arm_<stc>"
12996 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12997 (match_operand:SI 1 "immediate_operand")
12998 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12999 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
13001 (define_insn "arm_<mcr>"
13002 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
13003 (match_operand:SI 1 "immediate_operand" "n")
13004 (match_operand:SI 2 "s_register_operand" "r")
13005 (match_operand:SI 3 "immediate_operand" "n")
13006 (match_operand:SI 4 "immediate_operand" "n")
13007 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
13008 (use (match_dup 2))]
13009 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
13011 arm_const_bounds (operands[0], 0, 16);
13012 arm_const_bounds (operands[1], 0, 8);
13013 arm_const_bounds (operands[3], 0, (1 << 5));
13014 arm_const_bounds (operands[4], 0, (1 << 5));
13015 arm_const_bounds (operands[5], 0, 8);
13016 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
13018 [(set_attr "length" "4")
13019 (set_attr "type" "coproc")])
13021 (define_insn "arm_<mrc>"
13022 [(set (match_operand:SI 0 "s_register_operand" "=r")
13023 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
13024 (match_operand:SI 2 "immediate_operand" "n")
13025 (match_operand:SI 3 "immediate_operand" "n")
13026 (match_operand:SI 4 "immediate_operand" "n")
13027 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
13028 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
13030 arm_const_bounds (operands[1], 0, 16);
13031 arm_const_bounds (operands[2], 0, 8);
13032 arm_const_bounds (operands[3], 0, (1 << 5));
13033 arm_const_bounds (operands[4], 0, (1 << 5));
13034 arm_const_bounds (operands[5], 0, 8);
13035 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
13037 [(set_attr "length" "4")
13038 (set_attr "type" "coproc")])
13040 (define_insn "arm_<mcrr>"
13041 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
13042 (match_operand:SI 1 "immediate_operand" "n")
13043 (match_operand:DI 2 "s_register_operand" "r")
13044 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
13045 (use (match_dup 2))]
13046 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
13048 arm_const_bounds (operands[0], 0, 16);
13049 arm_const_bounds (operands[1], 0, 8);
13050 arm_const_bounds (operands[3], 0, (1 << 5));
13051 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
13053 [(set_attr "length" "4")
13054 (set_attr "type" "coproc")])
13056 (define_insn "arm_<mrrc>"
13057 [(set (match_operand:DI 0 "s_register_operand" "=r")
13058 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
13059 (match_operand:SI 2 "immediate_operand" "n")
13060 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
13061 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
13063 arm_const_bounds (operands[1], 0, 16);
13064 arm_const_bounds (operands[2], 0, 8);
13065 arm_const_bounds (operands[3], 0, (1 << 5));
13066 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
13068 [(set_attr "length" "4")
13069 (set_attr "type" "coproc")])
13071 (define_expand "speculation_barrier"
13072 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
13075 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
13076 have a usable barrier (and probably don't need one in practice).
13077 But to be safe if such code is run on later architectures, call a
13078 helper function in libgcc that will do the thing for the active
13080 if (!(arm_arch7 || arm_arch8))
13082 arm_emit_speculation_barrier_function ();
13088 ;; Generate a hard speculation barrier when we have not enabled speculation
13090 (define_insn "*speculation_barrier_insn"
13091 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
13092 "arm_arch7 || arm_arch8"
13094 [(set_attr "type" "block")
13095 (set_attr "length" "8")]
13098 (define_insn "pac_nop"
13099 [(set (reg:SI IP_REGNUM)
13100 (unspec:SI [(reg:SI SP_REGNUM) (reg:SI LR_REGNUM)]
13103 "pac\t%|ip, %|lr, %|sp"
13104 [(set_attr "conds" "unconditional")])
13106 (define_insn "pacbti_nop"
13107 [(set (reg:SI IP_REGNUM)
13108 (unspec_volatile:SI [(reg:SI SP_REGNUM) (reg:SI LR_REGNUM)]
13109 VUNSPEC_PACBTI_NOP))]
13111 "pacbti\t%|ip, %|lr, %|sp"
13112 [(set_attr "conds" "unconditional")])
13114 (define_insn "aut_nop"
13115 [(unspec_volatile:SI [(reg:SI IP_REGNUM) (reg:SI SP_REGNUM) (reg:SI LR_REGNUM)]
13118 "aut\t%|ip, %|lr, %|sp"
13119 [(set_attr "conds" "unconditional")])
13121 (define_insn "bti_nop"
13122 [(unspec_volatile [(const_int 0)] VUNSPEC_BTI_NOP)]
13125 [(set_attr "conds" "unconditional")
13126 (set_attr "type" "nop")])
13128 ;; Vector bits common to IWMMXT, Neon and MVE
13129 (include "vec-common.md")
13130 ;; Load the Intel Wireless Multimedia Extension patterns
13131 (include "iwmmxt.md")
13132 ;; Load the VFP co-processor patterns
13134 ;; Thumb-1 patterns
13135 (include "thumb1.md")
13136 ;; Thumb-2 patterns
13137 (include "thumb2.md")
13139 (include "neon.md")
13141 (include "crypto.md")
13142 ;; Synchronization Primitives
13143 (include "sync.md")
13144 ;; Fixed-point patterns
13145 (include "arm-fixed.md")
13146 ;; M-profile Vector Extension