1 ;; ARM Thumb-1 Machine Description
2 ;; Copyright (C) 2007-2016 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>. */
21 ;;---------------------------------------------------------------------------
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
46 (define_insn "*thumb1_adddi3"
47 [(set (match_operand:DI 0 "register_operand" "=l")
48 (plus:DI (match_operand:DI 1 "register_operand" "%0")
49 (match_operand:DI 2 "register_operand" "l")))
50 (clobber (reg:CC CC_REGNUM))
53 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
54 [(set_attr "length" "4")
55 (set_attr "type" "multiple")]
58 (define_insn_and_split "*thumb1_addsi3"
59 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
60 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
61 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
64 static const char * const asms[] =
66 \"adds\\t%0, %0, %2\",
67 \"subs\\t%0, %0, #%n2\",
68 \"adds\\t%0, %1, %2\",
77 if ((which_alternative == 2 || which_alternative == 6)
78 && CONST_INT_P (operands[2])
79 && INTVAL (operands[2]) < 0)
80 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
81 return asms[which_alternative];
83 "&& reload_completed && CONST_INT_P (operands[2])
84 && ((operands[1] != stack_pointer_rtx
85 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
86 || (operands[1] == stack_pointer_rtx
87 && INTVAL (operands[2]) > 1020))"
88 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
89 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
91 HOST_WIDE_INT offset = INTVAL (operands[2]);
92 if (operands[1] == stack_pointer_rtx)
98 else if (offset < -255)
101 operands[3] = GEN_INT (offset);
102 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
104 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
105 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
106 alus_sreg,alus_sreg,multiple,multiple,multiple")]
109 ;; Reloading and elimination of the frame pointer can
110 ;; sometimes cause this optimization to be missed.
112 [(set (match_operand:SI 0 "arm_general_register_operand" "")
113 (match_operand:SI 1 "const_int_operand" ""))
115 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
117 && UINTVAL (operands[1]) < 1024
118 && (UINTVAL (operands[1]) & 3) == 0"
119 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
123 (define_insn "*thumb_subdi3"
124 [(set (match_operand:DI 0 "register_operand" "=l")
125 (minus:DI (match_operand:DI 1 "register_operand" "0")
126 (match_operand:DI 2 "register_operand" "l")))
127 (clobber (reg:CC CC_REGNUM))]
129 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
130 [(set_attr "length" "4")
131 (set_attr "type" "multiple")]
134 (define_insn "thumb1_subsi3_insn"
135 [(set (match_operand:SI 0 "register_operand" "=l")
136 (minus:SI (match_operand:SI 1 "register_operand" "l")
137 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
140 [(set_attr "length" "2")
141 (set_attr "conds" "set")
142 (set_attr "type" "alus_sreg")]
145 ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
146 ;; 1 and 2 are the same, because reload will make operand 0 match
147 ;; operand 1 without realizing that this conflicts with operand 2. We fix
148 ;; this by adding another alternative to match this case, and then `reload'
149 ;; it ourselves. This alternative must come first.
150 (define_insn "*thumb_mulsi3"
151 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
152 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
153 (match_operand:SI 2 "register_operand" "l,l,l")))]
154 "TARGET_THUMB1 && !arm_arch6"
156 movs\\t%0, %1\;muls\\t%0, %2
157 mov\\t%0, %1\;muls\\t%0, %2
159 [(set_attr "length" "4,4,2")
160 (set_attr "type" "muls")]
163 (define_insn "*thumb_mulsi3_v6"
164 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
165 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
166 (match_operand:SI 2 "register_operand" "l,0,0")))]
167 "TARGET_THUMB1 && arm_arch6"
172 [(set_attr "length" "2")
173 (set_attr "type" "muls")]
176 (define_insn "*thumb1_andsi3_insn"
177 [(set (match_operand:SI 0 "register_operand" "=l")
178 (and:SI (match_operand:SI 1 "register_operand" "%0")
179 (match_operand:SI 2 "register_operand" "l")))]
182 [(set_attr "length" "2")
183 (set_attr "type" "logic_imm")
184 (set_attr "conds" "set")])
187 [(set (match_operand:SI 0 "s_register_operand" "")
188 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
189 (match_operand:SI 2 "const_int_operand" "")
190 (match_operand:SI 3 "const_int_operand" "")))
191 (clobber (match_operand:SI 4 "s_register_operand" ""))]
193 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
194 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
196 HOST_WIDE_INT temp = INTVAL (operands[2]);
198 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
199 operands[3] = GEN_INT (32 - temp);
204 [(set (match_operand:SI 0 "s_register_operand" "")
205 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
206 (match_operand:SI 2 "const_int_operand" "")
207 (match_operand:SI 3 "const_int_operand" "")))]
209 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
210 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
212 HOST_WIDE_INT temp = INTVAL (operands[2]);
214 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
215 operands[3] = GEN_INT (32 - temp);
219 (define_insn "thumb1_bicsi3"
220 [(set (match_operand:SI 0 "register_operand" "=l")
221 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
222 (match_operand:SI 2 "register_operand" "0")))]
225 [(set_attr "length" "2")
226 (set_attr "conds" "set")
227 (set_attr "type" "logics_reg")]
230 (define_insn "*thumb1_iorsi3_insn"
231 [(set (match_operand:SI 0 "register_operand" "=l")
232 (ior:SI (match_operand:SI 1 "register_operand" "%0")
233 (match_operand:SI 2 "register_operand" "l")))]
236 [(set_attr "length" "2")
237 (set_attr "conds" "set")
238 (set_attr "type" "logics_reg")])
240 (define_insn "*thumb1_xorsi3_insn"
241 [(set (match_operand:SI 0 "register_operand" "=l")
242 (xor:SI (match_operand:SI 1 "register_operand" "%0")
243 (match_operand:SI 2 "register_operand" "l")))]
246 [(set_attr "length" "2")
247 (set_attr "conds" "set")
248 (set_attr "type" "logics_reg")]
251 (define_insn "*thumb1_ashlsi3"
252 [(set (match_operand:SI 0 "register_operand" "=l,l")
253 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
254 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
257 [(set_attr "length" "2")
258 (set_attr "type" "shift_imm,shift_reg")
259 (set_attr "conds" "set")])
261 (define_insn "*thumb1_ashrsi3"
262 [(set (match_operand:SI 0 "register_operand" "=l,l")
263 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
264 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
267 [(set_attr "length" "2")
268 (set_attr "type" "shift_imm,shift_reg")
269 (set_attr "conds" "set")])
271 (define_insn "*thumb1_lshrsi3"
272 [(set (match_operand:SI 0 "register_operand" "=l,l")
273 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
274 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
277 [(set_attr "length" "2")
278 (set_attr "type" "shift_imm,shift_reg")
279 (set_attr "conds" "set")])
281 (define_insn "*thumb1_rotrsi3"
282 [(set (match_operand:SI 0 "register_operand" "=l")
283 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
284 (match_operand:SI 2 "register_operand" "l")))]
287 [(set_attr "type" "shift_reg")
288 (set_attr "length" "2")]
291 (define_insn "*thumb1_negdi2"
292 [(set (match_operand:DI 0 "register_operand" "=&l")
293 (neg:DI (match_operand:DI 1 "register_operand" "l")))
294 (clobber (reg:CC CC_REGNUM))]
296 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
297 [(set_attr "length" "6")
298 (set_attr "type" "multiple")]
301 (define_insn "*thumb1_negsi2"
302 [(set (match_operand:SI 0 "register_operand" "=l")
303 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
306 [(set_attr "length" "2")
307 (set_attr "type" "alu_imm")]
310 (define_insn_and_split "*thumb1_abssi2"
311 [(set (match_operand:SI 0 "s_register_operand" "=l")
312 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
313 (clobber (match_scratch:SI 2 "=&l"))]
316 "TARGET_THUMB1 && reload_completed"
317 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
318 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
319 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
321 [(set_attr "length" "6")
322 (set_attr "type" "multiple")]
325 (define_insn_and_split "*thumb1_neg_abssi2"
326 [(set (match_operand:SI 0 "s_register_operand" "=l")
327 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
328 (clobber (match_scratch:SI 2 "=&l"))]
331 "TARGET_THUMB1 && reload_completed"
332 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
333 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
334 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
336 [(set_attr "length" "6")
337 (set_attr "type" "multiple")]
340 (define_insn "*thumb1_one_cmplsi2"
341 [(set (match_operand:SI 0 "register_operand" "=l")
342 (not:SI (match_operand:SI 1 "register_operand" "l")))]
345 [(set_attr "length" "2")
346 (set_attr "type" "mvn_reg")]
349 (define_insn "*thumb1_zero_extendhisi2"
350 [(set (match_operand:SI 0 "register_operand" "=l,l")
351 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
356 if (which_alternative == 0 && arm_arch6)
357 return "uxth\t%0, %1";
358 if (which_alternative == 0)
361 mem = XEXP (operands[1], 0);
363 if (GET_CODE (mem) == CONST)
366 if (GET_CODE (mem) == PLUS)
368 rtx a = XEXP (mem, 0);
370 /* This can happen due to bugs in reload. */
371 if (REG_P (a) && REGNO (a) == SP_REGNUM)
374 ops[0] = operands[0];
377 output_asm_insn ("mov\t%0, %1", ops);
379 XEXP (mem, 0) = operands[0];
383 return "ldrh\t%0, %1";
385 [(set_attr_alternative "length"
386 [(if_then_else (eq_attr "is_arch6" "yes")
387 (const_int 2) (const_int 4))
389 (set_attr "type" "extend,load_byte")]
392 (define_insn "*thumb1_zero_extendqisi2"
393 [(set (match_operand:SI 0 "register_operand" "=l,l")
394 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
395 "TARGET_THUMB1 && !arm_arch6"
399 [(set_attr "length" "4,2")
400 (set_attr "type" "alu_shift_reg,load_byte")
401 (set_attr "pool_range" "*,32")]
404 (define_insn "*thumb1_zero_extendqisi2_v6"
405 [(set (match_operand:SI 0 "register_operand" "=l,l")
406 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
407 "TARGET_THUMB1 && arm_arch6"
411 [(set_attr "length" "2")
412 (set_attr "type" "extend,load_byte")]
415 ;; We used to have an early-clobber on the scratch register here.
416 ;; However, there's a bug somewhere in reload which means that this
417 ;; can be partially ignored during spill allocation if the memory
418 ;; address also needs reloading; this causes us to die later on when
419 ;; we try to verify the operands. Fortunately, we don't really need
420 ;; the early-clobber: we can always use operand 0 if operand 2
421 ;; overlaps the address.
422 (define_insn "thumb1_extendhisi2"
423 [(set (match_operand:SI 0 "register_operand" "=l,l")
424 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
425 (clobber (match_scratch:SI 2 "=X,l"))]
432 if (which_alternative == 0 && !arm_arch6)
434 if (which_alternative == 0)
435 return \"sxth\\t%0, %1\";
437 mem = XEXP (operands[1], 0);
439 /* This code used to try to use 'V', and fix the address only if it was
440 offsettable, but this fails for e.g. REG+48 because 48 is outside the
441 range of QImode offsets, and offsettable_address_p does a QImode
444 if (GET_CODE (mem) == CONST)
447 if (GET_CODE (mem) == LABEL_REF)
448 return \"ldr\\t%0, %1\";
450 if (GET_CODE (mem) == PLUS)
452 rtx a = XEXP (mem, 0);
453 rtx b = XEXP (mem, 1);
455 if (GET_CODE (a) == LABEL_REF
457 return \"ldr\\t%0, %1\";
460 return \"ldrsh\\t%0, %1\";
471 gcc_assert (REG_P (ops[1]));
473 ops[0] = operands[0];
474 if (reg_mentioned_p (operands[2], ops[1]))
477 ops[3] = operands[2];
478 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
481 [(set_attr_alternative "length"
482 [(if_then_else (eq_attr "is_arch6" "yes")
483 (const_int 2) (const_int 4))
485 (set_attr "type" "extend,load_byte")
486 (set_attr "pool_range" "*,1018")]
490 [(set (match_operand:SI 0 "register_operand" "")
491 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
492 "TARGET_THUMB1 && reload_completed"
493 [(set (match_dup 0) (match_dup 2))
494 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
496 rtx addr = XEXP (operands[1], 0);
498 if (GET_CODE (addr) == CONST)
499 addr = XEXP (addr, 0);
501 if (GET_CODE (addr) == PLUS
502 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
503 /* No split necessary. */
506 if (GET_CODE (addr) == PLUS
507 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
510 if (reg_overlap_mentioned_p (operands[0], addr))
512 rtx t = gen_lowpart (QImode, operands[0]);
513 emit_move_insn (t, operands[1]);
514 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
520 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
521 operands[2] = const0_rtx;
523 else if (GET_CODE (addr) != PLUS)
525 else if (REG_P (XEXP (addr, 0)))
527 operands[2] = XEXP (addr, 1);
528 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
532 operands[2] = XEXP (addr, 0);
533 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
536 operands[3] = change_address (operands[1], QImode, addr);
540 [(set (match_operand:SI 0 "register_operand" "")
541 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
542 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
543 (set (match_operand:SI 3 "register_operand" "")
544 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
546 && GET_CODE (XEXP (operands[4], 0)) == PLUS
547 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
548 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
549 && (peep2_reg_dead_p (3, operands[0])
550 || rtx_equal_p (operands[0], operands[3]))
551 && (peep2_reg_dead_p (3, operands[2])
552 || rtx_equal_p (operands[2], operands[3]))"
553 [(set (match_dup 2) (match_dup 1))
554 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
556 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
557 operands[4] = change_address (operands[4], QImode, addr);
560 (define_insn "thumb1_extendqisi2"
561 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
562 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
567 if (which_alternative == 0 && arm_arch6)
568 return "sxtb\\t%0, %1";
569 if (which_alternative == 0)
572 addr = XEXP (operands[1], 0);
573 if (GET_CODE (addr) == PLUS
574 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
575 return "ldrsb\\t%0, %1";
579 [(set_attr_alternative "length"
580 [(if_then_else (eq_attr "is_arch6" "yes")
581 (const_int 2) (const_int 4))
583 (if_then_else (eq_attr "is_arch6" "yes")
584 (const_int 4) (const_int 6))])
585 (set_attr "type" "extend,load_byte,load_byte")]
588 ;;; ??? This should have alternatives for constants.
589 ;;; ??? This was originally identical to the movdf_insn pattern.
590 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
591 ;;; thumb_reorg with a memory reference.
592 (define_insn "*thumb1_movdi_insn"
593 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
594 (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))]
596 && ( register_operand (operands[0], DImode)
597 || register_operand (operands[1], DImode))"
600 switch (which_alternative)
604 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
605 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
606 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
608 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
610 operands[1] = GEN_INT (- INTVAL (operands[1]));
611 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
613 gcc_assert (TARGET_HAVE_MOVT);
614 return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
616 return \"ldmia\\t%1, {%0, %H0}\";
618 return \"stmia\\t%0, {%1, %H1}\";
620 return thumb_load_double_from_address (operands);
622 operands[2] = gen_rtx_MEM (SImode,
623 plus_constant (Pmode, XEXP (operands[0], 0), 4));
624 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
627 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
628 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
629 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
632 [(set_attr "length" "4,4,6,6,2,2,6,4,4")
633 (set_attr "type" "multiple,multiple,multiple,multiple,load2,store2,load2,store2,multiple")
634 (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
635 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
638 (define_insn "*thumb1_movsi_insn"
639 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, m,*l*h*k")
640 (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,mi,l,*l*h*k"))]
642 && ( register_operand (operands[0], SImode)
643 || register_operand (operands[1], SImode))"
655 [(set_attr "length" "2,2,4,4,4,2,2,2,2,2")
656 (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load1,store1,load1,store1,mov_reg")
657 (set_attr "pool_range" "*,*,*,*,*,*,*,1018,*,*")
658 (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1")
659 (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond")])
661 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
662 ; to see if we can load them in fewer instructions or fewer cycles.
663 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
664 ; thumb1_movdi_insn has a better way to handle them.
666 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
667 (match_operand:ANY64 1 "immediate_operand" ""))]
668 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
669 [(set (match_dup 0) (match_dup 1))
670 (set (match_dup 2) (match_dup 3))]
672 operands[2] = gen_highpart (SImode, operands[0]);
673 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
675 operands[0] = gen_lowpart (SImode, operands[0]);
676 operands[1] = gen_lowpart (SImode, operands[1]);
681 [(set (match_operand:SI 0 "register_operand" "")
682 (match_operand:SI 1 "const_int_operand" ""))]
683 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
684 [(set (match_dup 2) (match_dup 1))
685 (set (match_dup 0) (neg:SI (match_dup 2)))]
688 operands[1] = GEN_INT (- INTVAL (operands[1]));
689 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
694 [(set (match_operand:SI 0 "register_operand" "")
695 (match_operand:SI 1 "const_int_operand" ""))]
696 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
697 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
698 [(set (match_dup 2) (match_dup 1))
699 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
702 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
703 unsigned HOST_WIDE_INT mask = 0xff;
706 for (i = 0; i < 25; i++)
707 if ((val & (mask << i)) == val)
710 /* Don't split if the shift is zero. */
714 operands[1] = GEN_INT (val >> i);
715 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
716 operands[3] = GEN_INT (i);
720 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
722 [(set (match_operand:SI 0 "register_operand" "")
723 (match_operand:SI 1 "const_int_operand" ""))]
724 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
725 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
726 [(set (match_dup 2) (match_dup 1))
727 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
730 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
731 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
732 operands[3] = GEN_INT (255);
736 (define_insn "*thumb1_movhi_insn"
737 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
738 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))]
740 && ( register_operand (operands[0], HImode)
741 || register_operand (operands[1], HImode))"
743 switch (which_alternative)
745 case 0: return \"adds %0, %1, #0\";
746 case 2: return \"strh %1, %0\";
747 case 3: return \"mov %0, %1\";
748 case 4: return \"mov %0, %1\";
749 case 5: return \"movs %0, %1\";
750 case 6: gcc_assert (TARGET_HAVE_MOVT);
751 return \"movw %0, %L1\";
752 default: gcc_unreachable ();
754 /* The stack pointer can end up being taken as an index register.
755 Catch this case here and deal with it. */
756 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
757 && REG_P (XEXP (XEXP (operands[1], 0), 0))
758 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
761 ops[0] = operands[0];
762 ops[1] = XEXP (XEXP (operands[1], 0), 0);
764 output_asm_insn (\"mov %0, %1\", ops);
766 XEXP (XEXP (operands[1], 0), 0) = operands[0];
769 return \"ldrh %0, %1\";
771 [(set_attr "length" "2,4,2,2,2,2,4")
772 (set_attr "type" "alus_imm,load1,store1,mov_reg,mov_reg,mov_imm,mov_imm")
773 (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
774 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
776 (define_expand "thumb_movhi_clobber"
777 [(set (match_operand:HI 0 "memory_operand" "")
778 (match_operand:HI 1 "register_operand" ""))
779 (clobber (match_operand:DI 2 "register_operand" ""))]
782 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
783 && REGNO (operands[1]) <= LAST_LO_REGNUM)
785 emit_insn (gen_movhi (operands[0], operands[1]));
788 /* XXX Fixme, need to handle other cases here as well. */
793 (define_insn "*thumb1_movqi_insn"
794 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
795 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
797 && ( register_operand (operands[0], QImode)
798 || register_operand (operands[1], QImode))"
806 [(set_attr "length" "2")
807 (set_attr "type" "alu_imm,load1,store1,mov_reg,mov_imm,mov_imm")
808 (set_attr "pool_range" "*,32,*,*,*,*")
809 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
811 (define_insn "*thumb1_movhf"
812 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
813 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
815 && ( s_register_operand (operands[0], HFmode)
816 || s_register_operand (operands[1], HFmode))"
818 switch (which_alternative)
821 return \"movs\\t%0, %1\";
825 gcc_assert (MEM_P (operands[1]));
826 addr = XEXP (operands[1], 0);
827 if (GET_CODE (addr) == LABEL_REF
828 || (GET_CODE (addr) == CONST
829 && GET_CODE (XEXP (addr, 0)) == PLUS
830 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
831 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
833 /* Constant pool entry. */
834 return \"ldr\\t%0, %1\";
836 return \"ldrh\\t%0, %1\";
838 case 2: return \"strh\\t%1, %0\";
839 default: return \"mov\\t%0, %1\";
842 [(set_attr "length" "2")
843 (set_attr "type" "mov_reg,load1,store1,mov_reg,mov_reg")
844 (set_attr "pool_range" "*,1018,*,*,*")
845 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
846 ;;; ??? This should have alternatives for constants.
847 (define_insn "*thumb1_movsf_insn"
848 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
849 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
851 && ( register_operand (operands[0], SFmode)
852 || register_operand (operands[1], SFmode))"
861 [(set_attr "length" "2")
862 (set_attr "type" "alus_imm,load1,store1,load1,store1,mov_reg,mov_reg")
863 (set_attr "pool_range" "*,*,*,1018,*,*,*")
864 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
867 ;;; ??? This should have alternatives for constants.
868 ;;; ??? This was originally identical to the movdi_insn pattern.
869 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
870 ;;; thumb_reorg with a memory reference.
871 (define_insn "*thumb_movdf_insn"
872 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
873 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
875 && ( register_operand (operands[0], DFmode)
876 || register_operand (operands[1], DFmode))"
878 switch (which_alternative)
882 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
883 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
884 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
886 return \"ldmia\\t%1, {%0, %H0}\";
888 return \"stmia\\t%0, {%1, %H1}\";
890 return thumb_load_double_from_address (operands);
892 operands[2] = gen_rtx_MEM (SImode,
893 plus_constant (Pmode,
894 XEXP (operands[0], 0), 4));
895 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
898 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
899 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
900 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
903 [(set_attr "length" "4,2,2,6,4,4")
904 (set_attr "type" "multiple,load2,store2,load2,store2,multiple")
905 (set_attr "pool_range" "*,*,*,1018,*,*")]
909 ;; Thumb block-move insns
911 (define_insn "movmem12b"
912 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
913 (mem:SI (match_operand:SI 3 "register_operand" "1")))
914 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
915 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
916 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
917 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
918 (set (match_operand:SI 0 "register_operand" "=l")
919 (plus:SI (match_dup 2) (const_int 12)))
920 (set (match_operand:SI 1 "register_operand" "=l")
921 (plus:SI (match_dup 3) (const_int 12)))
922 (clobber (match_scratch:SI 4 "=&l"))
923 (clobber (match_scratch:SI 5 "=&l"))
924 (clobber (match_scratch:SI 6 "=&l"))]
926 "* return thumb_output_move_mem_multiple (3, operands);"
927 [(set_attr "length" "4")
928 ; This isn't entirely accurate... It loads as well, but in terms of
929 ; scheduling the following insn it is better to consider it as a store
930 (set_attr "type" "store3")]
933 (define_insn "movmem8b"
934 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
935 (mem:SI (match_operand:SI 3 "register_operand" "1")))
936 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
937 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
938 (set (match_operand:SI 0 "register_operand" "=l")
939 (plus:SI (match_dup 2) (const_int 8)))
940 (set (match_operand:SI 1 "register_operand" "=l")
941 (plus:SI (match_dup 3) (const_int 8)))
942 (clobber (match_scratch:SI 4 "=&l"))
943 (clobber (match_scratch:SI 5 "=&l"))]
945 "* return thumb_output_move_mem_multiple (2, operands);"
946 [(set_attr "length" "4")
947 ; This isn't entirely accurate... It loads as well, but in terms of
948 ; scheduling the following insn it is better to consider it as a store
949 (set_attr "type" "store2")]
953 ;; A pattern to recognize a special situation and optimize for it.
954 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
955 ;; due to the available addressing modes. Hence, convert a signed comparison
956 ;; with zero into an unsigned comparison with 127 if possible.
957 (define_expand "cbranchqi4"
958 [(set (pc) (if_then_else
959 (match_operator 0 "lt_ge_comparison_operator"
960 [(match_operand:QI 1 "memory_operand" "")
961 (match_operand:QI 2 "const0_operand" "")])
962 (label_ref (match_operand 3 "" ""))
967 xops[1] = gen_reg_rtx (SImode);
968 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
969 xops[2] = GEN_INT (127);
970 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
971 VOIDmode, xops[1], xops[2]);
972 xops[3] = operands[3];
973 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
977 (define_insn "cbranchsi4_insn"
978 [(set (pc) (if_then_else
979 (match_operator 0 "arm_comparison_operator"
980 [(match_operand:SI 1 "s_register_operand" "l,l*h")
981 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
982 (label_ref (match_operand 3 "" ""))
986 rtx t = cfun->machine->thumb1_cc_insn;
989 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
990 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
992 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
994 if (!noov_comparison_operator (operands[0], VOIDmode))
997 else if (cfun->machine->thumb1_cc_mode != CCmode)
1002 output_asm_insn ("cmp\t%1, %2", operands);
1003 cfun->machine->thumb1_cc_insn = insn;
1004 cfun->machine->thumb1_cc_op0 = operands[1];
1005 cfun->machine->thumb1_cc_op1 = operands[2];
1006 cfun->machine->thumb1_cc_mode = CCmode;
1009 /* Ensure we emit the right type of condition code on the jump. */
1010 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1013 switch (get_attr_length (insn))
1015 case 4: return \"b%d0\\t%l3\";
1016 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1017 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1020 [(set (attr "far_jump")
1022 (eq_attr "length" "8")
1023 (const_string "yes")
1024 (const_string "no")))
1025 (set (attr "length")
1027 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1028 (le (minus (match_dup 3) (pc)) (const_int 256)))
1031 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1032 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1035 (set_attr "type" "multiple")]
1038 (define_insn "cbranchsi4_scratch"
1039 [(set (pc) (if_then_else
1040 (match_operator 4 "arm_comparison_operator"
1041 [(match_operand:SI 1 "s_register_operand" "l,0")
1042 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1043 (label_ref (match_operand 3 "" ""))
1045 (clobber (match_scratch:SI 0 "=l,l"))]
1048 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1050 switch (get_attr_length (insn))
1052 case 4: return \"b%d4\\t%l3\";
1053 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1054 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1057 [(set (attr "far_jump")
1059 (eq_attr "length" "8")
1060 (const_string "yes")
1061 (const_string "no")))
1062 (set (attr "length")
1064 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1065 (le (minus (match_dup 3) (pc)) (const_int 256)))
1068 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1069 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1072 (set_attr "type" "multiple")]
1075 (define_insn "*negated_cbranchsi4"
1078 (match_operator 0 "equality_operator"
1079 [(match_operand:SI 1 "s_register_operand" "l")
1080 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1081 (label_ref (match_operand 3 "" ""))
1085 output_asm_insn (\"cmn\\t%1, %2\", operands);
1086 switch (get_attr_length (insn))
1088 case 4: return \"b%d0\\t%l3\";
1089 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1090 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1093 [(set (attr "far_jump")
1095 (eq_attr "length" "8")
1096 (const_string "yes")
1097 (const_string "no")))
1098 (set (attr "length")
1100 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1101 (le (minus (match_dup 3) (pc)) (const_int 256)))
1104 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1105 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1108 (set_attr "type" "multiple")]
1111 (define_insn "*tbit_cbranch"
1114 (match_operator 0 "equality_operator"
1115 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1117 (match_operand:SI 2 "const_int_operand" "i"))
1119 (label_ref (match_operand 3 "" ""))
1121 (clobber (match_scratch:SI 4 "=l"))]
1126 op[0] = operands[4];
1127 op[1] = operands[1];
1128 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1130 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1131 switch (get_attr_length (insn))
1133 case 4: return \"b%d0\\t%l3\";
1134 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1135 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1138 [(set (attr "far_jump")
1140 (eq_attr "length" "8")
1141 (const_string "yes")
1142 (const_string "no")))
1143 (set (attr "length")
1145 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1146 (le (minus (match_dup 3) (pc)) (const_int 256)))
1149 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1150 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1153 (set_attr "type" "multiple")]
1156 (define_insn "*tlobits_cbranch"
1159 (match_operator 0 "equality_operator"
1160 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1161 (match_operand:SI 2 "const_int_operand" "i")
1164 (label_ref (match_operand 3 "" ""))
1166 (clobber (match_scratch:SI 4 "=l"))]
1171 op[0] = operands[4];
1172 op[1] = operands[1];
1173 op[2] = GEN_INT (32 - INTVAL (operands[2]));
1175 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1176 switch (get_attr_length (insn))
1178 case 4: return \"b%d0\\t%l3\";
1179 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1180 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1183 [(set (attr "far_jump")
1185 (eq_attr "length" "8")
1186 (const_string "yes")
1187 (const_string "no")))
1188 (set (attr "length")
1190 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1191 (le (minus (match_dup 3) (pc)) (const_int 256)))
1194 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1195 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1198 (set_attr "type" "multiple")]
1201 (define_insn "*tstsi3_cbranch"
1204 (match_operator 3 "equality_operator"
1205 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1206 (match_operand:SI 1 "s_register_operand" "l"))
1208 (label_ref (match_operand 2 "" ""))
1213 output_asm_insn (\"tst\\t%0, %1\", operands);
1214 switch (get_attr_length (insn))
1216 case 4: return \"b%d3\\t%l2\";
1217 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1218 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1221 [(set (attr "far_jump")
1223 (eq_attr "length" "8")
1224 (const_string "yes")
1225 (const_string "no")))
1226 (set (attr "length")
1228 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1229 (le (minus (match_dup 2) (pc)) (const_int 256)))
1232 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1233 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1236 (set_attr "type" "multiple")]
1239 (define_insn "*cbranchne_decr1"
1241 (if_then_else (match_operator 3 "equality_operator"
1242 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1244 (label_ref (match_operand 4 "" ""))
1246 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1247 (plus:SI (match_dup 2) (const_int -1)))
1248 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1253 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1255 VOIDmode, operands[2], const1_rtx);
1256 cond[1] = operands[4];
1258 if (which_alternative == 0)
1259 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1260 else if (which_alternative == 1)
1262 /* We must provide an alternative for a hi reg because reload
1263 cannot handle output reloads on a jump instruction, but we
1264 can't subtract into that. Fortunately a mov from lo to hi
1265 does not clobber the condition codes. */
1266 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1267 output_asm_insn (\"mov\\t%0, %1\", operands);
1271 /* Similarly, but the target is memory. */
1272 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1273 output_asm_insn (\"str\\t%1, %0\", operands);
1276 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1279 output_asm_insn (\"b%d0\\t%l1\", cond);
1282 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1283 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1285 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1286 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1290 [(set (attr "far_jump")
1292 (ior (and (eq (symbol_ref ("which_alternative"))
1294 (eq_attr "length" "8"))
1295 (eq_attr "length" "10"))
1296 (const_string "yes")
1297 (const_string "no")))
1298 (set_attr_alternative "length"
1302 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1303 (le (minus (match_dup 4) (pc)) (const_int 256)))
1306 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1307 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1312 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1313 (le (minus (match_dup 4) (pc)) (const_int 256)))
1316 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1317 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1322 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1323 (le (minus (match_dup 4) (pc)) (const_int 256)))
1326 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1327 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1332 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1333 (le (minus (match_dup 4) (pc)) (const_int 256)))
1336 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1337 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1340 (set_attr "type" "multiple")]
1343 (define_insn "*addsi3_cbranch"
1346 (match_operator 4 "arm_comparison_operator"
1348 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1349 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1351 (label_ref (match_operand 5 "" ""))
1354 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1355 (plus:SI (match_dup 2) (match_dup 3)))
1356 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1358 && (GET_CODE (operands[4]) == EQ
1359 || GET_CODE (operands[4]) == NE
1360 || GET_CODE (operands[4]) == GE
1361 || GET_CODE (operands[4]) == LT)"
1366 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1367 cond[1] = operands[2];
1368 cond[2] = operands[3];
1370 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1371 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1373 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1375 if (which_alternative >= 2
1376 && which_alternative < 4)
1377 output_asm_insn (\"mov\\t%0, %1\", operands);
1378 else if (which_alternative >= 4)
1379 output_asm_insn (\"str\\t%1, %0\", operands);
1381 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1384 return \"b%d4\\t%l5\";
1386 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1388 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1392 [(set (attr "far_jump")
1394 (ior (and (lt (symbol_ref ("which_alternative"))
1396 (eq_attr "length" "8"))
1397 (eq_attr "length" "10"))
1398 (const_string "yes")
1399 (const_string "no")))
1400 (set (attr "length")
1402 (lt (symbol_ref ("which_alternative"))
1405 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1406 (le (minus (match_dup 5) (pc)) (const_int 256)))
1409 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1410 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1414 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1415 (le (minus (match_dup 5) (pc)) (const_int 256)))
1418 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1419 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1422 (set_attr "type" "multiple")]
1425 (define_insn "*addsi3_cbranch_scratch"
1428 (match_operator 3 "arm_comparison_operator"
1430 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1431 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1433 (label_ref (match_operand 4 "" ""))
1435 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1437 && (GET_CODE (operands[3]) == EQ
1438 || GET_CODE (operands[3]) == NE
1439 || GET_CODE (operands[3]) == GE
1440 || GET_CODE (operands[3]) == LT)"
1443 switch (which_alternative)
1446 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1449 output_asm_insn (\"cmn\t%1, %2\", operands);
1452 if (INTVAL (operands[2]) < 0)
1453 output_asm_insn (\"subs\t%0, %1, %2\", operands);
1455 output_asm_insn (\"adds\t%0, %1, %2\", operands);
1458 if (INTVAL (operands[2]) < 0)
1459 output_asm_insn (\"subs\t%0, %0, %2\", operands);
1461 output_asm_insn (\"adds\t%0, %0, %2\", operands);
1465 switch (get_attr_length (insn))
1468 return \"b%d3\\t%l4\";
1470 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1472 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1476 [(set (attr "far_jump")
1478 (eq_attr "length" "8")
1479 (const_string "yes")
1480 (const_string "no")))
1481 (set (attr "length")
1483 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1484 (le (minus (match_dup 4) (pc)) (const_int 256)))
1487 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1488 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1491 (set_attr "type" "multiple")]
1494 (define_insn "*thumb_cmpdi_zero"
1495 [(set (reg:CC_Z CC_REGNUM)
1496 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1498 (clobber (match_scratch:SI 1 "=l"))]
1500 "orrs\\t%1, %Q0, %R0"
1501 [(set_attr "conds" "set")
1502 (set_attr "length" "2")
1503 (set_attr "type" "logics_reg")]
1506 (define_expand "cstoresi_eq0_thumb1"
1508 [(set (match_operand:SI 0 "s_register_operand" "")
1509 (eq:SI (match_operand:SI 1 "s_register_operand" "")
1511 (clobber (match_dup:SI 2))])]
1513 "operands[2] = gen_reg_rtx (SImode);"
1516 (define_expand "cstoresi_ne0_thumb1"
1518 [(set (match_operand:SI 0 "s_register_operand" "")
1519 (ne:SI (match_operand:SI 1 "s_register_operand" "")
1521 (clobber (match_dup:SI 2))])]
1523 "operands[2] = gen_reg_rtx (SImode);"
1526 (define_insn "*cstoresi_eq0_thumb1_insn"
1527 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1528 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1530 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1533 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1534 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1535 [(set_attr "length" "4")
1536 (set_attr "type" "multiple")]
1539 (define_insn "*cstoresi_ne0_thumb1_insn"
1540 [(set (match_operand:SI 0 "s_register_operand" "=l")
1541 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
1543 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1545 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1546 [(set_attr "length" "4")]
1549 ;; Used as part of the expansion of thumb ltu and gtu sequences
1550 (define_insn "cstoresi_nltu_thumb1"
1551 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1552 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1553 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1555 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1556 [(set_attr "length" "4")
1557 (set_attr "type" "multiple")]
1560 (define_insn_and_split "cstoresi_ltu_thumb1"
1561 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1562 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1563 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1568 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1569 (set (match_dup 0) (neg:SI (match_dup 3)))]
1570 "operands[3] = gen_reg_rtx (SImode);"
1571 [(set_attr "length" "4")
1572 (set_attr "type" "multiple")]
1575 ;; Used as part of the expansion of thumb les sequence.
1576 (define_insn "thumb1_addsi3_addgeu"
1577 [(set (match_operand:SI 0 "s_register_operand" "=l")
1578 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1579 (match_operand:SI 2 "s_register_operand" "l"))
1580 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1581 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1583 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1584 [(set_attr "length" "4")
1585 (set_attr "type" "multiple")]
1589 (define_insn "*thumb_jump"
1591 (label_ref (match_operand 0 "" "")))]
1594 if (get_attr_length (insn) == 2)
1596 return \"bl\\t%l0\\t%@ far jump\";
1598 [(set (attr "far_jump")
1600 (eq_attr "length" "4")
1601 (const_string "yes")
1602 (const_string "no")))
1603 (set (attr "length")
1605 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1606 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1609 (set_attr "type" "branch")]
1612 (define_insn "*call_reg_thumb1_v5"
1613 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1614 (match_operand 1 "" ""))
1615 (use (match_operand 2 "" ""))
1616 (clobber (reg:SI LR_REGNUM))]
1617 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
1619 [(set_attr "length" "2")
1620 (set_attr "type" "call")]
1623 (define_insn "*call_reg_thumb1"
1624 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1625 (match_operand 1 "" ""))
1626 (use (match_operand 2 "" ""))
1627 (clobber (reg:SI LR_REGNUM))]
1628 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
1631 if (!TARGET_CALLER_INTERWORKING)
1632 return thumb_call_via_reg (operands[0]);
1633 else if (operands[1] == const0_rtx)
1634 return \"bl\\t%__interwork_call_via_%0\";
1635 else if (frame_pointer_needed)
1636 return \"bl\\t%__interwork_r7_call_via_%0\";
1638 return \"bl\\t%__interwork_r11_call_via_%0\";
1640 [(set_attr "type" "call")]
1643 (define_insn "*call_value_reg_thumb1_v5"
1644 [(set (match_operand 0 "" "")
1645 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1646 (match_operand 2 "" "")))
1647 (use (match_operand 3 "" ""))
1648 (clobber (reg:SI LR_REGNUM))]
1649 "TARGET_THUMB1 && arm_arch5"
1651 [(set_attr "length" "2")
1652 (set_attr "type" "call")]
1655 (define_insn "*call_value_reg_thumb1"
1656 [(set (match_operand 0 "" "")
1657 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1658 (match_operand 2 "" "")))
1659 (use (match_operand 3 "" ""))
1660 (clobber (reg:SI LR_REGNUM))]
1661 "TARGET_THUMB1 && !arm_arch5"
1664 if (!TARGET_CALLER_INTERWORKING)
1665 return thumb_call_via_reg (operands[1]);
1666 else if (operands[2] == const0_rtx)
1667 return \"bl\\t%__interwork_call_via_%1\";
1668 else if (frame_pointer_needed)
1669 return \"bl\\t%__interwork_r7_call_via_%1\";
1671 return \"bl\\t%__interwork_r11_call_via_%1\";
1673 [(set_attr "type" "call")]
1676 (define_insn "*call_insn"
1677 [(call (mem:SI (match_operand:SI 0 "" ""))
1678 (match_operand:SI 1 "" ""))
1679 (use (match_operand 2 "" ""))
1680 (clobber (reg:SI LR_REGNUM))]
1682 && GET_CODE (operands[0]) == SYMBOL_REF
1683 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1685 [(set_attr "length" "4")
1686 (set_attr "type" "call")]
1689 (define_insn "*call_value_insn"
1690 [(set (match_operand 0 "" "")
1691 (call (mem:SI (match_operand 1 "" ""))
1692 (match_operand 2 "" "")))
1693 (use (match_operand 3 "" ""))
1694 (clobber (reg:SI LR_REGNUM))]
1696 && GET_CODE (operands[1]) == SYMBOL_REF
1697 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1699 [(set_attr "length" "4")
1700 (set_attr "type" "call")]
1703 (define_expand "thumb1_casesi_internal_pic"
1704 [(match_operand:SI 0 "s_register_operand" "")
1705 (match_operand:SI 1 "thumb1_cmp_operand" "")
1706 (match_operand 2 "" "")
1707 (match_operand 3 "" "")]
1711 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1712 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1714 reg0 = gen_rtx_REG (SImode, 0);
1715 emit_move_insn (reg0, operands[0]);
1716 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1721 (define_insn "thumb1_casesi_dispatch"
1722 [(parallel [(set (pc) (unspec [(reg:SI 0)
1723 (label_ref (match_operand 0 "" ""))
1724 ;; (label_ref (match_operand 1 "" ""))
1726 UNSPEC_THUMB1_CASESI))
1727 (clobber (reg:SI IP_REGNUM))
1728 (clobber (reg:SI LR_REGNUM))])]
1730 "* return thumb1_output_casesi(operands);"
1731 [(set_attr "length" "4")
1732 (set_attr "type" "multiple")]
1735 ;; NB Never uses BX.
1736 (define_insn "*thumb1_indirect_jump"
1738 (match_operand:SI 0 "register_operand" "l*r"))]
1741 [(set_attr "conds" "clob")
1742 (set_attr "length" "2")
1743 (set_attr "type" "branch")]
1747 (define_insn "prologue_thumb1_interwork"
1748 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
1750 "* return thumb1_output_interwork ();"
1751 [(set_attr "length" "8")
1752 (set_attr "type" "multiple")]
1755 (define_insn "*epilogue_insns"
1756 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
1759 return thumb1_unexpanded_epilogue ();
1761 ; Length is absolute worst case
1762 [(set_attr "length" "44")
1763 (set_attr "type" "block")
1764 ;; We don't clobber the conditions, but the potential length of this
1765 ;; operation is sufficient to make conditionalizing the sequence
1766 ;; unlikely to be profitable.
1767 (set_attr "conds" "clob")]
1770 ;; Miscellaneous Thumb patterns
1771 (define_expand "tablejump"
1772 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
1773 (use (label_ref (match_operand 1 "" "")))])]
1778 /* Hopefully, CSE will eliminate this copy. */
1779 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
1780 rtx reg2 = gen_reg_rtx (SImode);
1782 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
1788 (define_insn "*thumb1_movpc_insn"
1789 [(set (match_operand:SI 0 "s_register_operand" "=l")
1790 (reg:SI PC_REGNUM))]
1793 [(set_attr "length" "2")
1794 (set_attr "conds" "nocond")
1795 (set_attr "type" "mov_reg")]
1798 ;; NB never uses BX.
1799 (define_insn "*thumb1_tablejump"
1800 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
1801 (use (label_ref (match_operand 1 "" "")))]
1804 [(set_attr "length" "2")
1805 (set_attr "type" "no_insn")]
1808 (define_insn_and_split "thumb_eh_return"
1809 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
1811 (clobber (match_scratch:SI 1 "=&l"))]
1814 "&& reload_completed"
1818 thumb_set_return_address (operands[0], operands[1]);
1821 [(set_attr "type" "mov_reg")]