1 ;; ARM Thumb-1 Machine Description
2 ;; Copyright (C) 2007-2015 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>. */
21 ;;---------------------------------------------------------------------------
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
46 (define_insn "*thumb1_adddi3"
47 [(set (match_operand:DI 0 "register_operand" "=l")
48 (plus:DI (match_operand:DI 1 "register_operand" "%0")
49 (match_operand:DI 2 "register_operand" "l")))
50 (clobber (reg:CC CC_REGNUM))
53 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
54 [(set_attr "length" "4")
55 (set_attr "type" "multiple")]
58 (define_insn_and_split "*thumb1_addsi3"
59 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
60 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
61 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
64 static const char * const asms[] =
66 \"adds\\t%0, %0, %2\",
67 \"subs\\t%0, %0, #%n2\",
68 \"adds\\t%0, %1, %2\",
77 if ((which_alternative == 2 || which_alternative == 6)
78 && CONST_INT_P (operands[2])
79 && INTVAL (operands[2]) < 0)
80 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
81 return asms[which_alternative];
83 "&& reload_completed && CONST_INT_P (operands[2])
84 && ((operands[1] != stack_pointer_rtx
85 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
86 || (operands[1] == stack_pointer_rtx
87 && INTVAL (operands[2]) > 1020))"
88 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
89 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
91 HOST_WIDE_INT offset = INTVAL (operands[2]);
92 if (operands[1] == stack_pointer_rtx)
98 else if (offset < -255)
101 operands[3] = GEN_INT (offset);
102 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
104 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
105 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
106 alus_sreg,alus_sreg,multiple,multiple,multiple")]
109 ;; Reloading and elimination of the frame pointer can
110 ;; sometimes cause this optimization to be missed.
112 [(set (match_operand:SI 0 "arm_general_register_operand" "")
113 (match_operand:SI 1 "const_int_operand" ""))
115 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
117 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
118 && (INTVAL (operands[1]) & 3) == 0"
119 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
123 (define_insn "*thumb_subdi3"
124 [(set (match_operand:DI 0 "register_operand" "=l")
125 (minus:DI (match_operand:DI 1 "register_operand" "0")
126 (match_operand:DI 2 "register_operand" "l")))
127 (clobber (reg:CC CC_REGNUM))]
129 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
130 [(set_attr "length" "4")
131 (set_attr "type" "multiple")]
134 (define_insn "thumb1_subsi3_insn"
135 [(set (match_operand:SI 0 "register_operand" "=l")
136 (minus:SI (match_operand:SI 1 "register_operand" "l")
137 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
140 [(set_attr "length" "2")
141 (set_attr "conds" "set")
142 (set_attr "type" "alus_sreg")]
145 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
146 ; 1 and 2; are the same, because reload will make operand 0 match
147 ; operand 1 without realizing that this conflicts with operand 2. We fix
148 ; this by adding another alternative to match this case, and then `reload'
149 ; it ourselves. This alternative must come first.
150 (define_insn "*thumb_mulsi3"
151 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
152 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
153 (match_operand:SI 2 "register_operand" "l,l,l")))]
154 "TARGET_THUMB1 && !arm_arch6"
156 movs\\t%0, %1\;muls\\t%0, %2
157 mov\\t%0, %1\;muls\\t%0, %2
159 [(set_attr "length" "4,4,2")
160 (set_attr "type" "muls")]
163 (define_insn "*thumb_mulsi3_v6"
164 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
165 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
166 (match_operand:SI 2 "register_operand" "l,0,0")))]
167 "TARGET_THUMB1 && arm_arch6"
172 [(set_attr "length" "2")
173 (set_attr "type" "muls")]
176 (define_insn "*thumb1_andsi3_insn"
177 [(set (match_operand:SI 0 "register_operand" "=l")
178 (and:SI (match_operand:SI 1 "register_operand" "%0")
179 (match_operand:SI 2 "register_operand" "l")))]
182 [(set_attr "length" "2")
183 (set_attr "type" "logic_imm")
184 (set_attr "conds" "set")])
187 [(set (match_operand:SI 0 "s_register_operand" "")
188 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
189 (match_operand:SI 2 "const_int_operand" "")
190 (match_operand:SI 3 "const_int_operand" "")))
191 (clobber (match_operand:SI 4 "s_register_operand" ""))]
193 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
194 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
196 HOST_WIDE_INT temp = INTVAL (operands[2]);
198 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
199 operands[3] = GEN_INT (32 - temp);
204 [(set (match_operand:SI 0 "s_register_operand" "")
205 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
206 (match_operand:SI 2 "const_int_operand" "")
207 (match_operand:SI 3 "const_int_operand" "")))]
209 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
210 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
212 HOST_WIDE_INT temp = INTVAL (operands[2]);
214 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
215 operands[3] = GEN_INT (32 - temp);
219 (define_insn "thumb1_bicsi3"
220 [(set (match_operand:SI 0 "register_operand" "=l")
221 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
222 (match_operand:SI 2 "register_operand" "0")))]
225 [(set_attr "length" "2")
226 (set_attr "conds" "set")
227 (set_attr "type" "logics_reg")]
230 (define_insn "*thumb1_iorsi3_insn"
231 [(set (match_operand:SI 0 "register_operand" "=l")
232 (ior:SI (match_operand:SI 1 "register_operand" "%0")
233 (match_operand:SI 2 "register_operand" "l")))]
236 [(set_attr "length" "2")
237 (set_attr "conds" "set")
238 (set_attr "type" "logics_reg")])
240 (define_insn "*thumb1_xorsi3_insn"
241 [(set (match_operand:SI 0 "register_operand" "=l")
242 (xor:SI (match_operand:SI 1 "register_operand" "%0")
243 (match_operand:SI 2 "register_operand" "l")))]
246 [(set_attr "length" "2")
247 (set_attr "conds" "set")
248 (set_attr "type" "logics_reg")]
251 (define_insn "*thumb1_ashlsi3"
252 [(set (match_operand:SI 0 "register_operand" "=l,l")
253 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
254 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
257 [(set_attr "length" "2")
258 (set_attr "type" "shift_imm,shift_reg")
259 (set_attr "conds" "set")])
261 (define_insn "*thumb1_ashrsi3"
262 [(set (match_operand:SI 0 "register_operand" "=l,l")
263 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
264 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
267 [(set_attr "length" "2")
268 (set_attr "type" "shift_imm,shift_reg")
269 (set_attr "conds" "set")])
271 (define_insn "*thumb1_lshrsi3"
272 [(set (match_operand:SI 0 "register_operand" "=l,l")
273 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
274 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
277 [(set_attr "length" "2")
278 (set_attr "type" "shift_imm,shift_reg")
279 (set_attr "conds" "set")])
281 (define_insn "*thumb1_rotrsi3"
282 [(set (match_operand:SI 0 "register_operand" "=l")
283 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
284 (match_operand:SI 2 "register_operand" "l")))]
287 [(set_attr "type" "shift_reg")
288 (set_attr "length" "2")]
291 (define_insn "*thumb1_negdi2"
292 [(set (match_operand:DI 0 "register_operand" "=&l")
293 (neg:DI (match_operand:DI 1 "register_operand" "l")))
294 (clobber (reg:CC CC_REGNUM))]
296 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
297 [(set_attr "length" "6")
298 (set_attr "type" "multiple")]
301 (define_insn "*thumb1_negsi2"
302 [(set (match_operand:SI 0 "register_operand" "=l")
303 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
306 [(set_attr "length" "2")
307 (set_attr "type" "alu_imm")]
310 (define_insn_and_split "*thumb1_abssi2"
311 [(set (match_operand:SI 0 "s_register_operand" "=l")
312 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
313 (clobber (match_scratch:SI 2 "=&l"))]
316 "TARGET_THUMB1 && reload_completed"
317 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
318 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
319 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
321 [(set_attr "length" "6")
322 (set_attr "type" "multiple")]
325 (define_insn_and_split "*thumb1_neg_abssi2"
326 [(set (match_operand:SI 0 "s_register_operand" "=l")
327 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
328 (clobber (match_scratch:SI 2 "=&l"))]
331 "TARGET_THUMB1 && reload_completed"
332 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
333 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
334 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
336 [(set_attr "length" "6")
337 (set_attr "type" "multiple")]
340 (define_insn "*thumb1_one_cmplsi2"
341 [(set (match_operand:SI 0 "register_operand" "=l")
342 (not:SI (match_operand:SI 1 "register_operand" "l")))]
345 [(set_attr "length" "2")
346 (set_attr "type" "mvn_reg")]
349 (define_insn "*thumb1_zero_extendhisi2"
350 [(set (match_operand:SI 0 "register_operand" "=l,l")
351 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
356 if (which_alternative == 0 && arm_arch6)
357 return "uxth\t%0, %1";
358 if (which_alternative == 0)
361 mem = XEXP (operands[1], 0);
363 if (GET_CODE (mem) == CONST)
366 if (GET_CODE (mem) == PLUS)
368 rtx a = XEXP (mem, 0);
370 /* This can happen due to bugs in reload. */
371 if (REG_P (a) && REGNO (a) == SP_REGNUM)
374 ops[0] = operands[0];
377 output_asm_insn ("mov\t%0, %1", ops);
379 XEXP (mem, 0) = operands[0];
383 return "ldrh\t%0, %1";
385 [(set_attr_alternative "length"
386 [(if_then_else (eq_attr "is_arch6" "yes")
387 (const_int 2) (const_int 4))
389 (set_attr "type" "extend,load_byte")]
392 (define_insn "*thumb1_zero_extendqisi2"
393 [(set (match_operand:SI 0 "register_operand" "=l,l")
394 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
395 "TARGET_THUMB1 && !arm_arch6"
399 [(set_attr "length" "4,2")
400 (set_attr "type" "alu_shift_reg,load_byte")
401 (set_attr "pool_range" "*,32")]
404 (define_insn "*thumb1_zero_extendqisi2_v6"
405 [(set (match_operand:SI 0 "register_operand" "=l,l")
406 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
407 "TARGET_THUMB1 && arm_arch6"
411 [(set_attr "length" "2")
412 (set_attr "type" "extend,load_byte")]
415 ;; We used to have an early-clobber on the scratch register here.
416 ;; However, there's a bug somewhere in reload which means that this
417 ;; can be partially ignored during spill allocation if the memory
418 ;; address also needs reloading; this causes us to die later on when
419 ;; we try to verify the operands. Fortunately, we don't really need
420 ;; the early-clobber: we can always use operand 0 if operand 2
421 ;; overlaps the address.
422 (define_insn "thumb1_extendhisi2"
423 [(set (match_operand:SI 0 "register_operand" "=l,l")
424 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
425 (clobber (match_scratch:SI 2 "=X,l"))]
432 if (which_alternative == 0 && !arm_arch6)
434 if (which_alternative == 0)
435 return \"sxth\\t%0, %1\";
437 mem = XEXP (operands[1], 0);
439 /* This code used to try to use 'V', and fix the address only if it was
440 offsettable, but this fails for e.g. REG+48 because 48 is outside the
441 range of QImode offsets, and offsettable_address_p does a QImode
444 if (GET_CODE (mem) == CONST)
447 if (GET_CODE (mem) == LABEL_REF)
448 return \"ldr\\t%0, %1\";
450 if (GET_CODE (mem) == PLUS)
452 rtx a = XEXP (mem, 0);
453 rtx b = XEXP (mem, 1);
455 if (GET_CODE (a) == LABEL_REF
457 return \"ldr\\t%0, %1\";
460 return \"ldrsh\\t%0, %1\";
471 gcc_assert (REG_P (ops[1]));
473 ops[0] = operands[0];
474 if (reg_mentioned_p (operands[2], ops[1]))
477 ops[3] = operands[2];
478 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
481 [(set_attr_alternative "length"
482 [(if_then_else (eq_attr "is_arch6" "yes")
483 (const_int 2) (const_int 4))
485 (set_attr "type" "extend,load_byte")
486 (set_attr "pool_range" "*,1018")]
490 [(set (match_operand:SI 0 "register_operand" "")
491 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
492 "TARGET_THUMB1 && reload_completed"
493 [(set (match_dup 0) (match_dup 2))
494 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
496 rtx addr = XEXP (operands[1], 0);
498 if (GET_CODE (addr) == CONST)
499 addr = XEXP (addr, 0);
501 if (GET_CODE (addr) == PLUS
502 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
503 /* No split necessary. */
506 if (GET_CODE (addr) == PLUS
507 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
510 if (reg_overlap_mentioned_p (operands[0], addr))
512 rtx t = gen_lowpart (QImode, operands[0]);
513 emit_move_insn (t, operands[1]);
514 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
520 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
521 operands[2] = const0_rtx;
523 else if (GET_CODE (addr) != PLUS)
525 else if (REG_P (XEXP (addr, 0)))
527 operands[2] = XEXP (addr, 1);
528 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
532 operands[2] = XEXP (addr, 0);
533 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
536 operands[3] = change_address (operands[1], QImode, addr);
540 [(set (match_operand:SI 0 "register_operand" "")
541 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
542 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
543 (set (match_operand:SI 3 "register_operand" "")
544 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
546 && GET_CODE (XEXP (operands[4], 0)) == PLUS
547 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
548 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
549 && (peep2_reg_dead_p (3, operands[0])
550 || rtx_equal_p (operands[0], operands[3]))
551 && (peep2_reg_dead_p (3, operands[2])
552 || rtx_equal_p (operands[2], operands[3]))"
553 [(set (match_dup 2) (match_dup 1))
554 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
556 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
557 operands[4] = change_address (operands[4], QImode, addr);
560 (define_insn "thumb1_extendqisi2"
561 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
562 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
567 if (which_alternative == 0 && arm_arch6)
568 return "sxtb\\t%0, %1";
569 if (which_alternative == 0)
572 addr = XEXP (operands[1], 0);
573 if (GET_CODE (addr) == PLUS
574 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
575 return "ldrsb\\t%0, %1";
579 [(set_attr_alternative "length"
580 [(if_then_else (eq_attr "is_arch6" "yes")
581 (const_int 2) (const_int 4))
583 (if_then_else (eq_attr "is_arch6" "yes")
584 (const_int 4) (const_int 6))])
585 (set_attr "type" "extend,load_byte,load_byte")]
588 ;;; ??? This should have alternatives for constants.
589 ;;; ??? This was originally identical to the movdf_insn pattern.
590 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
591 ;;; thumb_reorg with a memory reference.
592 (define_insn "*thumb1_movdi_insn"
593 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
594 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
596 && ( register_operand (operands[0], DImode)
597 || register_operand (operands[1], DImode))"
600 switch (which_alternative)
604 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
605 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
606 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
608 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
610 operands[1] = GEN_INT (- INTVAL (operands[1]));
611 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
613 return \"ldmia\\t%1, {%0, %H0}\";
615 return \"stmia\\t%0, {%1, %H1}\";
617 return thumb_load_double_from_address (operands);
619 operands[2] = gen_rtx_MEM (SImode,
620 plus_constant (Pmode, XEXP (operands[0], 0), 4));
621 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
624 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
625 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
626 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
629 [(set_attr "length" "4,4,6,2,2,6,4,4")
630 (set_attr "type" "multiple,multiple,multiple,load2,store2,load2,store2,multiple")
631 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
634 (define_insn "*thumb1_movsi_insn"
635 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
636 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
638 && ( register_operand (operands[0], SImode)
639 || register_operand (operands[1], SImode))"
650 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
651 (set_attr "type" "mov_reg,mov_imm,multiple,multiple,load1,store1,load1,store1,mov_reg")
652 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
653 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
655 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
656 ; to see if we can load them in fewer instructions or fewer cycles.
657 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
658 ; thumb1_movdi_insn has a better way to handle them.
660 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
661 (match_operand:ANY64 1 "immediate_operand" ""))]
662 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
663 [(set (match_dup 0) (match_dup 1))
664 (set (match_dup 2) (match_dup 3))]
666 operands[2] = gen_highpart (SImode, operands[0]);
667 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
669 operands[0] = gen_lowpart (SImode, operands[0]);
670 operands[1] = gen_lowpart (SImode, operands[1]);
675 [(set (match_operand:SI 0 "register_operand" "")
676 (match_operand:SI 1 "const_int_operand" ""))]
677 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
678 [(set (match_dup 2) (match_dup 1))
679 (set (match_dup 0) (neg:SI (match_dup 2)))]
682 operands[1] = GEN_INT (- INTVAL (operands[1]));
683 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
688 [(set (match_operand:SI 0 "register_operand" "")
689 (match_operand:SI 1 "const_int_operand" ""))]
690 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
691 [(set (match_dup 2) (match_dup 1))
692 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
695 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
696 unsigned HOST_WIDE_INT mask = 0xff;
699 for (i = 0; i < 25; i++)
700 if ((val & (mask << i)) == val)
703 /* Don't split if the shift is zero. */
707 operands[1] = GEN_INT (val >> i);
708 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
709 operands[3] = GEN_INT (i);
713 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
715 [(set (match_operand:SI 0 "register_operand" "")
716 (match_operand:SI 1 "const_int_operand" ""))]
717 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
718 [(set (match_dup 2) (match_dup 1))
719 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
722 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
723 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
724 operands[3] = GEN_INT (255);
728 (define_insn "*thumb1_movhi_insn"
729 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
730 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I"))]
732 && ( register_operand (operands[0], HImode)
733 || register_operand (operands[1], HImode))"
735 switch (which_alternative)
737 case 0: return \"adds %0, %1, #0\";
738 case 2: return \"strh %1, %0\";
739 case 3: return \"mov %0, %1\";
740 case 4: return \"mov %0, %1\";
741 case 5: return \"movs %0, %1\";
742 default: gcc_unreachable ();
744 /* The stack pointer can end up being taken as an index register.
745 Catch this case here and deal with it. */
746 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
747 && REG_P (XEXP (XEXP (operands[1], 0), 0))
748 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
751 ops[0] = operands[0];
752 ops[1] = XEXP (XEXP (operands[1], 0), 0);
754 output_asm_insn (\"mov %0, %1\", ops);
756 XEXP (XEXP (operands[1], 0), 0) = operands[0];
759 return \"ldrh %0, %1\";
761 [(set_attr "length" "2,4,2,2,2,2")
762 (set_attr "type" "alus_imm,load1,store1,mov_reg,mov_reg,mov_imm")
763 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
765 (define_expand "thumb_movhi_clobber"
766 [(set (match_operand:HI 0 "memory_operand" "")
767 (match_operand:HI 1 "register_operand" ""))
768 (clobber (match_operand:DI 2 "register_operand" ""))]
771 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
772 && REGNO (operands[1]) <= LAST_LO_REGNUM)
774 emit_insn (gen_movhi (operands[0], operands[1]));
777 /* XXX Fixme, need to handle other cases here as well. */
782 (define_insn "*thumb1_movqi_insn"
783 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
784 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
786 && ( register_operand (operands[0], QImode)
787 || register_operand (operands[1], QImode))"
795 [(set_attr "length" "2")
796 (set_attr "type" "alu_imm,load1,store1,mov_reg,mov_imm,mov_imm")
797 (set_attr "pool_range" "*,32,*,*,*,*")
798 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
800 (define_insn "*thumb1_movhf"
801 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
802 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
804 && ( s_register_operand (operands[0], HFmode)
805 || s_register_operand (operands[1], HFmode))"
807 switch (which_alternative)
810 return \"movs\\t%0, %1\";
814 gcc_assert (MEM_P (operands[1]));
815 addr = XEXP (operands[1], 0);
816 if (GET_CODE (addr) == LABEL_REF
817 || (GET_CODE (addr) == CONST
818 && GET_CODE (XEXP (addr, 0)) == PLUS
819 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
820 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
822 /* Constant pool entry. */
823 return \"ldr\\t%0, %1\";
825 return \"ldrh\\t%0, %1\";
827 case 2: return \"strh\\t%1, %0\";
828 default: return \"mov\\t%0, %1\";
831 [(set_attr "length" "2")
832 (set_attr "type" "mov_reg,load1,store1,mov_reg,mov_reg")
833 (set_attr "pool_range" "*,1018,*,*,*")
834 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
835 ;;; ??? This should have alternatives for constants.
836 (define_insn "*thumb1_movsf_insn"
837 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
838 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
840 && ( register_operand (operands[0], SFmode)
841 || register_operand (operands[1], SFmode))"
850 [(set_attr "length" "2")
851 (set_attr "type" "alus_imm,load1,store1,load1,store1,mov_reg,mov_reg")
852 (set_attr "pool_range" "*,*,*,1018,*,*,*")
853 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
856 ;;; ??? This should have alternatives for constants.
857 ;;; ??? This was originally identical to the movdi_insn pattern.
858 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
859 ;;; thumb_reorg with a memory reference.
860 (define_insn "*thumb_movdf_insn"
861 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
862 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
864 && ( register_operand (operands[0], DFmode)
865 || register_operand (operands[1], DFmode))"
867 switch (which_alternative)
871 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
872 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
873 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
875 return \"ldmia\\t%1, {%0, %H0}\";
877 return \"stmia\\t%0, {%1, %H1}\";
879 return thumb_load_double_from_address (operands);
881 operands[2] = gen_rtx_MEM (SImode,
882 plus_constant (Pmode,
883 XEXP (operands[0], 0), 4));
884 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
887 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
888 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
889 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
892 [(set_attr "length" "4,2,2,6,4,4")
893 (set_attr "type" "multiple,load2,store2,load2,store2,multiple")
894 (set_attr "pool_range" "*,*,*,1018,*,*")]
898 ;; Thumb block-move insns
900 (define_insn "movmem12b"
901 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
902 (mem:SI (match_operand:SI 3 "register_operand" "1")))
903 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
904 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
905 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
906 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
907 (set (match_operand:SI 0 "register_operand" "=l")
908 (plus:SI (match_dup 2) (const_int 12)))
909 (set (match_operand:SI 1 "register_operand" "=l")
910 (plus:SI (match_dup 3) (const_int 12)))
911 (clobber (match_scratch:SI 4 "=&l"))
912 (clobber (match_scratch:SI 5 "=&l"))
913 (clobber (match_scratch:SI 6 "=&l"))]
915 "* return thumb_output_move_mem_multiple (3, operands);"
916 [(set_attr "length" "4")
917 ; This isn't entirely accurate... It loads as well, but in terms of
918 ; scheduling the following insn it is better to consider it as a store
919 (set_attr "type" "store3")]
922 (define_insn "movmem8b"
923 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
924 (mem:SI (match_operand:SI 3 "register_operand" "1")))
925 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
926 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
927 (set (match_operand:SI 0 "register_operand" "=l")
928 (plus:SI (match_dup 2) (const_int 8)))
929 (set (match_operand:SI 1 "register_operand" "=l")
930 (plus:SI (match_dup 3) (const_int 8)))
931 (clobber (match_scratch:SI 4 "=&l"))
932 (clobber (match_scratch:SI 5 "=&l"))]
934 "* return thumb_output_move_mem_multiple (2, operands);"
935 [(set_attr "length" "4")
936 ; This isn't entirely accurate... It loads as well, but in terms of
937 ; scheduling the following insn it is better to consider it as a store
938 (set_attr "type" "store2")]
942 ;; A pattern to recognize a special situation and optimize for it.
943 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
944 ;; due to the available addressing modes. Hence, convert a signed comparison
945 ;; with zero into an unsigned comparison with 127 if possible.
946 (define_expand "cbranchqi4"
947 [(set (pc) (if_then_else
948 (match_operator 0 "lt_ge_comparison_operator"
949 [(match_operand:QI 1 "memory_operand" "")
950 (match_operand:QI 2 "const0_operand" "")])
951 (label_ref (match_operand 3 "" ""))
956 xops[1] = gen_reg_rtx (SImode);
957 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
958 xops[2] = GEN_INT (127);
959 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
960 VOIDmode, xops[1], xops[2]);
961 xops[3] = operands[3];
962 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
966 (define_insn "cbranchsi4_insn"
967 [(set (pc) (if_then_else
968 (match_operator 0 "arm_comparison_operator"
969 [(match_operand:SI 1 "s_register_operand" "l,l*h")
970 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
971 (label_ref (match_operand 3 "" ""))
975 rtx t = cfun->machine->thumb1_cc_insn;
978 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
979 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
981 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
983 if (!noov_comparison_operator (operands[0], VOIDmode))
986 else if (cfun->machine->thumb1_cc_mode != CCmode)
991 output_asm_insn ("cmp\t%1, %2", operands);
992 cfun->machine->thumb1_cc_insn = insn;
993 cfun->machine->thumb1_cc_op0 = operands[1];
994 cfun->machine->thumb1_cc_op1 = operands[2];
995 cfun->machine->thumb1_cc_mode = CCmode;
998 /* Ensure we emit the right type of condition code on the jump. */
999 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1002 switch (get_attr_length (insn))
1004 case 4: return \"b%d0\\t%l3\";
1005 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1006 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1009 [(set (attr "far_jump")
1011 (eq_attr "length" "8")
1012 (const_string "yes")
1013 (const_string "no")))
1014 (set (attr "length")
1016 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1017 (le (minus (match_dup 3) (pc)) (const_int 256)))
1020 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1021 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1024 (set_attr "type" "multiple")]
1027 (define_insn "cbranchsi4_scratch"
1028 [(set (pc) (if_then_else
1029 (match_operator 4 "arm_comparison_operator"
1030 [(match_operand:SI 1 "s_register_operand" "l,0")
1031 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1032 (label_ref (match_operand 3 "" ""))
1034 (clobber (match_scratch:SI 0 "=l,l"))]
1037 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1039 switch (get_attr_length (insn))
1041 case 4: return \"b%d4\\t%l3\";
1042 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1043 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1046 [(set (attr "far_jump")
1048 (eq_attr "length" "8")
1049 (const_string "yes")
1050 (const_string "no")))
1051 (set (attr "length")
1053 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1054 (le (minus (match_dup 3) (pc)) (const_int 256)))
1057 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1058 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1061 (set_attr "type" "multiple")]
1064 (define_insn "*negated_cbranchsi4"
1067 (match_operator 0 "equality_operator"
1068 [(match_operand:SI 1 "s_register_operand" "l")
1069 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1070 (label_ref (match_operand 3 "" ""))
1074 output_asm_insn (\"cmn\\t%1, %2\", operands);
1075 switch (get_attr_length (insn))
1077 case 4: return \"b%d0\\t%l3\";
1078 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1079 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1082 [(set (attr "far_jump")
1084 (eq_attr "length" "8")
1085 (const_string "yes")
1086 (const_string "no")))
1087 (set (attr "length")
1089 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1090 (le (minus (match_dup 3) (pc)) (const_int 256)))
1093 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1094 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1097 (set_attr "type" "multiple")]
1100 (define_insn "*tbit_cbranch"
1103 (match_operator 0 "equality_operator"
1104 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1106 (match_operand:SI 2 "const_int_operand" "i"))
1108 (label_ref (match_operand 3 "" ""))
1110 (clobber (match_scratch:SI 4 "=l"))]
1115 op[0] = operands[4];
1116 op[1] = operands[1];
1117 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1119 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1120 switch (get_attr_length (insn))
1122 case 4: return \"b%d0\\t%l3\";
1123 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1124 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1127 [(set (attr "far_jump")
1129 (eq_attr "length" "8")
1130 (const_string "yes")
1131 (const_string "no")))
1132 (set (attr "length")
1134 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1135 (le (minus (match_dup 3) (pc)) (const_int 256)))
1138 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1139 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1142 (set_attr "type" "multiple")]
1145 (define_insn "*tlobits_cbranch"
1148 (match_operator 0 "equality_operator"
1149 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1150 (match_operand:SI 2 "const_int_operand" "i")
1153 (label_ref (match_operand 3 "" ""))
1155 (clobber (match_scratch:SI 4 "=l"))]
1160 op[0] = operands[4];
1161 op[1] = operands[1];
1162 op[2] = GEN_INT (32 - INTVAL (operands[2]));
1164 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1165 switch (get_attr_length (insn))
1167 case 4: return \"b%d0\\t%l3\";
1168 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1169 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1172 [(set (attr "far_jump")
1174 (eq_attr "length" "8")
1175 (const_string "yes")
1176 (const_string "no")))
1177 (set (attr "length")
1179 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1180 (le (minus (match_dup 3) (pc)) (const_int 256)))
1183 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1184 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1187 (set_attr "type" "multiple")]
1190 (define_insn "*tstsi3_cbranch"
1193 (match_operator 3 "equality_operator"
1194 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1195 (match_operand:SI 1 "s_register_operand" "l"))
1197 (label_ref (match_operand 2 "" ""))
1202 output_asm_insn (\"tst\\t%0, %1\", operands);
1203 switch (get_attr_length (insn))
1205 case 4: return \"b%d3\\t%l2\";
1206 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1207 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1210 [(set (attr "far_jump")
1212 (eq_attr "length" "8")
1213 (const_string "yes")
1214 (const_string "no")))
1215 (set (attr "length")
1217 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1218 (le (minus (match_dup 2) (pc)) (const_int 256)))
1221 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1222 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1225 (set_attr "type" "multiple")]
1228 (define_insn "*cbranchne_decr1"
1230 (if_then_else (match_operator 3 "equality_operator"
1231 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1233 (label_ref (match_operand 4 "" ""))
1235 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1236 (plus:SI (match_dup 2) (const_int -1)))
1237 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1242 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1244 VOIDmode, operands[2], const1_rtx);
1245 cond[1] = operands[4];
1247 if (which_alternative == 0)
1248 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1249 else if (which_alternative == 1)
1251 /* We must provide an alternative for a hi reg because reload
1252 cannot handle output reloads on a jump instruction, but we
1253 can't subtract into that. Fortunately a mov from lo to hi
1254 does not clobber the condition codes. */
1255 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1256 output_asm_insn (\"mov\\t%0, %1\", operands);
1260 /* Similarly, but the target is memory. */
1261 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1262 output_asm_insn (\"str\\t%1, %0\", operands);
1265 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1268 output_asm_insn (\"b%d0\\t%l1\", cond);
1271 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1272 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1274 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1275 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1279 [(set (attr "far_jump")
1281 (ior (and (eq (symbol_ref ("which_alternative"))
1283 (eq_attr "length" "8"))
1284 (eq_attr "length" "10"))
1285 (const_string "yes")
1286 (const_string "no")))
1287 (set_attr_alternative "length"
1291 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1292 (le (minus (match_dup 4) (pc)) (const_int 256)))
1295 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1296 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1301 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1302 (le (minus (match_dup 4) (pc)) (const_int 256)))
1305 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1306 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1311 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1312 (le (minus (match_dup 4) (pc)) (const_int 256)))
1315 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1316 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1321 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1322 (le (minus (match_dup 4) (pc)) (const_int 256)))
1325 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1326 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1329 (set_attr "type" "multiple")]
1332 (define_insn "*addsi3_cbranch"
1335 (match_operator 4 "arm_comparison_operator"
1337 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1338 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1340 (label_ref (match_operand 5 "" ""))
1343 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1344 (plus:SI (match_dup 2) (match_dup 3)))
1345 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1347 && (GET_CODE (operands[4]) == EQ
1348 || GET_CODE (operands[4]) == NE
1349 || GET_CODE (operands[4]) == GE
1350 || GET_CODE (operands[4]) == LT)"
1355 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1356 cond[1] = operands[2];
1357 cond[2] = operands[3];
1359 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1360 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1362 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1364 if (which_alternative >= 2
1365 && which_alternative < 4)
1366 output_asm_insn (\"mov\\t%0, %1\", operands);
1367 else if (which_alternative >= 4)
1368 output_asm_insn (\"str\\t%1, %0\", operands);
1370 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1373 return \"b%d4\\t%l5\";
1375 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1377 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1381 [(set (attr "far_jump")
1383 (ior (and (lt (symbol_ref ("which_alternative"))
1385 (eq_attr "length" "8"))
1386 (eq_attr "length" "10"))
1387 (const_string "yes")
1388 (const_string "no")))
1389 (set (attr "length")
1391 (lt (symbol_ref ("which_alternative"))
1394 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1395 (le (minus (match_dup 5) (pc)) (const_int 256)))
1398 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1399 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1403 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1404 (le (minus (match_dup 5) (pc)) (const_int 256)))
1407 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1408 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1411 (set_attr "type" "multiple")]
1414 (define_insn "*addsi3_cbranch_scratch"
1417 (match_operator 3 "arm_comparison_operator"
1419 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1420 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1422 (label_ref (match_operand 4 "" ""))
1424 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1426 && (GET_CODE (operands[3]) == EQ
1427 || GET_CODE (operands[3]) == NE
1428 || GET_CODE (operands[3]) == GE
1429 || GET_CODE (operands[3]) == LT)"
1432 switch (which_alternative)
1435 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1438 output_asm_insn (\"cmn\t%1, %2\", operands);
1441 if (INTVAL (operands[2]) < 0)
1442 output_asm_insn (\"subs\t%0, %1, %2\", operands);
1444 output_asm_insn (\"adds\t%0, %1, %2\", operands);
1447 if (INTVAL (operands[2]) < 0)
1448 output_asm_insn (\"subs\t%0, %0, %2\", operands);
1450 output_asm_insn (\"adds\t%0, %0, %2\", operands);
1454 switch (get_attr_length (insn))
1457 return \"b%d3\\t%l4\";
1459 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1461 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1465 [(set (attr "far_jump")
1467 (eq_attr "length" "8")
1468 (const_string "yes")
1469 (const_string "no")))
1470 (set (attr "length")
1472 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1473 (le (minus (match_dup 4) (pc)) (const_int 256)))
1476 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1477 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1480 (set_attr "type" "multiple")]
1483 (define_insn "*thumb_cmpdi_zero"
1484 [(set (reg:CC_Z CC_REGNUM)
1485 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1487 (clobber (match_scratch:SI 1 "=l"))]
1489 "orrs\\t%1, %Q0, %R0"
1490 [(set_attr "conds" "set")
1491 (set_attr "length" "2")
1492 (set_attr "type" "logics_reg")]
1495 (define_expand "cstoresi_eq0_thumb1"
1497 [(set (match_operand:SI 0 "s_register_operand" "")
1498 (eq:SI (match_operand:SI 1 "s_register_operand" "")
1500 (clobber (match_dup:SI 2))])]
1502 "operands[2] = gen_reg_rtx (SImode);"
1505 (define_expand "cstoresi_ne0_thumb1"
1507 [(set (match_operand:SI 0 "s_register_operand" "")
1508 (ne:SI (match_operand:SI 1 "s_register_operand" "")
1510 (clobber (match_dup:SI 2))])]
1512 "operands[2] = gen_reg_rtx (SImode);"
1515 (define_insn "*cstoresi_eq0_thumb1_insn"
1516 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1517 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1519 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1522 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1523 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1524 [(set_attr "length" "4")
1525 (set_attr "type" "multiple")]
1528 (define_insn "*cstoresi_ne0_thumb1_insn"
1529 [(set (match_operand:SI 0 "s_register_operand" "=l")
1530 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
1532 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1534 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1535 [(set_attr "length" "4")]
1538 ;; Used as part of the expansion of thumb ltu and gtu sequences
1539 (define_insn "cstoresi_nltu_thumb1"
1540 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1541 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1542 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1544 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1545 [(set_attr "length" "4")
1546 (set_attr "type" "multiple")]
1549 (define_insn_and_split "cstoresi_ltu_thumb1"
1550 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1551 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1552 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1557 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1558 (set (match_dup 0) (neg:SI (match_dup 3)))]
1559 "operands[3] = gen_reg_rtx (SImode);"
1560 [(set_attr "length" "4")
1561 (set_attr "type" "multiple")]
1564 ;; Used as part of the expansion of thumb les sequence.
1565 (define_insn "thumb1_addsi3_addgeu"
1566 [(set (match_operand:SI 0 "s_register_operand" "=l")
1567 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1568 (match_operand:SI 2 "s_register_operand" "l"))
1569 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1570 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1572 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1573 [(set_attr "length" "4")
1574 (set_attr "type" "multiple")]
1578 (define_insn "*thumb_jump"
1580 (label_ref (match_operand 0 "" "")))]
1583 if (get_attr_length (insn) == 2)
1585 return \"bl\\t%l0\\t%@ far jump\";
1587 [(set (attr "far_jump")
1589 (eq_attr "length" "4")
1590 (const_string "yes")
1591 (const_string "no")))
1592 (set (attr "length")
1594 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1595 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1598 (set_attr "type" "branch")]
1601 (define_insn "*call_reg_thumb1_v5"
1602 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1603 (match_operand 1 "" ""))
1604 (use (match_operand 2 "" ""))
1605 (clobber (reg:SI LR_REGNUM))]
1606 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
1608 [(set_attr "length" "2")
1609 (set_attr "type" "call")]
1612 (define_insn "*call_reg_thumb1"
1613 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1614 (match_operand 1 "" ""))
1615 (use (match_operand 2 "" ""))
1616 (clobber (reg:SI LR_REGNUM))]
1617 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
1620 if (!TARGET_CALLER_INTERWORKING)
1621 return thumb_call_via_reg (operands[0]);
1622 else if (operands[1] == const0_rtx)
1623 return \"bl\\t%__interwork_call_via_%0\";
1624 else if (frame_pointer_needed)
1625 return \"bl\\t%__interwork_r7_call_via_%0\";
1627 return \"bl\\t%__interwork_r11_call_via_%0\";
1629 [(set_attr "type" "call")]
1632 (define_insn "*call_value_reg_thumb1_v5"
1633 [(set (match_operand 0 "" "")
1634 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1635 (match_operand 2 "" "")))
1636 (use (match_operand 3 "" ""))
1637 (clobber (reg:SI LR_REGNUM))]
1638 "TARGET_THUMB1 && arm_arch5"
1640 [(set_attr "length" "2")
1641 (set_attr "type" "call")]
1644 (define_insn "*call_value_reg_thumb1"
1645 [(set (match_operand 0 "" "")
1646 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1647 (match_operand 2 "" "")))
1648 (use (match_operand 3 "" ""))
1649 (clobber (reg:SI LR_REGNUM))]
1650 "TARGET_THUMB1 && !arm_arch5"
1653 if (!TARGET_CALLER_INTERWORKING)
1654 return thumb_call_via_reg (operands[1]);
1655 else if (operands[2] == const0_rtx)
1656 return \"bl\\t%__interwork_call_via_%1\";
1657 else if (frame_pointer_needed)
1658 return \"bl\\t%__interwork_r7_call_via_%1\";
1660 return \"bl\\t%__interwork_r11_call_via_%1\";
1662 [(set_attr "type" "call")]
1665 (define_insn "*call_insn"
1666 [(call (mem:SI (match_operand:SI 0 "" ""))
1667 (match_operand:SI 1 "" ""))
1668 (use (match_operand 2 "" ""))
1669 (clobber (reg:SI LR_REGNUM))]
1671 && GET_CODE (operands[0]) == SYMBOL_REF
1672 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1674 [(set_attr "length" "4")
1675 (set_attr "type" "call")]
1678 (define_insn "*call_value_insn"
1679 [(set (match_operand 0 "" "")
1680 (call (mem:SI (match_operand 1 "" ""))
1681 (match_operand 2 "" "")))
1682 (use (match_operand 3 "" ""))
1683 (clobber (reg:SI LR_REGNUM))]
1685 && GET_CODE (operands[1]) == SYMBOL_REF
1686 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1688 [(set_attr "length" "4")
1689 (set_attr "type" "call")]
1692 (define_expand "thumb1_casesi_internal_pic"
1693 [(match_operand:SI 0 "s_register_operand" "")
1694 (match_operand:SI 1 "thumb1_cmp_operand" "")
1695 (match_operand 2 "" "")
1696 (match_operand 3 "" "")]
1700 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1701 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1703 reg0 = gen_rtx_REG (SImode, 0);
1704 emit_move_insn (reg0, operands[0]);
1705 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1710 (define_insn "thumb1_casesi_dispatch"
1711 [(parallel [(set (pc) (unspec [(reg:SI 0)
1712 (label_ref (match_operand 0 "" ""))
1713 ;; (label_ref (match_operand 1 "" ""))
1715 UNSPEC_THUMB1_CASESI))
1716 (clobber (reg:SI IP_REGNUM))
1717 (clobber (reg:SI LR_REGNUM))])]
1719 "* return thumb1_output_casesi(operands);"
1720 [(set_attr "length" "4")
1721 (set_attr "type" "multiple")]
1724 ;; NB Never uses BX.
1725 (define_insn "*thumb1_indirect_jump"
1727 (match_operand:SI 0 "register_operand" "l*r"))]
1730 [(set_attr "conds" "clob")
1731 (set_attr "length" "2")
1732 (set_attr "type" "branch")]
1736 (define_insn "prologue_thumb1_interwork"
1737 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
1739 "* return thumb1_output_interwork ();"
1740 [(set_attr "length" "8")
1741 (set_attr "type" "multiple")]
1744 (define_insn "*epilogue_insns"
1745 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
1748 return thumb1_unexpanded_epilogue ();
1750 ; Length is absolute worst case
1751 [(set_attr "length" "44")
1752 (set_attr "type" "block")
1753 ;; We don't clobber the conditions, but the potential length of this
1754 ;; operation is sufficient to make conditionalizing the sequence
1755 ;; unlikely to be profitable.
1756 (set_attr "conds" "clob")]
1759 ;; Miscellaneous Thumb patterns
1760 (define_expand "tablejump"
1761 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
1762 (use (label_ref (match_operand 1 "" "")))])]
1767 /* Hopefully, CSE will eliminate this copy. */
1768 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
1769 rtx reg2 = gen_reg_rtx (SImode);
1771 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
1777 (define_insn "*thumb1_movpc_insn"
1778 [(set (match_operand:SI 0 "s_register_operand" "=l")
1779 (reg:SI PC_REGNUM))]
1782 [(set_attr "length" "2")
1783 (set_attr "conds" "nocond")
1784 (set_attr "type" "mov_reg")]
1787 ;; NB never uses BX.
1788 (define_insn "*thumb1_tablejump"
1789 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
1790 (use (label_ref (match_operand 1 "" "")))]
1793 [(set_attr "length" "2")
1794 (set_attr "type" "no_insn")]
1797 (define_insn_and_split "thumb_eh_return"
1798 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
1800 (clobber (match_scratch:SI 1 "=&l"))]
1803 "&& reload_completed"
1807 thumb_set_return_address (operands[0], operands[1]);
1810 [(set_attr "type" "mov_reg")]