1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; Return true if OP is a non-fp register_operand.
31 (define_predicate "register_and_not_any_fp_reg_operand"
32 (and (match_code "reg")
33 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35 ;; Return true if OP is a register operand other than an i387 fp register.
36 (define_predicate "register_and_not_fp_reg_operand"
37 (and (match_code "reg")
38 (not (match_test "STACK_REGNO_P (REGNO (op))"))))
40 ;; True if the operand is an MMX register.
41 (define_predicate "mmx_reg_operand"
42 (and (match_code "reg")
43 (match_test "MMX_REGNO_P (REGNO (op))")))
45 ;; True if the operand is an SSE register.
46 (define_predicate "sse_reg_operand"
47 (and (match_code "reg")
48 (match_test "SSE_REGNO_P (REGNO (op))")))
50 ;; True if the operand is a Q_REGS class register.
51 (define_predicate "q_regs_operand"
52 (match_operand 0 "register_operand")
54 if (GET_CODE (op) == SUBREG)
56 return ANY_QI_REG_P (op);
59 ;; Match an SI or HImode register for a zero_extract.
60 (define_special_predicate "ext_register_operand"
61 (match_operand 0 "register_operand")
63 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
64 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
66 if (GET_CODE (op) == SUBREG)
69 /* Be careful to accept only registers having upper parts. */
71 && (REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG));
74 ;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
75 (define_predicate "nonimmediate_x64nomem_operand"
76 (if_then_else (match_test "TARGET_64BIT")
77 (match_operand 0 "register_operand")
78 (match_operand 0 "nonimmediate_operand")))
80 ;; Match general operands, but exclude memory operands on 64bit targets.
81 (define_predicate "general_x64nomem_operand"
82 (if_then_else (match_test "TARGET_64BIT")
83 (match_operand 0 "nonmemory_operand")
84 (match_operand 0 "general_operand")))
86 ;; Return true if op is the AX register.
87 (define_predicate "ax_reg_operand"
88 (and (match_code "reg")
89 (match_test "REGNO (op) == AX_REG")))
91 ;; Return true if op is the flags register.
92 (define_predicate "flags_reg_operand"
93 (and (match_code "reg")
94 (match_test "REGNO (op) == FLAGS_REG")))
96 ;; Return true if op is one of QImode registers: %[abcd][hl].
97 (define_predicate "QIreg_operand"
98 (match_test "QI_REG_P (op)"))
100 ;; Return true if op is a QImode register operand other than
102 (define_predicate "ext_QIreg_operand"
103 (and (match_code "reg")
104 (match_test "TARGET_64BIT")
105 (match_test "REGNO (op) > BX_REG")))
107 ;; Return true if VALUE can be stored in a sign extended immediate field.
108 (define_predicate "x86_64_immediate_operand"
109 (match_code "const_int,symbol_ref,label_ref,const")
112 return immediate_operand (op, mode);
114 switch (GET_CODE (op))
117 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
118 to be at least 32 and this all acceptable constants are
119 represented as CONST_INT. */
120 if (HOST_BITS_PER_WIDE_INT == 32)
124 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
125 return trunc_int_for_mode (val, SImode) == val;
130 /* For certain code models, the symbolic references are known to fit.
131 in CM_SMALL_PIC model we know it fits if it is local to the shared
132 library. Don't count TLS SYMBOL_REFs here, since they should fit
133 only if inside of UNSPEC handled below. */
134 /* TLS symbols are not constant. */
135 if (SYMBOL_REF_TLS_MODEL (op))
137 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
138 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
141 /* For certain code models, the code is near as well. */
142 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
143 || ix86_cmodel == CM_KERNEL);
146 /* We also may accept the offsetted memory references in certain
148 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
149 switch (XINT (XEXP (op, 0), 1))
151 case UNSPEC_GOTPCREL:
153 case UNSPEC_GOTNTPOFF:
160 if (GET_CODE (XEXP (op, 0)) == PLUS)
162 rtx op1 = XEXP (XEXP (op, 0), 0);
163 rtx op2 = XEXP (XEXP (op, 0), 1);
164 HOST_WIDE_INT offset;
166 if (ix86_cmodel == CM_LARGE)
168 if (!CONST_INT_P (op2))
170 offset = trunc_int_for_mode (INTVAL (op2), DImode);
171 switch (GET_CODE (op1))
174 /* TLS symbols are not constant. */
175 if (SYMBOL_REF_TLS_MODEL (op1))
177 /* For CM_SMALL assume that latest object is 16MB before
178 end of 31bits boundary. We may also accept pretty
179 large negative constants knowing that all objects are
180 in the positive half of address space. */
181 if ((ix86_cmodel == CM_SMALL
182 || (ix86_cmodel == CM_MEDIUM
183 && !SYMBOL_REF_FAR_ADDR_P (op1)))
184 && offset < 16*1024*1024
185 && trunc_int_for_mode (offset, SImode) == offset)
187 /* For CM_KERNEL we know that all object resist in the
188 negative half of 32bits address space. We may not
189 accept negative offsets, since they may be just off
190 and we may accept pretty large positive ones. */
191 if (ix86_cmodel == CM_KERNEL
193 && trunc_int_for_mode (offset, SImode) == offset)
198 /* These conditions are similar to SYMBOL_REF ones, just the
199 constraints for code models differ. */
200 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
201 && offset < 16*1024*1024
202 && trunc_int_for_mode (offset, SImode) == offset)
204 if (ix86_cmodel == CM_KERNEL
206 && trunc_int_for_mode (offset, SImode) == offset)
211 switch (XINT (op1, 1))
215 if (trunc_int_for_mode (offset, SImode) == offset)
233 ;; Return true if VALUE can be stored in the zero extended immediate field.
234 (define_predicate "x86_64_zext_immediate_operand"
235 (match_code "const_double,const_int,symbol_ref,label_ref,const")
237 switch (GET_CODE (op))
240 if (HOST_BITS_PER_WIDE_INT == 32)
241 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
246 if (HOST_BITS_PER_WIDE_INT == 32)
247 return INTVAL (op) >= 0;
249 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
252 /* For certain code models, the symbolic references are known to fit. */
253 /* TLS symbols are not constant. */
254 if (SYMBOL_REF_TLS_MODEL (op))
256 return (ix86_cmodel == CM_SMALL
257 || (ix86_cmodel == CM_MEDIUM
258 && !SYMBOL_REF_FAR_ADDR_P (op)));
261 /* For certain code models, the code is near as well. */
262 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
265 /* We also may accept the offsetted memory references in certain
267 if (GET_CODE (XEXP (op, 0)) == PLUS)
269 rtx op1 = XEXP (XEXP (op, 0), 0);
270 rtx op2 = XEXP (XEXP (op, 0), 1);
272 if (ix86_cmodel == CM_LARGE)
274 switch (GET_CODE (op1))
277 /* TLS symbols are not constant. */
278 if (SYMBOL_REF_TLS_MODEL (op1))
280 /* For small code model we may accept pretty large positive
281 offsets, since one bit is available for free. Negative
282 offsets are limited by the size of NULL pointer area
283 specified by the ABI. */
284 if ((ix86_cmodel == CM_SMALL
285 || (ix86_cmodel == CM_MEDIUM
286 && !SYMBOL_REF_FAR_ADDR_P (op1)))
288 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
289 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
291 /* ??? For the kernel, we may accept adjustment of
292 -0x10000000, since we know that it will just convert
293 negative address space to positive, but perhaps this
294 is not worthwhile. */
298 /* These conditions are similar to SYMBOL_REF ones, just the
299 constraints for code models differ. */
300 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
302 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
303 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
319 ;; Return true if OP is general operand representable on x86_64.
320 (define_predicate "x86_64_general_operand"
321 (if_then_else (match_test "TARGET_64BIT")
322 (ior (match_operand 0 "nonimmediate_operand")
323 (match_operand 0 "x86_64_immediate_operand"))
324 (match_operand 0 "general_operand")))
326 ;; Return true if OP is representable on x86_64 as zero-extended operand.
327 ;; This predicate is used in zero-extending conversion operations that
328 ;; require non-VOIDmode immediate operands.
329 (define_predicate "x86_64_zext_operand"
330 (if_then_else (match_test "TARGET_64BIT")
331 (ior (match_operand 0 "nonimmediate_operand")
332 (and (match_operand 0 "x86_64_zext_immediate_operand")
333 (match_test "GET_MODE (op) != VOIDmode")))
334 (match_operand 0 "nonimmediate_operand")))
336 ;; Return true if OP is general operand representable on x86_64
337 ;; as either sign extended or zero extended constant.
338 (define_predicate "x86_64_szext_general_operand"
339 (if_then_else (match_test "TARGET_64BIT")
340 (ior (match_operand 0 "nonimmediate_operand")
341 (match_operand 0 "x86_64_immediate_operand")
342 (match_operand 0 "x86_64_zext_immediate_operand"))
343 (match_operand 0 "general_operand")))
345 ;; Return true if OP is nonmemory operand representable on x86_64.
346 (define_predicate "x86_64_nonmemory_operand"
347 (if_then_else (match_test "TARGET_64BIT")
348 (ior (match_operand 0 "register_operand")
349 (match_operand 0 "x86_64_immediate_operand"))
350 (match_operand 0 "nonmemory_operand")))
352 ;; Return true if OP is nonmemory operand representable on x86_64.
353 (define_predicate "x86_64_szext_nonmemory_operand"
354 (if_then_else (match_test "TARGET_64BIT")
355 (ior (match_operand 0 "register_operand")
356 (match_operand 0 "x86_64_immediate_operand")
357 (match_operand 0 "x86_64_zext_immediate_operand"))
358 (match_operand 0 "nonmemory_operand")))
360 ;; Return true when operand is PIC expression that can be computed by lea
362 (define_predicate "pic_32bit_operand"
363 (match_code "const,symbol_ref,label_ref")
368 /* Rule out relocations that translate into 64bit constants. */
369 if (TARGET_64BIT && GET_CODE (op) == CONST)
372 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
374 if (GET_CODE (op) == UNSPEC
375 && (XINT (op, 1) == UNSPEC_GOTOFF
376 || XINT (op, 1) == UNSPEC_GOT))
380 return symbolic_operand (op, mode);
383 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
384 (define_predicate "x86_64_movabs_operand"
385 (and (match_operand 0 "nonmemory_operand")
386 (not (match_operand 0 "pic_32bit_operand"))))
388 ;; Return true if OP is either a symbol reference or a sum of a symbol
389 ;; reference and a constant.
390 (define_predicate "symbolic_operand"
391 (match_code "symbol_ref,label_ref,const")
393 switch (GET_CODE (op))
401 if (GET_CODE (op) == SYMBOL_REF
402 || GET_CODE (op) == LABEL_REF
403 || (GET_CODE (op) == UNSPEC
404 && (XINT (op, 1) == UNSPEC_GOT
405 || XINT (op, 1) == UNSPEC_GOTOFF
406 || XINT (op, 1) == UNSPEC_PCREL
407 || XINT (op, 1) == UNSPEC_GOTPCREL)))
409 if (GET_CODE (op) != PLUS
410 || !CONST_INT_P (XEXP (op, 1)))
414 if (GET_CODE (op) == SYMBOL_REF
415 || GET_CODE (op) == LABEL_REF)
417 /* Only @GOTOFF gets offsets. */
418 if (GET_CODE (op) != UNSPEC
419 || XINT (op, 1) != UNSPEC_GOTOFF)
422 op = XVECEXP (op, 0, 0);
423 if (GET_CODE (op) == SYMBOL_REF
424 || GET_CODE (op) == LABEL_REF)
433 ;; Return true if OP is a symbolic operand that resolves locally.
434 (define_predicate "local_symbolic_operand"
435 (match_code "const,label_ref,symbol_ref")
437 if (GET_CODE (op) == CONST
438 && GET_CODE (XEXP (op, 0)) == PLUS
439 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
440 op = XEXP (XEXP (op, 0), 0);
442 if (GET_CODE (op) == LABEL_REF)
445 if (GET_CODE (op) != SYMBOL_REF)
448 if (SYMBOL_REF_TLS_MODEL (op))
451 /* Dll-imported symbols are always external. */
452 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
454 if (SYMBOL_REF_LOCAL_P (op))
457 /* There is, however, a not insubstantial body of code in the rest of
458 the compiler that assumes it can just stick the results of
459 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
460 /* ??? This is a hack. Should update the body of the compiler to
461 always create a DECL an invoke targetm.encode_section_info. */
462 if (strncmp (XSTR (op, 0), internal_label_prefix,
463 internal_label_prefix_len) == 0)
469 ;; Test for a legitimate @GOTOFF operand.
471 ;; VxWorks does not impose a fixed gap between segments; the run-time
472 ;; gap can be different from the object-file gap. We therefore can't
473 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
474 ;; same segment as the GOT. Unfortunately, the flexibility of linker
475 ;; scripts means that we can't be sure of that in general, so assume
476 ;; that @GOTOFF is never valid on VxWorks.
477 (define_predicate "gotoff_operand"
478 (and (not (match_test "TARGET_VXWORKS_RTP"))
479 (match_operand 0 "local_symbolic_operand")))
481 ;; Test for various thread-local symbols.
482 (define_special_predicate "tls_symbolic_operand"
483 (and (match_code "symbol_ref")
484 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
486 (define_special_predicate "tls_modbase_operand"
487 (and (match_code "symbol_ref")
488 (match_test "op == ix86_tls_module_base ()")))
490 ;; Test for a pc-relative call operand
491 (define_predicate "constant_call_address_operand"
492 (match_code "symbol_ref")
494 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
496 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
501 ;; P6 processors will jump to the address after the decrement when %esp
502 ;; is used as a call operand, so they will execute return address as a code.
503 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
505 (define_predicate "call_register_no_elim_operand"
506 (match_operand 0 "register_operand")
508 if (GET_CODE (op) == SUBREG)
509 op = SUBREG_REG (op);
511 if (!TARGET_64BIT && op == stack_pointer_rtx)
514 return register_no_elim_operand (op, mode);
517 ;; True for any non-virtual or eliminable register. Used in places where
518 ;; instantiation of such a register may cause the pattern to not be recognized.
519 (define_predicate "register_no_elim_operand"
520 (match_operand 0 "register_operand")
522 if (GET_CODE (op) == SUBREG)
523 op = SUBREG_REG (op);
524 return !(op == arg_pointer_rtx
525 || op == frame_pointer_rtx
526 || IN_RANGE (REGNO (op),
527 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
530 ;; Similarly, but include the stack pointer. This is used to prevent esp
531 ;; from being used as an index reg.
532 (define_predicate "index_register_operand"
533 (match_operand 0 "register_operand")
535 if (GET_CODE (op) == SUBREG)
536 op = SUBREG_REG (op);
537 if (reload_in_progress || reload_completed)
538 return REG_OK_FOR_INDEX_STRICT_P (op);
540 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
543 ;; Return false if this is any eliminable register. Otherwise general_operand.
544 (define_predicate "general_no_elim_operand"
545 (if_then_else (match_code "reg,subreg")
546 (match_operand 0 "register_no_elim_operand")
547 (match_operand 0 "general_operand")))
549 ;; Return false if this is any eliminable register. Otherwise
550 ;; register_operand or a constant.
551 (define_predicate "nonmemory_no_elim_operand"
552 (ior (match_operand 0 "register_no_elim_operand")
553 (match_operand 0 "immediate_operand")))
555 ;; Test for a valid operand for indirect branch.
556 (define_predicate "indirect_branch_operand"
557 (ior (match_operand 0 "register_operand")
558 (and (not (match_test "TARGET_X32"))
559 (match_operand 0 "memory_operand"))))
561 ;; Test for a valid operand for a call instruction.
562 ;; Allow constant call address operands in Pmode only.
563 (define_special_predicate "call_insn_operand"
564 (ior (match_test "constant_call_address_operand
565 (op, mode == VOIDmode ? mode : Pmode)")
566 (match_operand 0 "call_register_no_elim_operand")
567 (and (not (match_test "TARGET_X32"))
568 (match_operand 0 "memory_operand"))))
570 ;; Similarly, but for tail calls, in which we cannot allow memory references.
571 (define_special_predicate "sibcall_insn_operand"
572 (ior (match_test "constant_call_address_operand
573 (op, mode == VOIDmode ? mode : Pmode)")
574 (match_operand 0 "register_no_elim_operand")))
576 ;; Return true if OP is a call from MS ABI to SYSV ABI function.
577 (define_predicate "call_rex64_ms_sysv_operation"
578 (match_code "parallel")
580 unsigned creg_size = ARRAY_SIZE (x86_64_ms_sysv_extra_clobbered_registers);
583 if ((unsigned) XVECLEN (op, 0) != creg_size + 2)
586 for (i = 0; i < creg_size; i++)
588 rtx elt = XVECEXP (op, 0, i+2);
589 enum machine_mode mode;
592 if (GET_CODE (elt) != CLOBBER
593 || GET_CODE (SET_DEST (elt)) != REG)
596 regno = x86_64_ms_sysv_extra_clobbered_registers[i];
597 mode = SSE_REGNO_P (regno) ? TImode : DImode;
599 if (GET_MODE (SET_DEST (elt)) != mode
600 || REGNO (SET_DEST (elt)) != regno)
606 ;; Match exactly zero.
607 (define_predicate "const0_operand"
608 (match_code "const_int,const_double,const_vector")
610 if (mode == VOIDmode)
611 mode = GET_MODE (op);
612 return op == CONST0_RTX (mode);
615 ;; Match one or vector filled with ones.
616 (define_predicate "const1_operand"
617 (match_code "const_int,const_double,const_vector")
619 if (mode == VOIDmode)
620 mode = GET_MODE (op);
621 return op == CONST1_RTX (mode);
624 ;; Match exactly eight.
625 (define_predicate "const8_operand"
626 (and (match_code "const_int")
627 (match_test "INTVAL (op) == 8")))
629 ;; Match exactly 128.
630 (define_predicate "const128_operand"
631 (and (match_code "const_int")
632 (match_test "INTVAL (op) == 128")))
634 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
635 (define_predicate "const_32bit_mask"
636 (and (match_code "const_int")
637 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
638 == (HOST_WIDE_INT) 0xffffffff")))
640 ;; Match 2, 4, or 8. Used for leal multiplicands.
641 (define_predicate "const248_operand"
642 (match_code "const_int")
644 HOST_WIDE_INT i = INTVAL (op);
645 return i == 2 || i == 4 || i == 8;
648 ;; Match 1, 2, 4, or 8
649 (define_predicate "const1248_operand"
650 (match_code "const_int")
652 HOST_WIDE_INT i = INTVAL (op);
653 return i == 1 || i == 2 || i == 4 || i == 8;
656 ;; Match 3, 5, or 9. Used for leal multiplicands.
657 (define_predicate "const359_operand"
658 (match_code "const_int")
660 HOST_WIDE_INT i = INTVAL (op);
661 return i == 3 || i == 5 || i == 9;
665 (define_predicate "const_0_to_1_operand"
666 (and (match_code "const_int")
667 (ior (match_test "op == const0_rtx")
668 (match_test "op == const1_rtx"))))
671 (define_predicate "const_0_to_3_operand"
672 (and (match_code "const_int")
673 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
676 (define_predicate "const_0_to_7_operand"
677 (and (match_code "const_int")
678 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
681 (define_predicate "const_0_to_15_operand"
682 (and (match_code "const_int")
683 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
686 (define_predicate "const_0_to_31_operand"
687 (and (match_code "const_int")
688 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
691 (define_predicate "const_0_to_63_operand"
692 (and (match_code "const_int")
693 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
696 (define_predicate "const_0_to_255_operand"
697 (and (match_code "const_int")
698 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
700 ;; Match (0 to 255) * 8
701 (define_predicate "const_0_to_255_mul_8_operand"
702 (match_code "const_int")
704 unsigned HOST_WIDE_INT val = INTVAL (op);
705 return val <= 255*8 && val % 8 == 0;
708 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
709 ;; for shift & compare patterns, as shifting by 0 does not change flags).
710 (define_predicate "const_1_to_31_operand"
711 (and (match_code "const_int")
712 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
714 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
715 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
716 (define_predicate "const_1_to_63_operand"
717 (and (match_code "const_int")
718 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
721 (define_predicate "const_2_to_3_operand"
722 (and (match_code "const_int")
723 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
726 (define_predicate "const_4_to_5_operand"
727 (and (match_code "const_int")
728 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
731 (define_predicate "const_4_to_7_operand"
732 (and (match_code "const_int")
733 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
736 (define_predicate "const_6_to_7_operand"
737 (and (match_code "const_int")
738 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
741 (define_predicate "const_8_to_11_operand"
742 (and (match_code "const_int")
743 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
746 (define_predicate "const_12_to_15_operand"
747 (and (match_code "const_int")
748 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
750 ;; True if this is a constant appropriate for an increment or decrement.
751 (define_predicate "incdec_operand"
752 (match_code "const_int")
754 /* On Pentium4, the inc and dec operations causes extra dependency on flag
755 registers, since carry flag is not set. */
756 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
758 return op == const1_rtx || op == constm1_rtx;
761 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
762 (define_predicate "reg_or_pm1_operand"
763 (ior (match_operand 0 "register_operand")
764 (and (match_code "const_int")
765 (ior (match_test "op == const1_rtx")
766 (match_test "op == constm1_rtx")))))
768 ;; True if OP is acceptable as operand of DImode shift expander.
769 (define_predicate "shiftdi_operand"
770 (if_then_else (match_test "TARGET_64BIT")
771 (match_operand 0 "nonimmediate_operand")
772 (match_operand 0 "register_operand")))
774 (define_predicate "ashldi_input_operand"
775 (if_then_else (match_test "TARGET_64BIT")
776 (match_operand 0 "nonimmediate_operand")
777 (match_operand 0 "reg_or_pm1_operand")))
779 ;; Return true if OP is a vector load from the constant pool with just
780 ;; the first element nonzero.
781 (define_predicate "zero_extended_scalar_load_operand"
785 op = maybe_get_pool_constant (op);
787 if (!(op && GET_CODE (op) == CONST_VECTOR))
790 n_elts = CONST_VECTOR_NUNITS (op);
792 for (n_elts--; n_elts > 0; n_elts--)
794 rtx elt = CONST_VECTOR_ELT (op, n_elts);
795 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
801 /* Return true if operand is a vector constant that is all ones. */
802 (define_predicate "vector_all_ones_operand"
803 (match_code "const_vector")
805 int nunits = GET_MODE_NUNITS (mode);
807 if (GET_CODE (op) == CONST_VECTOR
808 && CONST_VECTOR_NUNITS (op) == nunits)
811 for (i = 0; i < nunits; ++i)
813 rtx x = CONST_VECTOR_ELT (op, i);
814 if (x != constm1_rtx)
823 ; Return true when OP is operand acceptable for standard SSE move.
824 (define_predicate "vector_move_operand"
825 (ior (match_operand 0 "nonimmediate_operand")
826 (match_operand 0 "const0_operand")))
828 ;; Return true when OP is either nonimmediate operand, or any
830 (define_predicate "nonimmediate_or_const_vector_operand"
831 (ior (match_operand 0 "nonimmediate_operand")
832 (match_code "const_vector")))
834 ;; Return true when OP is nonimmediate or standard SSE constant.
835 (define_predicate "nonimmediate_or_sse_const_operand"
836 (match_operand 0 "general_operand")
838 if (nonimmediate_operand (op, mode))
840 if (standard_sse_constant_p (op) > 0)
845 ;; Return true if OP is a register or a zero.
846 (define_predicate "reg_or_0_operand"
847 (ior (match_operand 0 "register_operand")
848 (match_operand 0 "const0_operand")))
850 ;; Return true if op if a valid address for LEA, and does not contain
851 ;; a segment override. Defined as a special predicate to allow
852 ;; mode-less const_int operands pass to address_operand.
853 (define_special_predicate "lea_address_operand"
854 (match_operand 0 "address_operand")
856 struct ix86_address parts;
859 ok = ix86_decompose_address (op, &parts);
861 return parts.seg == SEG_DEFAULT;
864 ;; Return true for RTX codes that force SImode address.
865 (define_predicate "SImode_address_operand"
866 (match_code "subreg,zero_extend,and"))
868 ;; Return true if op if a valid base register, displacement or
869 ;; sum of base register and displacement for VSIB addressing.
870 (define_predicate "vsib_address_operand"
871 (match_operand 0 "address_operand")
873 struct ix86_address parts;
877 ok = ix86_decompose_address (op, &parts);
879 if (parts.index || parts.seg != SEG_DEFAULT)
882 /* VSIB addressing doesn't support (%rip). */
886 if (GET_CODE (disp) == CONST)
888 disp = XEXP (disp, 0);
889 if (GET_CODE (disp) == PLUS)
890 disp = XEXP (disp, 0);
891 if (GET_CODE (disp) == UNSPEC)
892 switch (XINT (disp, 1))
894 case UNSPEC_GOTPCREL:
896 case UNSPEC_GOTNTPOFF:
902 && (GET_CODE (disp) == SYMBOL_REF
903 || GET_CODE (disp) == LABEL_REF))
910 (define_predicate "vsib_mem_operator"
913 ;; Return true if the rtx is known to be at least 32 bits aligned.
914 (define_predicate "aligned_operand"
915 (match_operand 0 "general_operand")
917 struct ix86_address parts;
920 /* Registers and immediate operands are always "aligned". */
924 /* All patterns using aligned_operand on memory operands ends up
925 in promoting memory operand to 64bit and thus causing memory mismatch. */
926 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
929 /* Don't even try to do any aligned optimizations with volatiles. */
930 if (MEM_VOLATILE_P (op))
933 if (MEM_ALIGN (op) >= 32)
938 /* Pushes and pops are only valid on the stack pointer. */
939 if (GET_CODE (op) == PRE_DEC
940 || GET_CODE (op) == POST_INC)
943 /* Decode the address. */
944 ok = ix86_decompose_address (op, &parts);
947 if (parts.base && GET_CODE (parts.base) == SUBREG)
948 parts.base = SUBREG_REG (parts.base);
949 if (parts.index && GET_CODE (parts.index) == SUBREG)
950 parts.index = SUBREG_REG (parts.index);
952 /* Look for some component that isn't known to be aligned. */
955 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
960 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
965 if (!CONST_INT_P (parts.disp)
966 || (INTVAL (parts.disp) & 3))
970 /* Didn't find one -- this must be an aligned address. */
974 ;; Return true if OP is memory operand with a displacement.
975 (define_predicate "memory_displacement_operand"
976 (match_operand 0 "memory_operand")
978 struct ix86_address parts;
981 ok = ix86_decompose_address (XEXP (op, 0), &parts);
983 return parts.disp != NULL_RTX;
986 ;; Return true if OP is memory operand with a displacement only.
987 (define_predicate "memory_displacement_only_operand"
988 (match_operand 0 "memory_operand")
990 struct ix86_address parts;
996 ok = ix86_decompose_address (XEXP (op, 0), &parts);
999 if (parts.base || parts.index)
1002 return parts.disp != NULL_RTX;
1005 ;; Return true if OP is memory operand which will need zero or
1006 ;; one register at most, not counting stack pointer or frame pointer.
1007 (define_predicate "cmpxchg8b_pic_memory_operand"
1008 (match_operand 0 "memory_operand")
1010 struct ix86_address parts;
1013 if (TARGET_64BIT || !flag_pic)
1016 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1019 if (parts.base && GET_CODE (parts.base) == SUBREG)
1020 parts.base = SUBREG_REG (parts.base);
1021 if (parts.index && GET_CODE (parts.index) == SUBREG)
1022 parts.index = SUBREG_REG (parts.index);
1024 if (parts.base == NULL_RTX
1025 || parts.base == arg_pointer_rtx
1026 || parts.base == frame_pointer_rtx
1027 || parts.base == hard_frame_pointer_rtx
1028 || parts.base == stack_pointer_rtx)
1031 if (parts.index == NULL_RTX
1032 || parts.index == arg_pointer_rtx
1033 || parts.index == frame_pointer_rtx
1034 || parts.index == hard_frame_pointer_rtx
1035 || parts.index == stack_pointer_rtx)
1042 ;; Return true if OP is memory operand that cannot be represented
1043 ;; by the modRM array.
1044 (define_predicate "long_memory_operand"
1045 (and (match_operand 0 "memory_operand")
1046 (match_test "memory_address_length (op, false)")))
1048 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1049 (define_predicate "fcmov_comparison_operator"
1050 (match_operand 0 "comparison_operator")
1052 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1053 enum rtx_code code = GET_CODE (op);
1055 if (inmode == CCFPmode || inmode == CCFPUmode)
1057 if (!ix86_trivial_fp_comparison_operator (op, mode))
1059 code = ix86_fp_compare_code_to_integer (code);
1061 /* i387 supports just limited amount of conditional codes. */
1064 case LTU: case GTU: case LEU: case GEU:
1065 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1066 || inmode == CCCmode)
1069 case ORDERED: case UNORDERED:
1077 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1078 ;; The first set are supported directly; the second set can't be done with
1079 ;; full IEEE support, i.e. NaNs.
1081 (define_predicate "sse_comparison_operator"
1082 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1083 (and (match_test "TARGET_AVX")
1084 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1086 (define_predicate "ix86_comparison_int_operator"
1087 (match_code "ne,eq,ge,gt,le,lt"))
1089 (define_predicate "ix86_comparison_uns_operator"
1090 (match_code "ne,eq,geu,gtu,leu,ltu"))
1092 (define_predicate "bt_comparison_operator"
1093 (match_code "ne,eq"))
1095 ;; Return true if OP is a valid comparison operator in valid mode.
1096 (define_predicate "ix86_comparison_operator"
1097 (match_operand 0 "comparison_operator")
1099 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1100 enum rtx_code code = GET_CODE (op);
1102 if (inmode == CCFPmode || inmode == CCFPUmode)
1103 return ix86_trivial_fp_comparison_operator (op, mode);
1110 if (inmode == CCmode || inmode == CCGCmode
1111 || inmode == CCGOCmode || inmode == CCNOmode)
1114 case LTU: case GTU: case LEU: case GEU:
1115 if (inmode == CCmode || inmode == CCCmode)
1118 case ORDERED: case UNORDERED:
1119 if (inmode == CCmode)
1123 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1131 ;; Return true if OP is a valid comparison operator
1132 ;; testing carry flag to be set.
1133 (define_predicate "ix86_carry_flag_operator"
1134 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1136 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1137 enum rtx_code code = GET_CODE (op);
1139 if (inmode == CCFPmode || inmode == CCFPUmode)
1141 if (!ix86_trivial_fp_comparison_operator (op, mode))
1143 code = ix86_fp_compare_code_to_integer (code);
1145 else if (inmode == CCCmode)
1146 return code == LTU || code == GTU;
1147 else if (inmode != CCmode)
1153 ;; Return true if this comparison only requires testing one flag bit.
1154 (define_predicate "ix86_trivial_fp_comparison_operator"
1155 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1157 ;; Return true if we know how to do this comparison. Others require
1158 ;; testing more than one flag bit, and we let the generic middle-end
1160 (define_predicate "ix86_fp_comparison_operator"
1161 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1162 == IX86_FPCMP_ARITH")
1163 (match_operand 0 "comparison_operator")
1164 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1166 ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1167 (define_predicate "ix86_swapped_fp_comparison_operator"
1168 (match_operand 0 "comparison_operator")
1170 enum rtx_code code = GET_CODE (op);
1173 PUT_CODE (op, swap_condition (code));
1174 ret = ix86_fp_comparison_operator (op, mode);
1175 PUT_CODE (op, code);
1179 ;; Nearly general operand, but accept any const_double, since we wish
1180 ;; to be able to drop them into memory rather than have them get pulled
1182 (define_predicate "cmp_fp_expander_operand"
1183 (ior (match_code "const_double")
1184 (match_operand 0 "general_operand")))
1186 ;; Return true if this is a valid binary floating-point operation.
1187 (define_predicate "binary_fp_operator"
1188 (match_code "plus,minus,mult,div"))
1190 ;; Return true if this is a multiply operation.
1191 (define_predicate "mult_operator"
1192 (match_code "mult"))
1194 ;; Return true if this is a division operation.
1195 (define_predicate "div_operator"
1198 ;; Return true if this is a plus, minus, and, ior or xor operation.
1199 (define_predicate "plusminuslogic_operator"
1200 (match_code "plus,minus,and,ior,xor"))
1202 ;; Return true if this is a float extend operation.
1203 (define_predicate "float_operator"
1204 (match_code "float"))
1206 ;; Return true for ARITHMETIC_P.
1207 (define_predicate "arith_or_logical_operator"
1208 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1209 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1211 ;; Return true for COMMUTATIVE_P.
1212 (define_predicate "commutative_operator"
1213 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1215 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1216 (define_predicate "promotable_binary_operator"
1217 (ior (match_code "plus,minus,and,ior,xor,ashift")
1218 (and (match_code "mult")
1219 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1221 (define_predicate "compare_operator"
1222 (match_code "compare"))
1224 (define_predicate "absneg_operator"
1225 (match_code "abs,neg"))
1227 ;; Return true if OP is misaligned memory operand
1228 (define_predicate "misaligned_operand"
1229 (and (match_code "mem")
1230 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1232 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1233 (define_predicate "emms_operation"
1234 (match_code "parallel")
1238 if (XVECLEN (op, 0) != 17)
1241 for (i = 0; i < 8; i++)
1243 rtx elt = XVECEXP (op, 0, i+1);
1245 if (GET_CODE (elt) != CLOBBER
1246 || GET_CODE (SET_DEST (elt)) != REG
1247 || GET_MODE (SET_DEST (elt)) != XFmode
1248 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1251 elt = XVECEXP (op, 0, i+9);
1253 if (GET_CODE (elt) != CLOBBER
1254 || GET_CODE (SET_DEST (elt)) != REG
1255 || GET_MODE (SET_DEST (elt)) != DImode
1256 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1262 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1263 (define_predicate "vzeroall_operation"
1264 (match_code "parallel")
1266 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1268 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1271 for (i = 0; i < nregs; i++)
1273 rtx elt = XVECEXP (op, 0, i+1);
1275 if (GET_CODE (elt) != SET
1276 || GET_CODE (SET_DEST (elt)) != REG
1277 || GET_MODE (SET_DEST (elt)) != V8SImode
1278 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1279 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1285 ;; return true if OP is a vzeroupper operation.
1286 (define_predicate "vzeroupper_operation"
1287 (and (match_code "unspec_volatile")
1288 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1290 ;; Return true if OP is a parallel for a vbroadcast permute.
1292 (define_predicate "avx_vbroadcast_operand"
1293 (and (match_code "parallel")
1294 (match_code "const_int" "a"))
1296 rtx elt = XVECEXP (op, 0, 0);
1297 int i, nelt = XVECLEN (op, 0);
1299 /* Don't bother checking there are the right number of operands,
1300 merely that they're all identical. */
1301 for (i = 1; i < nelt; ++i)
1302 if (XVECEXP (op, 0, i) != elt)
1307 ;; Return true if OP is a proper third operand to vpblendw256.
1308 (define_predicate "avx2_pblendw_operand"
1309 (match_code "const_int")
1311 HOST_WIDE_INT val = INTVAL (op);
1312 HOST_WIDE_INT low = val & 0xff;
1313 return val == ((low << 8) | low);
1316 ;; Return true if OP is nonimmediate_operand or CONST_VECTOR.
1317 (define_predicate "general_vector_operand"
1318 (ior (match_operand 0 "nonimmediate_operand")
1319 (match_code "const_vector")))