1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; Return true if OP is a non-fp register_operand.
31 (define_predicate "register_and_not_any_fp_reg_operand"
32 (and (match_code "reg")
33 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35 ;; True if the operand is a GENERAL class register.
36 (define_predicate "general_reg_operand"
37 (and (match_code "reg")
38 (match_test "GENERAL_REG_P (op)")))
40 ;; Return true if OP is a register operand other than an i387 fp register.
41 (define_predicate "register_and_not_fp_reg_operand"
42 (and (match_code "reg")
43 (not (match_test "STACK_REGNO_P (REGNO (op))"))))
45 ;; True if the operand is an MMX register.
46 (define_predicate "mmx_reg_operand"
47 (and (match_code "reg")
48 (match_test "MMX_REGNO_P (REGNO (op))")))
50 ;; True if the operand is an SSE register.
51 (define_predicate "sse_reg_operand"
52 (and (match_code "reg")
53 (match_test "SSE_REGNO_P (REGNO (op))")))
55 ;; True if the operand is an AVX-512 new register.
56 (define_predicate "ext_sse_reg_operand"
57 (and (match_code "reg")
58 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
60 ;; True if the operand is an AVX-512 mask register.
61 (define_predicate "mask_reg_operand"
62 (and (match_code "reg")
63 (match_test "MASK_REGNO_P (REGNO (op))")))
65 ;; True if the operand is a Q_REGS class register.
66 (define_predicate "q_regs_operand"
67 (match_operand 0 "register_operand")
69 if (GET_CODE (op) == SUBREG)
71 return ANY_QI_REG_P (op);
74 ;; Match an SI or HImode register for a zero_extract.
75 (define_special_predicate "ext_register_operand"
76 (match_operand 0 "register_operand")
78 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
79 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
81 if (GET_CODE (op) == SUBREG)
84 /* Be careful to accept only registers having upper parts. */
86 && (REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG));
89 ;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
90 (define_predicate "nonimmediate_x64nomem_operand"
91 (if_then_else (match_test "TARGET_64BIT")
92 (match_operand 0 "register_operand")
93 (match_operand 0 "nonimmediate_operand")))
95 ;; Match general operands, but exclude memory operands on 64bit targets.
96 (define_predicate "general_x64nomem_operand"
97 (if_then_else (match_test "TARGET_64BIT")
98 (match_operand 0 "nonmemory_operand")
99 (match_operand 0 "general_operand")))
101 ;; Return true if op is the AX register.
102 (define_predicate "ax_reg_operand"
103 (and (match_code "reg")
104 (match_test "REGNO (op) == AX_REG")))
106 ;; Return true if op is the flags register.
107 (define_predicate "flags_reg_operand"
108 (and (match_code "reg")
109 (match_test "REGNO (op) == FLAGS_REG")))
111 ;; Return true if op is one of QImode registers: %[abcd][hl].
112 (define_predicate "QIreg_operand"
113 (match_test "QI_REG_P (op)"))
115 ;; Return true if op is a QImode register operand other than
117 (define_predicate "ext_QIreg_operand"
118 (and (match_code "reg")
119 (match_test "TARGET_64BIT")
120 (match_test "REGNO (op) > BX_REG")))
122 ;; Return true if VALUE can be stored in a sign extended immediate field.
123 (define_predicate "x86_64_immediate_operand"
124 (match_code "const_int,symbol_ref,label_ref,const")
127 return immediate_operand (op, mode);
129 switch (GET_CODE (op))
132 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
133 to be at least 32 and this all acceptable constants are
134 represented as CONST_INT. */
135 if (HOST_BITS_PER_WIDE_INT == 32)
139 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
140 return trunc_int_for_mode (val, SImode) == val;
145 /* For certain code models, the symbolic references are known to fit.
146 in CM_SMALL_PIC model we know it fits if it is local to the shared
147 library. Don't count TLS SYMBOL_REFs here, since they should fit
148 only if inside of UNSPEC handled below. */
149 /* TLS symbols are not constant. */
150 if (SYMBOL_REF_TLS_MODEL (op))
152 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
153 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
156 /* For certain code models, the code is near as well. */
157 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
158 || ix86_cmodel == CM_KERNEL);
161 /* We also may accept the offsetted memory references in certain
163 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
164 switch (XINT (XEXP (op, 0), 1))
166 case UNSPEC_GOTPCREL:
168 case UNSPEC_GOTNTPOFF:
175 if (GET_CODE (XEXP (op, 0)) == PLUS)
177 rtx op1 = XEXP (XEXP (op, 0), 0);
178 rtx op2 = XEXP (XEXP (op, 0), 1);
179 HOST_WIDE_INT offset;
181 if (ix86_cmodel == CM_LARGE)
183 if (!CONST_INT_P (op2))
185 offset = trunc_int_for_mode (INTVAL (op2), DImode);
186 switch (GET_CODE (op1))
189 /* TLS symbols are not constant. */
190 if (SYMBOL_REF_TLS_MODEL (op1))
192 /* For CM_SMALL assume that latest object is 16MB before
193 end of 31bits boundary. We may also accept pretty
194 large negative constants knowing that all objects are
195 in the positive half of address space. */
196 if ((ix86_cmodel == CM_SMALL
197 || (ix86_cmodel == CM_MEDIUM
198 && !SYMBOL_REF_FAR_ADDR_P (op1)))
199 && offset < 16*1024*1024
200 && trunc_int_for_mode (offset, SImode) == offset)
202 /* For CM_KERNEL we know that all object resist in the
203 negative half of 32bits address space. We may not
204 accept negative offsets, since they may be just off
205 and we may accept pretty large positive ones. */
206 if (ix86_cmodel == CM_KERNEL
208 && trunc_int_for_mode (offset, SImode) == offset)
213 /* These conditions are similar to SYMBOL_REF ones, just the
214 constraints for code models differ. */
215 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
216 && offset < 16*1024*1024
217 && trunc_int_for_mode (offset, SImode) == offset)
219 if (ix86_cmodel == CM_KERNEL
221 && trunc_int_for_mode (offset, SImode) == offset)
226 switch (XINT (op1, 1))
230 if (trunc_int_for_mode (offset, SImode) == offset)
248 ;; Return true if VALUE can be stored in the zero extended immediate field.
249 (define_predicate "x86_64_zext_immediate_operand"
250 (match_code "const_double,const_int,symbol_ref,label_ref,const")
252 switch (GET_CODE (op))
255 if (HOST_BITS_PER_WIDE_INT == 32)
256 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
261 if (HOST_BITS_PER_WIDE_INT == 32)
262 return INTVAL (op) >= 0;
264 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
267 /* For certain code models, the symbolic references are known to fit. */
268 /* TLS symbols are not constant. */
269 if (SYMBOL_REF_TLS_MODEL (op))
271 return (ix86_cmodel == CM_SMALL
272 || (ix86_cmodel == CM_MEDIUM
273 && !SYMBOL_REF_FAR_ADDR_P (op)));
276 /* For certain code models, the code is near as well. */
277 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
280 /* We also may accept the offsetted memory references in certain
282 if (GET_CODE (XEXP (op, 0)) == PLUS)
284 rtx op1 = XEXP (XEXP (op, 0), 0);
285 rtx op2 = XEXP (XEXP (op, 0), 1);
287 if (ix86_cmodel == CM_LARGE)
289 switch (GET_CODE (op1))
292 /* TLS symbols are not constant. */
293 if (SYMBOL_REF_TLS_MODEL (op1))
295 /* For small code model we may accept pretty large positive
296 offsets, since one bit is available for free. Negative
297 offsets are limited by the size of NULL pointer area
298 specified by the ABI. */
299 if ((ix86_cmodel == CM_SMALL
300 || (ix86_cmodel == CM_MEDIUM
301 && !SYMBOL_REF_FAR_ADDR_P (op1)))
303 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
304 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
306 /* ??? For the kernel, we may accept adjustment of
307 -0x10000000, since we know that it will just convert
308 negative address space to positive, but perhaps this
309 is not worthwhile. */
313 /* These conditions are similar to SYMBOL_REF ones, just the
314 constraints for code models differ. */
315 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
317 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
318 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
334 ;; Return true if OP is general operand representable on x86_64.
335 (define_predicate "x86_64_general_operand"
336 (if_then_else (match_test "TARGET_64BIT")
337 (ior (match_operand 0 "nonimmediate_operand")
338 (match_operand 0 "x86_64_immediate_operand"))
339 (match_operand 0 "general_operand")))
341 ;; Return true if OP is non-VOIDmode general operand representable
342 ;; on x86_64. This predicate is used in sign-extending conversion
343 ;; operations that require non-VOIDmode immediate operands.
344 (define_predicate "x86_64_sext_operand"
345 (and (match_test "GET_MODE (op) != VOIDmode")
346 (match_operand 0 "x86_64_general_operand")))
348 ;; Return true if OP is non-VOIDmode general operand. This predicate
349 ;; is used in sign-extending conversion operations that require
350 ;; non-VOIDmode immediate operands.
351 (define_predicate "sext_operand"
352 (and (match_test "GET_MODE (op) != VOIDmode")
353 (match_operand 0 "general_operand")))
355 ;; Return true if OP is representable on x86_64 as zero-extended operand.
356 ;; This predicate is used in zero-extending conversion operations that
357 ;; require non-VOIDmode immediate operands.
358 (define_predicate "x86_64_zext_operand"
359 (if_then_else (match_test "TARGET_64BIT")
360 (ior (match_operand 0 "nonimmediate_operand")
361 (and (match_operand 0 "x86_64_zext_immediate_operand")
362 (match_test "GET_MODE (op) != VOIDmode")))
363 (match_operand 0 "nonimmediate_operand")))
365 ;; Return true if OP is general operand representable on x86_64
366 ;; as either sign extended or zero extended constant.
367 (define_predicate "x86_64_szext_general_operand"
368 (if_then_else (match_test "TARGET_64BIT")
369 (ior (match_operand 0 "nonimmediate_operand")
370 (match_operand 0 "x86_64_immediate_operand")
371 (match_operand 0 "x86_64_zext_immediate_operand"))
372 (match_operand 0 "general_operand")))
374 ;; Return true if OP is nonmemory operand representable on x86_64.
375 (define_predicate "x86_64_nonmemory_operand"
376 (if_then_else (match_test "TARGET_64BIT")
377 (ior (match_operand 0 "register_operand")
378 (match_operand 0 "x86_64_immediate_operand"))
379 (match_operand 0 "nonmemory_operand")))
381 ;; Return true if OP is nonmemory operand representable on x86_64.
382 (define_predicate "x86_64_szext_nonmemory_operand"
383 (if_then_else (match_test "TARGET_64BIT")
384 (ior (match_operand 0 "register_operand")
385 (match_operand 0 "x86_64_immediate_operand")
386 (match_operand 0 "x86_64_zext_immediate_operand"))
387 (match_operand 0 "nonmemory_operand")))
389 ;; Return true when operand is PIC expression that can be computed by lea
391 (define_predicate "pic_32bit_operand"
392 (match_code "const,symbol_ref,label_ref")
397 /* Rule out relocations that translate into 64bit constants. */
398 if (TARGET_64BIT && GET_CODE (op) == CONST)
401 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
403 if (GET_CODE (op) == UNSPEC
404 && (XINT (op, 1) == UNSPEC_GOTOFF
405 || XINT (op, 1) == UNSPEC_GOT))
409 return symbolic_operand (op, mode);
412 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
413 (define_predicate "x86_64_movabs_operand"
414 (and (match_operand 0 "nonmemory_operand")
415 (not (match_operand 0 "pic_32bit_operand"))))
417 ;; Return true if OP is either a symbol reference or a sum of a symbol
418 ;; reference and a constant.
419 (define_predicate "symbolic_operand"
420 (match_code "symbol_ref,label_ref,const")
422 switch (GET_CODE (op))
430 if (GET_CODE (op) == SYMBOL_REF
431 || GET_CODE (op) == LABEL_REF
432 || (GET_CODE (op) == UNSPEC
433 && (XINT (op, 1) == UNSPEC_GOT
434 || XINT (op, 1) == UNSPEC_GOTOFF
435 || XINT (op, 1) == UNSPEC_PCREL
436 || XINT (op, 1) == UNSPEC_GOTPCREL)))
438 if (GET_CODE (op) != PLUS
439 || !CONST_INT_P (XEXP (op, 1)))
443 if (GET_CODE (op) == SYMBOL_REF
444 || GET_CODE (op) == LABEL_REF)
446 /* Only @GOTOFF gets offsets. */
447 if (GET_CODE (op) != UNSPEC
448 || XINT (op, 1) != UNSPEC_GOTOFF)
451 op = XVECEXP (op, 0, 0);
452 if (GET_CODE (op) == SYMBOL_REF
453 || GET_CODE (op) == LABEL_REF)
462 ;; Return true if OP is a symbolic operand that resolves locally.
463 (define_predicate "local_symbolic_operand"
464 (match_code "const,label_ref,symbol_ref")
466 if (GET_CODE (op) == CONST
467 && GET_CODE (XEXP (op, 0)) == PLUS
468 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
469 op = XEXP (XEXP (op, 0), 0);
471 if (GET_CODE (op) == LABEL_REF)
474 if (GET_CODE (op) != SYMBOL_REF)
477 if (SYMBOL_REF_TLS_MODEL (op))
480 /* Dll-imported symbols are always external. */
481 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
483 if (SYMBOL_REF_LOCAL_P (op))
486 /* There is, however, a not insubstantial body of code in the rest of
487 the compiler that assumes it can just stick the results of
488 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
489 /* ??? This is a hack. Should update the body of the compiler to
490 always create a DECL an invoke targetm.encode_section_info. */
491 if (strncmp (XSTR (op, 0), internal_label_prefix,
492 internal_label_prefix_len) == 0)
498 ;; Test for a legitimate @GOTOFF operand.
500 ;; VxWorks does not impose a fixed gap between segments; the run-time
501 ;; gap can be different from the object-file gap. We therefore can't
502 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
503 ;; same segment as the GOT. Unfortunately, the flexibility of linker
504 ;; scripts means that we can't be sure of that in general, so assume
505 ;; that @GOTOFF is never valid on VxWorks.
506 (define_predicate "gotoff_operand"
507 (and (not (match_test "TARGET_VXWORKS_RTP"))
508 (match_operand 0 "local_symbolic_operand")))
510 ;; Test for various thread-local symbols.
511 (define_special_predicate "tls_symbolic_operand"
512 (and (match_code "symbol_ref")
513 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
515 (define_special_predicate "tls_modbase_operand"
516 (and (match_code "symbol_ref")
517 (match_test "op == ix86_tls_module_base ()")))
519 ;; Test for a pc-relative call operand
520 (define_predicate "constant_call_address_operand"
521 (match_code "symbol_ref")
523 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
525 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
530 ;; P6 processors will jump to the address after the decrement when %esp
531 ;; is used as a call operand, so they will execute return address as a code.
532 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
534 (define_predicate "call_register_no_elim_operand"
535 (match_operand 0 "register_operand")
537 if (GET_CODE (op) == SUBREG)
538 op = SUBREG_REG (op);
540 if (!TARGET_64BIT && op == stack_pointer_rtx)
543 return register_no_elim_operand (op, mode);
546 ;; True for any non-virtual or eliminable register. Used in places where
547 ;; instantiation of such a register may cause the pattern to not be recognized.
548 (define_predicate "register_no_elim_operand"
549 (match_operand 0 "register_operand")
551 if (GET_CODE (op) == SUBREG)
552 op = SUBREG_REG (op);
553 return !(op == arg_pointer_rtx
554 || op == frame_pointer_rtx
555 || IN_RANGE (REGNO (op),
556 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
559 ;; Similarly, but include the stack pointer. This is used to prevent esp
560 ;; from being used as an index reg.
561 (define_predicate "index_register_operand"
562 (match_operand 0 "register_operand")
564 if (GET_CODE (op) == SUBREG)
565 op = SUBREG_REG (op);
566 if (reload_in_progress || reload_completed)
567 return REG_OK_FOR_INDEX_STRICT_P (op);
569 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
572 ;; Return false if this is any eliminable register. Otherwise general_operand.
573 (define_predicate "general_no_elim_operand"
574 (if_then_else (match_code "reg,subreg")
575 (match_operand 0 "register_no_elim_operand")
576 (match_operand 0 "general_operand")))
578 ;; Return false if this is any eliminable register. Otherwise
579 ;; register_operand or a constant.
580 (define_predicate "nonmemory_no_elim_operand"
581 (ior (match_operand 0 "register_no_elim_operand")
582 (match_operand 0 "immediate_operand")))
584 ;; Test for a valid operand for indirect branch.
585 (define_predicate "indirect_branch_operand"
586 (ior (match_operand 0 "register_operand")
587 (and (not (match_test "TARGET_X32"))
588 (match_operand 0 "memory_operand"))))
590 ;; Test for a valid operand for a call instruction.
591 ;; Allow constant call address operands in Pmode only.
592 (define_special_predicate "call_insn_operand"
593 (ior (match_test "constant_call_address_operand
594 (op, mode == VOIDmode ? mode : Pmode)")
595 (match_operand 0 "call_register_no_elim_operand")
596 (and (not (match_test "TARGET_X32"))
597 (match_operand 0 "memory_operand"))))
599 ;; Similarly, but for tail calls, in which we cannot allow memory references.
600 (define_special_predicate "sibcall_insn_operand"
601 (ior (match_test "constant_call_address_operand
602 (op, mode == VOIDmode ? mode : Pmode)")
603 (match_operand 0 "register_no_elim_operand")))
605 ;; Return true if OP is a call from MS ABI to SYSV ABI function.
606 (define_predicate "call_rex64_ms_sysv_operation"
607 (match_code "parallel")
609 unsigned creg_size = ARRAY_SIZE (x86_64_ms_sysv_extra_clobbered_registers);
612 if ((unsigned) XVECLEN (op, 0) != creg_size + 2)
615 for (i = 0; i < creg_size; i++)
617 rtx elt = XVECEXP (op, 0, i+2);
618 enum machine_mode mode;
621 if (GET_CODE (elt) != CLOBBER
622 || GET_CODE (SET_DEST (elt)) != REG)
625 regno = x86_64_ms_sysv_extra_clobbered_registers[i];
626 mode = SSE_REGNO_P (regno) ? TImode : DImode;
628 if (GET_MODE (SET_DEST (elt)) != mode
629 || REGNO (SET_DEST (elt)) != regno)
635 ;; Match exactly zero.
636 (define_predicate "const0_operand"
637 (match_code "const_int,const_double,const_vector")
639 if (mode == VOIDmode)
640 mode = GET_MODE (op);
641 return op == CONST0_RTX (mode);
644 ;; Match one or vector filled with ones.
645 (define_predicate "const1_operand"
646 (match_code "const_int,const_double,const_vector")
648 if (mode == VOIDmode)
649 mode = GET_MODE (op);
650 return op == CONST1_RTX (mode);
653 ;; Match exactly eight.
654 (define_predicate "const8_operand"
655 (and (match_code "const_int")
656 (match_test "INTVAL (op) == 8")))
658 ;; Match exactly 128.
659 (define_predicate "const128_operand"
660 (and (match_code "const_int")
661 (match_test "INTVAL (op) == 128")))
663 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
664 (define_predicate "const_32bit_mask"
665 (and (match_code "const_int")
666 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
667 == (HOST_WIDE_INT) 0xffffffff")))
669 ;; Match 2, 4, or 8. Used for leal multiplicands.
670 (define_predicate "const248_operand"
671 (match_code "const_int")
673 HOST_WIDE_INT i = INTVAL (op);
674 return i == 2 || i == 4 || i == 8;
677 ;; Match 2, 3, 6, or 7
678 (define_predicate "const2367_operand"
679 (match_code "const_int")
681 HOST_WIDE_INT i = INTVAL (op);
682 return i == 2 || i == 3 || i == 6 || i == 7;
685 ;; Match 1, 2, 4, or 8
686 (define_predicate "const1248_operand"
687 (match_code "const_int")
689 HOST_WIDE_INT i = INTVAL (op);
690 return i == 1 || i == 2 || i == 4 || i == 8;
693 ;; Match 3, 5, or 9. Used for leal multiplicands.
694 (define_predicate "const359_operand"
695 (match_code "const_int")
697 HOST_WIDE_INT i = INTVAL (op);
698 return i == 3 || i == 5 || i == 9;
701 ;; Match 4 or 8 to 11. Used for embeded rounding.
702 (define_predicate "const_4_or_8_to_11_operand"
703 (match_code "const_int")
705 HOST_WIDE_INT i = INTVAL (op);
706 return i == 4 || (i >= 8 && i <= 11);
709 ;; Match 4 or 8. Used for SAE.
710 (define_predicate "const48_operand"
711 (match_code "const_int")
713 HOST_WIDE_INT i = INTVAL (op);
714 return i == 4 || i == 8;
718 (define_predicate "const_0_to_1_operand"
719 (and (match_code "const_int")
720 (ior (match_test "op == const0_rtx")
721 (match_test "op == const1_rtx"))))
724 (define_predicate "const_0_to_3_operand"
725 (and (match_code "const_int")
726 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
729 (define_predicate "const_0_to_4_operand"
730 (and (match_code "const_int")
731 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
734 (define_predicate "const_0_to_5_operand"
735 (and (match_code "const_int")
736 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
739 (define_predicate "const_0_to_7_operand"
740 (and (match_code "const_int")
741 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
744 (define_predicate "const_0_to_15_operand"
745 (and (match_code "const_int")
746 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
749 (define_predicate "const_0_to_31_operand"
750 (and (match_code "const_int")
751 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
754 (define_predicate "const_0_to_63_operand"
755 (and (match_code "const_int")
756 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
759 (define_predicate "const_0_to_255_operand"
760 (and (match_code "const_int")
761 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
763 ;; Match (0 to 255) * 8
764 (define_predicate "const_0_to_255_mul_8_operand"
765 (match_code "const_int")
767 unsigned HOST_WIDE_INT val = INTVAL (op);
768 return val <= 255*8 && val % 8 == 0;
771 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
772 ;; for shift & compare patterns, as shifting by 0 does not change flags).
773 (define_predicate "const_1_to_31_operand"
774 (and (match_code "const_int")
775 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
777 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
778 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
779 (define_predicate "const_1_to_63_operand"
780 (and (match_code "const_int")
781 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
784 (define_predicate "const_2_to_3_operand"
785 (and (match_code "const_int")
786 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
789 (define_predicate "const_4_to_5_operand"
790 (and (match_code "const_int")
791 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
794 (define_predicate "const_4_to_7_operand"
795 (and (match_code "const_int")
796 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
799 (define_predicate "const_6_to_7_operand"
800 (and (match_code "const_int")
801 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
804 (define_predicate "const_8_to_9_operand"
805 (and (match_code "const_int")
806 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
809 (define_predicate "const_8_to_11_operand"
810 (and (match_code "const_int")
811 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
814 (define_predicate "const_8_to_15_operand"
815 (and (match_code "const_int")
816 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
819 (define_predicate "const_10_to_11_operand"
820 (and (match_code "const_int")
821 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
824 (define_predicate "const_12_to_13_operand"
825 (and (match_code "const_int")
826 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
829 (define_predicate "const_12_to_15_operand"
830 (and (match_code "const_int")
831 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
834 (define_predicate "const_14_to_15_operand"
835 (and (match_code "const_int")
836 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
839 (define_predicate "const_16_to_19_operand"
840 (and (match_code "const_int")
841 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
844 (define_predicate "const_16_to_31_operand"
845 (and (match_code "const_int")
846 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
849 (define_predicate "const_20_to_23_operand"
850 (and (match_code "const_int")
851 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
854 (define_predicate "const_24_to_27_operand"
855 (and (match_code "const_int")
856 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
859 (define_predicate "const_28_to_31_operand"
860 (and (match_code "const_int")
861 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
863 ;; True if this is a constant appropriate for an increment or decrement.
864 (define_predicate "incdec_operand"
865 (match_code "const_int")
867 /* On Pentium4, the inc and dec operations causes extra dependency on flag
868 registers, since carry flag is not set. */
869 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
871 return op == const1_rtx || op == constm1_rtx;
874 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
875 (define_predicate "reg_or_pm1_operand"
876 (ior (match_operand 0 "register_operand")
877 (and (match_code "const_int")
878 (ior (match_test "op == const1_rtx")
879 (match_test "op == constm1_rtx")))))
881 ;; True if OP is acceptable as operand of DImode shift expander.
882 (define_predicate "shiftdi_operand"
883 (if_then_else (match_test "TARGET_64BIT")
884 (match_operand 0 "nonimmediate_operand")
885 (match_operand 0 "register_operand")))
887 (define_predicate "ashldi_input_operand"
888 (if_then_else (match_test "TARGET_64BIT")
889 (match_operand 0 "nonimmediate_operand")
890 (match_operand 0 "reg_or_pm1_operand")))
892 ;; Return true if OP is a vector load from the constant pool with just
893 ;; the first element nonzero.
894 (define_predicate "zero_extended_scalar_load_operand"
898 op = maybe_get_pool_constant (op);
900 if (!(op && GET_CODE (op) == CONST_VECTOR))
903 n_elts = CONST_VECTOR_NUNITS (op);
905 for (n_elts--; n_elts > 0; n_elts--)
907 rtx elt = CONST_VECTOR_ELT (op, n_elts);
908 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
914 /* Return true if operand is a vector constant that is all ones. */
915 (define_predicate "vector_all_ones_operand"
916 (match_code "const_vector")
918 int nunits = GET_MODE_NUNITS (mode);
920 if (GET_CODE (op) == CONST_VECTOR
921 && CONST_VECTOR_NUNITS (op) == nunits)
924 for (i = 0; i < nunits; ++i)
926 rtx x = CONST_VECTOR_ELT (op, i);
927 if (x != constm1_rtx)
936 ; Return true when OP is operand acceptable for standard SSE move.
937 (define_predicate "vector_move_operand"
938 (ior (match_operand 0 "nonimmediate_operand")
939 (match_operand 0 "const0_operand")))
941 ;; Return true when OP is either nonimmediate operand, or any
943 (define_predicate "nonimmediate_or_const_vector_operand"
944 (ior (match_operand 0 "nonimmediate_operand")
945 (match_code "const_vector")))
947 ;; Return true when OP is nonimmediate or standard SSE constant.
948 (define_predicate "nonimmediate_or_sse_const_operand"
949 (match_operand 0 "general_operand")
951 if (nonimmediate_operand (op, mode))
953 if (standard_sse_constant_p (op) > 0)
958 ;; Return true if OP is a register or a zero.
959 (define_predicate "reg_or_0_operand"
960 (ior (match_operand 0 "register_operand")
961 (match_operand 0 "const0_operand")))
963 ;; Return true for RTX codes that force SImode address.
964 (define_predicate "SImode_address_operand"
965 (match_code "subreg,zero_extend,and"))
967 ;; Return true if op if a valid address for LEA, and does not contain
968 ;; a segment override. Defined as a special predicate to allow
969 ;; mode-less const_int operands pass to address_operand.
970 (define_special_predicate "address_no_seg_operand"
971 (match_operand 0 "address_operand")
973 struct ix86_address parts;
976 ok = ix86_decompose_address (op, &parts);
978 return parts.seg == SEG_DEFAULT;
981 ;; Return true if op if a valid base register, displacement or
982 ;; sum of base register and displacement for VSIB addressing.
983 (define_predicate "vsib_address_operand"
984 (match_operand 0 "address_operand")
986 struct ix86_address parts;
990 ok = ix86_decompose_address (op, &parts);
992 if (parts.index || parts.seg != SEG_DEFAULT)
995 /* VSIB addressing doesn't support (%rip). */
999 if (GET_CODE (disp) == CONST)
1001 disp = XEXP (disp, 0);
1002 if (GET_CODE (disp) == PLUS)
1003 disp = XEXP (disp, 0);
1004 if (GET_CODE (disp) == UNSPEC)
1005 switch (XINT (disp, 1))
1007 case UNSPEC_GOTPCREL:
1009 case UNSPEC_GOTNTPOFF:
1015 && (GET_CODE (disp) == SYMBOL_REF
1016 || GET_CODE (disp) == LABEL_REF))
1023 (define_predicate "vsib_mem_operator"
1026 ;; Return true if the rtx is known to be at least 32 bits aligned.
1027 (define_predicate "aligned_operand"
1028 (match_operand 0 "general_operand")
1030 struct ix86_address parts;
1033 /* Registers and immediate operands are always "aligned". */
1037 /* All patterns using aligned_operand on memory operands ends up
1038 in promoting memory operand to 64bit and thus causing memory mismatch. */
1039 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1042 /* Don't even try to do any aligned optimizations with volatiles. */
1043 if (MEM_VOLATILE_P (op))
1046 if (MEM_ALIGN (op) >= 32)
1051 /* Pushes and pops are only valid on the stack pointer. */
1052 if (GET_CODE (op) == PRE_DEC
1053 || GET_CODE (op) == POST_INC)
1056 /* Decode the address. */
1057 ok = ix86_decompose_address (op, &parts);
1060 if (parts.base && GET_CODE (parts.base) == SUBREG)
1061 parts.base = SUBREG_REG (parts.base);
1062 if (parts.index && GET_CODE (parts.index) == SUBREG)
1063 parts.index = SUBREG_REG (parts.index);
1065 /* Look for some component that isn't known to be aligned. */
1068 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1073 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1078 if (!CONST_INT_P (parts.disp)
1079 || (INTVAL (parts.disp) & 3))
1083 /* Didn't find one -- this must be an aligned address. */
1087 ;; Return true if OP is memory operand with a displacement.
1088 (define_predicate "memory_displacement_operand"
1089 (match_operand 0 "memory_operand")
1091 struct ix86_address parts;
1094 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1096 return parts.disp != NULL_RTX;
1099 ;; Return true if OP is memory operand with a displacement only.
1100 (define_predicate "memory_displacement_only_operand"
1101 (match_operand 0 "memory_operand")
1103 struct ix86_address parts;
1109 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1112 if (parts.base || parts.index)
1115 return parts.disp != NULL_RTX;
1118 ;; Return true if OP is memory operand which will need zero or
1119 ;; one register at most, not counting stack pointer or frame pointer.
1120 (define_predicate "cmpxchg8b_pic_memory_operand"
1121 (match_operand 0 "memory_operand")
1123 struct ix86_address parts;
1126 if (TARGET_64BIT || !flag_pic)
1129 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1132 if (parts.base && GET_CODE (parts.base) == SUBREG)
1133 parts.base = SUBREG_REG (parts.base);
1134 if (parts.index && GET_CODE (parts.index) == SUBREG)
1135 parts.index = SUBREG_REG (parts.index);
1137 if (parts.base == NULL_RTX
1138 || parts.base == arg_pointer_rtx
1139 || parts.base == frame_pointer_rtx
1140 || parts.base == hard_frame_pointer_rtx
1141 || parts.base == stack_pointer_rtx)
1144 if (parts.index == NULL_RTX
1145 || parts.index == arg_pointer_rtx
1146 || parts.index == frame_pointer_rtx
1147 || parts.index == hard_frame_pointer_rtx
1148 || parts.index == stack_pointer_rtx)
1155 ;; Return true if OP is memory operand that cannot be represented
1156 ;; by the modRM array.
1157 (define_predicate "long_memory_operand"
1158 (and (match_operand 0 "memory_operand")
1159 (match_test "memory_address_length (op, false)")))
1161 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1162 (define_predicate "fcmov_comparison_operator"
1163 (match_operand 0 "comparison_operator")
1165 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1166 enum rtx_code code = GET_CODE (op);
1168 if (inmode == CCFPmode || inmode == CCFPUmode)
1170 if (!ix86_trivial_fp_comparison_operator (op, mode))
1172 code = ix86_fp_compare_code_to_integer (code);
1174 /* i387 supports just limited amount of conditional codes. */
1177 case LTU: case GTU: case LEU: case GEU:
1178 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1179 || inmode == CCCmode)
1182 case ORDERED: case UNORDERED:
1190 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1191 ;; The first set are supported directly; the second set can't be done with
1192 ;; full IEEE support, i.e. NaNs.
1194 (define_predicate "sse_comparison_operator"
1195 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1196 (and (match_test "TARGET_AVX")
1197 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1199 (define_predicate "ix86_comparison_int_operator"
1200 (match_code "ne,eq,ge,gt,le,lt"))
1202 (define_predicate "ix86_comparison_uns_operator"
1203 (match_code "ne,eq,geu,gtu,leu,ltu"))
1205 (define_predicate "bt_comparison_operator"
1206 (match_code "ne,eq"))
1208 ;; Return true if OP is a valid comparison operator in valid mode.
1209 (define_predicate "ix86_comparison_operator"
1210 (match_operand 0 "comparison_operator")
1212 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1213 enum rtx_code code = GET_CODE (op);
1215 if (inmode == CCFPmode || inmode == CCFPUmode)
1216 return ix86_trivial_fp_comparison_operator (op, mode);
1223 if (inmode == CCmode || inmode == CCGCmode
1224 || inmode == CCGOCmode || inmode == CCNOmode)
1227 case LTU: case GTU: case LEU: case GEU:
1228 if (inmode == CCmode || inmode == CCCmode)
1231 case ORDERED: case UNORDERED:
1232 if (inmode == CCmode)
1236 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1244 ;; Return true if OP is a valid comparison operator
1245 ;; testing carry flag to be set.
1246 (define_predicate "ix86_carry_flag_operator"
1247 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1249 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1250 enum rtx_code code = GET_CODE (op);
1252 if (inmode == CCFPmode || inmode == CCFPUmode)
1254 if (!ix86_trivial_fp_comparison_operator (op, mode))
1256 code = ix86_fp_compare_code_to_integer (code);
1258 else if (inmode == CCCmode)
1259 return code == LTU || code == GTU;
1260 else if (inmode != CCmode)
1266 ;; Return true if this comparison only requires testing one flag bit.
1267 (define_predicate "ix86_trivial_fp_comparison_operator"
1268 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1270 ;; Return true if we know how to do this comparison. Others require
1271 ;; testing more than one flag bit, and we let the generic middle-end
1273 (define_predicate "ix86_fp_comparison_operator"
1274 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1275 == IX86_FPCMP_ARITH")
1276 (match_operand 0 "comparison_operator")
1277 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1279 ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1280 (define_predicate "ix86_swapped_fp_comparison_operator"
1281 (match_operand 0 "comparison_operator")
1283 enum rtx_code code = GET_CODE (op);
1286 PUT_CODE (op, swap_condition (code));
1287 ret = ix86_fp_comparison_operator (op, mode);
1288 PUT_CODE (op, code);
1292 ;; Nearly general operand, but accept any const_double, since we wish
1293 ;; to be able to drop them into memory rather than have them get pulled
1295 (define_predicate "cmp_fp_expander_operand"
1296 (ior (match_code "const_double")
1297 (match_operand 0 "general_operand")))
1299 ;; Return true if this is a valid binary floating-point operation.
1300 (define_predicate "binary_fp_operator"
1301 (match_code "plus,minus,mult,div"))
1303 ;; Return true if this is a multiply operation.
1304 (define_predicate "mult_operator"
1305 (match_code "mult"))
1307 ;; Return true if this is a division operation.
1308 (define_predicate "div_operator"
1311 ;; Return true if this is a plus, minus, and, ior or xor operation.
1312 (define_predicate "plusminuslogic_operator"
1313 (match_code "plus,minus,and,ior,xor"))
1315 ;; Return true if this is a float extend operation.
1316 (define_predicate "float_operator"
1317 (match_code "float"))
1319 ;; Return true for ARITHMETIC_P.
1320 (define_predicate "arith_or_logical_operator"
1321 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1322 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1324 ;; Return true for COMMUTATIVE_P.
1325 (define_predicate "commutative_operator"
1326 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1328 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1329 (define_predicate "promotable_binary_operator"
1330 (ior (match_code "plus,minus,and,ior,xor,ashift")
1331 (and (match_code "mult")
1332 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1334 (define_predicate "compare_operator"
1335 (match_code "compare"))
1337 (define_predicate "absneg_operator"
1338 (match_code "abs,neg"))
1340 ;; Return true if OP is misaligned memory operand
1341 (define_predicate "misaligned_operand"
1342 (and (match_code "mem")
1343 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1345 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1346 (define_predicate "emms_operation"
1347 (match_code "parallel")
1351 if (XVECLEN (op, 0) != 17)
1354 for (i = 0; i < 8; i++)
1356 rtx elt = XVECEXP (op, 0, i+1);
1358 if (GET_CODE (elt) != CLOBBER
1359 || GET_CODE (SET_DEST (elt)) != REG
1360 || GET_MODE (SET_DEST (elt)) != XFmode
1361 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1364 elt = XVECEXP (op, 0, i+9);
1366 if (GET_CODE (elt) != CLOBBER
1367 || GET_CODE (SET_DEST (elt)) != REG
1368 || GET_MODE (SET_DEST (elt)) != DImode
1369 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1375 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1376 (define_predicate "vzeroall_operation"
1377 (match_code "parallel")
1379 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1381 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1384 for (i = 0; i < nregs; i++)
1386 rtx elt = XVECEXP (op, 0, i+1);
1388 if (GET_CODE (elt) != SET
1389 || GET_CODE (SET_DEST (elt)) != REG
1390 || GET_MODE (SET_DEST (elt)) != V8SImode
1391 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1392 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1398 ;; return true if OP is a vzeroupper operation.
1399 (define_predicate "vzeroupper_operation"
1400 (and (match_code "unspec_volatile")
1401 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1403 ;; Return true if OP is a parallel for a vbroadcast permute.
1405 (define_predicate "avx_vbroadcast_operand"
1406 (and (match_code "parallel")
1407 (match_code "const_int" "a"))
1409 rtx elt = XVECEXP (op, 0, 0);
1410 int i, nelt = XVECLEN (op, 0);
1412 /* Don't bother checking there are the right number of operands,
1413 merely that they're all identical. */
1414 for (i = 1; i < nelt; ++i)
1415 if (XVECEXP (op, 0, i) != elt)
1420 ;; Return true if OP is a parallel for a palignr permute.
1421 (define_predicate "palignr_operand"
1422 (and (match_code "parallel")
1423 (match_code "const_int" "a"))
1425 int elt = INTVAL (XVECEXP (op, 0, 0));
1426 int i, nelt = XVECLEN (op, 0);
1428 /* Check that an order in the permutation is suitable for palignr.
1429 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1430 for (i = 1; i < nelt; ++i)
1431 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1436 ;; Return true if OP is a proper third operand to vpblendw256.
1437 (define_predicate "avx2_pblendw_operand"
1438 (match_code "const_int")
1440 HOST_WIDE_INT val = INTVAL (op);
1441 HOST_WIDE_INT low = val & 0xff;
1442 return val == ((low << 8) | low);
1445 ;; Return true if OP is nonimmediate_operand or CONST_VECTOR.
1446 (define_predicate "general_vector_operand"
1447 (ior (match_operand 0 "nonimmediate_operand")
1448 (match_code "const_vector")))
1450 ;; Return true if OP is either -1 constant or stored in register.
1451 (define_predicate "register_or_constm1_operand"
1452 (ior (match_operand 0 "register_operand")
1453 (and (match_code "const_int")
1454 (match_test "op == constm1_rtx"))))