1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
35 ;; True if the operand is a nonimmediate operand with GENERAL class register.
36 (define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
41 ;; True if the operand is a general operand with GENERAL class register.
42 (define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
47 ;; True if the operand is an MMX register.
48 (define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
52 ;; Match register operands, but include memory operands for
53 ;; !TARGET_MMX_WITH_SSE.
54 (define_predicate "register_mmxmem_operand"
55 (ior (match_operand 0 "register_operand")
56 (and (not (match_test "TARGET_MMX_WITH_SSE"))
57 (match_operand 0 "memory_operand"))))
59 ;; True if the operand is an SSE register.
60 (define_predicate "sse_reg_operand"
61 (and (match_code "reg")
62 (match_test "SSE_REGNO_P (REGNO (op))")))
64 ;; Return true if op is a QImode register.
65 (define_predicate "any_QIreg_operand"
66 (and (match_code "reg")
67 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
69 ;; Return true if op is one of QImode registers: %[abcd][hl].
70 (define_predicate "QIreg_operand"
71 (and (match_code "reg")
72 (match_test "QI_REGNO_P (REGNO (op))")))
74 ;; Return true if op is a QImode register operand other than %[abcd][hl].
75 (define_predicate "ext_QIreg_operand"
76 (and (match_test "TARGET_64BIT")
78 (not (match_test "QI_REGNO_P (REGNO (op))"))))
80 ;; Return true if op is the AX register.
81 (define_predicate "ax_reg_operand"
82 (and (match_code "reg")
83 (match_test "REGNO (op) == AX_REG")))
85 ;; Return true if op is the flags register.
86 (define_predicate "flags_reg_operand"
87 (and (match_code "reg")
88 (match_test "REGNO (op) == FLAGS_REG")))
90 ;; True if the operand is a MASK register.
91 (define_predicate "mask_reg_operand"
92 (and (match_code "reg")
93 (match_test "MASK_REGNO_P (REGNO (op))")))
95 ;; Match a DI, SI, HI or QImode nonimmediate_operand.
96 (define_special_predicate "int_nonimmediate_operand"
97 (and (match_operand 0 "nonimmediate_operand")
98 (ior (and (match_test "TARGET_64BIT")
99 (match_test "GET_MODE (op) == DImode"))
100 (match_test "GET_MODE (op) == SImode")
101 (match_test "GET_MODE (op) == HImode")
102 (match_test "GET_MODE (op) == QImode"))))
104 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
105 (define_predicate "register_ssemem_operand"
107 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
108 (match_operand 0 "nonimmediate_operand")
109 (match_operand 0 "register_operand")))
111 ;; Match nonimmediate operands, but exclude memory operands
112 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
113 (define_predicate "nonimm_ssenomem_operand"
115 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
116 (not (match_test "TARGET_MIX_SSE_I387")))
117 (match_operand 0 "register_operand")
118 (match_operand 0 "nonimmediate_operand")))
120 ;; The above predicate, suitable for x87 arithmetic operators.
121 (define_predicate "x87nonimm_ssenomem_operand"
123 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
124 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
125 (match_operand 0 "register_operand")
126 (match_operand 0 "nonimmediate_operand")))
128 ;; Match register operands, include memory operand for TARGET_SSE4_1.
129 (define_predicate "register_sse4nonimm_operand"
130 (if_then_else (match_test "TARGET_SSE4_1")
131 (match_operand 0 "nonimmediate_operand")
132 (match_operand 0 "register_operand")))
134 ;; Return true if VALUE is symbol reference
135 (define_predicate "symbol_operand"
136 (match_code "symbol_ref"))
138 ;; Return true if VALUE is an ENDBR opcode in immediate field.
139 (define_predicate "ix86_endbr_immediate_operand"
140 (match_code "const_int")
142 if (flag_cf_protection & CF_BRANCH)
144 unsigned HOST_WIDE_INT imm = UINTVAL (op);
145 unsigned HOST_WIDE_INT val = TARGET_64BIT ? 0xfa1e0ff3 : 0xfb1e0ff3;
150 /* NB: Encoding is byte based. */
152 for (; imm >= val; imm >>= 8)
160 ;; Return true if VALUE can be stored in a sign extended immediate field.
161 (define_predicate "x86_64_immediate_operand"
162 (match_code "const_int,symbol_ref,label_ref,const")
164 if (ix86_endbr_immediate_operand (op, VOIDmode))
168 return immediate_operand (op, mode);
170 switch (GET_CODE (op))
174 HOST_WIDE_INT val = INTVAL (op);
175 return trunc_int_for_mode (val, SImode) == val;
178 /* TLS symbols are not constant. */
179 if (SYMBOL_REF_TLS_MODEL (op))
182 /* Load the external function address via the GOT slot. */
183 if (ix86_force_load_from_GOT_p (op))
186 /* For certain code models, the symbolic references are known to fit.
187 in CM_SMALL_PIC model we know it fits if it is local to the shared
188 library. Don't count TLS SYMBOL_REFs here, since they should fit
189 only if inside of UNSPEC handled below. */
190 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
191 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
194 /* For certain code models, the code is near as well. */
195 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
196 || ix86_cmodel == CM_KERNEL);
199 /* We also may accept the offsetted memory references in certain
201 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
202 switch (XINT (XEXP (op, 0), 1))
204 case UNSPEC_GOTPCREL:
206 case UNSPEC_GOTNTPOFF:
213 if (GET_CODE (XEXP (op, 0)) == PLUS)
215 rtx op1 = XEXP (XEXP (op, 0), 0);
216 rtx op2 = XEXP (XEXP (op, 0), 1);
218 if (ix86_cmodel == CM_LARGE && GET_CODE (op1) != UNSPEC)
220 if (!CONST_INT_P (op2))
223 HOST_WIDE_INT offset = INTVAL (op2);
224 if (trunc_int_for_mode (offset, SImode) != offset)
227 switch (GET_CODE (op1))
230 /* TLS symbols are not constant. */
231 if (SYMBOL_REF_TLS_MODEL (op1))
234 /* Load the external function address via the GOT slot. */
235 if (ix86_force_load_from_GOT_p (op1))
238 /* For CM_SMALL assume that latest object is 16MB before
239 end of 31bits boundary. We may also accept pretty
240 large negative constants knowing that all objects are
241 in the positive half of address space. */
242 if ((ix86_cmodel == CM_SMALL
243 || (ix86_cmodel == CM_MEDIUM
244 && !SYMBOL_REF_FAR_ADDR_P (op1)))
245 && offset < 16*1024*1024)
247 /* For CM_KERNEL we know that all object resist in the
248 negative half of 32bits address space. We may not
249 accept negative offsets, since they may be just off
250 and we may accept pretty large positive ones. */
251 if (ix86_cmodel == CM_KERNEL
257 /* These conditions are similar to SYMBOL_REF ones, just the
258 constraints for code models differ. */
259 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
260 && offset < 16*1024*1024)
262 if (ix86_cmodel == CM_KERNEL
268 switch (XINT (op1, 1))
289 ;; Return true if VALUE can be stored in the zero extended immediate field.
290 (define_predicate "x86_64_zext_immediate_operand"
291 (match_code "const_int,symbol_ref,label_ref,const")
293 if (ix86_endbr_immediate_operand (op, VOIDmode))
296 switch (GET_CODE (op))
299 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
302 /* TLS symbols are not constant. */
303 if (SYMBOL_REF_TLS_MODEL (op))
306 /* Load the external function address via the GOT slot. */
307 if (ix86_force_load_from_GOT_p (op))
310 /* For certain code models, the symbolic references are known to fit. */
311 return (ix86_cmodel == CM_SMALL
312 || (ix86_cmodel == CM_MEDIUM
313 && !SYMBOL_REF_FAR_ADDR_P (op)));
316 /* For certain code models, the code is near as well. */
317 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
320 /* We also may accept the offsetted memory references in certain
322 if (GET_CODE (XEXP (op, 0)) == PLUS)
324 rtx op1 = XEXP (XEXP (op, 0), 0);
325 rtx op2 = XEXP (XEXP (op, 0), 1);
327 if (ix86_cmodel == CM_LARGE)
329 if (!CONST_INT_P (op2))
332 HOST_WIDE_INT offset = INTVAL (op2);
333 if (trunc_int_for_mode (offset, SImode) != offset)
336 switch (GET_CODE (op1))
339 /* TLS symbols are not constant. */
340 if (SYMBOL_REF_TLS_MODEL (op1))
343 /* Load the external function address via the GOT slot. */
344 if (ix86_force_load_from_GOT_p (op1))
347 /* For small code model we may accept pretty large positive
348 offsets, since one bit is available for free. Negative
349 offsets are limited by the size of NULL pointer area
350 specified by the ABI. */
351 if ((ix86_cmodel == CM_SMALL
352 || (ix86_cmodel == CM_MEDIUM
353 && !SYMBOL_REF_FAR_ADDR_P (op1)))
354 && offset > -0x10000)
356 /* ??? For the kernel, we may accept adjustment of
357 -0x10000000, since we know that it will just convert
358 negative address space to positive, but perhaps this
359 is not worthwhile. */
363 /* These conditions are similar to SYMBOL_REF ones, just the
364 constraints for code models differ. */
365 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
366 && offset > -0x10000)
382 ;; Return true if VALUE is a constant integer whose low and high words satisfy
383 ;; x86_64_immediate_operand.
384 (define_predicate "x86_64_hilo_int_operand"
385 (match_code "const_int,const_wide_int")
387 switch (GET_CODE (op))
390 return x86_64_immediate_operand (op, mode);
393 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
394 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
396 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
405 ;; Return true if VALUE is a constant integer whose value is
406 ;; x86_64_immediate_operand value zero extended from word mode to mode.
407 (define_predicate "x86_64_dwzext_immediate_operand"
408 (match_code "const_int,const_wide_int")
410 if (ix86_endbr_immediate_operand (op, VOIDmode))
413 switch (GET_CODE (op))
417 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
418 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
423 return (CONST_WIDE_INT_NUNITS (op) == 2
424 && CONST_WIDE_INT_ELT (op, 1) == 0
425 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
426 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
433 ;; Return true if size of VALUE can be stored in a sign
434 ;; extended immediate field.
435 (define_predicate "x86_64_immediate_size_operand"
436 (and (match_code "symbol_ref")
437 (ior (not (match_test "TARGET_64BIT"))
438 (match_test "ix86_cmodel == CM_SMALL")
439 (match_test "ix86_cmodel == CM_KERNEL"))))
441 ;; Return true if OP is general operand representable on x86_64.
442 (define_predicate "x86_64_general_operand"
443 (if_then_else (match_test "TARGET_64BIT")
444 (ior (match_operand 0 "nonimmediate_operand")
445 (match_operand 0 "x86_64_immediate_operand"))
446 (match_operand 0 "general_operand")))
448 ;; Return true if OP's both words are general operands representable
450 (define_predicate "x86_64_hilo_general_operand"
451 (if_then_else (match_test "TARGET_64BIT")
452 (ior (match_operand 0 "nonimmediate_operand")
453 (match_operand 0 "x86_64_hilo_int_operand"))
454 (match_operand 0 "general_operand")))
456 ;; Return true if OP is non-VOIDmode general operand representable
457 ;; on x86_64. This predicate is used in sign-extending conversion
458 ;; operations that require non-VOIDmode immediate operands.
459 (define_predicate "x86_64_sext_operand"
460 (and (match_test "GET_MODE (op) != VOIDmode")
461 (match_operand 0 "x86_64_general_operand")))
463 ;; Return true if OP is non-VOIDmode general operand. This predicate
464 ;; is used in sign-extending conversion operations that require
465 ;; non-VOIDmode immediate operands.
466 (define_predicate "sext_operand"
467 (and (match_test "GET_MODE (op) != VOIDmode")
468 (match_operand 0 "general_operand")))
470 ;; Return true if OP is representable on x86_64 as zero-extended operand.
471 ;; This predicate is used in zero-extending conversion operations that
472 ;; require non-VOIDmode immediate operands.
473 (define_predicate "x86_64_zext_operand"
474 (if_then_else (match_test "TARGET_64BIT")
475 (ior (match_operand 0 "nonimmediate_operand")
476 (and (match_operand 0 "x86_64_zext_immediate_operand")
477 (match_test "GET_MODE (op) != VOIDmode")))
478 (match_operand 0 "nonimmediate_operand")))
480 ;; Return true if OP is general operand representable on x86_64
481 ;; as either sign extended or zero extended constant.
482 (define_predicate "x86_64_szext_general_operand"
483 (if_then_else (match_test "TARGET_64BIT")
484 (ior (match_operand 0 "nonimmediate_operand")
485 (match_operand 0 "x86_64_immediate_operand")
486 (match_operand 0 "x86_64_zext_immediate_operand"))
487 (match_operand 0 "general_operand")))
489 ;; Return true if OP is nonmemory operand representable on x86_64.
490 (define_predicate "x86_64_nonmemory_operand"
491 (if_then_else (match_test "TARGET_64BIT")
492 (ior (match_operand 0 "register_operand")
493 (match_operand 0 "x86_64_immediate_operand"))
494 (match_operand 0 "nonmemory_operand")))
496 ;; Return true if OP is nonmemory operand representable on x86_64.
497 (define_predicate "x86_64_szext_nonmemory_operand"
498 (if_then_else (match_test "TARGET_64BIT")
499 (ior (match_operand 0 "register_operand")
500 (match_operand 0 "x86_64_immediate_operand")
501 (match_operand 0 "x86_64_zext_immediate_operand"))
502 (match_operand 0 "nonmemory_operand")))
504 ;; Return true when operand is PIC expression that can be computed by lea
506 (define_predicate "pic_32bit_operand"
507 (match_code "const,symbol_ref,label_ref")
512 /* Rule out relocations that translate into 64bit constants. */
513 if (TARGET_64BIT && GET_CODE (op) == CONST)
516 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
518 if (GET_CODE (op) == UNSPEC
519 && (XINT (op, 1) == UNSPEC_GOTOFF
520 || XINT (op, 1) == UNSPEC_GOT))
524 return symbolic_operand (op, mode);
527 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
528 (define_predicate "x86_64_movabs_operand"
529 (and (match_operand 0 "nonmemory_operand")
530 (not (match_operand 0 "pic_32bit_operand"))))
532 ;; Return true if OP is either a symbol reference or a sum of a symbol
533 ;; reference and a constant.
534 (define_predicate "symbolic_operand"
535 (match_code "symbol_ref,label_ref,const")
537 switch (GET_CODE (op))
545 if (GET_CODE (op) == SYMBOL_REF
546 || GET_CODE (op) == LABEL_REF
547 || (GET_CODE (op) == UNSPEC
548 && (XINT (op, 1) == UNSPEC_GOT
549 || XINT (op, 1) == UNSPEC_GOTOFF
550 || XINT (op, 1) == UNSPEC_PCREL
551 || XINT (op, 1) == UNSPEC_GOTPCREL)))
553 if (GET_CODE (op) != PLUS
554 || !CONST_INT_P (XEXP (op, 1)))
558 if (GET_CODE (op) == SYMBOL_REF
559 || GET_CODE (op) == LABEL_REF)
561 /* Only @GOTOFF gets offsets. */
562 if (GET_CODE (op) != UNSPEC
563 || XINT (op, 1) != UNSPEC_GOTOFF)
566 op = XVECEXP (op, 0, 0);
567 if (GET_CODE (op) == SYMBOL_REF
568 || GET_CODE (op) == LABEL_REF)
577 ;; Return true if OP is a symbolic operand that resolves locally.
578 (define_predicate "local_symbolic_operand"
579 (match_code "const,label_ref,symbol_ref")
581 if (GET_CODE (op) == CONST
582 && GET_CODE (XEXP (op, 0)) == PLUS
583 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
584 op = XEXP (XEXP (op, 0), 0);
586 if (GET_CODE (op) == LABEL_REF)
589 if (GET_CODE (op) != SYMBOL_REF)
592 if (SYMBOL_REF_TLS_MODEL (op))
595 /* Dll-imported symbols are always external. */
596 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
598 if (SYMBOL_REF_LOCAL_P (op))
601 /* There is, however, a not insubstantial body of code in the rest of
602 the compiler that assumes it can just stick the results of
603 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
604 /* ??? This is a hack. Should update the body of the compiler to
605 always create a DECL an invoke targetm.encode_section_info. */
606 if (strncmp (XSTR (op, 0), internal_label_prefix,
607 internal_label_prefix_len) == 0)
613 ;; Test for a legitimate @GOTOFF operand.
615 ;; VxWorks does not impose a fixed gap between segments; the run-time
616 ;; gap can be different from the object-file gap. We therefore can't
617 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
618 ;; same segment as the GOT. Unfortunately, the flexibility of linker
619 ;; scripts means that we can't be sure of that in general, so assume
620 ;; that @GOTOFF is never valid on VxWorks.
621 (define_predicate "gotoff_operand"
622 (and (not (match_test "TARGET_VXWORKS_RTP"))
623 (match_operand 0 "local_symbolic_operand")))
625 ;; Test for various thread-local symbols.
626 (define_special_predicate "tls_symbolic_operand"
627 (and (match_code "symbol_ref")
628 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
630 (define_special_predicate "tls_modbase_operand"
631 (and (match_code "symbol_ref")
632 (match_test "op == ix86_tls_module_base ()")))
634 (define_predicate "tls_address_pattern"
635 (and (match_code "set,parallel,unspec,unspec_volatile")
636 (match_test "ix86_tls_address_pattern_p (op)")))
638 ;; Test for a pc-relative call operand
639 (define_predicate "constant_call_address_operand"
640 (match_code "symbol_ref")
642 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
643 || flag_force_indirect_call)
645 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
650 ;; P6 processors will jump to the address after the decrement when %esp
651 ;; is used as a call operand, so they will execute return address as a code.
652 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
654 (define_predicate "call_register_no_elim_operand"
655 (match_operand 0 "register_operand")
658 op = SUBREG_REG (op);
660 if (!TARGET_64BIT && op == stack_pointer_rtx)
663 return register_no_elim_operand (op, mode);
666 ;; True for any non-virtual or eliminable register. Used in places where
667 ;; instantiation of such a register may cause the pattern to not be recognized.
668 (define_predicate "register_no_elim_operand"
669 (match_operand 0 "register_operand")
672 op = SUBREG_REG (op);
673 return !(op == arg_pointer_rtx
674 || op == frame_pointer_rtx
675 || IN_RANGE (REGNO (op),
676 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
679 ;; Similarly, but include the stack pointer. This is used to prevent esp
680 ;; from being used as an index reg.
681 (define_predicate "index_register_operand"
682 (match_operand 0 "register_operand")
685 op = SUBREG_REG (op);
686 if (reload_completed)
687 return REG_OK_FOR_INDEX_STRICT_P (op);
689 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
692 ;; Return false if this is any eliminable register. Otherwise general_operand.
693 (define_predicate "general_no_elim_operand"
694 (if_then_else (match_code "reg,subreg")
695 (match_operand 0 "register_no_elim_operand")
696 (match_operand 0 "general_operand")))
698 ;; Return false if this is any eliminable register. Otherwise
699 ;; register_operand or a constant.
700 (define_predicate "nonmemory_no_elim_operand"
701 (ior (match_operand 0 "register_no_elim_operand")
702 (match_operand 0 "immediate_operand")))
704 ;; Test for a valid operand for indirect branch.
705 (define_predicate "indirect_branch_operand"
706 (ior (match_operand 0 "register_operand")
707 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
708 (not (match_test "TARGET_X32"))
709 (match_operand 0 "memory_operand"))))
711 ;; Return true if OP is a memory operands that can be used in sibcalls.
712 ;; Since sibcall never returns, we can only use call-clobbered register
713 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
714 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
715 ;; and *sibcall_value_GOT_32 patterns.
716 (define_predicate "sibcall_memory_operand"
717 (match_operand 0 "memory_operand")
722 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
724 int regno = REGNO (XEXP (op, 0));
725 if (!HARD_REGISTER_NUM_P (regno) || call_used_or_fixed_reg_p (regno))
728 if (GOT32_symbol_operand (op, VOIDmode))
735 ;; Return true if OP is a GOT memory operand.
736 (define_predicate "GOT_memory_operand"
737 (match_operand 0 "memory_operand")
740 return (GET_CODE (op) == CONST
741 && GET_CODE (XEXP (op, 0)) == UNSPEC
742 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
745 ;; Test for a valid operand for a call instruction.
746 ;; Allow constant call address operands in Pmode only.
747 (define_special_predicate "call_insn_operand"
748 (ior (match_test "constant_call_address_operand
749 (op, mode == VOIDmode ? mode : Pmode)")
750 (match_operand 0 "call_register_no_elim_operand")
751 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
752 (ior (and (not (match_test "TARGET_X32"))
753 (match_operand 0 "memory_operand"))
754 (and (match_test "TARGET_X32 && Pmode == DImode")
755 (match_operand 0 "GOT_memory_operand"))))))
757 ;; Similarly, but for tail calls, in which we cannot allow memory references.
758 (define_special_predicate "sibcall_insn_operand"
759 (ior (match_test "constant_call_address_operand
760 (op, mode == VOIDmode ? mode : Pmode)")
761 (match_operand 0 "register_no_elim_operand")
762 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
763 (ior (and (not (match_test "TARGET_X32"))
764 (match_operand 0 "sibcall_memory_operand"))
765 (and (match_test "TARGET_X32 && Pmode == DImode")
766 (match_operand 0 "GOT_memory_operand"))))))
768 ;; Return true if OP is a 32-bit GOT symbol operand.
769 (define_predicate "GOT32_symbol_operand"
770 (match_test "GET_CODE (op) == CONST
771 && GET_CODE (XEXP (op, 0)) == UNSPEC
772 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
774 ;; Match exactly zero.
775 (define_predicate "const0_operand"
776 (match_code "const_int,const_double,const_vector")
778 if (mode == VOIDmode)
779 mode = GET_MODE (op);
780 return op == CONST0_RTX (mode);
783 ;; Match one or a vector with all elements equal to one.
784 (define_predicate "const1_operand"
785 (match_code "const_int,const_double,const_vector")
787 if (mode == VOIDmode)
788 mode = GET_MODE (op);
789 return op == CONST1_RTX (mode);
793 (define_predicate "constm1_operand"
794 (and (match_code "const_int")
795 (match_test "op == constm1_rtx")))
797 ;; Match exactly eight.
798 (define_predicate "const8_operand"
799 (and (match_code "const_int")
800 (match_test "INTVAL (op) == 8")))
802 ;; Match exactly 128.
803 (define_predicate "const128_operand"
804 (and (match_code "const_int")
805 (match_test "INTVAL (op) == 128")))
807 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
808 (define_predicate "const_32bit_mask"
809 (and (match_code "const_int")
810 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
811 == (HOST_WIDE_INT) 0xffffffff")))
813 ;; Match 2, 4, or 8. Used for leal multiplicands.
814 (define_predicate "const248_operand"
815 (match_code "const_int")
817 HOST_WIDE_INT i = INTVAL (op);
818 return i == 2 || i == 4 || i == 8;
821 ;; Match 1, 2, or 3. Used for lea shift amounts.
822 (define_predicate "const123_operand"
823 (match_code "const_int")
825 HOST_WIDE_INT i = INTVAL (op);
826 return i == 1 || i == 2 || i == 3;
829 ;; Match 2, 3, 6, or 7
830 (define_predicate "const2367_operand"
831 (match_code "const_int")
833 HOST_WIDE_INT i = INTVAL (op);
834 return i == 2 || i == 3 || i == 6 || i == 7;
837 ;; Match 1, 2, 4, or 8
838 (define_predicate "const1248_operand"
839 (match_code "const_int")
841 HOST_WIDE_INT i = INTVAL (op);
842 return i == 1 || i == 2 || i == 4 || i == 8;
845 ;; Match 3, 5, or 9. Used for leal multiplicands.
846 (define_predicate "const359_operand"
847 (match_code "const_int")
849 HOST_WIDE_INT i = INTVAL (op);
850 return i == 3 || i == 5 || i == 9;
853 ;; Match 4 or 8 to 11. Used for embeded rounding.
854 (define_predicate "const_4_or_8_to_11_operand"
855 (match_code "const_int")
857 HOST_WIDE_INT i = INTVAL (op);
858 return i == 4 || (i >= 8 && i <= 11);
861 ;; Match 4 or 8. Used for SAE.
862 (define_predicate "const48_operand"
863 (match_code "const_int")
865 HOST_WIDE_INT i = INTVAL (op);
866 return i == 4 || i == 8;
870 (define_predicate "const_0_to_1_operand"
871 (and (match_code "const_int")
872 (ior (match_test "op == const0_rtx")
873 (match_test "op == const1_rtx"))))
876 (define_predicate "const_0_to_3_operand"
877 (and (match_code "const_int")
878 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
881 (define_predicate "const_0_to_4_operand"
882 (and (match_code "const_int")
883 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
886 (define_predicate "const_0_to_5_operand"
887 (and (match_code "const_int")
888 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
891 (define_predicate "const_0_to_7_operand"
892 (and (match_code "const_int")
893 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
896 (define_predicate "const_0_to_15_operand"
897 (and (match_code "const_int")
898 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
901 (define_predicate "const_0_to_31_operand"
902 (and (match_code "const_int")
903 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
906 (define_predicate "const_0_to_63_operand"
907 (and (match_code "const_int")
908 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
911 (define_predicate "const_0_to_255_operand"
912 (and (match_code "const_int")
913 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
915 ;; Match (0 to 255) * 8
916 (define_predicate "const_0_to_255_mul_8_operand"
917 (match_code "const_int")
919 unsigned HOST_WIDE_INT val = INTVAL (op);
920 return val <= 255*8 && val % 8 == 0;
923 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
924 ;; for shift & compare patterns, as shifting by 0 does not change flags).
925 (define_predicate "const_1_to_31_operand"
926 (and (match_code "const_int")
927 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
929 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
930 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
931 (define_predicate "const_1_to_63_operand"
932 (and (match_code "const_int")
933 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
936 (define_predicate "const_2_to_3_operand"
937 (and (match_code "const_int")
938 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
941 (define_predicate "const_4_to_5_operand"
942 (and (match_code "const_int")
943 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
946 (define_predicate "const_4_to_7_operand"
947 (and (match_code "const_int")
948 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
951 (define_predicate "const_6_to_7_operand"
952 (and (match_code "const_int")
953 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
956 (define_predicate "const_8_to_9_operand"
957 (and (match_code "const_int")
958 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
961 (define_predicate "const_8_to_11_operand"
962 (and (match_code "const_int")
963 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
966 (define_predicate "const_8_to_15_operand"
967 (and (match_code "const_int")
968 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
971 (define_predicate "const_10_to_11_operand"
972 (and (match_code "const_int")
973 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
976 (define_predicate "const_12_to_13_operand"
977 (and (match_code "const_int")
978 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
981 (define_predicate "const_12_to_15_operand"
982 (and (match_code "const_int")
983 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
986 (define_predicate "const_14_to_15_operand"
987 (and (match_code "const_int")
988 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
991 (define_predicate "const_16_to_19_operand"
992 (and (match_code "const_int")
993 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
996 (define_predicate "const_16_to_31_operand"
997 (and (match_code "const_int")
998 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
1001 (define_predicate "const_20_to_23_operand"
1002 (and (match_code "const_int")
1003 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
1006 (define_predicate "const_24_to_27_operand"
1007 (and (match_code "const_int")
1008 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
1011 (define_predicate "const_28_to_31_operand"
1012 (and (match_code "const_int")
1013 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
1015 ;; True if this is a constant appropriate for an increment or decrement.
1016 (define_predicate "incdec_operand"
1017 (match_code "const_int")
1019 /* On Pentium4, the inc and dec operations causes extra dependency on flag
1020 registers, since carry flag is not set. */
1021 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
1023 return op == const1_rtx || op == constm1_rtx;
1026 ;; True for registers, or const_int_operand, used to vec_setm expander.
1027 (define_predicate "vec_setm_operand"
1028 (ior (and (match_operand 0 "register_operand")
1029 (match_test "TARGET_AVX2"))
1030 (match_code "const_int")))
1032 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
1033 (define_predicate "reg_or_pm1_operand"
1034 (ior (match_operand 0 "register_operand")
1035 (and (match_code "const_int")
1036 (ior (match_test "op == const1_rtx")
1037 (match_test "op == constm1_rtx")))))
1039 ;; True if OP is acceptable as operand of DImode shift expander.
1040 (define_predicate "shiftdi_operand"
1041 (if_then_else (match_test "TARGET_64BIT")
1042 (match_operand 0 "nonimmediate_operand")
1043 (match_operand 0 "register_operand")))
1045 (define_predicate "ashldi_input_operand"
1046 (if_then_else (match_test "TARGET_64BIT")
1047 (match_operand 0 "nonimmediate_operand")
1048 (match_operand 0 "reg_or_pm1_operand")))
1050 ;; Return true if OP is a vector load from the constant pool with just
1051 ;; the first element nonzero.
1052 (define_predicate "zero_extended_scalar_load_operand"
1056 op = avoid_constant_pool_reference (op);
1058 if (GET_CODE (op) != CONST_VECTOR)
1061 n_elts = CONST_VECTOR_NUNITS (op);
1063 for (n_elts--; n_elts > 0; n_elts--)
1065 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1066 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1072 /* Return true if operand is a vector constant that is all ones. */
1073 (define_predicate "vector_all_ones_operand"
1074 (and (match_code "const_vector")
1075 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1076 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1078 ; Return true when OP is operand acceptable for vector memory operand.
1079 ; Only AVX can have misaligned memory operand.
1080 (define_predicate "vector_memory_operand"
1081 (and (match_operand 0 "memory_operand")
1082 (ior (match_test "TARGET_AVX")
1083 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1085 ; Return true when OP is register_operand or vector_memory_operand.
1086 (define_predicate "vector_operand"
1087 (ior (match_operand 0 "register_operand")
1088 (match_operand 0 "vector_memory_operand")))
1090 (define_predicate "bcst_mem_operand"
1091 (and (match_code "vec_duplicate")
1092 (and (match_test "TARGET_AVX512F")
1093 (ior (match_test "TARGET_AVX512VL")
1094 (match_test "GET_MODE_SIZE (GET_MODE (op)) == 64")))
1095 (match_test "VALID_BCST_MODE_P (GET_MODE_INNER (GET_MODE (op)))")
1096 (match_test "memory_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
1098 ; Return true when OP is bcst_mem_operand or vector_memory_operand.
1099 (define_predicate "bcst_vector_operand"
1100 (ior (match_operand 0 "vector_operand")
1101 (match_operand 0 "bcst_mem_operand")))
1103 ;; Return true when OP is either nonimmediate operand, or any
1105 (define_predicate "nonimmediate_or_const_vector_operand"
1106 (ior (match_operand 0 "nonimmediate_operand")
1107 (match_code "const_vector")))
1109 ;; Return true when OP is nonimmediate or standard SSE constant.
1110 (define_predicate "nonimmediate_or_sse_const_operand"
1111 (ior (match_operand 0 "nonimmediate_operand")
1112 (match_test "standard_sse_constant_p (op, mode)")))
1114 ;; Return true if OP is a register or a zero.
1115 (define_predicate "reg_or_0_operand"
1116 (ior (match_operand 0 "register_operand")
1117 (match_operand 0 "const0_operand")))
1119 ; Return true when OP is a nonimmediate or zero.
1120 (define_predicate "nonimm_or_0_operand"
1121 (ior (match_operand 0 "nonimmediate_operand")
1122 (match_operand 0 "const0_operand")))
1124 (define_predicate "norex_memory_operand"
1125 (and (match_operand 0 "memory_operand")
1126 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1128 ;; Return true for RTX codes that force SImode address.
1129 (define_predicate "SImode_address_operand"
1130 (match_code "subreg,zero_extend,and"))
1132 ;; Return true if op is a valid address for LEA, and does not contain
1133 ;; a segment override. Defined as a special predicate to allow
1134 ;; mode-less const_int operands pass to address_operand.
1135 (define_special_predicate "address_no_seg_operand"
1136 (match_test "address_operand (op, VOIDmode)")
1138 struct ix86_address parts;
1141 if (!CONST_INT_P (op)
1143 && GET_MODE (op) != mode)
1146 ok = ix86_decompose_address (op, &parts);
1148 return parts.seg == ADDR_SPACE_GENERIC;
1151 ;; Return true if op if a valid base register, displacement or
1152 ;; sum of base register and displacement for VSIB addressing.
1153 (define_predicate "vsib_address_operand"
1154 (match_test "address_operand (op, VOIDmode)")
1156 struct ix86_address parts;
1160 ok = ix86_decompose_address (op, &parts);
1162 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1165 /* VSIB addressing doesn't support (%rip). */
1169 if (GET_CODE (disp) == CONST)
1171 disp = XEXP (disp, 0);
1172 if (GET_CODE (disp) == PLUS)
1173 disp = XEXP (disp, 0);
1174 if (GET_CODE (disp) == UNSPEC)
1175 switch (XINT (disp, 1))
1177 case UNSPEC_GOTPCREL:
1179 case UNSPEC_GOTNTPOFF:
1185 && (GET_CODE (disp) == SYMBOL_REF
1186 || GET_CODE (disp) == LABEL_REF))
1193 (define_predicate "vsib_mem_operator"
1196 ;; Return true if the rtx is known to be at least 32 bits aligned.
1197 (define_predicate "aligned_operand"
1198 (match_operand 0 "general_operand")
1200 struct ix86_address parts;
1203 /* Registers and immediate operands are always "aligned". */
1207 /* All patterns using aligned_operand on memory operands ends up
1208 in promoting memory operand to 64bit and thus causing memory mismatch. */
1209 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1212 /* Don't even try to do any aligned optimizations with volatiles. */
1213 if (MEM_VOLATILE_P (op))
1216 if (MEM_ALIGN (op) >= 32)
1221 /* Pushes and pops are only valid on the stack pointer. */
1222 if (GET_CODE (op) == PRE_DEC
1223 || GET_CODE (op) == POST_INC)
1226 /* Decode the address. */
1227 ok = ix86_decompose_address (op, &parts);
1230 if (parts.base && SUBREG_P (parts.base))
1231 parts.base = SUBREG_REG (parts.base);
1232 if (parts.index && SUBREG_P (parts.index))
1233 parts.index = SUBREG_REG (parts.index);
1235 /* Look for some component that isn't known to be aligned. */
1238 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1243 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1248 if (!CONST_INT_P (parts.disp)
1249 || (INTVAL (parts.disp) & 3))
1253 /* Didn't find one -- this must be an aligned address. */
1257 ;; Return true if OP is memory operand with a displacement.
1258 (define_predicate "memory_displacement_operand"
1259 (match_operand 0 "memory_operand")
1261 struct ix86_address parts;
1264 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1266 return parts.disp != NULL_RTX;
1269 ;; Return true if OP is memory operand with a displacement only.
1270 (define_predicate "memory_displacement_only_operand"
1271 (match_operand 0 "memory_operand")
1273 struct ix86_address parts;
1279 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1282 if (parts.base || parts.index)
1285 return parts.disp != NULL_RTX;
1288 ;; Return true if OP is memory operand that cannot be represented
1289 ;; by the modRM array.
1290 (define_predicate "long_memory_operand"
1291 (and (match_operand 0 "memory_operand")
1292 (match_test "memory_address_length (op, false)")))
1294 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1295 (define_predicate "fcmov_comparison_operator"
1296 (match_operand 0 "comparison_operator")
1298 machine_mode inmode = GET_MODE (XEXP (op, 0));
1299 enum rtx_code code = GET_CODE (op);
1301 if (inmode == CCFPmode)
1303 if (!ix86_trivial_fp_comparison_operator (op, mode))
1305 code = ix86_fp_compare_code_to_integer (code);
1307 /* i387 supports just limited amount of conditional codes. */
1310 case LTU: case GTU: case LEU: case GEU:
1311 if (inmode == CCmode || inmode == CCFPmode || inmode == CCCmode)
1314 case ORDERED: case UNORDERED:
1322 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1323 ;; The first set are supported directly; the second set can't be done with
1324 ;; full IEEE support, i.e. NaNs.
1326 (define_predicate "sse_comparison_operator"
1327 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1328 (and (match_test "TARGET_AVX")
1329 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1331 (define_predicate "ix86_comparison_int_operator"
1332 (match_code "ne,eq,ge,gt,le,lt"))
1334 (define_predicate "ix86_comparison_uns_operator"
1335 (match_code "ne,eq,geu,gtu,leu,ltu"))
1337 (define_predicate "bt_comparison_operator"
1338 (match_code "ne,eq"))
1340 (define_predicate "shr_comparison_operator"
1341 (match_code "gtu,leu"))
1343 (define_predicate "add_comparison_operator"
1344 (match_code "geu,ltu"))
1346 ;; Return true if OP is a valid comparison operator in valid mode.
1347 (define_predicate "ix86_comparison_operator"
1348 (match_operand 0 "comparison_operator")
1350 machine_mode inmode = GET_MODE (XEXP (op, 0));
1351 enum rtx_code code = GET_CODE (op);
1353 if (inmode == CCFPmode)
1354 return ix86_trivial_fp_comparison_operator (op, mode);
1359 if (inmode == CCGZmode)
1363 if (inmode == CCmode || inmode == CCGCmode
1364 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
1368 if (inmode == CCGZmode)
1372 if (inmode == CCmode || inmode == CCCmode || inmode == CCGZmode)
1375 case ORDERED: case UNORDERED:
1376 if (inmode == CCmode)
1380 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1388 ;; Return true if OP is a valid comparison operator
1389 ;; testing carry flag to be set.
1390 (define_predicate "ix86_carry_flag_operator"
1391 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1393 machine_mode inmode = GET_MODE (XEXP (op, 0));
1394 enum rtx_code code = GET_CODE (op);
1396 if (inmode == CCFPmode)
1398 if (!ix86_trivial_fp_comparison_operator (op, mode))
1400 code = ix86_fp_compare_code_to_integer (code);
1402 else if (inmode == CCCmode)
1403 return code == LTU || code == GTU;
1404 else if (inmode != CCmode)
1410 ;; Return true if this comparison only requires testing one flag bit.
1411 (define_predicate "ix86_trivial_fp_comparison_operator"
1412 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1414 ;; Return true if we know how to do this comparison. Others require
1415 ;; testing more than one flag bit, and we let the generic middle-end
1417 (define_predicate "ix86_fp_comparison_operator"
1418 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1419 == IX86_FPCMP_ARITH")
1420 (match_operand 0 "comparison_operator")
1421 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1423 ;; Nearly general operand, but accept any const_double, since we wish
1424 ;; to be able to drop them into memory rather than have them get pulled
1426 (define_predicate "cmp_fp_expander_operand"
1427 (ior (match_code "const_double")
1428 (match_operand 0 "general_operand")))
1430 ;; Return true if this is a valid binary floating-point operation.
1431 (define_predicate "binary_fp_operator"
1432 (match_code "plus,minus,mult,div"))
1434 ;; Return true if this is a multiply operation.
1435 (define_predicate "mult_operator"
1436 (match_code "mult"))
1438 ;; Return true if this is a division operation.
1439 (define_predicate "div_operator"
1442 ;; Return true if this is a plus, minus, and, ior or xor operation.
1443 (define_predicate "plusminuslogic_operator"
1444 (match_code "plus,minus,and,ior,xor"))
1446 ;; Return true for ARITHMETIC_P.
1447 (define_predicate "arith_or_logical_operator"
1448 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1449 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1451 ;; Return true for COMMUTATIVE_P.
1452 (define_predicate "commutative_operator"
1453 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1455 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1456 (define_predicate "promotable_binary_operator"
1457 (ior (match_code "plus,minus,and,ior,xor,ashift")
1458 (and (match_code "mult")
1459 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1461 (define_predicate "compare_operator"
1462 (match_code "compare"))
1464 ;; Return true if OP is a memory operand, aligned to
1465 ;; less than its natural alignment.
1466 (define_predicate "misaligned_operand"
1467 (and (match_code "mem")
1468 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1470 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1471 (define_predicate "vzeroall_operation"
1472 (match_code "parallel")
1474 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1476 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1479 for (i = 0; i < nregs; i++)
1481 rtx elt = XVECEXP (op, 0, i+1);
1483 if (GET_CODE (elt) != SET
1484 || GET_CODE (SET_DEST (elt)) != REG
1485 || GET_MODE (SET_DEST (elt)) != V8SImode
1486 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1487 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1493 ;; return true if OP is a vzeroall pattern.
1494 (define_predicate "vzeroall_pattern"
1495 (and (match_code "parallel")
1496 (match_code "unspec_volatile" "a")
1497 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1499 ;; return true if OP is a vzeroupper pattern.
1500 (define_predicate "vzeroupper_pattern"
1501 (and (match_code "parallel")
1502 (match_code "unspec_volatile" "a")
1503 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROUPPER")))
1505 ;; Return true if OP is an addsub vec_merge operation
1506 (define_predicate "addsub_vm_operator"
1507 (match_code "vec_merge")
1518 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1520 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1525 mask = INTVAL (XEXP (op, 2));
1526 nunits = GET_MODE_NUNITS (mode);
1528 for (elt = 0; elt < nunits; elt++)
1530 /* bit clear: take from op0, set: take from op1 */
1531 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1533 if (bit != ((elt & 1) ^ swapped))
1540 ;; Return true if OP is an addsub vec_select/vec_concat operation
1541 (define_predicate "addsub_vs_operator"
1542 (and (match_code "vec_select")
1543 (match_code "vec_concat" "0"))
1549 op0 = XEXP (XEXP (op, 0), 0);
1550 op1 = XEXP (XEXP (op, 0), 1);
1553 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1555 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1560 nunits = GET_MODE_NUNITS (mode);
1561 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1564 /* We already checked that permutation is suitable for addsub,
1565 so only look at the first element of the parallel. */
1566 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1568 return elt == (swapped ? nunits : 0);
1571 ;; Return true if OP is a parallel for an addsub vec_select.
1572 (define_predicate "addsub_vs_parallel"
1573 (and (match_code "parallel")
1574 (match_code "const_int" "a"))
1576 int nelt = XVECLEN (op, 0);
1582 /* Check that the permutation is suitable for addsub.
1583 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1584 elt = INTVAL (XVECEXP (op, 0, 0));
1587 for (i = 1; i < nelt; ++i)
1588 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1591 else if (elt == nelt)
1593 for (i = 1; i < nelt; ++i)
1594 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1603 ;; Return true if OP is a parallel for an pmovz{bw,wd,dq} vec_select,
1604 ;; where one of the two operands of the vec_concat is const0_operand.
1605 (define_predicate "pmovzx_parallel"
1606 (and (match_code "parallel")
1607 (match_code "const_int" "a"))
1609 int nelt = XVECLEN (op, 0);
1615 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1616 For example { 0 16 1 17 2 18 3 19 4 20 5 21 6 22 7 23 }. */
1617 elt = INTVAL (XVECEXP (op, 0, 0));
1620 for (i = 1; i < nelt; ++i)
1623 if (INTVAL (XVECEXP (op, 0, i)) < nelt)
1626 else if (INTVAL (XVECEXP (op, 0, i)) != i / 2)
1635 ;; Return true if OP is a parallel for a vbroadcast permute.
1636 (define_predicate "avx_vbroadcast_operand"
1637 (and (match_code "parallel")
1638 (match_code "const_int" "a"))
1640 rtx elt = XVECEXP (op, 0, 0);
1641 int i, nelt = XVECLEN (op, 0);
1643 /* Don't bother checking there are the right number of operands,
1644 merely that they're all identical. */
1645 for (i = 1; i < nelt; ++i)
1646 if (XVECEXP (op, 0, i) != elt)
1651 ;; Return true if OP is a parallel for a palignr permute.
1652 (define_predicate "palignr_operand"
1653 (and (match_code "parallel")
1654 (match_code "const_int" "a"))
1656 int elt = INTVAL (XVECEXP (op, 0, 0));
1657 int i, nelt = XVECLEN (op, 0);
1659 /* Check that an order in the permutation is suitable for palignr.
1660 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1661 for (i = 1; i < nelt; ++i)
1662 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1667 ;; Return true if OP is a proper third operand to vpblendw256.
1668 (define_predicate "avx2_pblendw_operand"
1669 (match_code "const_int")
1671 HOST_WIDE_INT val = INTVAL (op);
1672 HOST_WIDE_INT low = val & 0xff;
1673 return val == ((low << 8) | low);
1676 ;; Return true if OP is vector_operand or CONST_VECTOR.
1677 (define_predicate "general_vector_operand"
1678 (ior (match_operand 0 "vector_operand")
1679 (match_code "const_vector")))
1681 ;; Return true if OP is either -1 constant or stored in register.
1682 (define_predicate "register_or_constm1_operand"
1683 (ior (match_operand 0 "register_operand")
1684 (and (match_code "const_int")
1685 (match_test "op == constm1_rtx"))))
1687 ;; Return true if the vector ends with between 12 and 18 register saves using
1688 ;; RAX as the base address.
1689 (define_predicate "save_multiple"
1690 (match_code "parallel")
1692 const unsigned len = XVECLEN (op, 0);
1695 /* Starting from end of vector, count register saves. */
1696 for (i = 0; i < len; ++i)
1698 rtx src, dest, addr;
1699 rtx e = XVECEXP (op, 0, len - 1 - i);
1701 if (GET_CODE (e) != SET)
1705 dest = SET_DEST (e);
1707 if (!REG_P (src) || !MEM_P (dest))
1710 addr = XEXP (dest, 0);
1712 /* Good if dest address is in RAX. */
1713 if (REG_P (addr) && REGNO (addr) == AX_REG)
1716 /* Good if dest address is offset of RAX. */
1717 if (GET_CODE (addr) == PLUS
1718 && REG_P (XEXP (addr, 0))
1719 && REGNO (XEXP (addr, 0)) == AX_REG)
1724 return (i >= 12 && i <= 18);
1728 ;; Return true if the vector ends with between 12 and 18 register loads using
1729 ;; RSI as the base address.
1730 (define_predicate "restore_multiple"
1731 (match_code "parallel")
1733 const unsigned len = XVECLEN (op, 0);
1736 /* Starting from end of vector, count register restores. */
1737 for (i = 0; i < len; ++i)
1739 rtx src, dest, addr;
1740 rtx e = XVECEXP (op, 0, len - 1 - i);
1742 if (GET_CODE (e) != SET)
1746 dest = SET_DEST (e);
1748 if (!MEM_P (src) || !REG_P (dest))
1751 addr = XEXP (src, 0);
1753 /* Good if src address is in RSI. */
1754 if (REG_P (addr) && REGNO (addr) == SI_REG)
1757 /* Good if src address is offset of RSI. */
1758 if (GET_CODE (addr) == PLUS
1759 && REG_P (XEXP (addr, 0))
1760 && REGNO (XEXP (addr, 0)) == SI_REG)
1765 return (i >= 12 && i <= 18);
1768 ;; Keylocker specific predicates
1769 (define_predicate "encodekey128_operation"
1770 (match_code "parallel")
1775 if (XVECLEN (op, 0) != 8)
1778 for(i = 0; i < 3; i++)
1780 elt = XVECEXP (op, 0, i + 1);
1781 if (GET_CODE (elt) != SET
1782 || GET_CODE (SET_DEST (elt)) != REG
1783 || GET_MODE (SET_DEST (elt)) != V2DImode
1784 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1785 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1786 || GET_MODE (SET_SRC (elt)) != V2DImode
1787 || XVECLEN(SET_SRC (elt), 0) != 1
1788 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
1792 for(i = 4; i < 7; i++)
1794 elt = XVECEXP (op, 0, i);
1795 if (GET_CODE (elt) != SET
1796 || GET_CODE (SET_DEST (elt)) != REG
1797 || GET_MODE (SET_DEST (elt)) != V2DImode
1798 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1799 || SET_SRC (elt) != CONST0_RTX (V2DImode))
1803 elt = XVECEXP (op, 0, 7);
1804 if (GET_CODE (elt) != CLOBBER
1805 || GET_MODE (elt) != VOIDmode
1806 || GET_CODE (XEXP (elt, 0)) != REG
1807 || GET_MODE (XEXP (elt, 0)) != CCmode
1808 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
1813 (define_predicate "encodekey256_operation"
1814 (match_code "parallel")
1819 if (XVECLEN (op, 0) != 9)
1822 elt = SET_SRC (XVECEXP (op, 0, 0));
1823 elt = XVECEXP (elt, 0, 2);
1825 || REGNO(elt) != GET_SSE_REGNO (1))
1828 for(i = 0; i < 4; i++)
1830 elt = XVECEXP (op, 0, i + 1);
1831 if (GET_CODE (elt) != SET
1832 || GET_CODE (SET_DEST (elt)) != REG
1833 || GET_MODE (SET_DEST (elt)) != V2DImode
1834 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1835 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1836 || GET_MODE (SET_SRC (elt)) != V2DImode
1837 || XVECLEN(SET_SRC (elt), 0) != 1
1838 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
1842 for(i = 4; i < 7; i++)
1844 elt = XVECEXP (op, 0, i + 1);
1845 if (GET_CODE (elt) != SET
1846 || GET_CODE (SET_DEST (elt)) != REG
1847 || GET_MODE (SET_DEST (elt)) != V2DImode
1848 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1849 || SET_SRC (elt) != CONST0_RTX (V2DImode))
1853 elt = XVECEXP (op, 0, 8);
1854 if (GET_CODE (elt) != CLOBBER
1855 || GET_MODE (elt) != VOIDmode
1856 || GET_CODE (XEXP (elt, 0)) != REG
1857 || GET_MODE (XEXP (elt, 0)) != CCmode
1858 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
1864 (define_predicate "aeswidekl_operation"
1865 (match_code "parallel")
1870 for (i = 0; i < 8; i++)
1872 elt = XVECEXP (op, 0, i + 1);
1873 if (GET_CODE (elt) != SET
1874 || GET_CODE (SET_DEST (elt)) != REG
1875 || GET_MODE (SET_DEST (elt)) != V2DImode
1876 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1877 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1878 || GET_MODE (SET_SRC (elt)) != V2DImode
1879 || XVECLEN (SET_SRC (elt), 0) != 1
1880 || REGNO (XVECEXP (SET_SRC (elt), 0, 0)) != GET_SSE_REGNO (i))