0a3ab4dce68f23247bc53cd2a50d4d681fcd18f4
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
29
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
34
35 ;; True if the operand is a nonimmediate operand with GENERAL class register.
36 (define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
40
41 ;; True if the operand is a general operand with GENERAL class register.
42 (define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
46
47 ;; True if the operand is an MMX register.
48 (define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
51
52 ;; Match register operands, but include memory operands for
53 ;; !TARGET_MMX_WITH_SSE.
54 (define_predicate "register_mmxmem_operand"
55 (ior (match_operand 0 "register_operand")
56 (and (not (match_test "TARGET_MMX_WITH_SSE"))
57 (match_operand 0 "memory_operand"))))
58
59 ;; True if the operand is an SSE register.
60 (define_predicate "sse_reg_operand"
61 (and (match_code "reg")
62 (match_test "SSE_REGNO_P (REGNO (op))")))
63
64 ;; Return true if op is a QImode register.
65 (define_predicate "any_QIreg_operand"
66 (and (match_code "reg")
67 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
68
69 ;; Return true if op is one of QImode registers: %[abcd][hl].
70 (define_predicate "QIreg_operand"
71 (and (match_code "reg")
72 (match_test "QI_REGNO_P (REGNO (op))")))
73
74 ;; Return true if op is a QImode register operand other than %[abcd][hl].
75 (define_predicate "ext_QIreg_operand"
76 (and (match_test "TARGET_64BIT")
77 (match_code "reg")
78 (not (match_test "QI_REGNO_P (REGNO (op))"))))
79
80 ;; Return true if op is the AX register.
81 (define_predicate "ax_reg_operand"
82 (and (match_code "reg")
83 (match_test "REGNO (op) == AX_REG")))
84
85 ;; Return true if op is the flags register.
86 (define_predicate "flags_reg_operand"
87 (and (match_code "reg")
88 (match_test "REGNO (op) == FLAGS_REG")))
89
90 ;; True if the operand is a MASK register.
91 (define_predicate "mask_reg_operand"
92 (and (match_code "reg")
93 (match_test "MASK_REGNO_P (REGNO (op))")))
94
95 ;; Match a DI, SI, HI or QImode nonimmediate_operand.
96 (define_special_predicate "int_nonimmediate_operand"
97 (and (match_operand 0 "nonimmediate_operand")
98 (ior (and (match_test "TARGET_64BIT")
99 (match_test "GET_MODE (op) == DImode"))
100 (match_test "GET_MODE (op) == SImode")
101 (match_test "GET_MODE (op) == HImode")
102 (match_test "GET_MODE (op) == QImode"))))
103
104 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
105 (define_predicate "register_ssemem_operand"
106 (if_then_else
107 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
108 (match_operand 0 "nonimmediate_operand")
109 (match_operand 0 "register_operand")))
110
111 ;; Match nonimmediate operands, but exclude memory operands
112 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
113 (define_predicate "nonimm_ssenomem_operand"
114 (if_then_else
115 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
116 (not (match_test "TARGET_MIX_SSE_I387")))
117 (match_operand 0 "register_operand")
118 (match_operand 0 "nonimmediate_operand")))
119
120 ;; The above predicate, suitable for x87 arithmetic operators.
121 (define_predicate "x87nonimm_ssenomem_operand"
122 (if_then_else
123 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
124 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
125 (match_operand 0 "register_operand")
126 (match_operand 0 "nonimmediate_operand")))
127
128 ;; Match register operands, include memory operand for TARGET_SSE4_1.
129 (define_predicate "register_sse4nonimm_operand"
130 (if_then_else (match_test "TARGET_SSE4_1")
131 (match_operand 0 "nonimmediate_operand")
132 (match_operand 0 "register_operand")))
133
134 ;; Return true if VALUE is symbol reference
135 (define_predicate "symbol_operand"
136 (match_code "symbol_ref"))
137
138 ;; Return true if VALUE is an ENDBR opcode in immediate field.
139 (define_predicate "ix86_endbr_immediate_operand"
140 (match_code "const_int")
141 {
142 if (flag_cf_protection & CF_BRANCH)
143 {
144 unsigned HOST_WIDE_INT imm = UINTVAL (op);
145 unsigned HOST_WIDE_INT val = TARGET_64BIT ? 0xfa1e0ff3 : 0xfb1e0ff3;
146
147 if (imm == val)
148 return 1;
149
150 /* NB: Encoding is byte based. */
151 if (TARGET_64BIT)
152 for (; imm >= val; imm >>= 8)
153 if (imm == val)
154 return 1;
155 }
156
157 return 0;
158 })
159
160 ;; Return true if VALUE can be stored in a sign extended immediate field.
161 (define_predicate "x86_64_immediate_operand"
162 (match_code "const_int,symbol_ref,label_ref,const")
163 {
164 if (ix86_endbr_immediate_operand (op, VOIDmode))
165 return false;
166
167 if (!TARGET_64BIT)
168 return immediate_operand (op, mode);
169
170 switch (GET_CODE (op))
171 {
172 case CONST_INT:
173 {
174 HOST_WIDE_INT val = INTVAL (op);
175 return trunc_int_for_mode (val, SImode) == val;
176 }
177 case SYMBOL_REF:
178 /* TLS symbols are not constant. */
179 if (SYMBOL_REF_TLS_MODEL (op))
180 return false;
181
182 /* Load the external function address via the GOT slot. */
183 if (ix86_force_load_from_GOT_p (op))
184 return false;
185
186 /* For certain code models, the symbolic references are known to fit.
187 in CM_SMALL_PIC model we know it fits if it is local to the shared
188 library. Don't count TLS SYMBOL_REFs here, since they should fit
189 only if inside of UNSPEC handled below. */
190 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
191 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
192
193 case LABEL_REF:
194 /* For certain code models, the code is near as well. */
195 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
196 || ix86_cmodel == CM_KERNEL);
197
198 case CONST:
199 /* We also may accept the offsetted memory references in certain
200 special cases. */
201 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
202 switch (XINT (XEXP (op, 0), 1))
203 {
204 case UNSPEC_GOTPCREL:
205 case UNSPEC_DTPOFF:
206 case UNSPEC_GOTNTPOFF:
207 case UNSPEC_NTPOFF:
208 return true;
209 default:
210 break;
211 }
212
213 if (GET_CODE (XEXP (op, 0)) == PLUS)
214 {
215 rtx op1 = XEXP (XEXP (op, 0), 0);
216 rtx op2 = XEXP (XEXP (op, 0), 1);
217
218 if (ix86_cmodel == CM_LARGE && GET_CODE (op1) != UNSPEC)
219 return false;
220 if (!CONST_INT_P (op2))
221 return false;
222
223 HOST_WIDE_INT offset = INTVAL (op2);
224 if (trunc_int_for_mode (offset, SImode) != offset)
225 return false;
226
227 switch (GET_CODE (op1))
228 {
229 case SYMBOL_REF:
230 /* TLS symbols are not constant. */
231 if (SYMBOL_REF_TLS_MODEL (op1))
232 return false;
233
234 /* Load the external function address via the GOT slot. */
235 if (ix86_force_load_from_GOT_p (op1))
236 return false;
237
238 /* For CM_SMALL assume that latest object is 16MB before
239 end of 31bits boundary. We may also accept pretty
240 large negative constants knowing that all objects are
241 in the positive half of address space. */
242 if ((ix86_cmodel == CM_SMALL
243 || (ix86_cmodel == CM_MEDIUM
244 && !SYMBOL_REF_FAR_ADDR_P (op1)))
245 && offset < 16*1024*1024)
246 return true;
247 /* For CM_KERNEL we know that all object resist in the
248 negative half of 32bits address space. We may not
249 accept negative offsets, since they may be just off
250 and we may accept pretty large positive ones. */
251 if (ix86_cmodel == CM_KERNEL
252 && offset > 0)
253 return true;
254 break;
255
256 case LABEL_REF:
257 /* These conditions are similar to SYMBOL_REF ones, just the
258 constraints for code models differ. */
259 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
260 && offset < 16*1024*1024)
261 return true;
262 if (ix86_cmodel == CM_KERNEL
263 && offset > 0)
264 return true;
265 break;
266
267 case UNSPEC:
268 switch (XINT (op1, 1))
269 {
270 case UNSPEC_DTPOFF:
271 case UNSPEC_NTPOFF:
272 return true;
273 }
274 break;
275
276 default:
277 break;
278 }
279 }
280 break;
281
282 default:
283 gcc_unreachable ();
284 }
285
286 return false;
287 })
288
289 ;; Return true if VALUE can be stored in the zero extended immediate field.
290 (define_predicate "x86_64_zext_immediate_operand"
291 (match_code "const_int,symbol_ref,label_ref,const")
292 {
293 if (ix86_endbr_immediate_operand (op, VOIDmode))
294 return false;
295
296 switch (GET_CODE (op))
297 {
298 case CONST_INT:
299 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
300
301 case SYMBOL_REF:
302 /* TLS symbols are not constant. */
303 if (SYMBOL_REF_TLS_MODEL (op))
304 return false;
305
306 /* Load the external function address via the GOT slot. */
307 if (ix86_force_load_from_GOT_p (op))
308 return false;
309
310 /* For certain code models, the symbolic references are known to fit. */
311 return (ix86_cmodel == CM_SMALL
312 || (ix86_cmodel == CM_MEDIUM
313 && !SYMBOL_REF_FAR_ADDR_P (op)));
314
315 case LABEL_REF:
316 /* For certain code models, the code is near as well. */
317 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
318
319 case CONST:
320 /* We also may accept the offsetted memory references in certain
321 special cases. */
322 if (GET_CODE (XEXP (op, 0)) == PLUS)
323 {
324 rtx op1 = XEXP (XEXP (op, 0), 0);
325 rtx op2 = XEXP (XEXP (op, 0), 1);
326
327 if (ix86_cmodel == CM_LARGE)
328 return false;
329 if (!CONST_INT_P (op2))
330 return false;
331
332 HOST_WIDE_INT offset = INTVAL (op2);
333 if (trunc_int_for_mode (offset, SImode) != offset)
334 return false;
335
336 switch (GET_CODE (op1))
337 {
338 case SYMBOL_REF:
339 /* TLS symbols are not constant. */
340 if (SYMBOL_REF_TLS_MODEL (op1))
341 return false;
342
343 /* Load the external function address via the GOT slot. */
344 if (ix86_force_load_from_GOT_p (op1))
345 return false;
346
347 /* For small code model we may accept pretty large positive
348 offsets, since one bit is available for free. Negative
349 offsets are limited by the size of NULL pointer area
350 specified by the ABI. */
351 if ((ix86_cmodel == CM_SMALL
352 || (ix86_cmodel == CM_MEDIUM
353 && !SYMBOL_REF_FAR_ADDR_P (op1)))
354 && offset > -0x10000)
355 return true;
356 /* ??? For the kernel, we may accept adjustment of
357 -0x10000000, since we know that it will just convert
358 negative address space to positive, but perhaps this
359 is not worthwhile. */
360 break;
361
362 case LABEL_REF:
363 /* These conditions are similar to SYMBOL_REF ones, just the
364 constraints for code models differ. */
365 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
366 && offset > -0x10000)
367 return true;
368 break;
369
370 default:
371 return false;
372 }
373 }
374 break;
375
376 default:
377 gcc_unreachable ();
378 }
379 return false;
380 })
381
382 ;; Return true if VALUE is a constant integer whose low and high words satisfy
383 ;; x86_64_immediate_operand.
384 (define_predicate "x86_64_hilo_int_operand"
385 (match_code "const_int,const_wide_int")
386 {
387 switch (GET_CODE (op))
388 {
389 case CONST_INT:
390 return x86_64_immediate_operand (op, mode);
391
392 case CONST_WIDE_INT:
393 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
394 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
395 DImode)
396 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
397 1)),
398 DImode));
399
400 default:
401 gcc_unreachable ();
402 }
403 })
404
405 ;; Return true if VALUE is a constant integer whose value is
406 ;; x86_64_immediate_operand value zero extended from word mode to mode.
407 (define_predicate "x86_64_dwzext_immediate_operand"
408 (match_code "const_int,const_wide_int")
409 {
410 if (ix86_endbr_immediate_operand (op, VOIDmode))
411 return false;
412
413 switch (GET_CODE (op))
414 {
415 case CONST_INT:
416 if (!TARGET_64BIT)
417 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
418 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
419
420 case CONST_WIDE_INT:
421 if (!TARGET_64BIT)
422 return false;
423 return (CONST_WIDE_INT_NUNITS (op) == 2
424 && CONST_WIDE_INT_ELT (op, 1) == 0
425 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
426 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
427
428 default:
429 gcc_unreachable ();
430 }
431 })
432
433 ;; Return true if size of VALUE can be stored in a sign
434 ;; extended immediate field.
435 (define_predicate "x86_64_immediate_size_operand"
436 (and (match_code "symbol_ref")
437 (ior (not (match_test "TARGET_64BIT"))
438 (match_test "ix86_cmodel == CM_SMALL")
439 (match_test "ix86_cmodel == CM_KERNEL"))))
440
441 ;; Return true if OP is general operand representable on x86_64.
442 (define_predicate "x86_64_general_operand"
443 (if_then_else (match_test "TARGET_64BIT")
444 (ior (match_operand 0 "nonimmediate_operand")
445 (match_operand 0 "x86_64_immediate_operand"))
446 (match_operand 0 "general_operand")))
447
448 ;; Return true if OP's both words are general operands representable
449 ;; on x86_64.
450 (define_predicate "x86_64_hilo_general_operand"
451 (if_then_else (match_test "TARGET_64BIT")
452 (ior (match_operand 0 "nonimmediate_operand")
453 (match_operand 0 "x86_64_hilo_int_operand"))
454 (match_operand 0 "general_operand")))
455
456 ;; Return true if OP is non-VOIDmode general operand representable
457 ;; on x86_64. This predicate is used in sign-extending conversion
458 ;; operations that require non-VOIDmode immediate operands.
459 (define_predicate "x86_64_sext_operand"
460 (and (match_test "GET_MODE (op) != VOIDmode")
461 (match_operand 0 "x86_64_general_operand")))
462
463 ;; Return true if OP is non-VOIDmode general operand. This predicate
464 ;; is used in sign-extending conversion operations that require
465 ;; non-VOIDmode immediate operands.
466 (define_predicate "sext_operand"
467 (and (match_test "GET_MODE (op) != VOIDmode")
468 (match_operand 0 "general_operand")))
469
470 ;; Return true if OP is representable on x86_64 as zero-extended operand.
471 ;; This predicate is used in zero-extending conversion operations that
472 ;; require non-VOIDmode immediate operands.
473 (define_predicate "x86_64_zext_operand"
474 (if_then_else (match_test "TARGET_64BIT")
475 (ior (match_operand 0 "nonimmediate_operand")
476 (and (match_operand 0 "x86_64_zext_immediate_operand")
477 (match_test "GET_MODE (op) != VOIDmode")))
478 (match_operand 0 "nonimmediate_operand")))
479
480 ;; Return true if OP is general operand representable on x86_64
481 ;; as either sign extended or zero extended constant.
482 (define_predicate "x86_64_szext_general_operand"
483 (if_then_else (match_test "TARGET_64BIT")
484 (ior (match_operand 0 "nonimmediate_operand")
485 (match_operand 0 "x86_64_immediate_operand")
486 (match_operand 0 "x86_64_zext_immediate_operand"))
487 (match_operand 0 "general_operand")))
488
489 ;; Return true if OP is nonmemory operand representable on x86_64.
490 (define_predicate "x86_64_nonmemory_operand"
491 (if_then_else (match_test "TARGET_64BIT")
492 (ior (match_operand 0 "register_operand")
493 (match_operand 0 "x86_64_immediate_operand"))
494 (match_operand 0 "nonmemory_operand")))
495
496 ;; Return true if OP is nonmemory operand representable on x86_64.
497 (define_predicate "x86_64_szext_nonmemory_operand"
498 (if_then_else (match_test "TARGET_64BIT")
499 (ior (match_operand 0 "register_operand")
500 (match_operand 0 "x86_64_immediate_operand")
501 (match_operand 0 "x86_64_zext_immediate_operand"))
502 (match_operand 0 "nonmemory_operand")))
503
504 ;; Return true when operand is PIC expression that can be computed by lea
505 ;; operation.
506 (define_predicate "pic_32bit_operand"
507 (match_code "const,symbol_ref,label_ref")
508 {
509 if (!flag_pic)
510 return false;
511
512 /* Rule out relocations that translate into 64bit constants. */
513 if (TARGET_64BIT && GET_CODE (op) == CONST)
514 {
515 op = XEXP (op, 0);
516 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
517 op = XEXP (op, 0);
518 if (GET_CODE (op) == UNSPEC
519 && (XINT (op, 1) == UNSPEC_GOTOFF
520 || XINT (op, 1) == UNSPEC_GOT))
521 return false;
522 }
523
524 return symbolic_operand (op, mode);
525 })
526
527 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
528 (define_predicate "x86_64_movabs_operand"
529 (and (match_operand 0 "nonmemory_operand")
530 (not (match_operand 0 "pic_32bit_operand"))))
531
532 ;; Return true if OP is either a symbol reference or a sum of a symbol
533 ;; reference and a constant.
534 (define_predicate "symbolic_operand"
535 (match_code "symbol_ref,label_ref,const")
536 {
537 switch (GET_CODE (op))
538 {
539 case SYMBOL_REF:
540 case LABEL_REF:
541 return true;
542
543 case CONST:
544 op = XEXP (op, 0);
545 if (GET_CODE (op) == SYMBOL_REF
546 || GET_CODE (op) == LABEL_REF
547 || (GET_CODE (op) == UNSPEC
548 && (XINT (op, 1) == UNSPEC_GOT
549 || XINT (op, 1) == UNSPEC_GOTOFF
550 || XINT (op, 1) == UNSPEC_PCREL
551 || XINT (op, 1) == UNSPEC_GOTPCREL)))
552 return true;
553 if (GET_CODE (op) != PLUS
554 || !CONST_INT_P (XEXP (op, 1)))
555 return false;
556
557 op = XEXP (op, 0);
558 if (GET_CODE (op) == SYMBOL_REF
559 || GET_CODE (op) == LABEL_REF)
560 return true;
561 /* Only @GOTOFF gets offsets. */
562 if (GET_CODE (op) != UNSPEC
563 || XINT (op, 1) != UNSPEC_GOTOFF)
564 return false;
565
566 op = XVECEXP (op, 0, 0);
567 if (GET_CODE (op) == SYMBOL_REF
568 || GET_CODE (op) == LABEL_REF)
569 return true;
570 return false;
571
572 default:
573 gcc_unreachable ();
574 }
575 })
576
577 ;; Return true if OP is a symbolic operand that resolves locally.
578 (define_predicate "local_symbolic_operand"
579 (match_code "const,label_ref,symbol_ref")
580 {
581 if (GET_CODE (op) == CONST
582 && GET_CODE (XEXP (op, 0)) == PLUS
583 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
584 op = XEXP (XEXP (op, 0), 0);
585
586 if (GET_CODE (op) == LABEL_REF)
587 return true;
588
589 if (GET_CODE (op) != SYMBOL_REF)
590 return false;
591
592 if (SYMBOL_REF_TLS_MODEL (op))
593 return false;
594
595 /* Dll-imported symbols are always external. */
596 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
597 return false;
598 if (SYMBOL_REF_LOCAL_P (op))
599 return true;
600
601 /* There is, however, a not insubstantial body of code in the rest of
602 the compiler that assumes it can just stick the results of
603 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
604 /* ??? This is a hack. Should update the body of the compiler to
605 always create a DECL an invoke targetm.encode_section_info. */
606 if (strncmp (XSTR (op, 0), internal_label_prefix,
607 internal_label_prefix_len) == 0)
608 return true;
609
610 return false;
611 })
612
613 ;; Test for a legitimate @GOTOFF operand.
614 ;;
615 ;; VxWorks does not impose a fixed gap between segments; the run-time
616 ;; gap can be different from the object-file gap. We therefore can't
617 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
618 ;; same segment as the GOT. Unfortunately, the flexibility of linker
619 ;; scripts means that we can't be sure of that in general, so assume
620 ;; that @GOTOFF is never valid on VxWorks.
621 (define_predicate "gotoff_operand"
622 (and (not (match_test "TARGET_VXWORKS_RTP"))
623 (match_operand 0 "local_symbolic_operand")))
624
625 ;; Test for various thread-local symbols.
626 (define_special_predicate "tls_symbolic_operand"
627 (and (match_code "symbol_ref")
628 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
629
630 (define_special_predicate "tls_modbase_operand"
631 (and (match_code "symbol_ref")
632 (match_test "op == ix86_tls_module_base ()")))
633
634 (define_predicate "tls_address_pattern"
635 (and (match_code "set,parallel,unspec,unspec_volatile")
636 (match_test "ix86_tls_address_pattern_p (op)")))
637
638 ;; Test for a pc-relative call operand
639 (define_predicate "constant_call_address_operand"
640 (match_code "symbol_ref")
641 {
642 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
643 || flag_force_indirect_call)
644 return false;
645 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
646 return false;
647 return true;
648 })
649
650 ;; P6 processors will jump to the address after the decrement when %esp
651 ;; is used as a call operand, so they will execute return address as a code.
652 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
653
654 (define_predicate "call_register_no_elim_operand"
655 (match_operand 0 "register_operand")
656 {
657 if (SUBREG_P (op))
658 op = SUBREG_REG (op);
659
660 if (!TARGET_64BIT && op == stack_pointer_rtx)
661 return false;
662
663 return register_no_elim_operand (op, mode);
664 })
665
666 ;; True for any non-virtual or eliminable register. Used in places where
667 ;; instantiation of such a register may cause the pattern to not be recognized.
668 (define_predicate "register_no_elim_operand"
669 (match_operand 0 "register_operand")
670 {
671 if (SUBREG_P (op))
672 op = SUBREG_REG (op);
673 return !(op == arg_pointer_rtx
674 || op == frame_pointer_rtx
675 || IN_RANGE (REGNO (op),
676 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
677 })
678
679 ;; Similarly, but include the stack pointer. This is used to prevent esp
680 ;; from being used as an index reg.
681 (define_predicate "index_register_operand"
682 (match_operand 0 "register_operand")
683 {
684 if (SUBREG_P (op))
685 op = SUBREG_REG (op);
686 if (reload_completed)
687 return REG_OK_FOR_INDEX_STRICT_P (op);
688 else
689 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
690 })
691
692 ;; Return false if this is any eliminable register. Otherwise general_operand.
693 (define_predicate "general_no_elim_operand"
694 (if_then_else (match_code "reg,subreg")
695 (match_operand 0 "register_no_elim_operand")
696 (match_operand 0 "general_operand")))
697
698 ;; Return false if this is any eliminable register. Otherwise
699 ;; register_operand or a constant.
700 (define_predicate "nonmemory_no_elim_operand"
701 (ior (match_operand 0 "register_no_elim_operand")
702 (match_operand 0 "immediate_operand")))
703
704 ;; Test for a valid operand for indirect branch.
705 (define_predicate "indirect_branch_operand"
706 (ior (match_operand 0 "register_operand")
707 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
708 (not (match_test "TARGET_X32"))
709 (match_operand 0 "memory_operand"))))
710
711 ;; Return true if OP is a memory operands that can be used in sibcalls.
712 ;; Since sibcall never returns, we can only use call-clobbered register
713 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
714 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
715 ;; and *sibcall_value_GOT_32 patterns.
716 (define_predicate "sibcall_memory_operand"
717 (match_operand 0 "memory_operand")
718 {
719 op = XEXP (op, 0);
720 if (CONSTANT_P (op))
721 return true;
722 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
723 {
724 int regno = REGNO (XEXP (op, 0));
725 if (!HARD_REGISTER_NUM_P (regno) || call_used_or_fixed_reg_p (regno))
726 {
727 op = XEXP (op, 1);
728 if (GOT32_symbol_operand (op, VOIDmode))
729 return true;
730 }
731 }
732 return false;
733 })
734
735 ;; Return true if OP is a GOT memory operand.
736 (define_predicate "GOT_memory_operand"
737 (match_operand 0 "memory_operand")
738 {
739 op = XEXP (op, 0);
740 return (GET_CODE (op) == CONST
741 && GET_CODE (XEXP (op, 0)) == UNSPEC
742 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
743 })
744
745 ;; Test for a valid operand for a call instruction.
746 ;; Allow constant call address operands in Pmode only.
747 (define_special_predicate "call_insn_operand"
748 (ior (match_test "constant_call_address_operand
749 (op, mode == VOIDmode ? mode : Pmode)")
750 (match_operand 0 "call_register_no_elim_operand")
751 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
752 (ior (and (not (match_test "TARGET_X32"))
753 (match_operand 0 "memory_operand"))
754 (and (match_test "TARGET_X32 && Pmode == DImode")
755 (match_operand 0 "GOT_memory_operand"))))))
756
757 ;; Similarly, but for tail calls, in which we cannot allow memory references.
758 (define_special_predicate "sibcall_insn_operand"
759 (ior (match_test "constant_call_address_operand
760 (op, mode == VOIDmode ? mode : Pmode)")
761 (match_operand 0 "register_no_elim_operand")
762 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
763 (ior (and (not (match_test "TARGET_X32"))
764 (match_operand 0 "sibcall_memory_operand"))
765 (and (match_test "TARGET_X32 && Pmode == DImode")
766 (match_operand 0 "GOT_memory_operand"))))))
767
768 ;; Return true if OP is a 32-bit GOT symbol operand.
769 (define_predicate "GOT32_symbol_operand"
770 (match_test "GET_CODE (op) == CONST
771 && GET_CODE (XEXP (op, 0)) == UNSPEC
772 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
773
774 ;; Match exactly zero.
775 (define_predicate "const0_operand"
776 (match_code "const_int,const_double,const_vector")
777 {
778 if (mode == VOIDmode)
779 mode = GET_MODE (op);
780 return op == CONST0_RTX (mode);
781 })
782
783 ;; Match one or a vector with all elements equal to one.
784 (define_predicate "const1_operand"
785 (match_code "const_int,const_double,const_vector")
786 {
787 if (mode == VOIDmode)
788 mode = GET_MODE (op);
789 return op == CONST1_RTX (mode);
790 })
791
792 ;; Match exactly -1.
793 (define_predicate "constm1_operand"
794 (and (match_code "const_int")
795 (match_test "op == constm1_rtx")))
796
797 ;; Match exactly eight.
798 (define_predicate "const8_operand"
799 (and (match_code "const_int")
800 (match_test "INTVAL (op) == 8")))
801
802 ;; Match exactly 128.
803 (define_predicate "const128_operand"
804 (and (match_code "const_int")
805 (match_test "INTVAL (op) == 128")))
806
807 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
808 (define_predicate "const_32bit_mask"
809 (and (match_code "const_int")
810 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
811 == (HOST_WIDE_INT) 0xffffffff")))
812
813 ;; Match 2, 4, or 8. Used for leal multiplicands.
814 (define_predicate "const248_operand"
815 (match_code "const_int")
816 {
817 HOST_WIDE_INT i = INTVAL (op);
818 return i == 2 || i == 4 || i == 8;
819 })
820
821 ;; Match 1, 2, or 3. Used for lea shift amounts.
822 (define_predicate "const123_operand"
823 (match_code "const_int")
824 {
825 HOST_WIDE_INT i = INTVAL (op);
826 return i == 1 || i == 2 || i == 3;
827 })
828
829 ;; Match 2, 3, 6, or 7
830 (define_predicate "const2367_operand"
831 (match_code "const_int")
832 {
833 HOST_WIDE_INT i = INTVAL (op);
834 return i == 2 || i == 3 || i == 6 || i == 7;
835 })
836
837 ;; Match 1, 2, 4, or 8
838 (define_predicate "const1248_operand"
839 (match_code "const_int")
840 {
841 HOST_WIDE_INT i = INTVAL (op);
842 return i == 1 || i == 2 || i == 4 || i == 8;
843 })
844
845 ;; Match 3, 5, or 9. Used for leal multiplicands.
846 (define_predicate "const359_operand"
847 (match_code "const_int")
848 {
849 HOST_WIDE_INT i = INTVAL (op);
850 return i == 3 || i == 5 || i == 9;
851 })
852
853 ;; Match 4 or 8 to 11. Used for embeded rounding.
854 (define_predicate "const_4_or_8_to_11_operand"
855 (match_code "const_int")
856 {
857 HOST_WIDE_INT i = INTVAL (op);
858 return i == 4 || (i >= 8 && i <= 11);
859 })
860
861 ;; Match 4 or 8. Used for SAE.
862 (define_predicate "const48_operand"
863 (match_code "const_int")
864 {
865 HOST_WIDE_INT i = INTVAL (op);
866 return i == 4 || i == 8;
867 })
868
869 ;; Match 0 or 1.
870 (define_predicate "const_0_to_1_operand"
871 (and (match_code "const_int")
872 (ior (match_test "op == const0_rtx")
873 (match_test "op == const1_rtx"))))
874
875 ;; Match 0 to 3.
876 (define_predicate "const_0_to_3_operand"
877 (and (match_code "const_int")
878 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
879
880 ;; Match 0 to 4.
881 (define_predicate "const_0_to_4_operand"
882 (and (match_code "const_int")
883 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
884
885 ;; Match 0 to 5.
886 (define_predicate "const_0_to_5_operand"
887 (and (match_code "const_int")
888 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
889
890 ;; Match 0 to 7.
891 (define_predicate "const_0_to_7_operand"
892 (and (match_code "const_int")
893 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
894
895 ;; Match 0 to 15.
896 (define_predicate "const_0_to_15_operand"
897 (and (match_code "const_int")
898 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
899
900 ;; Match 0 to 31.
901 (define_predicate "const_0_to_31_operand"
902 (and (match_code "const_int")
903 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
904
905 ;; Match 0 to 63.
906 (define_predicate "const_0_to_63_operand"
907 (and (match_code "const_int")
908 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
909
910 ;; Match 0 to 255.
911 (define_predicate "const_0_to_255_operand"
912 (and (match_code "const_int")
913 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
914
915 ;; Match (0 to 255) * 8
916 (define_predicate "const_0_to_255_mul_8_operand"
917 (match_code "const_int")
918 {
919 unsigned HOST_WIDE_INT val = INTVAL (op);
920 return val <= 255*8 && val % 8 == 0;
921 })
922
923 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
924 ;; for shift & compare patterns, as shifting by 0 does not change flags).
925 (define_predicate "const_1_to_31_operand"
926 (and (match_code "const_int")
927 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
928
929 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
930 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
931 (define_predicate "const_1_to_63_operand"
932 (and (match_code "const_int")
933 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
934
935 ;; Match 2 or 3.
936 (define_predicate "const_2_to_3_operand"
937 (and (match_code "const_int")
938 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
939
940 ;; Match 4 to 5.
941 (define_predicate "const_4_to_5_operand"
942 (and (match_code "const_int")
943 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
944
945 ;; Match 4 to 7.
946 (define_predicate "const_4_to_7_operand"
947 (and (match_code "const_int")
948 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
949
950 ;; Match 6 to 7.
951 (define_predicate "const_6_to_7_operand"
952 (and (match_code "const_int")
953 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
954
955 ;; Match 8 to 9.
956 (define_predicate "const_8_to_9_operand"
957 (and (match_code "const_int")
958 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
959
960 ;; Match 8 to 11.
961 (define_predicate "const_8_to_11_operand"
962 (and (match_code "const_int")
963 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
964
965 ;; Match 8 to 15.
966 (define_predicate "const_8_to_15_operand"
967 (and (match_code "const_int")
968 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
969
970 ;; Match 10 to 11.
971 (define_predicate "const_10_to_11_operand"
972 (and (match_code "const_int")
973 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
974
975 ;; Match 12 to 13.
976 (define_predicate "const_12_to_13_operand"
977 (and (match_code "const_int")
978 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
979
980 ;; Match 12 to 15.
981 (define_predicate "const_12_to_15_operand"
982 (and (match_code "const_int")
983 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
984
985 ;; Match 14 to 15.
986 (define_predicate "const_14_to_15_operand"
987 (and (match_code "const_int")
988 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
989
990 ;; Match 16 to 19.
991 (define_predicate "const_16_to_19_operand"
992 (and (match_code "const_int")
993 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
994
995 ;; Match 16 to 31.
996 (define_predicate "const_16_to_31_operand"
997 (and (match_code "const_int")
998 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
999
1000 ;; Match 20 to 23.
1001 (define_predicate "const_20_to_23_operand"
1002 (and (match_code "const_int")
1003 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
1004
1005 ;; Match 24 to 27.
1006 (define_predicate "const_24_to_27_operand"
1007 (and (match_code "const_int")
1008 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
1009
1010 ;; Match 28 to 31.
1011 (define_predicate "const_28_to_31_operand"
1012 (and (match_code "const_int")
1013 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
1014
1015 ;; True if this is a constant appropriate for an increment or decrement.
1016 (define_predicate "incdec_operand"
1017 (match_code "const_int")
1018 {
1019 /* On Pentium4, the inc and dec operations causes extra dependency on flag
1020 registers, since carry flag is not set. */
1021 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
1022 return false;
1023 return op == const1_rtx || op == constm1_rtx;
1024 })
1025
1026 ;; True for registers, or const_int_operand, used to vec_setm expander.
1027 (define_predicate "vec_setm_operand"
1028 (ior (and (match_operand 0 "register_operand")
1029 (match_test "TARGET_AVX2"))
1030 (match_code "const_int")))
1031
1032 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
1033 (define_predicate "reg_or_pm1_operand"
1034 (ior (match_operand 0 "register_operand")
1035 (and (match_code "const_int")
1036 (ior (match_test "op == const1_rtx")
1037 (match_test "op == constm1_rtx")))))
1038
1039 ;; True if OP is acceptable as operand of DImode shift expander.
1040 (define_predicate "shiftdi_operand"
1041 (if_then_else (match_test "TARGET_64BIT")
1042 (match_operand 0 "nonimmediate_operand")
1043 (match_operand 0 "register_operand")))
1044
1045 (define_predicate "ashldi_input_operand"
1046 (if_then_else (match_test "TARGET_64BIT")
1047 (match_operand 0 "nonimmediate_operand")
1048 (match_operand 0 "reg_or_pm1_operand")))
1049
1050 ;; Return true if OP is a vector load from the constant pool with just
1051 ;; the first element nonzero.
1052 (define_predicate "zero_extended_scalar_load_operand"
1053 (match_code "mem")
1054 {
1055 unsigned n_elts;
1056 op = avoid_constant_pool_reference (op);
1057
1058 if (GET_CODE (op) != CONST_VECTOR)
1059 return false;
1060
1061 n_elts = CONST_VECTOR_NUNITS (op);
1062
1063 for (n_elts--; n_elts > 0; n_elts--)
1064 {
1065 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1066 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1067 return false;
1068 }
1069 return true;
1070 })
1071
1072 /* Return true if operand is a vector constant that is all ones. */
1073 (define_predicate "vector_all_ones_operand"
1074 (and (match_code "const_vector")
1075 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1076 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1077
1078 ; Return true when OP is operand acceptable for vector memory operand.
1079 ; Only AVX can have misaligned memory operand.
1080 (define_predicate "vector_memory_operand"
1081 (and (match_operand 0 "memory_operand")
1082 (ior (match_test "TARGET_AVX")
1083 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1084
1085 ; Return true when OP is register_operand or vector_memory_operand.
1086 (define_predicate "vector_operand"
1087 (ior (match_operand 0 "register_operand")
1088 (match_operand 0 "vector_memory_operand")))
1089
1090 (define_predicate "bcst_mem_operand"
1091 (and (match_code "vec_duplicate")
1092 (and (match_test "TARGET_AVX512F")
1093 (ior (match_test "TARGET_AVX512VL")
1094 (match_test "GET_MODE_SIZE (GET_MODE (op)) == 64")))
1095 (match_test "VALID_BCST_MODE_P (GET_MODE_INNER (GET_MODE (op)))")
1096 (match_test "memory_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
1097
1098 ; Return true when OP is bcst_mem_operand or vector_memory_operand.
1099 (define_predicate "bcst_vector_operand"
1100 (ior (match_operand 0 "vector_operand")
1101 (match_operand 0 "bcst_mem_operand")))
1102
1103 ;; Return true when OP is either nonimmediate operand, or any
1104 ;; CONST_VECTOR.
1105 (define_predicate "nonimmediate_or_const_vector_operand"
1106 (ior (match_operand 0 "nonimmediate_operand")
1107 (match_code "const_vector")))
1108
1109 ;; Return true when OP is nonimmediate or standard SSE constant.
1110 (define_predicate "nonimmediate_or_sse_const_operand"
1111 (ior (match_operand 0 "nonimmediate_operand")
1112 (match_test "standard_sse_constant_p (op, mode)")))
1113
1114 ;; Return true if OP is a register or a zero.
1115 (define_predicate "reg_or_0_operand"
1116 (ior (match_operand 0 "register_operand")
1117 (match_operand 0 "const0_operand")))
1118
1119 ; Return true when OP is a nonimmediate or zero.
1120 (define_predicate "nonimm_or_0_operand"
1121 (ior (match_operand 0 "nonimmediate_operand")
1122 (match_operand 0 "const0_operand")))
1123
1124 (define_predicate "norex_memory_operand"
1125 (and (match_operand 0 "memory_operand")
1126 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1127
1128 ;; Return true for RTX codes that force SImode address.
1129 (define_predicate "SImode_address_operand"
1130 (match_code "subreg,zero_extend,and"))
1131
1132 ;; Return true if op is a valid address for LEA, and does not contain
1133 ;; a segment override. Defined as a special predicate to allow
1134 ;; mode-less const_int operands pass to address_operand.
1135 (define_special_predicate "address_no_seg_operand"
1136 (match_test "address_operand (op, VOIDmode)")
1137 {
1138 struct ix86_address parts;
1139 int ok;
1140
1141 if (!CONST_INT_P (op)
1142 && mode != VOIDmode
1143 && GET_MODE (op) != mode)
1144 return false;
1145
1146 ok = ix86_decompose_address (op, &parts);
1147 gcc_assert (ok);
1148 return parts.seg == ADDR_SPACE_GENERIC;
1149 })
1150
1151 ;; Return true if op if a valid base register, displacement or
1152 ;; sum of base register and displacement for VSIB addressing.
1153 (define_predicate "vsib_address_operand"
1154 (match_test "address_operand (op, VOIDmode)")
1155 {
1156 struct ix86_address parts;
1157 int ok;
1158 rtx disp;
1159
1160 ok = ix86_decompose_address (op, &parts);
1161 gcc_assert (ok);
1162 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1163 return false;
1164
1165 /* VSIB addressing doesn't support (%rip). */
1166 if (parts.disp)
1167 {
1168 disp = parts.disp;
1169 if (GET_CODE (disp) == CONST)
1170 {
1171 disp = XEXP (disp, 0);
1172 if (GET_CODE (disp) == PLUS)
1173 disp = XEXP (disp, 0);
1174 if (GET_CODE (disp) == UNSPEC)
1175 switch (XINT (disp, 1))
1176 {
1177 case UNSPEC_GOTPCREL:
1178 case UNSPEC_PCREL:
1179 case UNSPEC_GOTNTPOFF:
1180 return false;
1181 }
1182 }
1183 if (TARGET_64BIT
1184 && flag_pic
1185 && (GET_CODE (disp) == SYMBOL_REF
1186 || GET_CODE (disp) == LABEL_REF))
1187 return false;
1188 }
1189
1190 return true;
1191 })
1192
1193 (define_predicate "vsib_mem_operator"
1194 (match_code "mem"))
1195
1196 ;; Return true if the rtx is known to be at least 32 bits aligned.
1197 (define_predicate "aligned_operand"
1198 (match_operand 0 "general_operand")
1199 {
1200 struct ix86_address parts;
1201 int ok;
1202
1203 /* Registers and immediate operands are always "aligned". */
1204 if (!MEM_P (op))
1205 return true;
1206
1207 /* All patterns using aligned_operand on memory operands ends up
1208 in promoting memory operand to 64bit and thus causing memory mismatch. */
1209 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1210 return false;
1211
1212 /* Don't even try to do any aligned optimizations with volatiles. */
1213 if (MEM_VOLATILE_P (op))
1214 return false;
1215
1216 if (MEM_ALIGN (op) >= 32)
1217 return true;
1218
1219 op = XEXP (op, 0);
1220
1221 /* Pushes and pops are only valid on the stack pointer. */
1222 if (GET_CODE (op) == PRE_DEC
1223 || GET_CODE (op) == POST_INC)
1224 return true;
1225
1226 /* Decode the address. */
1227 ok = ix86_decompose_address (op, &parts);
1228 gcc_assert (ok);
1229
1230 if (parts.base && SUBREG_P (parts.base))
1231 parts.base = SUBREG_REG (parts.base);
1232 if (parts.index && SUBREG_P (parts.index))
1233 parts.index = SUBREG_REG (parts.index);
1234
1235 /* Look for some component that isn't known to be aligned. */
1236 if (parts.index)
1237 {
1238 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1239 return false;
1240 }
1241 if (parts.base)
1242 {
1243 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1244 return false;
1245 }
1246 if (parts.disp)
1247 {
1248 if (!CONST_INT_P (parts.disp)
1249 || (INTVAL (parts.disp) & 3))
1250 return false;
1251 }
1252
1253 /* Didn't find one -- this must be an aligned address. */
1254 return true;
1255 })
1256
1257 ;; Return true if OP is memory operand with a displacement.
1258 (define_predicate "memory_displacement_operand"
1259 (match_operand 0 "memory_operand")
1260 {
1261 struct ix86_address parts;
1262 int ok;
1263
1264 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1265 gcc_assert (ok);
1266 return parts.disp != NULL_RTX;
1267 })
1268
1269 ;; Return true if OP is memory operand with a displacement only.
1270 (define_predicate "memory_displacement_only_operand"
1271 (match_operand 0 "memory_operand")
1272 {
1273 struct ix86_address parts;
1274 int ok;
1275
1276 if (TARGET_64BIT)
1277 return false;
1278
1279 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1280 gcc_assert (ok);
1281
1282 if (parts.base || parts.index)
1283 return false;
1284
1285 return parts.disp != NULL_RTX;
1286 })
1287
1288 ;; Return true if OP is memory operand that cannot be represented
1289 ;; by the modRM array.
1290 (define_predicate "long_memory_operand"
1291 (and (match_operand 0 "memory_operand")
1292 (match_test "memory_address_length (op, false)")))
1293
1294 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1295 (define_predicate "fcmov_comparison_operator"
1296 (match_operand 0 "comparison_operator")
1297 {
1298 machine_mode inmode = GET_MODE (XEXP (op, 0));
1299 enum rtx_code code = GET_CODE (op);
1300
1301 if (inmode == CCFPmode)
1302 {
1303 if (!ix86_trivial_fp_comparison_operator (op, mode))
1304 return false;
1305 code = ix86_fp_compare_code_to_integer (code);
1306 }
1307 /* i387 supports just limited amount of conditional codes. */
1308 switch (code)
1309 {
1310 case LTU: case GTU: case LEU: case GEU:
1311 if (inmode == CCmode || inmode == CCFPmode || inmode == CCCmode)
1312 return true;
1313 return false;
1314 case ORDERED: case UNORDERED:
1315 case EQ: case NE:
1316 return true;
1317 default:
1318 return false;
1319 }
1320 })
1321
1322 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1323 ;; The first set are supported directly; the second set can't be done with
1324 ;; full IEEE support, i.e. NaNs.
1325
1326 (define_predicate "sse_comparison_operator"
1327 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1328 (and (match_test "TARGET_AVX")
1329 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1330
1331 (define_predicate "ix86_comparison_int_operator"
1332 (match_code "ne,eq,ge,gt,le,lt"))
1333
1334 (define_predicate "ix86_comparison_uns_operator"
1335 (match_code "ne,eq,geu,gtu,leu,ltu"))
1336
1337 (define_predicate "bt_comparison_operator"
1338 (match_code "ne,eq"))
1339
1340 (define_predicate "shr_comparison_operator"
1341 (match_code "gtu,leu"))
1342
1343 (define_predicate "add_comparison_operator"
1344 (match_code "geu,ltu"))
1345
1346 ;; Return true if OP is a valid comparison operator in valid mode.
1347 (define_predicate "ix86_comparison_operator"
1348 (match_operand 0 "comparison_operator")
1349 {
1350 machine_mode inmode = GET_MODE (XEXP (op, 0));
1351 enum rtx_code code = GET_CODE (op);
1352
1353 if (inmode == CCFPmode)
1354 return ix86_trivial_fp_comparison_operator (op, mode);
1355
1356 switch (code)
1357 {
1358 case EQ: case NE:
1359 if (inmode == CCGZmode)
1360 return false;
1361 return true;
1362 case GE: case LT:
1363 if (inmode == CCmode || inmode == CCGCmode
1364 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
1365 return true;
1366 return false;
1367 case GEU: case LTU:
1368 if (inmode == CCGZmode)
1369 return true;
1370 /* FALLTHRU */
1371 case GTU: case LEU:
1372 if (inmode == CCmode || inmode == CCCmode || inmode == CCGZmode)
1373 return true;
1374 return false;
1375 case ORDERED: case UNORDERED:
1376 if (inmode == CCmode)
1377 return true;
1378 return false;
1379 case GT: case LE:
1380 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1381 return true;
1382 return false;
1383 default:
1384 return false;
1385 }
1386 })
1387
1388 ;; Return true if OP is a valid comparison operator
1389 ;; testing carry flag to be set.
1390 (define_predicate "ix86_carry_flag_operator"
1391 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1392 {
1393 machine_mode inmode = GET_MODE (XEXP (op, 0));
1394 enum rtx_code code = GET_CODE (op);
1395
1396 if (inmode == CCFPmode)
1397 {
1398 if (!ix86_trivial_fp_comparison_operator (op, mode))
1399 return false;
1400 code = ix86_fp_compare_code_to_integer (code);
1401 }
1402 else if (inmode == CCCmode)
1403 return code == LTU || code == GTU;
1404 else if (inmode != CCmode)
1405 return false;
1406
1407 return code == LTU;
1408 })
1409
1410 ;; Return true if this comparison only requires testing one flag bit.
1411 (define_predicate "ix86_trivial_fp_comparison_operator"
1412 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1413
1414 ;; Return true if we know how to do this comparison. Others require
1415 ;; testing more than one flag bit, and we let the generic middle-end
1416 ;; code do that.
1417 (define_predicate "ix86_fp_comparison_operator"
1418 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1419 == IX86_FPCMP_ARITH")
1420 (match_operand 0 "comparison_operator")
1421 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1422
1423 ;; Nearly general operand, but accept any const_double, since we wish
1424 ;; to be able to drop them into memory rather than have them get pulled
1425 ;; into registers.
1426 (define_predicate "cmp_fp_expander_operand"
1427 (ior (match_code "const_double")
1428 (match_operand 0 "general_operand")))
1429
1430 ;; Return true if this is a valid binary floating-point operation.
1431 (define_predicate "binary_fp_operator"
1432 (match_code "plus,minus,mult,div"))
1433
1434 ;; Return true if this is a multiply operation.
1435 (define_predicate "mult_operator"
1436 (match_code "mult"))
1437
1438 ;; Return true if this is a division operation.
1439 (define_predicate "div_operator"
1440 (match_code "div"))
1441
1442 ;; Return true if this is a plus, minus, and, ior or xor operation.
1443 (define_predicate "plusminuslogic_operator"
1444 (match_code "plus,minus,and,ior,xor"))
1445
1446 ;; Return true for ARITHMETIC_P.
1447 (define_predicate "arith_or_logical_operator"
1448 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1449 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1450
1451 ;; Return true for COMMUTATIVE_P.
1452 (define_predicate "commutative_operator"
1453 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1454
1455 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1456 (define_predicate "promotable_binary_operator"
1457 (ior (match_code "plus,minus,and,ior,xor,ashift")
1458 (and (match_code "mult")
1459 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1460
1461 (define_predicate "compare_operator"
1462 (match_code "compare"))
1463
1464 ;; Return true if OP is a memory operand, aligned to
1465 ;; less than its natural alignment.
1466 (define_predicate "misaligned_operand"
1467 (and (match_code "mem")
1468 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1469
1470 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1471 (define_predicate "vzeroall_operation"
1472 (match_code "parallel")
1473 {
1474 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1475
1476 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1477 return false;
1478
1479 for (i = 0; i < nregs; i++)
1480 {
1481 rtx elt = XVECEXP (op, 0, i+1);
1482
1483 if (GET_CODE (elt) != SET
1484 || GET_CODE (SET_DEST (elt)) != REG
1485 || GET_MODE (SET_DEST (elt)) != V8SImode
1486 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1487 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1488 return false;
1489 }
1490 return true;
1491 })
1492
1493 ;; return true if OP is a vzeroall pattern.
1494 (define_predicate "vzeroall_pattern"
1495 (and (match_code "parallel")
1496 (match_code "unspec_volatile" "a")
1497 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1498
1499 ;; return true if OP is a vzeroupper pattern.
1500 (define_predicate "vzeroupper_pattern"
1501 (and (match_code "parallel")
1502 (match_code "unspec_volatile" "a")
1503 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROUPPER")))
1504
1505 ;; Return true if OP is an addsub vec_merge operation
1506 (define_predicate "addsub_vm_operator"
1507 (match_code "vec_merge")
1508 {
1509 rtx op0, op1;
1510 int swapped;
1511 HOST_WIDE_INT mask;
1512 int nunits, elt;
1513
1514 op0 = XEXP (op, 0);
1515 op1 = XEXP (op, 1);
1516
1517 /* Sanity check. */
1518 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1519 swapped = 0;
1520 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1521 swapped = 1;
1522 else
1523 gcc_unreachable ();
1524
1525 mask = INTVAL (XEXP (op, 2));
1526 nunits = GET_MODE_NUNITS (mode);
1527
1528 for (elt = 0; elt < nunits; elt++)
1529 {
1530 /* bit clear: take from op0, set: take from op1 */
1531 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1532
1533 if (bit != ((elt & 1) ^ swapped))
1534 return false;
1535 }
1536
1537 return true;
1538 })
1539
1540 ;; Return true if OP is an addsub vec_select/vec_concat operation
1541 (define_predicate "addsub_vs_operator"
1542 (and (match_code "vec_select")
1543 (match_code "vec_concat" "0"))
1544 {
1545 rtx op0, op1;
1546 bool swapped;
1547 int nunits, elt;
1548
1549 op0 = XEXP (XEXP (op, 0), 0);
1550 op1 = XEXP (XEXP (op, 0), 1);
1551
1552 /* Sanity check. */
1553 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1554 swapped = false;
1555 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1556 swapped = true;
1557 else
1558 gcc_unreachable ();
1559
1560 nunits = GET_MODE_NUNITS (mode);
1561 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1562 return false;
1563
1564 /* We already checked that permutation is suitable for addsub,
1565 so only look at the first element of the parallel. */
1566 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1567
1568 return elt == (swapped ? nunits : 0);
1569 })
1570
1571 ;; Return true if OP is a parallel for an addsub vec_select.
1572 (define_predicate "addsub_vs_parallel"
1573 (and (match_code "parallel")
1574 (match_code "const_int" "a"))
1575 {
1576 int nelt = XVECLEN (op, 0);
1577 int elt, i;
1578
1579 if (nelt < 2)
1580 return false;
1581
1582 /* Check that the permutation is suitable for addsub.
1583 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1584 elt = INTVAL (XVECEXP (op, 0, 0));
1585 if (elt == 0)
1586 {
1587 for (i = 1; i < nelt; ++i)
1588 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1589 return false;
1590 }
1591 else if (elt == nelt)
1592 {
1593 for (i = 1; i < nelt; ++i)
1594 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1595 return false;
1596 }
1597 else
1598 return false;
1599
1600 return true;
1601 })
1602
1603 ;; Return true if OP is a parallel for an pmovz{bw,wd,dq} vec_select,
1604 ;; where one of the two operands of the vec_concat is const0_operand.
1605 (define_predicate "pmovzx_parallel"
1606 (and (match_code "parallel")
1607 (match_code "const_int" "a"))
1608 {
1609 int nelt = XVECLEN (op, 0);
1610 int elt, i;
1611
1612 if (nelt < 2)
1613 return false;
1614
1615 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1616 For example { 0 16 1 17 2 18 3 19 4 20 5 21 6 22 7 23 }. */
1617 elt = INTVAL (XVECEXP (op, 0, 0));
1618 if (elt == 0)
1619 {
1620 for (i = 1; i < nelt; ++i)
1621 if ((i & 1) != 0)
1622 {
1623 if (INTVAL (XVECEXP (op, 0, i)) < nelt)
1624 return false;
1625 }
1626 else if (INTVAL (XVECEXP (op, 0, i)) != i / 2)
1627 return false;
1628 }
1629 else
1630 return false;
1631
1632 return true;
1633 })
1634
1635 ;; Return true if OP is a parallel for a vbroadcast permute.
1636 (define_predicate "avx_vbroadcast_operand"
1637 (and (match_code "parallel")
1638 (match_code "const_int" "a"))
1639 {
1640 rtx elt = XVECEXP (op, 0, 0);
1641 int i, nelt = XVECLEN (op, 0);
1642
1643 /* Don't bother checking there are the right number of operands,
1644 merely that they're all identical. */
1645 for (i = 1; i < nelt; ++i)
1646 if (XVECEXP (op, 0, i) != elt)
1647 return false;
1648 return true;
1649 })
1650
1651 ;; Return true if OP is a parallel for a palignr permute.
1652 (define_predicate "palignr_operand"
1653 (and (match_code "parallel")
1654 (match_code "const_int" "a"))
1655 {
1656 int elt = INTVAL (XVECEXP (op, 0, 0));
1657 int i, nelt = XVECLEN (op, 0);
1658
1659 /* Check that an order in the permutation is suitable for palignr.
1660 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1661 for (i = 1; i < nelt; ++i)
1662 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1663 return false;
1664 return true;
1665 })
1666
1667 ;; Return true if OP is a proper third operand to vpblendw256.
1668 (define_predicate "avx2_pblendw_operand"
1669 (match_code "const_int")
1670 {
1671 HOST_WIDE_INT val = INTVAL (op);
1672 HOST_WIDE_INT low = val & 0xff;
1673 return val == ((low << 8) | low);
1674 })
1675
1676 ;; Return true if OP is vector_operand or CONST_VECTOR.
1677 (define_predicate "general_vector_operand"
1678 (ior (match_operand 0 "vector_operand")
1679 (match_code "const_vector")))
1680
1681 ;; Return true if OP is either -1 constant or stored in register.
1682 (define_predicate "register_or_constm1_operand"
1683 (ior (match_operand 0 "register_operand")
1684 (and (match_code "const_int")
1685 (match_test "op == constm1_rtx"))))
1686
1687 ;; Return true if the vector ends with between 12 and 18 register saves using
1688 ;; RAX as the base address.
1689 (define_predicate "save_multiple"
1690 (match_code "parallel")
1691 {
1692 const unsigned len = XVECLEN (op, 0);
1693 unsigned i;
1694
1695 /* Starting from end of vector, count register saves. */
1696 for (i = 0; i < len; ++i)
1697 {
1698 rtx src, dest, addr;
1699 rtx e = XVECEXP (op, 0, len - 1 - i);
1700
1701 if (GET_CODE (e) != SET)
1702 break;
1703
1704 src = SET_SRC (e);
1705 dest = SET_DEST (e);
1706
1707 if (!REG_P (src) || !MEM_P (dest))
1708 break;
1709
1710 addr = XEXP (dest, 0);
1711
1712 /* Good if dest address is in RAX. */
1713 if (REG_P (addr) && REGNO (addr) == AX_REG)
1714 continue;
1715
1716 /* Good if dest address is offset of RAX. */
1717 if (GET_CODE (addr) == PLUS
1718 && REG_P (XEXP (addr, 0))
1719 && REGNO (XEXP (addr, 0)) == AX_REG)
1720 continue;
1721
1722 break;
1723 }
1724 return (i >= 12 && i <= 18);
1725 })
1726
1727
1728 ;; Return true if the vector ends with between 12 and 18 register loads using
1729 ;; RSI as the base address.
1730 (define_predicate "restore_multiple"
1731 (match_code "parallel")
1732 {
1733 const unsigned len = XVECLEN (op, 0);
1734 unsigned i;
1735
1736 /* Starting from end of vector, count register restores. */
1737 for (i = 0; i < len; ++i)
1738 {
1739 rtx src, dest, addr;
1740 rtx e = XVECEXP (op, 0, len - 1 - i);
1741
1742 if (GET_CODE (e) != SET)
1743 break;
1744
1745 src = SET_SRC (e);
1746 dest = SET_DEST (e);
1747
1748 if (!MEM_P (src) || !REG_P (dest))
1749 break;
1750
1751 addr = XEXP (src, 0);
1752
1753 /* Good if src address is in RSI. */
1754 if (REG_P (addr) && REGNO (addr) == SI_REG)
1755 continue;
1756
1757 /* Good if src address is offset of RSI. */
1758 if (GET_CODE (addr) == PLUS
1759 && REG_P (XEXP (addr, 0))
1760 && REGNO (XEXP (addr, 0)) == SI_REG)
1761 continue;
1762
1763 break;
1764 }
1765 return (i >= 12 && i <= 18);
1766 })
1767
1768 ;; Keylocker specific predicates
1769 (define_predicate "encodekey128_operation"
1770 (match_code "parallel")
1771 {
1772 unsigned i;
1773 rtx elt;
1774
1775 if (XVECLEN (op, 0) != 8)
1776 return false;
1777
1778 for(i = 0; i < 3; i++)
1779 {
1780 elt = XVECEXP (op, 0, i + 1);
1781 if (GET_CODE (elt) != SET
1782 || GET_CODE (SET_DEST (elt)) != REG
1783 || GET_MODE (SET_DEST (elt)) != V2DImode
1784 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1785 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1786 || GET_MODE (SET_SRC (elt)) != V2DImode
1787 || XVECLEN(SET_SRC (elt), 0) != 1
1788 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
1789 return false;
1790 }
1791
1792 for(i = 4; i < 7; i++)
1793 {
1794 elt = XVECEXP (op, 0, i);
1795 if (GET_CODE (elt) != SET
1796 || GET_CODE (SET_DEST (elt)) != REG
1797 || GET_MODE (SET_DEST (elt)) != V2DImode
1798 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1799 || SET_SRC (elt) != CONST0_RTX (V2DImode))
1800 return false;
1801 }
1802
1803 elt = XVECEXP (op, 0, 7);
1804 if (GET_CODE (elt) != CLOBBER
1805 || GET_MODE (elt) != VOIDmode
1806 || GET_CODE (XEXP (elt, 0)) != REG
1807 || GET_MODE (XEXP (elt, 0)) != CCmode
1808 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
1809 return false;
1810 return true;
1811 })
1812
1813 (define_predicate "encodekey256_operation"
1814 (match_code "parallel")
1815 {
1816 unsigned i;
1817 rtx elt;
1818
1819 if (XVECLEN (op, 0) != 9)
1820 return false;
1821
1822 elt = SET_SRC (XVECEXP (op, 0, 0));
1823 elt = XVECEXP (elt, 0, 2);
1824 if (!REG_P (elt)
1825 || REGNO(elt) != GET_SSE_REGNO (1))
1826 return false;
1827
1828 for(i = 0; i < 4; i++)
1829 {
1830 elt = XVECEXP (op, 0, i + 1);
1831 if (GET_CODE (elt) != SET
1832 || GET_CODE (SET_DEST (elt)) != REG
1833 || GET_MODE (SET_DEST (elt)) != V2DImode
1834 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1835 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1836 || GET_MODE (SET_SRC (elt)) != V2DImode
1837 || XVECLEN(SET_SRC (elt), 0) != 1
1838 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
1839 return false;
1840 }
1841
1842 for(i = 4; i < 7; i++)
1843 {
1844 elt = XVECEXP (op, 0, i + 1);
1845 if (GET_CODE (elt) != SET
1846 || GET_CODE (SET_DEST (elt)) != REG
1847 || GET_MODE (SET_DEST (elt)) != V2DImode
1848 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1849 || SET_SRC (elt) != CONST0_RTX (V2DImode))
1850 return false;
1851 }
1852
1853 elt = XVECEXP (op, 0, 8);
1854 if (GET_CODE (elt) != CLOBBER
1855 || GET_MODE (elt) != VOIDmode
1856 || GET_CODE (XEXP (elt, 0)) != REG
1857 || GET_MODE (XEXP (elt, 0)) != CCmode
1858 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
1859 return false;
1860 return true;
1861 })
1862
1863
1864 (define_predicate "aeswidekl_operation"
1865 (match_code "parallel")
1866 {
1867 unsigned i;
1868 rtx elt;
1869
1870 for (i = 0; i < 8; i++)
1871 {
1872 elt = XVECEXP (op, 0, i + 1);
1873 if (GET_CODE (elt) != SET
1874 || GET_CODE (SET_DEST (elt)) != REG
1875 || GET_MODE (SET_DEST (elt)) != V2DImode
1876 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1877 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
1878 || GET_MODE (SET_SRC (elt)) != V2DImode
1879 || XVECLEN (SET_SRC (elt), 0) != 1
1880 || REGNO (XVECEXP (SET_SRC (elt), 0, 0)) != GET_SSE_REGNO (i))
1881 return false;
1882 }
1883 return true;
1884 })