Daily bump.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static bool twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 tree, tree, tree);
122 static bool simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142
143
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
146
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
156
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
165 {
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
168 }
169 return x;
170 }
171 \f
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
175
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179 widest_int quo;
180
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184
185 return NULL_TREE;
186 }
187 \f
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
196
197 static int fold_deferring_overflow_warnings;
198
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
203
204 static const char* fold_deferred_overflow_warning;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
208
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
213
214 void
215 fold_defer_overflow_warnings (void)
216 {
217 ++fold_deferring_overflow_warnings;
218 }
219
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
228
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232 const char *warnmsg;
233 location_t locus;
234
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
238 {
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
244 }
245
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
248
249 if (!issue || warnmsg == NULL)
250 return;
251
252 if (gimple_no_warning_p (stmt))
253 return;
254
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
259
260 if (!issue_strict_overflow_warning (code))
261 return;
262
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
272
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276 fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278
279 /* Whether we are deferring overflow warnings. */
280
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284 return fold_deferring_overflow_warnings > 0;
285 }
286
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
289
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293 if (fold_deferring_overflow_warnings > 0)
294 {
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
297 {
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
300 }
301 }
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 \f
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
308
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312 switch (fn)
313 {
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_ROUNDEVEN:
333 CASE_CFN_ROUNDEVEN_FN:
334 CASE_CFN_SIN:
335 CASE_CFN_SINH:
336 CASE_CFN_TAN:
337 CASE_CFN_TANH:
338 CASE_CFN_TRUNC:
339 return true;
340
341 CASE_CFN_LLRINT:
342 CASE_CFN_LRINT:
343 CASE_CFN_NEARBYINT:
344 CASE_CFN_RINT:
345 return !flag_rounding_math;
346
347 default:
348 break;
349 }
350 return false;
351 }
352
353 /* Check whether we may negate an integer constant T without causing
354 overflow. */
355
356 bool
357 may_negate_without_overflow_p (const_tree t)
358 {
359 tree type;
360
361 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362
363 type = TREE_TYPE (t);
364 if (TYPE_UNSIGNED (type))
365 return false;
366
367 return !wi::only_sign_bit_p (wi::to_wide (t));
368 }
369
370 /* Determine whether an expression T can be cheaply negated using
371 the function negate_expr without introducing undefined overflow. */
372
373 static bool
374 negate_expr_p (tree t)
375 {
376 tree type;
377
378 if (t == 0)
379 return false;
380
381 type = TREE_TYPE (t);
382
383 STRIP_SIGN_NOPS (t);
384 switch (TREE_CODE (t))
385 {
386 case INTEGER_CST:
387 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
388 return true;
389
390 /* Check that -CST will not overflow type. */
391 return may_negate_without_overflow_p (t);
392 case BIT_NOT_EXPR:
393 return (INTEGRAL_TYPE_P (type)
394 && TYPE_OVERFLOW_WRAPS (type));
395
396 case FIXED_CST:
397 return true;
398
399 case NEGATE_EXPR:
400 return !TYPE_OVERFLOW_SANITIZED (type);
401
402 case REAL_CST:
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406
407 case COMPLEX_CST:
408 return negate_expr_p (TREE_REALPART (t))
409 && negate_expr_p (TREE_IMAGPART (t));
410
411 case VECTOR_CST:
412 {
413 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
414 return true;
415
416 /* Steps don't prevent negation. */
417 unsigned int count = vector_cst_encoded_nelts (t);
418 for (unsigned int i = 0; i < count; ++i)
419 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
420 return false;
421
422 return true;
423 }
424
425 case COMPLEX_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0))
427 && negate_expr_p (TREE_OPERAND (t, 1));
428
429 case CONJ_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0));
431
432 case PLUS_EXPR:
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
434 || HONOR_SIGNED_ZEROS (element_mode (type))
435 || (ANY_INTEGRAL_TYPE_P (type)
436 && ! TYPE_OVERFLOW_WRAPS (type)))
437 return false;
438 /* -(A + B) -> (-B) - A. */
439 if (negate_expr_p (TREE_OPERAND (t, 1)))
440 return true;
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case MINUS_EXPR:
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
447 && !HONOR_SIGNED_ZEROS (element_mode (type))
448 && (! ANY_INTEGRAL_TYPE_P (type)
449 || TYPE_OVERFLOW_WRAPS (type));
450
451 case MULT_EXPR:
452 if (TYPE_UNSIGNED (type))
453 break;
454 /* INT_MIN/n * n doesn't overflow while negating one operand it does
455 if n is a (negative) power of two. */
456 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
457 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
458 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
461 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
462 && (wi::popcount
463 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
464 break;
465
466 /* Fall through. */
467
468 case RDIV_EXPR:
469 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
470 return negate_expr_p (TREE_OPERAND (t, 1))
471 || negate_expr_p (TREE_OPERAND (t, 0));
472 break;
473
474 case TRUNC_DIV_EXPR:
475 case ROUND_DIV_EXPR:
476 case EXACT_DIV_EXPR:
477 if (TYPE_UNSIGNED (type))
478 break;
479 /* In general we can't negate A in A / B, because if A is INT_MIN and
480 B is not 1 we change the sign of the result. */
481 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 && negate_expr_p (TREE_OPERAND (t, 0)))
483 return true;
484 /* In general we can't negate B in A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. */
487 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
488 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
489 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
490 && ! integer_onep (TREE_OPERAND (t, 1))))
491 return negate_expr_p (TREE_OPERAND (t, 1));
492 break;
493
494 case NOP_EXPR:
495 /* Negate -((double)float) as (double)(-float). */
496 if (TREE_CODE (type) == REAL_TYPE)
497 {
498 tree tem = strip_float_extensions (t);
499 if (tem != t)
500 return negate_expr_p (tem);
501 }
502 break;
503
504 case CALL_EXPR:
505 /* Negate -f(x) as f(-x). */
506 if (negate_mathfn_p (get_call_combined_fn (t)))
507 return negate_expr_p (CALL_EXPR_ARG (t, 0));
508 break;
509
510 case RSHIFT_EXPR:
511 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
512 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 {
514 tree op1 = TREE_OPERAND (t, 1);
515 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
516 return true;
517 }
518 break;
519
520 default:
521 break;
522 }
523 return false;
524 }
525
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
530
531 static tree
532 fold_negate_expr_1 (location_t loc, tree t)
533 {
534 tree type = TREE_TYPE (t);
535 tree tem;
536
537 switch (TREE_CODE (t))
538 {
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_one_cst (type));
544 break;
545
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || (ANY_INTEGRAL_TYPE_P (type)
550 && !TYPE_OVERFLOW_TRAPS (type)
551 && TYPE_OVERFLOW_WRAPS (type))
552 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
553 return tem;
554 break;
555
556 case POLY_INT_CST:
557 case REAL_CST:
558 case FIXED_CST:
559 tem = fold_negate_const (t, type);
560 return tem;
561
562 case COMPLEX_CST:
563 {
564 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
565 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
566 if (rpart && ipart)
567 return build_complex (type, rpart, ipart);
568 }
569 break;
570
571 case VECTOR_CST:
572 {
573 tree_vector_builder elts;
574 elts.new_unary_operation (type, t, true);
575 unsigned int count = elts.encoded_nelts ();
576 for (unsigned int i = 0; i < count; ++i)
577 {
578 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
579 if (elt == NULL_TREE)
580 return NULL_TREE;
581 elts.quick_push (elt);
582 }
583
584 return elts.build ();
585 }
586
587 case COMPLEX_EXPR:
588 if (negate_expr_p (t))
589 return fold_build2_loc (loc, COMPLEX_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
591 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
592 break;
593
594 case CONJ_EXPR:
595 if (negate_expr_p (t))
596 return fold_build1_loc (loc, CONJ_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
598 break;
599
600 case NEGATE_EXPR:
601 if (!TYPE_OVERFLOW_SANITIZED (type))
602 return TREE_OPERAND (t, 0);
603 break;
604
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
607 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 {
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 {
612 tem = negate_expr (TREE_OPERAND (t, 1));
613 return fold_build2_loc (loc, MINUS_EXPR, type,
614 tem, TREE_OPERAND (t, 0));
615 }
616
617 /* -(A + B) -> (-A) - B. */
618 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 {
620 tem = negate_expr (TREE_OPERAND (t, 0));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 1));
623 }
624 }
625 break;
626
627 case MINUS_EXPR:
628 /* - (A - B) -> B - A */
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
630 && !HONOR_SIGNED_ZEROS (element_mode (type)))
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 break;
634
635 case MULT_EXPR:
636 if (TYPE_UNSIGNED (type))
637 break;
638
639 /* Fall through. */
640
641 case RDIV_EXPR:
642 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 {
644 tem = TREE_OPERAND (t, 1);
645 if (negate_expr_p (tem))
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
648 tem = TREE_OPERAND (t, 0);
649 if (negate_expr_p (tem))
650 return fold_build2_loc (loc, TREE_CODE (t), type,
651 negate_expr (tem), TREE_OPERAND (t, 1));
652 }
653 break;
654
655 case TRUNC_DIV_EXPR:
656 case ROUND_DIV_EXPR:
657 case EXACT_DIV_EXPR:
658 if (TYPE_UNSIGNED (type))
659 break;
660 /* In general we can't negate A in A / B, because if A is INT_MIN and
661 B is not 1 we change the sign of the result. */
662 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
663 && negate_expr_p (TREE_OPERAND (t, 0)))
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 negate_expr (TREE_OPERAND (t, 0)),
666 TREE_OPERAND (t, 1));
667 /* In general we can't negate B in A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. */
670 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
671 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
672 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
673 && ! integer_onep (TREE_OPERAND (t, 1))))
674 && negate_expr_p (TREE_OPERAND (t, 1)))
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 TREE_OPERAND (t, 0),
677 negate_expr (TREE_OPERAND (t, 1)));
678 break;
679
680 case NOP_EXPR:
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
683 {
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
687 }
688 break;
689
690 case CALL_EXPR:
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (get_call_combined_fn (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 {
695 tree fndecl, arg;
696
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
700 }
701 break;
702
703 case RSHIFT_EXPR:
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 {
707 tree op1 = TREE_OPERAND (t, 1);
708 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 {
710 tree ntype = TYPE_UNSIGNED (type)
711 ? signed_type_for (type)
712 : unsigned_type_for (type);
713 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
714 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
715 return fold_convert_loc (loc, type, temp);
716 }
717 }
718 break;
719
720 default:
721 break;
722 }
723
724 return NULL_TREE;
725 }
726
727 /* A wrapper for fold_negate_expr_1. */
728
729 static tree
730 fold_negate_expr (location_t loc, tree t)
731 {
732 tree type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
734 tree tem = fold_negate_expr_1 (loc, t);
735 if (tem == NULL_TREE)
736 return NULL_TREE;
737 return fold_convert_loc (loc, type, tem);
738 }
739
740 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
741 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 return NULL_TREE. */
743
744 static tree
745 negate_expr (tree t)
746 {
747 tree type, tem;
748 location_t loc;
749
750 if (t == NULL_TREE)
751 return NULL_TREE;
752
753 loc = EXPR_LOCATION (t);
754 type = TREE_TYPE (t);
755 STRIP_SIGN_NOPS (t);
756
757 tem = fold_negate_expr (loc, t);
758 if (!tem)
759 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
760 return fold_convert_loc (loc, type, tem);
761 }
762 \f
763 /* Split a tree IN into a constant, literal and variable parts that could be
764 combined with CODE to make IN. "constant" means an expression with
765 TREE_CONSTANT but that isn't an actual constant. CODE must be a
766 commutative arithmetic operation. Store the constant part into *CONP,
767 the literal in *LITP and return the variable part. If a part isn't
768 present, set it to null. If the tree does not decompose in this way,
769 return the entire tree as the variable part and the other parts as null.
770
771 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
772 case, we negate an operand that was subtracted. Except if it is a
773 literal for which we use *MINUS_LITP instead.
774
775 If NEGATE_P is true, we are negating all of IN, again except a literal
776 for which we use *MINUS_LITP instead. If a variable part is of pointer
777 type, it is negated after converting to TYPE. This prevents us from
778 generating illegal MINUS pointer expression. LOC is the location of
779 the converted variable part.
780
781 If IN is itself a literal or constant, return it as appropriate.
782
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
785
786 static tree
787 split_tree (tree in, tree type, enum tree_code code,
788 tree *minus_varp, tree *conp, tree *minus_conp,
789 tree *litp, tree *minus_litp, int negate_p)
790 {
791 tree var = 0;
792 *minus_varp = 0;
793 *conp = 0;
794 *minus_conp = 0;
795 *litp = 0;
796 *minus_litp = 0;
797
798 /* Strip any conversions that don't change the machine mode or signedness. */
799 STRIP_SIGN_NOPS (in);
800
801 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
802 || TREE_CODE (in) == FIXED_CST)
803 *litp = in;
804 else if (TREE_CODE (in) == code
805 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
806 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
807 /* We can associate addition and subtraction together (even
808 though the C standard doesn't say so) for integers because
809 the value is not affected. For reals, the value might be
810 affected, so we can't. */
811 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
812 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
813 || (code == MINUS_EXPR
814 && (TREE_CODE (in) == PLUS_EXPR
815 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 {
817 tree op0 = TREE_OPERAND (in, 0);
818 tree op1 = TREE_OPERAND (in, 1);
819 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
820 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821
822 /* First see if either of the operands is a literal, then a constant. */
823 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
824 || TREE_CODE (op0) == FIXED_CST)
825 *litp = op0, op0 = 0;
826 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
827 || TREE_CODE (op1) == FIXED_CST)
828 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829
830 if (op0 != 0 && TREE_CONSTANT (op0))
831 *conp = op0, op0 = 0;
832 else if (op1 != 0 && TREE_CONSTANT (op1))
833 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834
835 /* If we haven't dealt with either operand, this is not a case we can
836 decompose. Otherwise, VAR is either of the ones remaining, if any. */
837 if (op0 != 0 && op1 != 0)
838 var = in;
839 else if (op0 != 0)
840 var = op0;
841 else
842 var = op1, neg_var_p = neg1_p;
843
844 /* Now do any needed negations. */
845 if (neg_litp_p)
846 *minus_litp = *litp, *litp = 0;
847 if (neg_conp_p && *conp)
848 *minus_conp = *conp, *conp = 0;
849 if (neg_var_p && var)
850 *minus_varp = var, var = 0;
851 }
852 else if (TREE_CONSTANT (in))
853 *conp = in;
854 else if (TREE_CODE (in) == BIT_NOT_EXPR
855 && code == PLUS_EXPR)
856 {
857 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
858 when IN is constant. */
859 *litp = build_minus_one_cst (type);
860 *minus_varp = TREE_OPERAND (in, 0);
861 }
862 else
863 var = in;
864
865 if (negate_p)
866 {
867 if (*litp)
868 *minus_litp = *litp, *litp = 0;
869 else if (*minus_litp)
870 *litp = *minus_litp, *minus_litp = 0;
871 if (*conp)
872 *minus_conp = *conp, *conp = 0;
873 else if (*minus_conp)
874 *conp = *minus_conp, *minus_conp = 0;
875 if (var)
876 *minus_varp = var, var = 0;
877 else if (*minus_varp)
878 var = *minus_varp, *minus_varp = 0;
879 }
880
881 if (*litp
882 && TREE_OVERFLOW_P (*litp))
883 *litp = drop_tree_overflow (*litp);
884 if (*minus_litp
885 && TREE_OVERFLOW_P (*minus_litp))
886 *minus_litp = drop_tree_overflow (*minus_litp);
887
888 return var;
889 }
890
891 /* Re-associate trees split by the above function. T1 and T2 are
892 either expressions to associate or null. Return the new
893 expression, if any. LOC is the location of the new expression. If
894 we build an operation, do it in TYPE and with CODE. */
895
896 static tree
897 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 {
899 if (t1 == 0)
900 {
901 gcc_assert (t2 == 0 || code != MINUS_EXPR);
902 return t2;
903 }
904 else if (t2 == 0)
905 return t1;
906
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
911 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
912 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 {
914 if (code == PLUS_EXPR)
915 {
916 if (TREE_CODE (t1) == NEGATE_EXPR)
917 return build2_loc (loc, MINUS_EXPR, type,
918 fold_convert_loc (loc, type, t2),
919 fold_convert_loc (loc, type,
920 TREE_OPERAND (t1, 0)));
921 else if (TREE_CODE (t2) == NEGATE_EXPR)
922 return build2_loc (loc, MINUS_EXPR, type,
923 fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type,
925 TREE_OPERAND (t2, 0)));
926 else if (integer_zerop (t2))
927 return fold_convert_loc (loc, type, t1);
928 }
929 else if (code == MINUS_EXPR)
930 {
931 if (integer_zerop (t2))
932 return fold_convert_loc (loc, type, t1);
933 }
934
935 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
936 fold_convert_loc (loc, type, t2));
937 }
938
939 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
940 fold_convert_loc (loc, type, t2));
941 }
942 \f
943 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
944 for use in int_const_binop, size_binop and size_diffop. */
945
946 static bool
947 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 {
949 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
950 return false;
951 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
952 return false;
953
954 switch (code)
955 {
956 case LSHIFT_EXPR:
957 case RSHIFT_EXPR:
958 case LROTATE_EXPR:
959 case RROTATE_EXPR:
960 return true;
961
962 default:
963 break;
964 }
965
966 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
967 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
968 && TYPE_MODE (type1) == TYPE_MODE (type2);
969 }
970
971 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
972 a new constant in RES. Return FALSE if we don't know how to
973 evaluate CODE at compile-time. */
974
975 bool
976 wide_int_binop (wide_int &res,
977 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
978 signop sign, wi::overflow_type *overflow)
979 {
980 wide_int tmp;
981 *overflow = wi::OVF_NONE;
982 switch (code)
983 {
984 case BIT_IOR_EXPR:
985 res = wi::bit_or (arg1, arg2);
986 break;
987
988 case BIT_XOR_EXPR:
989 res = wi::bit_xor (arg1, arg2);
990 break;
991
992 case BIT_AND_EXPR:
993 res = wi::bit_and (arg1, arg2);
994 break;
995
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 return false;
999 res = wi::lshift (arg1, arg2);
1000 break;
1001
1002 case RSHIFT_EXPR:
1003 if (wi::neg_p (arg2))
1004 return false;
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 break;
1010
1011 case RROTATE_EXPR:
1012 case LROTATE_EXPR:
1013 if (wi::neg_p (arg2))
1014 {
1015 tmp = -arg2;
1016 if (code == RROTATE_EXPR)
1017 code = LROTATE_EXPR;
1018 else
1019 code = RROTATE_EXPR;
1020 }
1021 else
1022 tmp = arg2;
1023
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, tmp);
1026 else
1027 res = wi::lrotate (arg1, tmp);
1028 break;
1029
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, overflow);
1032 break;
1033
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, overflow);
1036 break;
1037
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, overflow);
1040 break;
1041
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1045
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return false;
1050 res = wi::div_trunc (arg1, arg2, sign, overflow);
1051 break;
1052
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_floor (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_ceil (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_round (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_floor (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_round (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1098
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1102
1103 default:
1104 return false;
1105 }
1106 return true;
1107 }
1108
1109 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1110 produce a new constant in RES. Return FALSE if we don't know how
1111 to evaluate CODE at compile-time. */
1112
1113 static bool
1114 poly_int_binop (poly_wide_int &res, enum tree_code code,
1115 const_tree arg1, const_tree arg2,
1116 signop sign, wi::overflow_type *overflow)
1117 {
1118 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1119 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1120 switch (code)
1121 {
1122 case PLUS_EXPR:
1123 res = wi::add (wi::to_poly_wide (arg1),
1124 wi::to_poly_wide (arg2), sign, overflow);
1125 break;
1126
1127 case MINUS_EXPR:
1128 res = wi::sub (wi::to_poly_wide (arg1),
1129 wi::to_poly_wide (arg2), sign, overflow);
1130 break;
1131
1132 case MULT_EXPR:
1133 if (TREE_CODE (arg2) == INTEGER_CST)
1134 res = wi::mul (wi::to_poly_wide (arg1),
1135 wi::to_wide (arg2), sign, overflow);
1136 else if (TREE_CODE (arg1) == INTEGER_CST)
1137 res = wi::mul (wi::to_poly_wide (arg2),
1138 wi::to_wide (arg1), sign, overflow);
1139 else
1140 return NULL_TREE;
1141 break;
1142
1143 case LSHIFT_EXPR:
1144 if (TREE_CODE (arg2) == INTEGER_CST)
1145 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1146 else
1147 return false;
1148 break;
1149
1150 case BIT_IOR_EXPR:
1151 if (TREE_CODE (arg2) != INTEGER_CST
1152 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1153 &res))
1154 return false;
1155 break;
1156
1157 default:
1158 return false;
1159 }
1160 return true;
1161 }
1162
1163 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1164 produce a new constant. Return NULL_TREE if we don't know how to
1165 evaluate CODE at compile-time. */
1166
1167 tree
1168 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1169 int overflowable)
1170 {
1171 poly_wide_int poly_res;
1172 tree type = TREE_TYPE (arg1);
1173 signop sign = TYPE_SIGN (type);
1174 wi::overflow_type overflow = wi::OVF_NONE;
1175
1176 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1177 {
1178 wide_int warg1 = wi::to_wide (arg1), res;
1179 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1180 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1181 return NULL_TREE;
1182 poly_res = res;
1183 }
1184 else if (!poly_int_tree_p (arg1)
1185 || !poly_int_tree_p (arg2)
1186 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1187 return NULL_TREE;
1188 return force_fit_type (type, poly_res, overflowable,
1189 (((sign == SIGNED || overflowable == -1)
1190 && overflow)
1191 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1192 }
1193
1194 /* Return true if binary operation OP distributes over addition in operand
1195 OPNO, with the other operand being held constant. OPNO counts from 1. */
1196
1197 static bool
1198 distributes_over_addition_p (tree_code op, int opno)
1199 {
1200 switch (op)
1201 {
1202 case PLUS_EXPR:
1203 case MINUS_EXPR:
1204 case MULT_EXPR:
1205 return true;
1206
1207 case LSHIFT_EXPR:
1208 return opno == 1;
1209
1210 default:
1211 return false;
1212 }
1213 }
1214
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216 constant. We assume ARG1 and ARG2 have the same data type, or at least
1217 are the same kind of constant and the same machine mode. Return zero if
1218 combining the constants is not allowed in the current operating mode. */
1219
1220 static tree
1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 {
1223 /* Sanity check for the recursive cases. */
1224 if (!arg1 || !arg2)
1225 return NULL_TREE;
1226
1227 STRIP_NOPS (arg1);
1228 STRIP_NOPS (arg2);
1229
1230 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231 {
1232 if (code == POINTER_PLUS_EXPR)
1233 return int_const_binop (PLUS_EXPR,
1234 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235
1236 return int_const_binop (code, arg1, arg2);
1237 }
1238
1239 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240 {
1241 machine_mode mode;
1242 REAL_VALUE_TYPE d1;
1243 REAL_VALUE_TYPE d2;
1244 REAL_VALUE_TYPE value;
1245 REAL_VALUE_TYPE result;
1246 bool inexact;
1247 tree t, type;
1248
1249 /* The following codes are handled by real_arithmetic. */
1250 switch (code)
1251 {
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case RDIV_EXPR:
1256 case MIN_EXPR:
1257 case MAX_EXPR:
1258 break;
1259
1260 default:
1261 return NULL_TREE;
1262 }
1263
1264 d1 = TREE_REAL_CST (arg1);
1265 d2 = TREE_REAL_CST (arg2);
1266
1267 type = TREE_TYPE (arg1);
1268 mode = TYPE_MODE (type);
1269
1270 /* Don't perform operation if we honor signaling NaNs and
1271 either operand is a signaling NaN. */
1272 if (HONOR_SNANS (mode)
1273 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 return NULL_TREE;
1276
1277 /* Don't perform operation if it would raise a division
1278 by zero exception. */
1279 if (code == RDIV_EXPR
1280 && real_equal (&d2, &dconst0)
1281 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 return NULL_TREE;
1283
1284 /* If either operand is a NaN, just return it. Otherwise, set up
1285 for floating-point trap; we return an overflow. */
1286 if (REAL_VALUE_ISNAN (d1))
1287 {
1288 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 is off. */
1290 d1.signalling = 0;
1291 t = build_real (type, d1);
1292 return t;
1293 }
1294 else if (REAL_VALUE_ISNAN (d2))
1295 {
1296 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 is off. */
1298 d2.signalling = 0;
1299 t = build_real (type, d2);
1300 return t;
1301 }
1302
1303 inexact = real_arithmetic (&value, code, &d1, &d2);
1304 real_convert (&result, mode, &value);
1305
1306 /* Don't constant fold this floating point operation if
1307 the result has overflowed and flag_trapping_math. */
1308 if (flag_trapping_math
1309 && MODE_HAS_INFINITIES (mode)
1310 && REAL_VALUE_ISINF (result)
1311 && !REAL_VALUE_ISINF (d1)
1312 && !REAL_VALUE_ISINF (d2))
1313 return NULL_TREE;
1314
1315 /* Don't constant fold this floating point operation if the
1316 result may dependent upon the run-time rounding mode and
1317 flag_rounding_math is set, or if GCC's software emulation
1318 is unable to accurately represent the result. */
1319 if ((flag_rounding_math
1320 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 && (inexact || !real_identical (&result, &value)))
1322 return NULL_TREE;
1323
1324 t = build_real (type, result);
1325
1326 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327 return t;
1328 }
1329
1330 if (TREE_CODE (arg1) == FIXED_CST)
1331 {
1332 FIXED_VALUE_TYPE f1;
1333 FIXED_VALUE_TYPE f2;
1334 FIXED_VALUE_TYPE result;
1335 tree t, type;
1336 int sat_p;
1337 bool overflow_p;
1338
1339 /* The following codes are handled by fixed_arithmetic. */
1340 switch (code)
1341 {
1342 case PLUS_EXPR:
1343 case MINUS_EXPR:
1344 case MULT_EXPR:
1345 case TRUNC_DIV_EXPR:
1346 if (TREE_CODE (arg2) != FIXED_CST)
1347 return NULL_TREE;
1348 f2 = TREE_FIXED_CST (arg2);
1349 break;
1350
1351 case LSHIFT_EXPR:
1352 case RSHIFT_EXPR:
1353 {
1354 if (TREE_CODE (arg2) != INTEGER_CST)
1355 return NULL_TREE;
1356 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 f2.data.high = w2.elt (1);
1358 f2.data.low = w2.ulow ();
1359 f2.mode = SImode;
1360 }
1361 break;
1362
1363 default:
1364 return NULL_TREE;
1365 }
1366
1367 f1 = TREE_FIXED_CST (arg1);
1368 type = TREE_TYPE (arg1);
1369 sat_p = TYPE_SATURATING (type);
1370 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371 t = build_fixed (type, result);
1372 /* Propagate overflow flags. */
1373 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 TREE_OVERFLOW (t) = 1;
1375 return t;
1376 }
1377
1378 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379 {
1380 tree type = TREE_TYPE (arg1);
1381 tree r1 = TREE_REALPART (arg1);
1382 tree i1 = TREE_IMAGPART (arg1);
1383 tree r2 = TREE_REALPART (arg2);
1384 tree i2 = TREE_IMAGPART (arg2);
1385 tree real, imag;
1386
1387 switch (code)
1388 {
1389 case PLUS_EXPR:
1390 case MINUS_EXPR:
1391 real = const_binop (code, r1, r2);
1392 imag = const_binop (code, i1, i2);
1393 break;
1394
1395 case MULT_EXPR:
1396 if (COMPLEX_FLOAT_TYPE_P (type))
1397 return do_mpc_arg2 (arg1, arg2, type,
1398 /* do_nonfinite= */ folding_initializer,
1399 mpc_mul);
1400
1401 real = const_binop (MINUS_EXPR,
1402 const_binop (MULT_EXPR, r1, r2),
1403 const_binop (MULT_EXPR, i1, i2));
1404 imag = const_binop (PLUS_EXPR,
1405 const_binop (MULT_EXPR, r1, i2),
1406 const_binop (MULT_EXPR, i1, r2));
1407 break;
1408
1409 case RDIV_EXPR:
1410 if (COMPLEX_FLOAT_TYPE_P (type))
1411 return do_mpc_arg2 (arg1, arg2, type,
1412 /* do_nonfinite= */ folding_initializer,
1413 mpc_div);
1414 /* Fallthru. */
1415 case TRUNC_DIV_EXPR:
1416 case CEIL_DIV_EXPR:
1417 case FLOOR_DIV_EXPR:
1418 case ROUND_DIV_EXPR:
1419 if (flag_complex_method == 0)
1420 {
1421 /* Keep this algorithm in sync with
1422 tree-complex.c:expand_complex_div_straight().
1423
1424 Expand complex division to scalars, straightforward algorithm.
1425 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 t = br*br + bi*bi
1427 */
1428 tree magsquared
1429 = const_binop (PLUS_EXPR,
1430 const_binop (MULT_EXPR, r2, r2),
1431 const_binop (MULT_EXPR, i2, i2));
1432 tree t1
1433 = const_binop (PLUS_EXPR,
1434 const_binop (MULT_EXPR, r1, r2),
1435 const_binop (MULT_EXPR, i1, i2));
1436 tree t2
1437 = const_binop (MINUS_EXPR,
1438 const_binop (MULT_EXPR, i1, r2),
1439 const_binop (MULT_EXPR, r1, i2));
1440
1441 real = const_binop (code, t1, magsquared);
1442 imag = const_binop (code, t2, magsquared);
1443 }
1444 else
1445 {
1446 /* Keep this algorithm in sync with
1447 tree-complex.c:expand_complex_div_wide().
1448
1449 Expand complex division to scalars, modified algorithm to minimize
1450 overflow with wide input ranges. */
1451 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 fold_abs_const (r2, TREE_TYPE (type)),
1453 fold_abs_const (i2, TREE_TYPE (type)));
1454
1455 if (integer_nonzerop (compare))
1456 {
1457 /* In the TRUE branch, we compute
1458 ratio = br/bi;
1459 div = (br * ratio) + bi;
1460 tr = (ar * ratio) + ai;
1461 ti = (ai * ratio) - ar;
1462 tr = tr / div;
1463 ti = ti / div; */
1464 tree ratio = const_binop (code, r2, i2);
1465 tree div = const_binop (PLUS_EXPR, i2,
1466 const_binop (MULT_EXPR, r2, ratio));
1467 real = const_binop (MULT_EXPR, r1, ratio);
1468 real = const_binop (PLUS_EXPR, real, i1);
1469 real = const_binop (code, real, div);
1470
1471 imag = const_binop (MULT_EXPR, i1, ratio);
1472 imag = const_binop (MINUS_EXPR, imag, r1);
1473 imag = const_binop (code, imag, div);
1474 }
1475 else
1476 {
1477 /* In the FALSE branch, we compute
1478 ratio = d/c;
1479 divisor = (d * ratio) + c;
1480 tr = (b * ratio) + a;
1481 ti = b - (a * ratio);
1482 tr = tr / div;
1483 ti = ti / div; */
1484 tree ratio = const_binop (code, i2, r2);
1485 tree div = const_binop (PLUS_EXPR, r2,
1486 const_binop (MULT_EXPR, i2, ratio));
1487
1488 real = const_binop (MULT_EXPR, i1, ratio);
1489 real = const_binop (PLUS_EXPR, real, r1);
1490 real = const_binop (code, real, div);
1491
1492 imag = const_binop (MULT_EXPR, r1, ratio);
1493 imag = const_binop (MINUS_EXPR, i1, imag);
1494 imag = const_binop (code, imag, div);
1495 }
1496 }
1497 break;
1498
1499 default:
1500 return NULL_TREE;
1501 }
1502
1503 if (real && imag)
1504 return build_complex (type, real, imag);
1505 }
1506
1507 if (TREE_CODE (arg1) == VECTOR_CST
1508 && TREE_CODE (arg2) == VECTOR_CST
1509 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511 {
1512 tree type = TREE_TYPE (arg1);
1513 bool step_ok_p;
1514 if (VECTOR_CST_STEPPED_P (arg1)
1515 && VECTOR_CST_STEPPED_P (arg2))
1516 /* We can operate directly on the encoding if:
1517
1518 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 implies
1520 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521
1522 Addition and subtraction are the supported operators
1523 for which this is true. */
1524 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525 else if (VECTOR_CST_STEPPED_P (arg1))
1526 /* We can operate directly on stepped encodings if:
1527
1528 a3 - a2 == a2 - a1
1529 implies:
1530 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531
1532 which is true if (x -> x op c) distributes over addition. */
1533 step_ok_p = distributes_over_addition_p (code, 1);
1534 else
1535 /* Similarly in reverse. */
1536 step_ok_p = distributes_over_addition_p (code, 2);
1537 tree_vector_builder elts;
1538 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 return NULL_TREE;
1540 unsigned int count = elts.encoded_nelts ();
1541 for (unsigned int i = 0; i < count; ++i)
1542 {
1543 tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 tree elem2 = VECTOR_CST_ELT (arg2, i);
1545
1546 tree elt = const_binop (code, elem1, elem2);
1547
1548 /* It is possible that const_binop cannot handle the given
1549 code and return NULL_TREE */
1550 if (elt == NULL_TREE)
1551 return NULL_TREE;
1552 elts.quick_push (elt);
1553 }
1554
1555 return elts.build ();
1556 }
1557
1558 /* Shifts allow a scalar offset for a vector. */
1559 if (TREE_CODE (arg1) == VECTOR_CST
1560 && TREE_CODE (arg2) == INTEGER_CST)
1561 {
1562 tree type = TREE_TYPE (arg1);
1563 bool step_ok_p = distributes_over_addition_p (code, 1);
1564 tree_vector_builder elts;
1565 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 return NULL_TREE;
1567 unsigned int count = elts.encoded_nelts ();
1568 for (unsigned int i = 0; i < count; ++i)
1569 {
1570 tree elem1 = VECTOR_CST_ELT (arg1, i);
1571
1572 tree elt = const_binop (code, elem1, arg2);
1573
1574 /* It is possible that const_binop cannot handle the given
1575 code and return NULL_TREE. */
1576 if (elt == NULL_TREE)
1577 return NULL_TREE;
1578 elts.quick_push (elt);
1579 }
1580
1581 return elts.build ();
1582 }
1583 return NULL_TREE;
1584 }
1585
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587 to fold_relational_const. */
1588
1589 tree
1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 {
1592 if (TREE_CODE_CLASS (code) == tcc_comparison)
1593 return fold_relational_const (code, type, arg1, arg2);
1594
1595 /* ??? Until we make the const_binop worker take the type of the
1596 result as argument put those cases that need it here. */
1597 switch (code)
1598 {
1599 case VEC_SERIES_EXPR:
1600 if (CONSTANT_CLASS_P (arg1)
1601 && CONSTANT_CLASS_P (arg2))
1602 return build_vec_series (type, arg1, arg2);
1603 return NULL_TREE;
1604
1605 case COMPLEX_EXPR:
1606 if ((TREE_CODE (arg1) == REAL_CST
1607 && TREE_CODE (arg2) == REAL_CST)
1608 || (TREE_CODE (arg1) == INTEGER_CST
1609 && TREE_CODE (arg2) == INTEGER_CST))
1610 return build_complex (type, arg1, arg2);
1611 return NULL_TREE;
1612
1613 case POINTER_DIFF_EXPR:
1614 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1615 {
1616 poly_offset_int res = (wi::to_poly_offset (arg1)
1617 - wi::to_poly_offset (arg2));
1618 return force_fit_type (type, res, 1,
1619 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 }
1621 return NULL_TREE;
1622
1623 case VEC_PACK_TRUNC_EXPR:
1624 case VEC_PACK_FIX_TRUNC_EXPR:
1625 case VEC_PACK_FLOAT_EXPR:
1626 {
1627 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1628
1629 if (TREE_CODE (arg1) != VECTOR_CST
1630 || TREE_CODE (arg2) != VECTOR_CST)
1631 return NULL_TREE;
1632
1633 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1634 return NULL_TREE;
1635
1636 out_nelts = in_nelts * 2;
1637 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1638 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1639
1640 tree_vector_builder elts (type, out_nelts, 1);
1641 for (i = 0; i < out_nelts; i++)
1642 {
1643 tree elt = (i < in_nelts
1644 ? VECTOR_CST_ELT (arg1, i)
1645 : VECTOR_CST_ELT (arg2, i - in_nelts));
1646 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1647 ? NOP_EXPR
1648 : code == VEC_PACK_FLOAT_EXPR
1649 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1650 TREE_TYPE (type), elt);
1651 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1652 return NULL_TREE;
1653 elts.quick_push (elt);
1654 }
1655
1656 return elts.build ();
1657 }
1658
1659 case VEC_WIDEN_MULT_LO_EXPR:
1660 case VEC_WIDEN_MULT_HI_EXPR:
1661 case VEC_WIDEN_MULT_EVEN_EXPR:
1662 case VEC_WIDEN_MULT_ODD_EXPR:
1663 {
1664 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1665
1666 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1667 return NULL_TREE;
1668
1669 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1670 return NULL_TREE;
1671 out_nelts = in_nelts / 2;
1672 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1673 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1674
1675 if (code == VEC_WIDEN_MULT_LO_EXPR)
1676 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1677 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1679 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1680 scale = 1, ofs = 0;
1681 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1682 scale = 1, ofs = 1;
1683
1684 tree_vector_builder elts (type, out_nelts, 1);
1685 for (out = 0; out < out_nelts; out++)
1686 {
1687 unsigned int in = (out << scale) + ofs;
1688 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1689 VECTOR_CST_ELT (arg1, in));
1690 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg2, in));
1692
1693 if (t1 == NULL_TREE || t2 == NULL_TREE)
1694 return NULL_TREE;
1695 tree elt = const_binop (MULT_EXPR, t1, t2);
1696 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1697 return NULL_TREE;
1698 elts.quick_push (elt);
1699 }
1700
1701 return elts.build ();
1702 }
1703
1704 default:;
1705 }
1706
1707 if (TREE_CODE_CLASS (code) != tcc_binary)
1708 return NULL_TREE;
1709
1710 /* Make sure type and arg0 have the same saturating flag. */
1711 gcc_checking_assert (TYPE_SATURATING (type)
1712 == TYPE_SATURATING (TREE_TYPE (arg1)));
1713
1714 return const_binop (code, arg1, arg2);
1715 }
1716
1717 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1718 Return zero if computing the constants is not possible. */
1719
1720 tree
1721 const_unop (enum tree_code code, tree type, tree arg0)
1722 {
1723 /* Don't perform the operation, other than NEGATE and ABS, if
1724 flag_signaling_nans is on and the operand is a signaling NaN. */
1725 if (TREE_CODE (arg0) == REAL_CST
1726 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1727 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1728 && code != NEGATE_EXPR
1729 && code != ABS_EXPR
1730 && code != ABSU_EXPR)
1731 return NULL_TREE;
1732
1733 switch (code)
1734 {
1735 CASE_CONVERT:
1736 case FLOAT_EXPR:
1737 case FIX_TRUNC_EXPR:
1738 case FIXED_CONVERT_EXPR:
1739 return fold_convert_const (code, type, arg0);
1740
1741 case ADDR_SPACE_CONVERT_EXPR:
1742 /* If the source address is 0, and the source address space
1743 cannot have a valid object at 0, fold to dest type null. */
1744 if (integer_zerop (arg0)
1745 && !(targetm.addr_space.zero_address_valid
1746 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1747 return fold_convert_const (code, type, arg0);
1748 break;
1749
1750 case VIEW_CONVERT_EXPR:
1751 return fold_view_convert_expr (type, arg0);
1752
1753 case NEGATE_EXPR:
1754 {
1755 /* Can't call fold_negate_const directly here as that doesn't
1756 handle all cases and we might not be able to negate some
1757 constants. */
1758 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1759 if (tem && CONSTANT_CLASS_P (tem))
1760 return tem;
1761 break;
1762 }
1763
1764 case ABS_EXPR:
1765 case ABSU_EXPR:
1766 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1767 return fold_abs_const (arg0, type);
1768 break;
1769
1770 case CONJ_EXPR:
1771 if (TREE_CODE (arg0) == COMPLEX_CST)
1772 {
1773 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1774 TREE_TYPE (type));
1775 return build_complex (type, TREE_REALPART (arg0), ipart);
1776 }
1777 break;
1778
1779 case BIT_NOT_EXPR:
1780 if (TREE_CODE (arg0) == INTEGER_CST)
1781 return fold_not_const (arg0, type);
1782 else if (POLY_INT_CST_P (arg0))
1783 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1784 /* Perform BIT_NOT_EXPR on each element individually. */
1785 else if (TREE_CODE (arg0) == VECTOR_CST)
1786 {
1787 tree elem;
1788
1789 /* This can cope with stepped encodings because ~x == -1 - x. */
1790 tree_vector_builder elements;
1791 elements.new_unary_operation (type, arg0, true);
1792 unsigned int i, count = elements.encoded_nelts ();
1793 for (i = 0; i < count; ++i)
1794 {
1795 elem = VECTOR_CST_ELT (arg0, i);
1796 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1797 if (elem == NULL_TREE)
1798 break;
1799 elements.quick_push (elem);
1800 }
1801 if (i == count)
1802 return elements.build ();
1803 }
1804 break;
1805
1806 case TRUTH_NOT_EXPR:
1807 if (TREE_CODE (arg0) == INTEGER_CST)
1808 return constant_boolean_node (integer_zerop (arg0), type);
1809 break;
1810
1811 case REALPART_EXPR:
1812 if (TREE_CODE (arg0) == COMPLEX_CST)
1813 return fold_convert (type, TREE_REALPART (arg0));
1814 break;
1815
1816 case IMAGPART_EXPR:
1817 if (TREE_CODE (arg0) == COMPLEX_CST)
1818 return fold_convert (type, TREE_IMAGPART (arg0));
1819 break;
1820
1821 case VEC_UNPACK_LO_EXPR:
1822 case VEC_UNPACK_HI_EXPR:
1823 case VEC_UNPACK_FLOAT_LO_EXPR:
1824 case VEC_UNPACK_FLOAT_HI_EXPR:
1825 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1826 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1827 {
1828 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1829 enum tree_code subcode;
1830
1831 if (TREE_CODE (arg0) != VECTOR_CST)
1832 return NULL_TREE;
1833
1834 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1835 return NULL_TREE;
1836 out_nelts = in_nelts / 2;
1837 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1838
1839 unsigned int offset = 0;
1840 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1841 || code == VEC_UNPACK_FLOAT_LO_EXPR
1842 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1843 offset = out_nelts;
1844
1845 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1846 subcode = NOP_EXPR;
1847 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1849 subcode = FLOAT_EXPR;
1850 else
1851 subcode = FIX_TRUNC_EXPR;
1852
1853 tree_vector_builder elts (type, out_nelts, 1);
1854 for (i = 0; i < out_nelts; i++)
1855 {
1856 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1857 VECTOR_CST_ELT (arg0, i + offset));
1858 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1859 return NULL_TREE;
1860 elts.quick_push (elt);
1861 }
1862
1863 return elts.build ();
1864 }
1865
1866 case VEC_DUPLICATE_EXPR:
1867 if (CONSTANT_CLASS_P (arg0))
1868 return build_vector_from_val (type, arg0);
1869 return NULL_TREE;
1870
1871 default:
1872 break;
1873 }
1874
1875 return NULL_TREE;
1876 }
1877
1878 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1879 indicates which particular sizetype to create. */
1880
1881 tree
1882 size_int_kind (poly_int64 number, enum size_type_kind kind)
1883 {
1884 return build_int_cst (sizetype_tab[(int) kind], number);
1885 }
1886 \f
1887 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1888 is a tree code. The type of the result is taken from the operands.
1889 Both must be equivalent integer types, ala int_binop_types_match_p.
1890 If the operands are constant, so is the result. */
1891
1892 tree
1893 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1894 {
1895 tree type = TREE_TYPE (arg0);
1896
1897 if (arg0 == error_mark_node || arg1 == error_mark_node)
1898 return error_mark_node;
1899
1900 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1901 TREE_TYPE (arg1)));
1902
1903 /* Handle the special case of two poly_int constants faster. */
1904 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1905 {
1906 /* And some specific cases even faster than that. */
1907 if (code == PLUS_EXPR)
1908 {
1909 if (integer_zerop (arg0)
1910 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1911 return arg1;
1912 if (integer_zerop (arg1)
1913 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1914 return arg0;
1915 }
1916 else if (code == MINUS_EXPR)
1917 {
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MULT_EXPR)
1923 {
1924 if (integer_onep (arg0)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1926 return arg1;
1927 }
1928
1929 /* Handle general case of two integer constants. For sizetype
1930 constant calculations we always want to know about overflow,
1931 even in the unsigned case. */
1932 tree res = int_const_binop (code, arg0, arg1, -1);
1933 if (res != NULL_TREE)
1934 return res;
1935 }
1936
1937 return fold_build2_loc (loc, code, type, arg0, arg1);
1938 }
1939
1940 /* Given two values, either both of sizetype or both of bitsizetype,
1941 compute the difference between the two values. Return the value
1942 in signed type corresponding to the type of the operands. */
1943
1944 tree
1945 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1946 {
1947 tree type = TREE_TYPE (arg0);
1948 tree ctype;
1949
1950 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1951 TREE_TYPE (arg1)));
1952
1953 /* If the type is already signed, just do the simple thing. */
1954 if (!TYPE_UNSIGNED (type))
1955 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1956
1957 if (type == sizetype)
1958 ctype = ssizetype;
1959 else if (type == bitsizetype)
1960 ctype = sbitsizetype;
1961 else
1962 ctype = signed_type_for (type);
1963
1964 /* If either operand is not a constant, do the conversions to the signed
1965 type and subtract. The hardware will do the right thing with any
1966 overflow in the subtraction. */
1967 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1968 return size_binop_loc (loc, MINUS_EXPR,
1969 fold_convert_loc (loc, ctype, arg0),
1970 fold_convert_loc (loc, ctype, arg1));
1971
1972 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1973 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1974 overflow) and negate (which can't either). Special-case a result
1975 of zero while we're here. */
1976 if (tree_int_cst_equal (arg0, arg1))
1977 return build_int_cst (ctype, 0);
1978 else if (tree_int_cst_lt (arg1, arg0))
1979 return fold_convert_loc (loc, ctype,
1980 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1981 else
1982 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1983 fold_convert_loc (loc, ctype,
1984 size_binop_loc (loc,
1985 MINUS_EXPR,
1986 arg1, arg0)));
1987 }
1988 \f
1989 /* A subroutine of fold_convert_const handling conversions of an
1990 INTEGER_CST to another integer type. */
1991
1992 static tree
1993 fold_convert_const_int_from_int (tree type, const_tree arg1)
1994 {
1995 /* Given an integer constant, make new constant with new type,
1996 appropriately sign-extended or truncated. Use widest_int
1997 so that any extension is done according ARG1's type. */
1998 return force_fit_type (type, wi::to_widest (arg1),
1999 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2000 TREE_OVERFLOW (arg1));
2001 }
2002
2003 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2004 to an integer type. */
2005
2006 static tree
2007 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2008 {
2009 bool overflow = false;
2010 tree t;
2011
2012 /* The following code implements the floating point to integer
2013 conversion rules required by the Java Language Specification,
2014 that IEEE NaNs are mapped to zero and values that overflow
2015 the target precision saturate, i.e. values greater than
2016 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2017 are mapped to INT_MIN. These semantics are allowed by the
2018 C and C++ standards that simply state that the behavior of
2019 FP-to-integer conversion is unspecified upon overflow. */
2020
2021 wide_int val;
2022 REAL_VALUE_TYPE r;
2023 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2024
2025 switch (code)
2026 {
2027 case FIX_TRUNC_EXPR:
2028 real_trunc (&r, VOIDmode, &x);
2029 break;
2030
2031 default:
2032 gcc_unreachable ();
2033 }
2034
2035 /* If R is NaN, return zero and show we have an overflow. */
2036 if (REAL_VALUE_ISNAN (r))
2037 {
2038 overflow = true;
2039 val = wi::zero (TYPE_PRECISION (type));
2040 }
2041
2042 /* See if R is less than the lower bound or greater than the
2043 upper bound. */
2044
2045 if (! overflow)
2046 {
2047 tree lt = TYPE_MIN_VALUE (type);
2048 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2049 if (real_less (&r, &l))
2050 {
2051 overflow = true;
2052 val = wi::to_wide (lt);
2053 }
2054 }
2055
2056 if (! overflow)
2057 {
2058 tree ut = TYPE_MAX_VALUE (type);
2059 if (ut)
2060 {
2061 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2062 if (real_less (&u, &r))
2063 {
2064 overflow = true;
2065 val = wi::to_wide (ut);
2066 }
2067 }
2068 }
2069
2070 if (! overflow)
2071 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2072
2073 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2074 return t;
2075 }
2076
2077 /* A subroutine of fold_convert_const handling conversions of a
2078 FIXED_CST to an integer type. */
2079
2080 static tree
2081 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2082 {
2083 tree t;
2084 double_int temp, temp_trunc;
2085 scalar_mode mode;
2086
2087 /* Right shift FIXED_CST to temp by fbit. */
2088 temp = TREE_FIXED_CST (arg1).data;
2089 mode = TREE_FIXED_CST (arg1).mode;
2090 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2091 {
2092 temp = temp.rshift (GET_MODE_FBIT (mode),
2093 HOST_BITS_PER_DOUBLE_INT,
2094 SIGNED_FIXED_POINT_MODE_P (mode));
2095
2096 /* Left shift temp to temp_trunc by fbit. */
2097 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2098 HOST_BITS_PER_DOUBLE_INT,
2099 SIGNED_FIXED_POINT_MODE_P (mode));
2100 }
2101 else
2102 {
2103 temp = double_int_zero;
2104 temp_trunc = double_int_zero;
2105 }
2106
2107 /* If FIXED_CST is negative, we need to round the value toward 0.
2108 By checking if the fractional bits are not zero to add 1 to temp. */
2109 if (SIGNED_FIXED_POINT_MODE_P (mode)
2110 && temp_trunc.is_negative ()
2111 && TREE_FIXED_CST (arg1).data != temp_trunc)
2112 temp += double_int_one;
2113
2114 /* Given a fixed-point constant, make new constant with new type,
2115 appropriately sign-extended or truncated. */
2116 t = force_fit_type (type, temp, -1,
2117 (temp.is_negative ()
2118 && (TYPE_UNSIGNED (type)
2119 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2120 | TREE_OVERFLOW (arg1));
2121
2122 return t;
2123 }
2124
2125 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2126 to another floating point type. */
2127
2128 static tree
2129 fold_convert_const_real_from_real (tree type, const_tree arg1)
2130 {
2131 REAL_VALUE_TYPE value;
2132 tree t;
2133
2134 /* Don't perform the operation if flag_signaling_nans is on
2135 and the operand is a signaling NaN. */
2136 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2137 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2138 return NULL_TREE;
2139
2140 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2141 t = build_real (type, value);
2142
2143 /* If converting an infinity or NAN to a representation that doesn't
2144 have one, set the overflow bit so that we can produce some kind of
2145 error message at the appropriate point if necessary. It's not the
2146 most user-friendly message, but it's better than nothing. */
2147 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2148 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2149 TREE_OVERFLOW (t) = 1;
2150 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2151 && !MODE_HAS_NANS (TYPE_MODE (type)))
2152 TREE_OVERFLOW (t) = 1;
2153 /* Regular overflow, conversion produced an infinity in a mode that
2154 can't represent them. */
2155 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2156 && REAL_VALUE_ISINF (value)
2157 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2158 TREE_OVERFLOW (t) = 1;
2159 else
2160 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2161 return t;
2162 }
2163
2164 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2165 to a floating point type. */
2166
2167 static tree
2168 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2169 {
2170 REAL_VALUE_TYPE value;
2171 tree t;
2172
2173 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2174 &TREE_FIXED_CST (arg1));
2175 t = build_real (type, value);
2176
2177 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2178 return t;
2179 }
2180
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182 to another fixed-point type. */
2183
2184 static tree
2185 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2186 {
2187 FIXED_VALUE_TYPE value;
2188 tree t;
2189 bool overflow_p;
2190
2191 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2192 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2193 t = build_fixed (type, value);
2194
2195 /* Propagate overflow flags. */
2196 if (overflow_p | TREE_OVERFLOW (arg1))
2197 TREE_OVERFLOW (t) = 1;
2198 return t;
2199 }
2200
2201 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2202 to a fixed-point type. */
2203
2204 static tree
2205 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2206 {
2207 FIXED_VALUE_TYPE value;
2208 tree t;
2209 bool overflow_p;
2210 double_int di;
2211
2212 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2213
2214 di.low = TREE_INT_CST_ELT (arg1, 0);
2215 if (TREE_INT_CST_NUNITS (arg1) == 1)
2216 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2217 else
2218 di.high = TREE_INT_CST_ELT (arg1, 1);
2219
2220 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2221 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2222 TYPE_SATURATING (type));
2223 t = build_fixed (type, value);
2224
2225 /* Propagate overflow flags. */
2226 if (overflow_p | TREE_OVERFLOW (arg1))
2227 TREE_OVERFLOW (t) = 1;
2228 return t;
2229 }
2230
2231 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2232 to a fixed-point type. */
2233
2234 static tree
2235 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2236 {
2237 FIXED_VALUE_TYPE value;
2238 tree t;
2239 bool overflow_p;
2240
2241 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2242 &TREE_REAL_CST (arg1),
2243 TYPE_SATURATING (type));
2244 t = build_fixed (type, value);
2245
2246 /* Propagate overflow flags. */
2247 if (overflow_p | TREE_OVERFLOW (arg1))
2248 TREE_OVERFLOW (t) = 1;
2249 return t;
2250 }
2251
2252 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2253 type TYPE. If no simplification can be done return NULL_TREE. */
2254
2255 static tree
2256 fold_convert_const (enum tree_code code, tree type, tree arg1)
2257 {
2258 tree arg_type = TREE_TYPE (arg1);
2259 if (arg_type == type)
2260 return arg1;
2261
2262 /* We can't widen types, since the runtime value could overflow the
2263 original type before being extended to the new type. */
2264 if (POLY_INT_CST_P (arg1)
2265 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2266 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2267 return build_poly_int_cst (type,
2268 poly_wide_int::from (poly_int_cst_value (arg1),
2269 TYPE_PRECISION (type),
2270 TYPE_SIGN (arg_type)));
2271
2272 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2273 || TREE_CODE (type) == OFFSET_TYPE)
2274 {
2275 if (TREE_CODE (arg1) == INTEGER_CST)
2276 return fold_convert_const_int_from_int (type, arg1);
2277 else if (TREE_CODE (arg1) == REAL_CST)
2278 return fold_convert_const_int_from_real (code, type, arg1);
2279 else if (TREE_CODE (arg1) == FIXED_CST)
2280 return fold_convert_const_int_from_fixed (type, arg1);
2281 }
2282 else if (TREE_CODE (type) == REAL_TYPE)
2283 {
2284 if (TREE_CODE (arg1) == INTEGER_CST)
2285 return build_real_from_int_cst (type, arg1);
2286 else if (TREE_CODE (arg1) == REAL_CST)
2287 return fold_convert_const_real_from_real (type, arg1);
2288 else if (TREE_CODE (arg1) == FIXED_CST)
2289 return fold_convert_const_real_from_fixed (type, arg1);
2290 }
2291 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2292 {
2293 if (TREE_CODE (arg1) == FIXED_CST)
2294 return fold_convert_const_fixed_from_fixed (type, arg1);
2295 else if (TREE_CODE (arg1) == INTEGER_CST)
2296 return fold_convert_const_fixed_from_int (type, arg1);
2297 else if (TREE_CODE (arg1) == REAL_CST)
2298 return fold_convert_const_fixed_from_real (type, arg1);
2299 }
2300 else if (TREE_CODE (type) == VECTOR_TYPE)
2301 {
2302 if (TREE_CODE (arg1) == VECTOR_CST
2303 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2304 {
2305 tree elttype = TREE_TYPE (type);
2306 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2307 /* We can't handle steps directly when extending, since the
2308 values need to wrap at the original precision first. */
2309 bool step_ok_p
2310 = (INTEGRAL_TYPE_P (elttype)
2311 && INTEGRAL_TYPE_P (arg1_elttype)
2312 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2313 tree_vector_builder v;
2314 if (!v.new_unary_operation (type, arg1, step_ok_p))
2315 return NULL_TREE;
2316 unsigned int len = v.encoded_nelts ();
2317 for (unsigned int i = 0; i < len; ++i)
2318 {
2319 tree elt = VECTOR_CST_ELT (arg1, i);
2320 tree cvt = fold_convert_const (code, elttype, elt);
2321 if (cvt == NULL_TREE)
2322 return NULL_TREE;
2323 v.quick_push (cvt);
2324 }
2325 return v.build ();
2326 }
2327 }
2328 return NULL_TREE;
2329 }
2330
2331 /* Construct a vector of zero elements of vector type TYPE. */
2332
2333 static tree
2334 build_zero_vector (tree type)
2335 {
2336 tree t;
2337
2338 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2339 return build_vector_from_val (type, t);
2340 }
2341
2342 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2343
2344 bool
2345 fold_convertible_p (const_tree type, const_tree arg)
2346 {
2347 tree orig = TREE_TYPE (arg);
2348
2349 if (type == orig)
2350 return true;
2351
2352 if (TREE_CODE (arg) == ERROR_MARK
2353 || TREE_CODE (type) == ERROR_MARK
2354 || TREE_CODE (orig) == ERROR_MARK)
2355 return false;
2356
2357 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2358 return true;
2359
2360 switch (TREE_CODE (type))
2361 {
2362 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2363 case POINTER_TYPE: case REFERENCE_TYPE:
2364 case OFFSET_TYPE:
2365 return (INTEGRAL_TYPE_P (orig)
2366 || (POINTER_TYPE_P (orig)
2367 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2368 || TREE_CODE (orig) == OFFSET_TYPE);
2369
2370 case REAL_TYPE:
2371 case FIXED_POINT_TYPE:
2372 case VOID_TYPE:
2373 return TREE_CODE (type) == TREE_CODE (orig);
2374
2375 case VECTOR_TYPE:
2376 return (VECTOR_TYPE_P (orig)
2377 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2378 TYPE_VECTOR_SUBPARTS (orig))
2379 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2380
2381 default:
2382 return false;
2383 }
2384 }
2385
2386 /* Convert expression ARG to type TYPE. Used by the middle-end for
2387 simple conversions in preference to calling the front-end's convert. */
2388
2389 tree
2390 fold_convert_loc (location_t loc, tree type, tree arg)
2391 {
2392 tree orig = TREE_TYPE (arg);
2393 tree tem;
2394
2395 if (type == orig)
2396 return arg;
2397
2398 if (TREE_CODE (arg) == ERROR_MARK
2399 || TREE_CODE (type) == ERROR_MARK
2400 || TREE_CODE (orig) == ERROR_MARK)
2401 return error_mark_node;
2402
2403 switch (TREE_CODE (type))
2404 {
2405 case POINTER_TYPE:
2406 case REFERENCE_TYPE:
2407 /* Handle conversions between pointers to different address spaces. */
2408 if (POINTER_TYPE_P (orig)
2409 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2410 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2411 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2412 /* fall through */
2413
2414 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2415 case OFFSET_TYPE:
2416 if (TREE_CODE (arg) == INTEGER_CST)
2417 {
2418 tem = fold_convert_const (NOP_EXPR, type, arg);
2419 if (tem != NULL_TREE)
2420 return tem;
2421 }
2422 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2423 || TREE_CODE (orig) == OFFSET_TYPE)
2424 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2425 if (TREE_CODE (orig) == COMPLEX_TYPE)
2426 return fold_convert_loc (loc, type,
2427 fold_build1_loc (loc, REALPART_EXPR,
2428 TREE_TYPE (orig), arg));
2429 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2430 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2431 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2432
2433 case REAL_TYPE:
2434 if (TREE_CODE (arg) == INTEGER_CST)
2435 {
2436 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2437 if (tem != NULL_TREE)
2438 return tem;
2439 }
2440 else if (TREE_CODE (arg) == REAL_CST)
2441 {
2442 tem = fold_convert_const (NOP_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2445 }
2446 else if (TREE_CODE (arg) == FIXED_CST)
2447 {
2448 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2451 }
2452
2453 switch (TREE_CODE (orig))
2454 {
2455 case INTEGER_TYPE:
2456 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2457 case POINTER_TYPE: case REFERENCE_TYPE:
2458 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2459
2460 case REAL_TYPE:
2461 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2462
2463 case FIXED_POINT_TYPE:
2464 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2465
2466 case COMPLEX_TYPE:
2467 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2468 return fold_convert_loc (loc, type, tem);
2469
2470 default:
2471 gcc_unreachable ();
2472 }
2473
2474 case FIXED_POINT_TYPE:
2475 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2476 || TREE_CODE (arg) == REAL_CST)
2477 {
2478 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2479 if (tem != NULL_TREE)
2480 goto fold_convert_exit;
2481 }
2482
2483 switch (TREE_CODE (orig))
2484 {
2485 case FIXED_POINT_TYPE:
2486 case INTEGER_TYPE:
2487 case ENUMERAL_TYPE:
2488 case BOOLEAN_TYPE:
2489 case REAL_TYPE:
2490 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2491
2492 case COMPLEX_TYPE:
2493 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2494 return fold_convert_loc (loc, type, tem);
2495
2496 default:
2497 gcc_unreachable ();
2498 }
2499
2500 case COMPLEX_TYPE:
2501 switch (TREE_CODE (orig))
2502 {
2503 case INTEGER_TYPE:
2504 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2505 case POINTER_TYPE: case REFERENCE_TYPE:
2506 case REAL_TYPE:
2507 case FIXED_POINT_TYPE:
2508 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2509 fold_convert_loc (loc, TREE_TYPE (type), arg),
2510 fold_convert_loc (loc, TREE_TYPE (type),
2511 integer_zero_node));
2512 case COMPLEX_TYPE:
2513 {
2514 tree rpart, ipart;
2515
2516 if (TREE_CODE (arg) == COMPLEX_EXPR)
2517 {
2518 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2519 TREE_OPERAND (arg, 0));
2520 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2521 TREE_OPERAND (arg, 1));
2522 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2523 }
2524
2525 arg = save_expr (arg);
2526 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2527 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2528 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2529 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2530 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 }
2532
2533 default:
2534 gcc_unreachable ();
2535 }
2536
2537 case VECTOR_TYPE:
2538 if (integer_zerop (arg))
2539 return build_zero_vector (type);
2540 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2541 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2542 || TREE_CODE (orig) == VECTOR_TYPE);
2543 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2544
2545 case VOID_TYPE:
2546 tem = fold_ignored_result (arg);
2547 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2548
2549 default:
2550 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2551 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2552 gcc_unreachable ();
2553 }
2554 fold_convert_exit:
2555 protected_set_expr_location_unshare (tem, loc);
2556 return tem;
2557 }
2558 \f
2559 /* Return false if expr can be assumed not to be an lvalue, true
2560 otherwise. */
2561
2562 static bool
2563 maybe_lvalue_p (const_tree x)
2564 {
2565 /* We only need to wrap lvalue tree codes. */
2566 switch (TREE_CODE (x))
2567 {
2568 case VAR_DECL:
2569 case PARM_DECL:
2570 case RESULT_DECL:
2571 case LABEL_DECL:
2572 case FUNCTION_DECL:
2573 case SSA_NAME:
2574
2575 case COMPONENT_REF:
2576 case MEM_REF:
2577 case INDIRECT_REF:
2578 case ARRAY_REF:
2579 case ARRAY_RANGE_REF:
2580 case BIT_FIELD_REF:
2581 case OBJ_TYPE_REF:
2582
2583 case REALPART_EXPR:
2584 case IMAGPART_EXPR:
2585 case PREINCREMENT_EXPR:
2586 case PREDECREMENT_EXPR:
2587 case SAVE_EXPR:
2588 case TRY_CATCH_EXPR:
2589 case WITH_CLEANUP_EXPR:
2590 case COMPOUND_EXPR:
2591 case MODIFY_EXPR:
2592 case TARGET_EXPR:
2593 case COND_EXPR:
2594 case BIND_EXPR:
2595 case VIEW_CONVERT_EXPR:
2596 break;
2597
2598 default:
2599 /* Assume the worst for front-end tree codes. */
2600 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2601 break;
2602 return false;
2603 }
2604
2605 return true;
2606 }
2607
2608 /* Return an expr equal to X but certainly not valid as an lvalue. */
2609
2610 tree
2611 non_lvalue_loc (location_t loc, tree x)
2612 {
2613 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2614 us. */
2615 if (in_gimple_form)
2616 return x;
2617
2618 if (! maybe_lvalue_p (x))
2619 return x;
2620 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2621 }
2622
2623 /* When pedantic, return an expr equal to X but certainly not valid as a
2624 pedantic lvalue. Otherwise, return X. */
2625
2626 static tree
2627 pedantic_non_lvalue_loc (location_t loc, tree x)
2628 {
2629 return protected_set_expr_location_unshare (x, loc);
2630 }
2631 \f
2632 /* Given a tree comparison code, return the code that is the logical inverse.
2633 It is generally not safe to do this for floating-point comparisons, except
2634 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2635 ERROR_MARK in this case. */
2636
2637 enum tree_code
2638 invert_tree_comparison (enum tree_code code, bool honor_nans)
2639 {
2640 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2641 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2642 return ERROR_MARK;
2643
2644 switch (code)
2645 {
2646 case EQ_EXPR:
2647 return NE_EXPR;
2648 case NE_EXPR:
2649 return EQ_EXPR;
2650 case GT_EXPR:
2651 return honor_nans ? UNLE_EXPR : LE_EXPR;
2652 case GE_EXPR:
2653 return honor_nans ? UNLT_EXPR : LT_EXPR;
2654 case LT_EXPR:
2655 return honor_nans ? UNGE_EXPR : GE_EXPR;
2656 case LE_EXPR:
2657 return honor_nans ? UNGT_EXPR : GT_EXPR;
2658 case LTGT_EXPR:
2659 return UNEQ_EXPR;
2660 case UNEQ_EXPR:
2661 return LTGT_EXPR;
2662 case UNGT_EXPR:
2663 return LE_EXPR;
2664 case UNGE_EXPR:
2665 return LT_EXPR;
2666 case UNLT_EXPR:
2667 return GE_EXPR;
2668 case UNLE_EXPR:
2669 return GT_EXPR;
2670 case ORDERED_EXPR:
2671 return UNORDERED_EXPR;
2672 case UNORDERED_EXPR:
2673 return ORDERED_EXPR;
2674 default:
2675 gcc_unreachable ();
2676 }
2677 }
2678
2679 /* Similar, but return the comparison that results if the operands are
2680 swapped. This is safe for floating-point. */
2681
2682 enum tree_code
2683 swap_tree_comparison (enum tree_code code)
2684 {
2685 switch (code)
2686 {
2687 case EQ_EXPR:
2688 case NE_EXPR:
2689 case ORDERED_EXPR:
2690 case UNORDERED_EXPR:
2691 case LTGT_EXPR:
2692 case UNEQ_EXPR:
2693 return code;
2694 case GT_EXPR:
2695 return LT_EXPR;
2696 case GE_EXPR:
2697 return LE_EXPR;
2698 case LT_EXPR:
2699 return GT_EXPR;
2700 case LE_EXPR:
2701 return GE_EXPR;
2702 case UNGT_EXPR:
2703 return UNLT_EXPR;
2704 case UNGE_EXPR:
2705 return UNLE_EXPR;
2706 case UNLT_EXPR:
2707 return UNGT_EXPR;
2708 case UNLE_EXPR:
2709 return UNGE_EXPR;
2710 default:
2711 gcc_unreachable ();
2712 }
2713 }
2714
2715
2716 /* Convert a comparison tree code from an enum tree_code representation
2717 into a compcode bit-based encoding. This function is the inverse of
2718 compcode_to_comparison. */
2719
2720 static enum comparison_code
2721 comparison_to_compcode (enum tree_code code)
2722 {
2723 switch (code)
2724 {
2725 case LT_EXPR:
2726 return COMPCODE_LT;
2727 case EQ_EXPR:
2728 return COMPCODE_EQ;
2729 case LE_EXPR:
2730 return COMPCODE_LE;
2731 case GT_EXPR:
2732 return COMPCODE_GT;
2733 case NE_EXPR:
2734 return COMPCODE_NE;
2735 case GE_EXPR:
2736 return COMPCODE_GE;
2737 case ORDERED_EXPR:
2738 return COMPCODE_ORD;
2739 case UNORDERED_EXPR:
2740 return COMPCODE_UNORD;
2741 case UNLT_EXPR:
2742 return COMPCODE_UNLT;
2743 case UNEQ_EXPR:
2744 return COMPCODE_UNEQ;
2745 case UNLE_EXPR:
2746 return COMPCODE_UNLE;
2747 case UNGT_EXPR:
2748 return COMPCODE_UNGT;
2749 case LTGT_EXPR:
2750 return COMPCODE_LTGT;
2751 case UNGE_EXPR:
2752 return COMPCODE_UNGE;
2753 default:
2754 gcc_unreachable ();
2755 }
2756 }
2757
2758 /* Convert a compcode bit-based encoding of a comparison operator back
2759 to GCC's enum tree_code representation. This function is the
2760 inverse of comparison_to_compcode. */
2761
2762 static enum tree_code
2763 compcode_to_comparison (enum comparison_code code)
2764 {
2765 switch (code)
2766 {
2767 case COMPCODE_LT:
2768 return LT_EXPR;
2769 case COMPCODE_EQ:
2770 return EQ_EXPR;
2771 case COMPCODE_LE:
2772 return LE_EXPR;
2773 case COMPCODE_GT:
2774 return GT_EXPR;
2775 case COMPCODE_NE:
2776 return NE_EXPR;
2777 case COMPCODE_GE:
2778 return GE_EXPR;
2779 case COMPCODE_ORD:
2780 return ORDERED_EXPR;
2781 case COMPCODE_UNORD:
2782 return UNORDERED_EXPR;
2783 case COMPCODE_UNLT:
2784 return UNLT_EXPR;
2785 case COMPCODE_UNEQ:
2786 return UNEQ_EXPR;
2787 case COMPCODE_UNLE:
2788 return UNLE_EXPR;
2789 case COMPCODE_UNGT:
2790 return UNGT_EXPR;
2791 case COMPCODE_LTGT:
2792 return LTGT_EXPR;
2793 case COMPCODE_UNGE:
2794 return UNGE_EXPR;
2795 default:
2796 gcc_unreachable ();
2797 }
2798 }
2799
2800 /* Return true if COND1 tests the opposite condition of COND2. */
2801
2802 bool
2803 inverse_conditions_p (const_tree cond1, const_tree cond2)
2804 {
2805 return (COMPARISON_CLASS_P (cond1)
2806 && COMPARISON_CLASS_P (cond2)
2807 && (invert_tree_comparison
2808 (TREE_CODE (cond1),
2809 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2810 && operand_equal_p (TREE_OPERAND (cond1, 0),
2811 TREE_OPERAND (cond2, 0), 0)
2812 && operand_equal_p (TREE_OPERAND (cond1, 1),
2813 TREE_OPERAND (cond2, 1), 0));
2814 }
2815
2816 /* Return a tree for the comparison which is the combination of
2817 doing the AND or OR (depending on CODE) of the two operations LCODE
2818 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2819 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2820 if this makes the transformation invalid. */
2821
2822 tree
2823 combine_comparisons (location_t loc,
2824 enum tree_code code, enum tree_code lcode,
2825 enum tree_code rcode, tree truth_type,
2826 tree ll_arg, tree lr_arg)
2827 {
2828 bool honor_nans = HONOR_NANS (ll_arg);
2829 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2830 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2831 int compcode;
2832
2833 switch (code)
2834 {
2835 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2836 compcode = lcompcode & rcompcode;
2837 break;
2838
2839 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2840 compcode = lcompcode | rcompcode;
2841 break;
2842
2843 default:
2844 return NULL_TREE;
2845 }
2846
2847 if (!honor_nans)
2848 {
2849 /* Eliminate unordered comparisons, as well as LTGT and ORD
2850 which are not used unless the mode has NaNs. */
2851 compcode &= ~COMPCODE_UNORD;
2852 if (compcode == COMPCODE_LTGT)
2853 compcode = COMPCODE_NE;
2854 else if (compcode == COMPCODE_ORD)
2855 compcode = COMPCODE_TRUE;
2856 }
2857 else if (flag_trapping_math)
2858 {
2859 /* Check that the original operation and the optimized ones will trap
2860 under the same condition. */
2861 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2862 && (lcompcode != COMPCODE_EQ)
2863 && (lcompcode != COMPCODE_ORD);
2864 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2865 && (rcompcode != COMPCODE_EQ)
2866 && (rcompcode != COMPCODE_ORD);
2867 bool trap = (compcode & COMPCODE_UNORD) == 0
2868 && (compcode != COMPCODE_EQ)
2869 && (compcode != COMPCODE_ORD);
2870
2871 /* In a short-circuited boolean expression the LHS might be
2872 such that the RHS, if evaluated, will never trap. For
2873 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2874 if neither x nor y is NaN. (This is a mixed blessing: for
2875 example, the expression above will never trap, hence
2876 optimizing it to x < y would be invalid). */
2877 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2878 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2879 rtrap = false;
2880
2881 /* If the comparison was short-circuited, and only the RHS
2882 trapped, we may now generate a spurious trap. */
2883 if (rtrap && !ltrap
2884 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2885 return NULL_TREE;
2886
2887 /* If we changed the conditions that cause a trap, we lose. */
2888 if ((ltrap || rtrap) != trap)
2889 return NULL_TREE;
2890 }
2891
2892 if (compcode == COMPCODE_TRUE)
2893 return constant_boolean_node (true, truth_type);
2894 else if (compcode == COMPCODE_FALSE)
2895 return constant_boolean_node (false, truth_type);
2896 else
2897 {
2898 enum tree_code tcode;
2899
2900 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2901 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2902 }
2903 }
2904 \f
2905 /* Return nonzero if two operands (typically of the same tree node)
2906 are necessarily equal. FLAGS modifies behavior as follows:
2907
2908 If OEP_ONLY_CONST is set, only return nonzero for constants.
2909 This function tests whether the operands are indistinguishable;
2910 it does not test whether they are equal using C's == operation.
2911 The distinction is important for IEEE floating point, because
2912 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2913 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2914
2915 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2916 even though it may hold multiple values during a function.
2917 This is because a GCC tree node guarantees that nothing else is
2918 executed between the evaluation of its "operands" (which may often
2919 be evaluated in arbitrary order). Hence if the operands themselves
2920 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2921 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2922 unset means assuming isochronic (or instantaneous) tree equivalence.
2923 Unless comparing arbitrary expression trees, such as from different
2924 statements, this flag can usually be left unset.
2925
2926 If OEP_PURE_SAME is set, then pure functions with identical arguments
2927 are considered the same. It is used when the caller has other ways
2928 to ensure that global memory is unchanged in between.
2929
2930 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2931 not values of expressions.
2932
2933 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2934 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2935
2936 If OEP_BITWISE is set, then require the values to be bitwise identical
2937 rather than simply numerically equal. Do not take advantage of things
2938 like math-related flags or undefined behavior; only return true for
2939 values that are provably bitwise identical in all circumstances.
2940
2941 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2942 any operand with side effect. This is unnecesarily conservative in the
2943 case we know that arg0 and arg1 are in disjoint code paths (such as in
2944 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2945 addresses with TREE_CONSTANT flag set so we know that &var == &var
2946 even if var is volatile. */
2947
2948 bool
2949 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2950 unsigned int flags)
2951 {
2952 bool r;
2953 if (verify_hash_value (arg0, arg1, flags, &r))
2954 return r;
2955
2956 STRIP_ANY_LOCATION_WRAPPER (arg0);
2957 STRIP_ANY_LOCATION_WRAPPER (arg1);
2958
2959 /* If either is ERROR_MARK, they aren't equal. */
2960 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2961 || TREE_TYPE (arg0) == error_mark_node
2962 || TREE_TYPE (arg1) == error_mark_node)
2963 return false;
2964
2965 /* Similar, if either does not have a type (like a template id),
2966 they aren't equal. */
2967 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2968 return false;
2969
2970 /* Bitwise identity makes no sense if the values have different layouts. */
2971 if ((flags & OEP_BITWISE)
2972 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2973 return false;
2974
2975 /* We cannot consider pointers to different address space equal. */
2976 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2977 && POINTER_TYPE_P (TREE_TYPE (arg1))
2978 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2979 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2980 return false;
2981
2982 /* Check equality of integer constants before bailing out due to
2983 precision differences. */
2984 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2985 {
2986 /* Address of INTEGER_CST is not defined; check that we did not forget
2987 to drop the OEP_ADDRESS_OF flags. */
2988 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2989 return tree_int_cst_equal (arg0, arg1);
2990 }
2991
2992 if (!(flags & OEP_ADDRESS_OF))
2993 {
2994 /* If both types don't have the same signedness, then we can't consider
2995 them equal. We must check this before the STRIP_NOPS calls
2996 because they may change the signedness of the arguments. As pointers
2997 strictly don't have a signedness, require either two pointers or
2998 two non-pointers as well. */
2999 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3000 || POINTER_TYPE_P (TREE_TYPE (arg0))
3001 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3002 return false;
3003
3004 /* If both types don't have the same precision, then it is not safe
3005 to strip NOPs. */
3006 if (element_precision (TREE_TYPE (arg0))
3007 != element_precision (TREE_TYPE (arg1)))
3008 return false;
3009
3010 STRIP_NOPS (arg0);
3011 STRIP_NOPS (arg1);
3012 }
3013 #if 0
3014 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3015 sanity check once the issue is solved. */
3016 else
3017 /* Addresses of conversions and SSA_NAMEs (and many other things)
3018 are not defined. Check that we did not forget to drop the
3019 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3020 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3021 && TREE_CODE (arg0) != SSA_NAME);
3022 #endif
3023
3024 /* In case both args are comparisons but with different comparison
3025 code, try to swap the comparison operands of one arg to produce
3026 a match and compare that variant. */
3027 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3028 && COMPARISON_CLASS_P (arg0)
3029 && COMPARISON_CLASS_P (arg1))
3030 {
3031 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3032
3033 if (TREE_CODE (arg0) == swap_code)
3034 return operand_equal_p (TREE_OPERAND (arg0, 0),
3035 TREE_OPERAND (arg1, 1), flags)
3036 && operand_equal_p (TREE_OPERAND (arg0, 1),
3037 TREE_OPERAND (arg1, 0), flags);
3038 }
3039
3040 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3041 {
3042 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3043 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3044 ;
3045 else if (flags & OEP_ADDRESS_OF)
3046 {
3047 /* If we are interested in comparing addresses ignore
3048 MEM_REF wrappings of the base that can appear just for
3049 TBAA reasons. */
3050 if (TREE_CODE (arg0) == MEM_REF
3051 && DECL_P (arg1)
3052 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3053 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3054 && integer_zerop (TREE_OPERAND (arg0, 1)))
3055 return true;
3056 else if (TREE_CODE (arg1) == MEM_REF
3057 && DECL_P (arg0)
3058 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3060 && integer_zerop (TREE_OPERAND (arg1, 1)))
3061 return true;
3062 return false;
3063 }
3064 else
3065 return false;
3066 }
3067
3068 /* When not checking adddresses, this is needed for conversions and for
3069 COMPONENT_REF. Might as well play it safe and always test this. */
3070 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3073 && !(flags & OEP_ADDRESS_OF)))
3074 return false;
3075
3076 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3077 We don't care about side effects in that case because the SAVE_EXPR
3078 takes care of that for us. In all other cases, two expressions are
3079 equal if they have no side effects. If we have two identical
3080 expressions with side effects that should be treated the same due
3081 to the only side effects being identical SAVE_EXPR's, that will
3082 be detected in the recursive calls below.
3083 If we are taking an invariant address of two identical objects
3084 they are necessarily equal as well. */
3085 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3086 && (TREE_CODE (arg0) == SAVE_EXPR
3087 || (flags & OEP_MATCH_SIDE_EFFECTS)
3088 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3089 return true;
3090
3091 /* Next handle constant cases, those for which we can return 1 even
3092 if ONLY_CONST is set. */
3093 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3094 switch (TREE_CODE (arg0))
3095 {
3096 case INTEGER_CST:
3097 return tree_int_cst_equal (arg0, arg1);
3098
3099 case FIXED_CST:
3100 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3101 TREE_FIXED_CST (arg1));
3102
3103 case REAL_CST:
3104 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3105 return true;
3106
3107 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3108 {
3109 /* If we do not distinguish between signed and unsigned zero,
3110 consider them equal. */
3111 if (real_zerop (arg0) && real_zerop (arg1))
3112 return true;
3113 }
3114 return false;
3115
3116 case VECTOR_CST:
3117 {
3118 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3119 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3120 return false;
3121
3122 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3123 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3124 return false;
3125
3126 unsigned int count = vector_cst_encoded_nelts (arg0);
3127 for (unsigned int i = 0; i < count; ++i)
3128 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3129 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3130 return false;
3131 return true;
3132 }
3133
3134 case COMPLEX_CST:
3135 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3136 flags)
3137 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3138 flags));
3139
3140 case STRING_CST:
3141 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3142 && ! memcmp (TREE_STRING_POINTER (arg0),
3143 TREE_STRING_POINTER (arg1),
3144 TREE_STRING_LENGTH (arg0)));
3145
3146 case ADDR_EXPR:
3147 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3148 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3149 flags | OEP_ADDRESS_OF
3150 | OEP_MATCH_SIDE_EFFECTS);
3151 case CONSTRUCTOR:
3152 /* In GIMPLE empty constructors are allowed in initializers of
3153 aggregates. */
3154 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3155 default:
3156 break;
3157 }
3158
3159 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3160 two instances of undefined behavior will give identical results. */
3161 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3162 return false;
3163
3164 /* Define macros to test an operand from arg0 and arg1 for equality and a
3165 variant that allows null and views null as being different from any
3166 non-null value. In the latter case, if either is null, the both
3167 must be; otherwise, do the normal comparison. */
3168 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3169 TREE_OPERAND (arg1, N), flags)
3170
3171 #define OP_SAME_WITH_NULL(N) \
3172 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3173 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3174
3175 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3176 {
3177 case tcc_unary:
3178 /* Two conversions are equal only if signedness and modes match. */
3179 switch (TREE_CODE (arg0))
3180 {
3181 CASE_CONVERT:
3182 case FIX_TRUNC_EXPR:
3183 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3184 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3185 return false;
3186 break;
3187 default:
3188 break;
3189 }
3190
3191 return OP_SAME (0);
3192
3193
3194 case tcc_comparison:
3195 case tcc_binary:
3196 if (OP_SAME (0) && OP_SAME (1))
3197 return true;
3198
3199 /* For commutative ops, allow the other order. */
3200 return (commutative_tree_code (TREE_CODE (arg0))
3201 && operand_equal_p (TREE_OPERAND (arg0, 0),
3202 TREE_OPERAND (arg1, 1), flags)
3203 && operand_equal_p (TREE_OPERAND (arg0, 1),
3204 TREE_OPERAND (arg1, 0), flags));
3205
3206 case tcc_reference:
3207 /* If either of the pointer (or reference) expressions we are
3208 dereferencing contain a side effect, these cannot be equal,
3209 but their addresses can be. */
3210 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3211 && (TREE_SIDE_EFFECTS (arg0)
3212 || TREE_SIDE_EFFECTS (arg1)))
3213 return false;
3214
3215 switch (TREE_CODE (arg0))
3216 {
3217 case INDIRECT_REF:
3218 if (!(flags & OEP_ADDRESS_OF))
3219 {
3220 if (TYPE_ALIGN (TREE_TYPE (arg0))
3221 != TYPE_ALIGN (TREE_TYPE (arg1)))
3222 return false;
3223 /* Verify that the access types are compatible. */
3224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3225 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3226 return false;
3227 }
3228 flags &= ~OEP_ADDRESS_OF;
3229 return OP_SAME (0);
3230
3231 case IMAGPART_EXPR:
3232 /* Require the same offset. */
3233 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3234 TYPE_SIZE (TREE_TYPE (arg1)),
3235 flags & ~OEP_ADDRESS_OF))
3236 return false;
3237
3238 /* Fallthru. */
3239 case REALPART_EXPR:
3240 case VIEW_CONVERT_EXPR:
3241 return OP_SAME (0);
3242
3243 case TARGET_MEM_REF:
3244 case MEM_REF:
3245 if (!(flags & OEP_ADDRESS_OF))
3246 {
3247 /* Require equal access sizes */
3248 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3249 && (!TYPE_SIZE (TREE_TYPE (arg0))
3250 || !TYPE_SIZE (TREE_TYPE (arg1))
3251 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3252 TYPE_SIZE (TREE_TYPE (arg1)),
3253 flags)))
3254 return false;
3255 /* Verify that access happens in similar types. */
3256 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3257 return false;
3258 /* Verify that accesses are TBAA compatible. */
3259 if (!alias_ptr_types_compatible_p
3260 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3261 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3262 || (MR_DEPENDENCE_CLIQUE (arg0)
3263 != MR_DEPENDENCE_CLIQUE (arg1))
3264 || (MR_DEPENDENCE_BASE (arg0)
3265 != MR_DEPENDENCE_BASE (arg1)))
3266 return false;
3267 /* Verify that alignment is compatible. */
3268 if (TYPE_ALIGN (TREE_TYPE (arg0))
3269 != TYPE_ALIGN (TREE_TYPE (arg1)))
3270 return false;
3271 }
3272 flags &= ~OEP_ADDRESS_OF;
3273 return (OP_SAME (0) && OP_SAME (1)
3274 /* TARGET_MEM_REF require equal extra operands. */
3275 && (TREE_CODE (arg0) != TARGET_MEM_REF
3276 || (OP_SAME_WITH_NULL (2)
3277 && OP_SAME_WITH_NULL (3)
3278 && OP_SAME_WITH_NULL (4))));
3279
3280 case ARRAY_REF:
3281 case ARRAY_RANGE_REF:
3282 if (!OP_SAME (0))
3283 return false;
3284 flags &= ~OEP_ADDRESS_OF;
3285 /* Compare the array index by value if it is constant first as we
3286 may have different types but same value here. */
3287 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3288 TREE_OPERAND (arg1, 1))
3289 || OP_SAME (1))
3290 && OP_SAME_WITH_NULL (2)
3291 && OP_SAME_WITH_NULL (3)
3292 /* Compare low bound and element size as with OEP_ADDRESS_OF
3293 we have to account for the offset of the ref. */
3294 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3295 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3296 || (operand_equal_p (array_ref_low_bound
3297 (CONST_CAST_TREE (arg0)),
3298 array_ref_low_bound
3299 (CONST_CAST_TREE (arg1)), flags)
3300 && operand_equal_p (array_ref_element_size
3301 (CONST_CAST_TREE (arg0)),
3302 array_ref_element_size
3303 (CONST_CAST_TREE (arg1)),
3304 flags))));
3305
3306 case COMPONENT_REF:
3307 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3308 may be NULL when we're called to compare MEM_EXPRs. */
3309 if (!OP_SAME_WITH_NULL (0))
3310 return false;
3311 {
3312 bool compare_address = flags & OEP_ADDRESS_OF;
3313
3314 /* Most of time we only need to compare FIELD_DECLs for equality.
3315 However when determining address look into actual offsets.
3316 These may match for unions and unshared record types. */
3317 flags &= ~OEP_ADDRESS_OF;
3318 if (!OP_SAME (1))
3319 {
3320 if (compare_address)
3321 {
3322 if (TREE_OPERAND (arg0, 2)
3323 || TREE_OPERAND (arg1, 2))
3324 return OP_SAME_WITH_NULL (2);
3325 tree field0 = TREE_OPERAND (arg0, 1);
3326 tree field1 = TREE_OPERAND (arg1, 1);
3327
3328 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3329 DECL_FIELD_OFFSET (field1), flags)
3330 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3331 DECL_FIELD_BIT_OFFSET (field1),
3332 flags))
3333 return false;
3334 }
3335 else
3336 return false;
3337 }
3338 }
3339 return OP_SAME_WITH_NULL (2);
3340
3341 case BIT_FIELD_REF:
3342 if (!OP_SAME (0))
3343 return false;
3344 flags &= ~OEP_ADDRESS_OF;
3345 return OP_SAME (1) && OP_SAME (2);
3346
3347 default:
3348 return false;
3349 }
3350
3351 case tcc_expression:
3352 switch (TREE_CODE (arg0))
3353 {
3354 case ADDR_EXPR:
3355 /* Be sure we pass right ADDRESS_OF flag. */
3356 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3357 return operand_equal_p (TREE_OPERAND (arg0, 0),
3358 TREE_OPERAND (arg1, 0),
3359 flags | OEP_ADDRESS_OF);
3360
3361 case TRUTH_NOT_EXPR:
3362 return OP_SAME (0);
3363
3364 case TRUTH_ANDIF_EXPR:
3365 case TRUTH_ORIF_EXPR:
3366 return OP_SAME (0) && OP_SAME (1);
3367
3368 case WIDEN_MULT_PLUS_EXPR:
3369 case WIDEN_MULT_MINUS_EXPR:
3370 if (!OP_SAME (2))
3371 return false;
3372 /* The multiplcation operands are commutative. */
3373 /* FALLTHRU */
3374
3375 case TRUTH_AND_EXPR:
3376 case TRUTH_OR_EXPR:
3377 case TRUTH_XOR_EXPR:
3378 if (OP_SAME (0) && OP_SAME (1))
3379 return true;
3380
3381 /* Otherwise take into account this is a commutative operation. */
3382 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3383 TREE_OPERAND (arg1, 1), flags)
3384 && operand_equal_p (TREE_OPERAND (arg0, 1),
3385 TREE_OPERAND (arg1, 0), flags));
3386
3387 case COND_EXPR:
3388 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3389 return false;
3390 flags &= ~OEP_ADDRESS_OF;
3391 return OP_SAME (0);
3392
3393 case BIT_INSERT_EXPR:
3394 /* BIT_INSERT_EXPR has an implict operand as the type precision
3395 of op1. Need to check to make sure they are the same. */
3396 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3397 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3398 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3399 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3400 return false;
3401 /* FALLTHRU */
3402
3403 case VEC_COND_EXPR:
3404 case DOT_PROD_EXPR:
3405 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3406
3407 case MODIFY_EXPR:
3408 case INIT_EXPR:
3409 case COMPOUND_EXPR:
3410 case PREDECREMENT_EXPR:
3411 case PREINCREMENT_EXPR:
3412 case POSTDECREMENT_EXPR:
3413 case POSTINCREMENT_EXPR:
3414 if (flags & OEP_LEXICOGRAPHIC)
3415 return OP_SAME (0) && OP_SAME (1);
3416 return false;
3417
3418 case CLEANUP_POINT_EXPR:
3419 case EXPR_STMT:
3420 case SAVE_EXPR:
3421 if (flags & OEP_LEXICOGRAPHIC)
3422 return OP_SAME (0);
3423 return false;
3424
3425 case OBJ_TYPE_REF:
3426 /* Virtual table reference. */
3427 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3428 OBJ_TYPE_REF_EXPR (arg1), flags))
3429 return false;
3430 flags &= ~OEP_ADDRESS_OF;
3431 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3432 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3433 return false;
3434 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3435 OBJ_TYPE_REF_OBJECT (arg1), flags))
3436 return false;
3437 if (virtual_method_call_p (arg0))
3438 {
3439 if (!virtual_method_call_p (arg1))
3440 return false;
3441 return types_same_for_odr (obj_type_ref_class (arg0),
3442 obj_type_ref_class (arg1));
3443 }
3444 return false;
3445
3446 default:
3447 return false;
3448 }
3449
3450 case tcc_vl_exp:
3451 switch (TREE_CODE (arg0))
3452 {
3453 case CALL_EXPR:
3454 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3455 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3456 /* If not both CALL_EXPRs are either internal or normal function
3457 functions, then they are not equal. */
3458 return false;
3459 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3460 {
3461 /* If the CALL_EXPRs call different internal functions, then they
3462 are not equal. */
3463 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3464 return false;
3465 }
3466 else
3467 {
3468 /* If the CALL_EXPRs call different functions, then they are not
3469 equal. */
3470 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3471 flags))
3472 return false;
3473 }
3474
3475 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3476 {
3477 unsigned int cef = call_expr_flags (arg0);
3478 if (flags & OEP_PURE_SAME)
3479 cef &= ECF_CONST | ECF_PURE;
3480 else
3481 cef &= ECF_CONST;
3482 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3483 return false;
3484 }
3485
3486 /* Now see if all the arguments are the same. */
3487 {
3488 const_call_expr_arg_iterator iter0, iter1;
3489 const_tree a0, a1;
3490 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3491 a1 = first_const_call_expr_arg (arg1, &iter1);
3492 a0 && a1;
3493 a0 = next_const_call_expr_arg (&iter0),
3494 a1 = next_const_call_expr_arg (&iter1))
3495 if (! operand_equal_p (a0, a1, flags))
3496 return false;
3497
3498 /* If we get here and both argument lists are exhausted
3499 then the CALL_EXPRs are equal. */
3500 return ! (a0 || a1);
3501 }
3502 default:
3503 return false;
3504 }
3505
3506 case tcc_declaration:
3507 /* Consider __builtin_sqrt equal to sqrt. */
3508 return (TREE_CODE (arg0) == FUNCTION_DECL
3509 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3510 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3511 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3512 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3513
3514 case tcc_exceptional:
3515 if (TREE_CODE (arg0) == CONSTRUCTOR)
3516 {
3517 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3518 return false;
3519
3520 /* In GIMPLE constructors are used only to build vectors from
3521 elements. Individual elements in the constructor must be
3522 indexed in increasing order and form an initial sequence.
3523
3524 We make no effort to compare constructors in generic.
3525 (see sem_variable::equals in ipa-icf which can do so for
3526 constants). */
3527 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3528 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3529 return false;
3530
3531 /* Be sure that vectors constructed have the same representation.
3532 We only tested element precision and modes to match.
3533 Vectors may be BLKmode and thus also check that the number of
3534 parts match. */
3535 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3536 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3537 return false;
3538
3539 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3540 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3541 unsigned int len = vec_safe_length (v0);
3542
3543 if (len != vec_safe_length (v1))
3544 return false;
3545
3546 for (unsigned int i = 0; i < len; i++)
3547 {
3548 constructor_elt *c0 = &(*v0)[i];
3549 constructor_elt *c1 = &(*v1)[i];
3550
3551 if (!operand_equal_p (c0->value, c1->value, flags)
3552 /* In GIMPLE the indexes can be either NULL or matching i.
3553 Double check this so we won't get false
3554 positives for GENERIC. */
3555 || (c0->index
3556 && (TREE_CODE (c0->index) != INTEGER_CST
3557 || compare_tree_int (c0->index, i)))
3558 || (c1->index
3559 && (TREE_CODE (c1->index) != INTEGER_CST
3560 || compare_tree_int (c1->index, i))))
3561 return false;
3562 }
3563 return true;
3564 }
3565 else if (TREE_CODE (arg0) == STATEMENT_LIST
3566 && (flags & OEP_LEXICOGRAPHIC))
3567 {
3568 /* Compare the STATEMENT_LISTs. */
3569 tree_stmt_iterator tsi1, tsi2;
3570 tree body1 = CONST_CAST_TREE (arg0);
3571 tree body2 = CONST_CAST_TREE (arg1);
3572 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3573 tsi_next (&tsi1), tsi_next (&tsi2))
3574 {
3575 /* The lists don't have the same number of statements. */
3576 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3577 return false;
3578 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3579 return true;
3580 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3581 flags & (OEP_LEXICOGRAPHIC
3582 | OEP_NO_HASH_CHECK)))
3583 return false;
3584 }
3585 }
3586 return false;
3587
3588 case tcc_statement:
3589 switch (TREE_CODE (arg0))
3590 {
3591 case RETURN_EXPR:
3592 if (flags & OEP_LEXICOGRAPHIC)
3593 return OP_SAME_WITH_NULL (0);
3594 return false;
3595 case DEBUG_BEGIN_STMT:
3596 if (flags & OEP_LEXICOGRAPHIC)
3597 return true;
3598 return false;
3599 default:
3600 return false;
3601 }
3602
3603 default:
3604 return false;
3605 }
3606
3607 #undef OP_SAME
3608 #undef OP_SAME_WITH_NULL
3609 }
3610
3611 /* Generate a hash value for an expression. This can be used iteratively
3612 by passing a previous result as the HSTATE argument. */
3613
3614 void
3615 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3616 unsigned int flags)
3617 {
3618 int i;
3619 enum tree_code code;
3620 enum tree_code_class tclass;
3621
3622 if (t == NULL_TREE || t == error_mark_node)
3623 {
3624 hstate.merge_hash (0);
3625 return;
3626 }
3627
3628 STRIP_ANY_LOCATION_WRAPPER (t);
3629
3630 if (!(flags & OEP_ADDRESS_OF))
3631 STRIP_NOPS (t);
3632
3633 code = TREE_CODE (t);
3634
3635 switch (code)
3636 {
3637 /* Alas, constants aren't shared, so we can't rely on pointer
3638 identity. */
3639 case VOID_CST:
3640 hstate.merge_hash (0);
3641 return;
3642 case INTEGER_CST:
3643 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3644 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3645 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3646 return;
3647 case REAL_CST:
3648 {
3649 unsigned int val2;
3650 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3651 val2 = rvc_zero;
3652 else
3653 val2 = real_hash (TREE_REAL_CST_PTR (t));
3654 hstate.merge_hash (val2);
3655 return;
3656 }
3657 case FIXED_CST:
3658 {
3659 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3660 hstate.merge_hash (val2);
3661 return;
3662 }
3663 case STRING_CST:
3664 hstate.add ((const void *) TREE_STRING_POINTER (t),
3665 TREE_STRING_LENGTH (t));
3666 return;
3667 case COMPLEX_CST:
3668 hash_operand (TREE_REALPART (t), hstate, flags);
3669 hash_operand (TREE_IMAGPART (t), hstate, flags);
3670 return;
3671 case VECTOR_CST:
3672 {
3673 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3674 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3675 unsigned int count = vector_cst_encoded_nelts (t);
3676 for (unsigned int i = 0; i < count; ++i)
3677 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3678 return;
3679 }
3680 case SSA_NAME:
3681 /* We can just compare by pointer. */
3682 hstate.add_hwi (SSA_NAME_VERSION (t));
3683 return;
3684 case PLACEHOLDER_EXPR:
3685 /* The node itself doesn't matter. */
3686 return;
3687 case BLOCK:
3688 case OMP_CLAUSE:
3689 /* Ignore. */
3690 return;
3691 case TREE_LIST:
3692 /* A list of expressions, for a CALL_EXPR or as the elements of a
3693 VECTOR_CST. */
3694 for (; t; t = TREE_CHAIN (t))
3695 hash_operand (TREE_VALUE (t), hstate, flags);
3696 return;
3697 case CONSTRUCTOR:
3698 {
3699 unsigned HOST_WIDE_INT idx;
3700 tree field, value;
3701 flags &= ~OEP_ADDRESS_OF;
3702 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3703 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3704 {
3705 /* In GIMPLE the indexes can be either NULL or matching i. */
3706 if (field == NULL_TREE)
3707 field = bitsize_int (idx);
3708 hash_operand (field, hstate, flags);
3709 hash_operand (value, hstate, flags);
3710 }
3711 return;
3712 }
3713 case STATEMENT_LIST:
3714 {
3715 tree_stmt_iterator i;
3716 for (i = tsi_start (CONST_CAST_TREE (t));
3717 !tsi_end_p (i); tsi_next (&i))
3718 hash_operand (tsi_stmt (i), hstate, flags);
3719 return;
3720 }
3721 case TREE_VEC:
3722 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3723 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3724 return;
3725 case IDENTIFIER_NODE:
3726 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3727 return;
3728 case FUNCTION_DECL:
3729 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3730 Otherwise nodes that compare equal according to operand_equal_p might
3731 get different hash codes. However, don't do this for machine specific
3732 or front end builtins, since the function code is overloaded in those
3733 cases. */
3734 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3735 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3736 {
3737 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3738 code = TREE_CODE (t);
3739 }
3740 /* FALL THROUGH */
3741 default:
3742 if (POLY_INT_CST_P (t))
3743 {
3744 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3745 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3746 return;
3747 }
3748 tclass = TREE_CODE_CLASS (code);
3749
3750 if (tclass == tcc_declaration)
3751 {
3752 /* DECL's have a unique ID */
3753 hstate.add_hwi (DECL_UID (t));
3754 }
3755 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3756 {
3757 /* For comparisons that can be swapped, use the lower
3758 tree code. */
3759 enum tree_code ccode = swap_tree_comparison (code);
3760 if (code < ccode)
3761 ccode = code;
3762 hstate.add_object (ccode);
3763 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3764 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3765 }
3766 else if (CONVERT_EXPR_CODE_P (code))
3767 {
3768 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3769 operand_equal_p. */
3770 enum tree_code ccode = NOP_EXPR;
3771 hstate.add_object (ccode);
3772
3773 /* Don't hash the type, that can lead to having nodes which
3774 compare equal according to operand_equal_p, but which
3775 have different hash codes. Make sure to include signedness
3776 in the hash computation. */
3777 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3778 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3779 }
3780 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3781 else if (code == MEM_REF
3782 && (flags & OEP_ADDRESS_OF) != 0
3783 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3784 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3785 && integer_zerop (TREE_OPERAND (t, 1)))
3786 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3787 hstate, flags);
3788 /* Don't ICE on FE specific trees, or their arguments etc.
3789 during operand_equal_p hash verification. */
3790 else if (!IS_EXPR_CODE_CLASS (tclass))
3791 gcc_assert (flags & OEP_HASH_CHECK);
3792 else
3793 {
3794 unsigned int sflags = flags;
3795
3796 hstate.add_object (code);
3797
3798 switch (code)
3799 {
3800 case ADDR_EXPR:
3801 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3802 flags |= OEP_ADDRESS_OF;
3803 sflags = flags;
3804 break;
3805
3806 case INDIRECT_REF:
3807 case MEM_REF:
3808 case TARGET_MEM_REF:
3809 flags &= ~OEP_ADDRESS_OF;
3810 sflags = flags;
3811 break;
3812
3813 case COMPONENT_REF:
3814 if (sflags & OEP_ADDRESS_OF)
3815 {
3816 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3817 if (TREE_OPERAND (t, 2))
3818 hash_operand (TREE_OPERAND (t, 2), hstate,
3819 flags & ~OEP_ADDRESS_OF);
3820 else
3821 {
3822 tree field = TREE_OPERAND (t, 1);
3823 hash_operand (DECL_FIELD_OFFSET (field),
3824 hstate, flags & ~OEP_ADDRESS_OF);
3825 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3826 hstate, flags & ~OEP_ADDRESS_OF);
3827 }
3828 return;
3829 }
3830 break;
3831 case ARRAY_REF:
3832 case ARRAY_RANGE_REF:
3833 case BIT_FIELD_REF:
3834 sflags &= ~OEP_ADDRESS_OF;
3835 break;
3836
3837 case COND_EXPR:
3838 flags &= ~OEP_ADDRESS_OF;
3839 break;
3840
3841 case WIDEN_MULT_PLUS_EXPR:
3842 case WIDEN_MULT_MINUS_EXPR:
3843 {
3844 /* The multiplication operands are commutative. */
3845 inchash::hash one, two;
3846 hash_operand (TREE_OPERAND (t, 0), one, flags);
3847 hash_operand (TREE_OPERAND (t, 1), two, flags);
3848 hstate.add_commutative (one, two);
3849 hash_operand (TREE_OPERAND (t, 2), two, flags);
3850 return;
3851 }
3852
3853 case CALL_EXPR:
3854 if (CALL_EXPR_FN (t) == NULL_TREE)
3855 hstate.add_int (CALL_EXPR_IFN (t));
3856 break;
3857
3858 case TARGET_EXPR:
3859 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3860 Usually different TARGET_EXPRs just should use
3861 different temporaries in their slots. */
3862 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3863 return;
3864
3865 case OBJ_TYPE_REF:
3866 /* Virtual table reference. */
3867 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3868 flags &= ~OEP_ADDRESS_OF;
3869 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3870 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3871 if (!virtual_method_call_p (t))
3872 return;
3873 if (tree c = obj_type_ref_class (t))
3874 {
3875 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3876 /* We compute mangled names only when free_lang_data is run.
3877 In that case we can hash precisely. */
3878 if (TREE_CODE (c) == TYPE_DECL
3879 && DECL_ASSEMBLER_NAME_SET_P (c))
3880 hstate.add_object
3881 (IDENTIFIER_HASH_VALUE
3882 (DECL_ASSEMBLER_NAME (c)));
3883 }
3884 return;
3885 default:
3886 break;
3887 }
3888
3889 /* Don't hash the type, that can lead to having nodes which
3890 compare equal according to operand_equal_p, but which
3891 have different hash codes. */
3892 if (code == NON_LVALUE_EXPR)
3893 {
3894 /* Make sure to include signness in the hash computation. */
3895 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3896 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3897 }
3898
3899 else if (commutative_tree_code (code))
3900 {
3901 /* It's a commutative expression. We want to hash it the same
3902 however it appears. We do this by first hashing both operands
3903 and then rehashing based on the order of their independent
3904 hashes. */
3905 inchash::hash one, two;
3906 hash_operand (TREE_OPERAND (t, 0), one, flags);
3907 hash_operand (TREE_OPERAND (t, 1), two, flags);
3908 hstate.add_commutative (one, two);
3909 }
3910 else
3911 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3912 hash_operand (TREE_OPERAND (t, i), hstate,
3913 i == 0 ? flags : sflags);
3914 }
3915 return;
3916 }
3917 }
3918
3919 bool
3920 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3921 unsigned int flags, bool *ret)
3922 {
3923 /* When checking, verify at the outermost operand_equal_p call that
3924 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3925 hash value. */
3926 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3927 {
3928 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3929 {
3930 if (arg0 != arg1)
3931 {
3932 inchash::hash hstate0 (0), hstate1 (0);
3933 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3934 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3935 hashval_t h0 = hstate0.end ();
3936 hashval_t h1 = hstate1.end ();
3937 gcc_assert (h0 == h1);
3938 }
3939 *ret = true;
3940 }
3941 else
3942 *ret = false;
3943
3944 return true;
3945 }
3946
3947 return false;
3948 }
3949
3950
3951 static operand_compare default_compare_instance;
3952
3953 /* Conveinece wrapper around operand_compare class because usually we do
3954 not need to play with the valueizer. */
3955
3956 bool
3957 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3958 {
3959 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3960 }
3961
3962 namespace inchash
3963 {
3964
3965 /* Generate a hash value for an expression. This can be used iteratively
3966 by passing a previous result as the HSTATE argument.
3967
3968 This function is intended to produce the same hash for expressions which
3969 would compare equal using operand_equal_p. */
3970 void
3971 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3972 {
3973 default_compare_instance.hash_operand (t, hstate, flags);
3974 }
3975
3976 }
3977 \f
3978 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3979 with a different signedness or a narrower precision. */
3980
3981 static bool
3982 operand_equal_for_comparison_p (tree arg0, tree arg1)
3983 {
3984 if (operand_equal_p (arg0, arg1, 0))
3985 return true;
3986
3987 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3988 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3989 return false;
3990
3991 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3992 and see if the inner values are the same. This removes any
3993 signedness comparison, which doesn't matter here. */
3994 tree op0 = arg0;
3995 tree op1 = arg1;
3996 STRIP_NOPS (op0);
3997 STRIP_NOPS (op1);
3998 if (operand_equal_p (op0, op1, 0))
3999 return true;
4000
4001 /* Discard a single widening conversion from ARG1 and see if the inner
4002 value is the same as ARG0. */
4003 if (CONVERT_EXPR_P (arg1)
4004 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4005 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4006 < TYPE_PRECISION (TREE_TYPE (arg1))
4007 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4008 return true;
4009
4010 return false;
4011 }
4012 \f
4013 /* See if ARG is an expression that is either a comparison or is performing
4014 arithmetic on comparisons. The comparisons must only be comparing
4015 two different values, which will be stored in *CVAL1 and *CVAL2; if
4016 they are nonzero it means that some operands have already been found.
4017 No variables may be used anywhere else in the expression except in the
4018 comparisons.
4019
4020 If this is true, return 1. Otherwise, return zero. */
4021
4022 static bool
4023 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4024 {
4025 enum tree_code code = TREE_CODE (arg);
4026 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4027
4028 /* We can handle some of the tcc_expression cases here. */
4029 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4030 tclass = tcc_unary;
4031 else if (tclass == tcc_expression
4032 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4033 || code == COMPOUND_EXPR))
4034 tclass = tcc_binary;
4035
4036 switch (tclass)
4037 {
4038 case tcc_unary:
4039 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4040
4041 case tcc_binary:
4042 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4043 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4044
4045 case tcc_constant:
4046 return true;
4047
4048 case tcc_expression:
4049 if (code == COND_EXPR)
4050 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4051 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4052 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4053 return false;
4054
4055 case tcc_comparison:
4056 /* First see if we can handle the first operand, then the second. For
4057 the second operand, we know *CVAL1 can't be zero. It must be that
4058 one side of the comparison is each of the values; test for the
4059 case where this isn't true by failing if the two operands
4060 are the same. */
4061
4062 if (operand_equal_p (TREE_OPERAND (arg, 0),
4063 TREE_OPERAND (arg, 1), 0))
4064 return false;
4065
4066 if (*cval1 == 0)
4067 *cval1 = TREE_OPERAND (arg, 0);
4068 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4069 ;
4070 else if (*cval2 == 0)
4071 *cval2 = TREE_OPERAND (arg, 0);
4072 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4073 ;
4074 else
4075 return false;
4076
4077 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4078 ;
4079 else if (*cval2 == 0)
4080 *cval2 = TREE_OPERAND (arg, 1);
4081 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4082 ;
4083 else
4084 return false;
4085
4086 return true;
4087
4088 default:
4089 return false;
4090 }
4091 }
4092 \f
4093 /* ARG is a tree that is known to contain just arithmetic operations and
4094 comparisons. Evaluate the operations in the tree substituting NEW0 for
4095 any occurrence of OLD0 as an operand of a comparison and likewise for
4096 NEW1 and OLD1. */
4097
4098 static tree
4099 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4100 tree old1, tree new1)
4101 {
4102 tree type = TREE_TYPE (arg);
4103 enum tree_code code = TREE_CODE (arg);
4104 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4105
4106 /* We can handle some of the tcc_expression cases here. */
4107 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4108 tclass = tcc_unary;
4109 else if (tclass == tcc_expression
4110 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4111 tclass = tcc_binary;
4112
4113 switch (tclass)
4114 {
4115 case tcc_unary:
4116 return fold_build1_loc (loc, code, type,
4117 eval_subst (loc, TREE_OPERAND (arg, 0),
4118 old0, new0, old1, new1));
4119
4120 case tcc_binary:
4121 return fold_build2_loc (loc, code, type,
4122 eval_subst (loc, TREE_OPERAND (arg, 0),
4123 old0, new0, old1, new1),
4124 eval_subst (loc, TREE_OPERAND (arg, 1),
4125 old0, new0, old1, new1));
4126
4127 case tcc_expression:
4128 switch (code)
4129 {
4130 case SAVE_EXPR:
4131 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4132 old1, new1);
4133
4134 case COMPOUND_EXPR:
4135 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4136 old1, new1);
4137
4138 case COND_EXPR:
4139 return fold_build3_loc (loc, code, type,
4140 eval_subst (loc, TREE_OPERAND (arg, 0),
4141 old0, new0, old1, new1),
4142 eval_subst (loc, TREE_OPERAND (arg, 1),
4143 old0, new0, old1, new1),
4144 eval_subst (loc, TREE_OPERAND (arg, 2),
4145 old0, new0, old1, new1));
4146 default:
4147 break;
4148 }
4149 /* Fall through - ??? */
4150
4151 case tcc_comparison:
4152 {
4153 tree arg0 = TREE_OPERAND (arg, 0);
4154 tree arg1 = TREE_OPERAND (arg, 1);
4155
4156 /* We need to check both for exact equality and tree equality. The
4157 former will be true if the operand has a side-effect. In that
4158 case, we know the operand occurred exactly once. */
4159
4160 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4161 arg0 = new0;
4162 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4163 arg0 = new1;
4164
4165 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4166 arg1 = new0;
4167 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4168 arg1 = new1;
4169
4170 return fold_build2_loc (loc, code, type, arg0, arg1);
4171 }
4172
4173 default:
4174 return arg;
4175 }
4176 }
4177 \f
4178 /* Return a tree for the case when the result of an expression is RESULT
4179 converted to TYPE and OMITTED was previously an operand of the expression
4180 but is now not needed (e.g., we folded OMITTED * 0).
4181
4182 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4183 the conversion of RESULT to TYPE. */
4184
4185 tree
4186 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4187 {
4188 tree t = fold_convert_loc (loc, type, result);
4189
4190 /* If the resulting operand is an empty statement, just return the omitted
4191 statement casted to void. */
4192 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4193 return build1_loc (loc, NOP_EXPR, void_type_node,
4194 fold_ignored_result (omitted));
4195
4196 if (TREE_SIDE_EFFECTS (omitted))
4197 return build2_loc (loc, COMPOUND_EXPR, type,
4198 fold_ignored_result (omitted), t);
4199
4200 return non_lvalue_loc (loc, t);
4201 }
4202
4203 /* Return a tree for the case when the result of an expression is RESULT
4204 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4205 of the expression but are now not needed.
4206
4207 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4208 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4209 evaluated before OMITTED2. Otherwise, if neither has side effects,
4210 just do the conversion of RESULT to TYPE. */
4211
4212 tree
4213 omit_two_operands_loc (location_t loc, tree type, tree result,
4214 tree omitted1, tree omitted2)
4215 {
4216 tree t = fold_convert_loc (loc, type, result);
4217
4218 if (TREE_SIDE_EFFECTS (omitted2))
4219 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4220 if (TREE_SIDE_EFFECTS (omitted1))
4221 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4222
4223 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4224 }
4225
4226 \f
4227 /* Return a simplified tree node for the truth-negation of ARG. This
4228 never alters ARG itself. We assume that ARG is an operation that
4229 returns a truth value (0 or 1).
4230
4231 FIXME: one would think we would fold the result, but it causes
4232 problems with the dominator optimizer. */
4233
4234 static tree
4235 fold_truth_not_expr (location_t loc, tree arg)
4236 {
4237 tree type = TREE_TYPE (arg);
4238 enum tree_code code = TREE_CODE (arg);
4239 location_t loc1, loc2;
4240
4241 /* If this is a comparison, we can simply invert it, except for
4242 floating-point non-equality comparisons, in which case we just
4243 enclose a TRUTH_NOT_EXPR around what we have. */
4244
4245 if (TREE_CODE_CLASS (code) == tcc_comparison)
4246 {
4247 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4248 if (FLOAT_TYPE_P (op_type)
4249 && flag_trapping_math
4250 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4251 && code != NE_EXPR && code != EQ_EXPR)
4252 return NULL_TREE;
4253
4254 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4255 if (code == ERROR_MARK)
4256 return NULL_TREE;
4257
4258 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4259 TREE_OPERAND (arg, 1));
4260 if (TREE_NO_WARNING (arg))
4261 TREE_NO_WARNING (ret) = 1;
4262 return ret;
4263 }
4264
4265 switch (code)
4266 {
4267 case INTEGER_CST:
4268 return constant_boolean_node (integer_zerop (arg), type);
4269
4270 case TRUTH_AND_EXPR:
4271 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4272 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4273 return build2_loc (loc, TRUTH_OR_EXPR, type,
4274 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4275 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4276
4277 case TRUTH_OR_EXPR:
4278 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4279 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4280 return build2_loc (loc, TRUTH_AND_EXPR, type,
4281 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4282 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4283
4284 case TRUTH_XOR_EXPR:
4285 /* Here we can invert either operand. We invert the first operand
4286 unless the second operand is a TRUTH_NOT_EXPR in which case our
4287 result is the XOR of the first operand with the inside of the
4288 negation of the second operand. */
4289
4290 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4291 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4292 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4293 else
4294 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4295 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4296 TREE_OPERAND (arg, 1));
4297
4298 case TRUTH_ANDIF_EXPR:
4299 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4300 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4301 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4302 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4303 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4304
4305 case TRUTH_ORIF_EXPR:
4306 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4307 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4308 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4309 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4310 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4311
4312 case TRUTH_NOT_EXPR:
4313 return TREE_OPERAND (arg, 0);
4314
4315 case COND_EXPR:
4316 {
4317 tree arg1 = TREE_OPERAND (arg, 1);
4318 tree arg2 = TREE_OPERAND (arg, 2);
4319
4320 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4321 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4322
4323 /* A COND_EXPR may have a throw as one operand, which
4324 then has void type. Just leave void operands
4325 as they are. */
4326 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4327 VOID_TYPE_P (TREE_TYPE (arg1))
4328 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4329 VOID_TYPE_P (TREE_TYPE (arg2))
4330 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4331 }
4332
4333 case COMPOUND_EXPR:
4334 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4335 return build2_loc (loc, COMPOUND_EXPR, type,
4336 TREE_OPERAND (arg, 0),
4337 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4338
4339 case NON_LVALUE_EXPR:
4340 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4341 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4342
4343 CASE_CONVERT:
4344 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4345 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4346
4347 /* fall through */
4348
4349 case FLOAT_EXPR:
4350 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4351 return build1_loc (loc, TREE_CODE (arg), type,
4352 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4353
4354 case BIT_AND_EXPR:
4355 if (!integer_onep (TREE_OPERAND (arg, 1)))
4356 return NULL_TREE;
4357 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4358
4359 case SAVE_EXPR:
4360 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4361
4362 case CLEANUP_POINT_EXPR:
4363 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4364 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4365 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4366
4367 default:
4368 return NULL_TREE;
4369 }
4370 }
4371
4372 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4373 assume that ARG is an operation that returns a truth value (0 or 1
4374 for scalars, 0 or -1 for vectors). Return the folded expression if
4375 folding is successful. Otherwise, return NULL_TREE. */
4376
4377 static tree
4378 fold_invert_truthvalue (location_t loc, tree arg)
4379 {
4380 tree type = TREE_TYPE (arg);
4381 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4382 ? BIT_NOT_EXPR
4383 : TRUTH_NOT_EXPR,
4384 type, arg);
4385 }
4386
4387 /* Return a simplified tree node for the truth-negation of ARG. This
4388 never alters ARG itself. We assume that ARG is an operation that
4389 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4390
4391 tree
4392 invert_truthvalue_loc (location_t loc, tree arg)
4393 {
4394 if (TREE_CODE (arg) == ERROR_MARK)
4395 return arg;
4396
4397 tree type = TREE_TYPE (arg);
4398 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4399 ? BIT_NOT_EXPR
4400 : TRUTH_NOT_EXPR,
4401 type, arg);
4402 }
4403 \f
4404 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4405 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4406 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4407 is the original memory reference used to preserve the alias set of
4408 the access. */
4409
4410 static tree
4411 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4412 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4413 int unsignedp, int reversep)
4414 {
4415 tree result, bftype;
4416
4417 /* Attempt not to lose the access path if possible. */
4418 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4419 {
4420 tree ninner = TREE_OPERAND (orig_inner, 0);
4421 machine_mode nmode;
4422 poly_int64 nbitsize, nbitpos;
4423 tree noffset;
4424 int nunsignedp, nreversep, nvolatilep = 0;
4425 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4426 &noffset, &nmode, &nunsignedp,
4427 &nreversep, &nvolatilep);
4428 if (base == inner
4429 && noffset == NULL_TREE
4430 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4431 && !reversep
4432 && !nreversep
4433 && !nvolatilep)
4434 {
4435 inner = ninner;
4436 bitpos -= nbitpos;
4437 }
4438 }
4439
4440 alias_set_type iset = get_alias_set (orig_inner);
4441 if (iset == 0 && get_alias_set (inner) != iset)
4442 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4443 build_fold_addr_expr (inner),
4444 build_int_cst (ptr_type_node, 0));
4445
4446 if (known_eq (bitpos, 0) && !reversep)
4447 {
4448 tree size = TYPE_SIZE (TREE_TYPE (inner));
4449 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4450 || POINTER_TYPE_P (TREE_TYPE (inner)))
4451 && tree_fits_shwi_p (size)
4452 && tree_to_shwi (size) == bitsize)
4453 return fold_convert_loc (loc, type, inner);
4454 }
4455
4456 bftype = type;
4457 if (TYPE_PRECISION (bftype) != bitsize
4458 || TYPE_UNSIGNED (bftype) == !unsignedp)
4459 bftype = build_nonstandard_integer_type (bitsize, 0);
4460
4461 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4462 bitsize_int (bitsize), bitsize_int (bitpos));
4463 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4464
4465 if (bftype != type)
4466 result = fold_convert_loc (loc, type, result);
4467
4468 return result;
4469 }
4470
4471 /* Optimize a bit-field compare.
4472
4473 There are two cases: First is a compare against a constant and the
4474 second is a comparison of two items where the fields are at the same
4475 bit position relative to the start of a chunk (byte, halfword, word)
4476 large enough to contain it. In these cases we can avoid the shift
4477 implicit in bitfield extractions.
4478
4479 For constants, we emit a compare of the shifted constant with the
4480 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4481 compared. For two fields at the same position, we do the ANDs with the
4482 similar mask and compare the result of the ANDs.
4483
4484 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4485 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4486 are the left and right operands of the comparison, respectively.
4487
4488 If the optimization described above can be done, we return the resulting
4489 tree. Otherwise we return zero. */
4490
4491 static tree
4492 optimize_bit_field_compare (location_t loc, enum tree_code code,
4493 tree compare_type, tree lhs, tree rhs)
4494 {
4495 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4496 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4497 tree type = TREE_TYPE (lhs);
4498 tree unsigned_type;
4499 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4500 machine_mode lmode, rmode;
4501 scalar_int_mode nmode;
4502 int lunsignedp, runsignedp;
4503 int lreversep, rreversep;
4504 int lvolatilep = 0, rvolatilep = 0;
4505 tree linner, rinner = NULL_TREE;
4506 tree mask;
4507 tree offset;
4508
4509 /* Get all the information about the extractions being done. If the bit size
4510 is the same as the size of the underlying object, we aren't doing an
4511 extraction at all and so can do nothing. We also don't want to
4512 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4513 then will no longer be able to replace it. */
4514 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4515 &lunsignedp, &lreversep, &lvolatilep);
4516 if (linner == lhs
4517 || !known_size_p (plbitsize)
4518 || !plbitsize.is_constant (&lbitsize)
4519 || !plbitpos.is_constant (&lbitpos)
4520 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4521 || offset != 0
4522 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4523 || lvolatilep)
4524 return 0;
4525
4526 if (const_p)
4527 rreversep = lreversep;
4528 else
4529 {
4530 /* If this is not a constant, we can only do something if bit positions,
4531 sizes, signedness and storage order are the same. */
4532 rinner
4533 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4534 &runsignedp, &rreversep, &rvolatilep);
4535
4536 if (rinner == rhs
4537 || maybe_ne (lbitpos, rbitpos)
4538 || maybe_ne (lbitsize, rbitsize)
4539 || lunsignedp != runsignedp
4540 || lreversep != rreversep
4541 || offset != 0
4542 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4543 || rvolatilep)
4544 return 0;
4545 }
4546
4547 /* Honor the C++ memory model and mimic what RTL expansion does. */
4548 poly_uint64 bitstart = 0;
4549 poly_uint64 bitend = 0;
4550 if (TREE_CODE (lhs) == COMPONENT_REF)
4551 {
4552 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4553 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4554 return 0;
4555 }
4556
4557 /* See if we can find a mode to refer to this field. We should be able to,
4558 but fail if we can't. */
4559 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4560 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4561 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4562 TYPE_ALIGN (TREE_TYPE (rinner))),
4563 BITS_PER_WORD, false, &nmode))
4564 return 0;
4565
4566 /* Set signed and unsigned types of the precision of this mode for the
4567 shifts below. */
4568 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4569
4570 /* Compute the bit position and size for the new reference and our offset
4571 within it. If the new reference is the same size as the original, we
4572 won't optimize anything, so return zero. */
4573 nbitsize = GET_MODE_BITSIZE (nmode);
4574 nbitpos = lbitpos & ~ (nbitsize - 1);
4575 lbitpos -= nbitpos;
4576 if (nbitsize == lbitsize)
4577 return 0;
4578
4579 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4580 lbitpos = nbitsize - lbitsize - lbitpos;
4581
4582 /* Make the mask to be used against the extracted field. */
4583 mask = build_int_cst_type (unsigned_type, -1);
4584 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4585 mask = const_binop (RSHIFT_EXPR, mask,
4586 size_int (nbitsize - lbitsize - lbitpos));
4587
4588 if (! const_p)
4589 {
4590 if (nbitpos < 0)
4591 return 0;
4592
4593 /* If not comparing with constant, just rework the comparison
4594 and return. */
4595 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4596 nbitsize, nbitpos, 1, lreversep);
4597 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4598 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4599 nbitsize, nbitpos, 1, rreversep);
4600 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4601 return fold_build2_loc (loc, code, compare_type, t1, t2);
4602 }
4603
4604 /* Otherwise, we are handling the constant case. See if the constant is too
4605 big for the field. Warn and return a tree for 0 (false) if so. We do
4606 this not only for its own sake, but to avoid having to test for this
4607 error case below. If we didn't, we might generate wrong code.
4608
4609 For unsigned fields, the constant shifted right by the field length should
4610 be all zero. For signed fields, the high-order bits should agree with
4611 the sign bit. */
4612
4613 if (lunsignedp)
4614 {
4615 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4616 {
4617 warning (0, "comparison is always %d due to width of bit-field",
4618 code == NE_EXPR);
4619 return constant_boolean_node (code == NE_EXPR, compare_type);
4620 }
4621 }
4622 else
4623 {
4624 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4625 if (tem != 0 && tem != -1)
4626 {
4627 warning (0, "comparison is always %d due to width of bit-field",
4628 code == NE_EXPR);
4629 return constant_boolean_node (code == NE_EXPR, compare_type);
4630 }
4631 }
4632
4633 if (nbitpos < 0)
4634 return 0;
4635
4636 /* Single-bit compares should always be against zero. */
4637 if (lbitsize == 1 && ! integer_zerop (rhs))
4638 {
4639 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4640 rhs = build_int_cst (type, 0);
4641 }
4642
4643 /* Make a new bitfield reference, shift the constant over the
4644 appropriate number of bits and mask it with the computed mask
4645 (in case this was a signed field). If we changed it, make a new one. */
4646 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4647 nbitsize, nbitpos, 1, lreversep);
4648
4649 rhs = const_binop (BIT_AND_EXPR,
4650 const_binop (LSHIFT_EXPR,
4651 fold_convert_loc (loc, unsigned_type, rhs),
4652 size_int (lbitpos)),
4653 mask);
4654
4655 lhs = build2_loc (loc, code, compare_type,
4656 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4657 return lhs;
4658 }
4659 \f
4660 /* Subroutine for fold_truth_andor_1: decode a field reference.
4661
4662 If EXP is a comparison reference, we return the innermost reference.
4663
4664 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4665 set to the starting bit number.
4666
4667 If the innermost field can be completely contained in a mode-sized
4668 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4669
4670 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4671 otherwise it is not changed.
4672
4673 *PUNSIGNEDP is set to the signedness of the field.
4674
4675 *PREVERSEP is set to the storage order of the field.
4676
4677 *PMASK is set to the mask used. This is either contained in a
4678 BIT_AND_EXPR or derived from the width of the field.
4679
4680 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4681
4682 Return 0 if this is not a component reference or is one that we can't
4683 do anything with. */
4684
4685 static tree
4686 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4687 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4688 int *punsignedp, int *preversep, int *pvolatilep,
4689 tree *pmask, tree *pand_mask)
4690 {
4691 tree exp = *exp_;
4692 tree outer_type = 0;
4693 tree and_mask = 0;
4694 tree mask, inner, offset;
4695 tree unsigned_type;
4696 unsigned int precision;
4697
4698 /* All the optimizations using this function assume integer fields.
4699 There are problems with FP fields since the type_for_size call
4700 below can fail for, e.g., XFmode. */
4701 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4702 return NULL_TREE;
4703
4704 /* We are interested in the bare arrangement of bits, so strip everything
4705 that doesn't affect the machine mode. However, record the type of the
4706 outermost expression if it may matter below. */
4707 if (CONVERT_EXPR_P (exp)
4708 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4709 outer_type = TREE_TYPE (exp);
4710 STRIP_NOPS (exp);
4711
4712 if (TREE_CODE (exp) == BIT_AND_EXPR)
4713 {
4714 and_mask = TREE_OPERAND (exp, 1);
4715 exp = TREE_OPERAND (exp, 0);
4716 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4717 if (TREE_CODE (and_mask) != INTEGER_CST)
4718 return NULL_TREE;
4719 }
4720
4721 poly_int64 poly_bitsize, poly_bitpos;
4722 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4723 pmode, punsignedp, preversep, pvolatilep);
4724 if ((inner == exp && and_mask == 0)
4725 || !poly_bitsize.is_constant (pbitsize)
4726 || !poly_bitpos.is_constant (pbitpos)
4727 || *pbitsize < 0
4728 || offset != 0
4729 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4730 /* Reject out-of-bound accesses (PR79731). */
4731 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4732 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4733 *pbitpos + *pbitsize) < 0))
4734 return NULL_TREE;
4735
4736 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4737 if (unsigned_type == NULL_TREE)
4738 return NULL_TREE;
4739
4740 *exp_ = exp;
4741
4742 /* If the number of bits in the reference is the same as the bitsize of
4743 the outer type, then the outer type gives the signedness. Otherwise
4744 (in case of a small bitfield) the signedness is unchanged. */
4745 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4746 *punsignedp = TYPE_UNSIGNED (outer_type);
4747
4748 /* Compute the mask to access the bitfield. */
4749 precision = TYPE_PRECISION (unsigned_type);
4750
4751 mask = build_int_cst_type (unsigned_type, -1);
4752
4753 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4754 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4755
4756 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4757 if (and_mask != 0)
4758 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4759 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4760
4761 *pmask = mask;
4762 *pand_mask = and_mask;
4763 return inner;
4764 }
4765
4766 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4767 bit positions and MASK is SIGNED. */
4768
4769 static bool
4770 all_ones_mask_p (const_tree mask, unsigned int size)
4771 {
4772 tree type = TREE_TYPE (mask);
4773 unsigned int precision = TYPE_PRECISION (type);
4774
4775 /* If this function returns true when the type of the mask is
4776 UNSIGNED, then there will be errors. In particular see
4777 gcc.c-torture/execute/990326-1.c. There does not appear to be
4778 any documentation paper trail as to why this is so. But the pre
4779 wide-int worked with that restriction and it has been preserved
4780 here. */
4781 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4782 return false;
4783
4784 return wi::mask (size, false, precision) == wi::to_wide (mask);
4785 }
4786
4787 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4788 represents the sign bit of EXP's type. If EXP represents a sign
4789 or zero extension, also test VAL against the unextended type.
4790 The return value is the (sub)expression whose sign bit is VAL,
4791 or NULL_TREE otherwise. */
4792
4793 tree
4794 sign_bit_p (tree exp, const_tree val)
4795 {
4796 int width;
4797 tree t;
4798
4799 /* Tree EXP must have an integral type. */
4800 t = TREE_TYPE (exp);
4801 if (! INTEGRAL_TYPE_P (t))
4802 return NULL_TREE;
4803
4804 /* Tree VAL must be an integer constant. */
4805 if (TREE_CODE (val) != INTEGER_CST
4806 || TREE_OVERFLOW (val))
4807 return NULL_TREE;
4808
4809 width = TYPE_PRECISION (t);
4810 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4811 return exp;
4812
4813 /* Handle extension from a narrower type. */
4814 if (TREE_CODE (exp) == NOP_EXPR
4815 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4816 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4817
4818 return NULL_TREE;
4819 }
4820
4821 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4822 to be evaluated unconditionally. */
4823
4824 static bool
4825 simple_operand_p (const_tree exp)
4826 {
4827 /* Strip any conversions that don't change the machine mode. */
4828 STRIP_NOPS (exp);
4829
4830 return (CONSTANT_CLASS_P (exp)
4831 || TREE_CODE (exp) == SSA_NAME
4832 || (DECL_P (exp)
4833 && ! TREE_ADDRESSABLE (exp)
4834 && ! TREE_THIS_VOLATILE (exp)
4835 && ! DECL_NONLOCAL (exp)
4836 /* Don't regard global variables as simple. They may be
4837 allocated in ways unknown to the compiler (shared memory,
4838 #pragma weak, etc). */
4839 && ! TREE_PUBLIC (exp)
4840 && ! DECL_EXTERNAL (exp)
4841 /* Weakrefs are not safe to be read, since they can be NULL.
4842 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4843 have DECL_WEAK flag set. */
4844 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4845 /* Loading a static variable is unduly expensive, but global
4846 registers aren't expensive. */
4847 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4848 }
4849
4850 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4851 to be evaluated unconditionally.
4852 I addition to simple_operand_p, we assume that comparisons, conversions,
4853 and logic-not operations are simple, if their operands are simple, too. */
4854
4855 static bool
4856 simple_operand_p_2 (tree exp)
4857 {
4858 enum tree_code code;
4859
4860 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4861 return false;
4862
4863 while (CONVERT_EXPR_P (exp))
4864 exp = TREE_OPERAND (exp, 0);
4865
4866 code = TREE_CODE (exp);
4867
4868 if (TREE_CODE_CLASS (code) == tcc_comparison)
4869 return (simple_operand_p (TREE_OPERAND (exp, 0))
4870 && simple_operand_p (TREE_OPERAND (exp, 1)));
4871
4872 if (code == TRUTH_NOT_EXPR)
4873 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4874
4875 return simple_operand_p (exp);
4876 }
4877
4878 \f
4879 /* The following functions are subroutines to fold_range_test and allow it to
4880 try to change a logical combination of comparisons into a range test.
4881
4882 For example, both
4883 X == 2 || X == 3 || X == 4 || X == 5
4884 and
4885 X >= 2 && X <= 5
4886 are converted to
4887 (unsigned) (X - 2) <= 3
4888
4889 We describe each set of comparisons as being either inside or outside
4890 a range, using a variable named like IN_P, and then describe the
4891 range with a lower and upper bound. If one of the bounds is omitted,
4892 it represents either the highest or lowest value of the type.
4893
4894 In the comments below, we represent a range by two numbers in brackets
4895 preceded by a "+" to designate being inside that range, or a "-" to
4896 designate being outside that range, so the condition can be inverted by
4897 flipping the prefix. An omitted bound is represented by a "-". For
4898 example, "- [-, 10]" means being outside the range starting at the lowest
4899 possible value and ending at 10, in other words, being greater than 10.
4900 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4901 always false.
4902
4903 We set up things so that the missing bounds are handled in a consistent
4904 manner so neither a missing bound nor "true" and "false" need to be
4905 handled using a special case. */
4906
4907 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4908 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4909 and UPPER1_P are nonzero if the respective argument is an upper bound
4910 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4911 must be specified for a comparison. ARG1 will be converted to ARG0's
4912 type if both are specified. */
4913
4914 static tree
4915 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4916 tree arg1, int upper1_p)
4917 {
4918 tree tem;
4919 int result;
4920 int sgn0, sgn1;
4921
4922 /* If neither arg represents infinity, do the normal operation.
4923 Else, if not a comparison, return infinity. Else handle the special
4924 comparison rules. Note that most of the cases below won't occur, but
4925 are handled for consistency. */
4926
4927 if (arg0 != 0 && arg1 != 0)
4928 {
4929 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4930 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4931 STRIP_NOPS (tem);
4932 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4933 }
4934
4935 if (TREE_CODE_CLASS (code) != tcc_comparison)
4936 return 0;
4937
4938 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4939 for neither. In real maths, we cannot assume open ended ranges are
4940 the same. But, this is computer arithmetic, where numbers are finite.
4941 We can therefore make the transformation of any unbounded range with
4942 the value Z, Z being greater than any representable number. This permits
4943 us to treat unbounded ranges as equal. */
4944 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4945 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4946 switch (code)
4947 {
4948 case EQ_EXPR:
4949 result = sgn0 == sgn1;
4950 break;
4951 case NE_EXPR:
4952 result = sgn0 != sgn1;
4953 break;
4954 case LT_EXPR:
4955 result = sgn0 < sgn1;
4956 break;
4957 case LE_EXPR:
4958 result = sgn0 <= sgn1;
4959 break;
4960 case GT_EXPR:
4961 result = sgn0 > sgn1;
4962 break;
4963 case GE_EXPR:
4964 result = sgn0 >= sgn1;
4965 break;
4966 default:
4967 gcc_unreachable ();
4968 }
4969
4970 return constant_boolean_node (result, type);
4971 }
4972 \f
4973 /* Helper routine for make_range. Perform one step for it, return
4974 new expression if the loop should continue or NULL_TREE if it should
4975 stop. */
4976
4977 tree
4978 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4979 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4980 bool *strict_overflow_p)
4981 {
4982 tree arg0_type = TREE_TYPE (arg0);
4983 tree n_low, n_high, low = *p_low, high = *p_high;
4984 int in_p = *p_in_p, n_in_p;
4985
4986 switch (code)
4987 {
4988 case TRUTH_NOT_EXPR:
4989 /* We can only do something if the range is testing for zero. */
4990 if (low == NULL_TREE || high == NULL_TREE
4991 || ! integer_zerop (low) || ! integer_zerop (high))
4992 return NULL_TREE;
4993 *p_in_p = ! in_p;
4994 return arg0;
4995
4996 case EQ_EXPR: case NE_EXPR:
4997 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4998 /* We can only do something if the range is testing for zero
4999 and if the second operand is an integer constant. Note that
5000 saying something is "in" the range we make is done by
5001 complementing IN_P since it will set in the initial case of
5002 being not equal to zero; "out" is leaving it alone. */
5003 if (low == NULL_TREE || high == NULL_TREE
5004 || ! integer_zerop (low) || ! integer_zerop (high)
5005 || TREE_CODE (arg1) != INTEGER_CST)
5006 return NULL_TREE;
5007
5008 switch (code)
5009 {
5010 case NE_EXPR: /* - [c, c] */
5011 low = high = arg1;
5012 break;
5013 case EQ_EXPR: /* + [c, c] */
5014 in_p = ! in_p, low = high = arg1;
5015 break;
5016 case GT_EXPR: /* - [-, c] */
5017 low = 0, high = arg1;
5018 break;
5019 case GE_EXPR: /* + [c, -] */
5020 in_p = ! in_p, low = arg1, high = 0;
5021 break;
5022 case LT_EXPR: /* - [c, -] */
5023 low = arg1, high = 0;
5024 break;
5025 case LE_EXPR: /* + [-, c] */
5026 in_p = ! in_p, low = 0, high = arg1;
5027 break;
5028 default:
5029 gcc_unreachable ();
5030 }
5031
5032 /* If this is an unsigned comparison, we also know that EXP is
5033 greater than or equal to zero. We base the range tests we make
5034 on that fact, so we record it here so we can parse existing
5035 range tests. We test arg0_type since often the return type
5036 of, e.g. EQ_EXPR, is boolean. */
5037 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5038 {
5039 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5040 in_p, low, high, 1,
5041 build_int_cst (arg0_type, 0),
5042 NULL_TREE))
5043 return NULL_TREE;
5044
5045 in_p = n_in_p, low = n_low, high = n_high;
5046
5047 /* If the high bound is missing, but we have a nonzero low
5048 bound, reverse the range so it goes from zero to the low bound
5049 minus 1. */
5050 if (high == 0 && low && ! integer_zerop (low))
5051 {
5052 in_p = ! in_p;
5053 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5054 build_int_cst (TREE_TYPE (low), 1), 0);
5055 low = build_int_cst (arg0_type, 0);
5056 }
5057 }
5058
5059 *p_low = low;
5060 *p_high = high;
5061 *p_in_p = in_p;
5062 return arg0;
5063
5064 case NEGATE_EXPR:
5065 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5066 low and high are non-NULL, then normalize will DTRT. */
5067 if (!TYPE_UNSIGNED (arg0_type)
5068 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5069 {
5070 if (low == NULL_TREE)
5071 low = TYPE_MIN_VALUE (arg0_type);
5072 if (high == NULL_TREE)
5073 high = TYPE_MAX_VALUE (arg0_type);
5074 }
5075
5076 /* (-x) IN [a,b] -> x in [-b, -a] */
5077 n_low = range_binop (MINUS_EXPR, exp_type,
5078 build_int_cst (exp_type, 0),
5079 0, high, 1);
5080 n_high = range_binop (MINUS_EXPR, exp_type,
5081 build_int_cst (exp_type, 0),
5082 0, low, 0);
5083 if (n_high != 0 && TREE_OVERFLOW (n_high))
5084 return NULL_TREE;
5085 goto normalize;
5086
5087 case BIT_NOT_EXPR:
5088 /* ~ X -> -X - 1 */
5089 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5090 build_int_cst (exp_type, 1));
5091
5092 case PLUS_EXPR:
5093 case MINUS_EXPR:
5094 if (TREE_CODE (arg1) != INTEGER_CST)
5095 return NULL_TREE;
5096
5097 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5098 move a constant to the other side. */
5099 if (!TYPE_UNSIGNED (arg0_type)
5100 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5101 return NULL_TREE;
5102
5103 /* If EXP is signed, any overflow in the computation is undefined,
5104 so we don't worry about it so long as our computations on
5105 the bounds don't overflow. For unsigned, overflow is defined
5106 and this is exactly the right thing. */
5107 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5108 arg0_type, low, 0, arg1, 0);
5109 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5110 arg0_type, high, 1, arg1, 0);
5111 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5112 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5113 return NULL_TREE;
5114
5115 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5116 *strict_overflow_p = true;
5117
5118 normalize:
5119 /* Check for an unsigned range which has wrapped around the maximum
5120 value thus making n_high < n_low, and normalize it. */
5121 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5122 {
5123 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5124 build_int_cst (TREE_TYPE (n_high), 1), 0);
5125 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5126 build_int_cst (TREE_TYPE (n_low), 1), 0);
5127
5128 /* If the range is of the form +/- [ x+1, x ], we won't
5129 be able to normalize it. But then, it represents the
5130 whole range or the empty set, so make it
5131 +/- [ -, - ]. */
5132 if (tree_int_cst_equal (n_low, low)
5133 && tree_int_cst_equal (n_high, high))
5134 low = high = 0;
5135 else
5136 in_p = ! in_p;
5137 }
5138 else
5139 low = n_low, high = n_high;
5140
5141 *p_low = low;
5142 *p_high = high;
5143 *p_in_p = in_p;
5144 return arg0;
5145
5146 CASE_CONVERT:
5147 case NON_LVALUE_EXPR:
5148 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5149 return NULL_TREE;
5150
5151 if (! INTEGRAL_TYPE_P (arg0_type)
5152 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5153 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5154 return NULL_TREE;
5155
5156 n_low = low, n_high = high;
5157
5158 if (n_low != 0)
5159 n_low = fold_convert_loc (loc, arg0_type, n_low);
5160
5161 if (n_high != 0)
5162 n_high = fold_convert_loc (loc, arg0_type, n_high);
5163
5164 /* If we're converting arg0 from an unsigned type, to exp,
5165 a signed type, we will be doing the comparison as unsigned.
5166 The tests above have already verified that LOW and HIGH
5167 are both positive.
5168
5169 So we have to ensure that we will handle large unsigned
5170 values the same way that the current signed bounds treat
5171 negative values. */
5172
5173 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5174 {
5175 tree high_positive;
5176 tree equiv_type;
5177 /* For fixed-point modes, we need to pass the saturating flag
5178 as the 2nd parameter. */
5179 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5180 equiv_type
5181 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5182 TYPE_SATURATING (arg0_type));
5183 else
5184 equiv_type
5185 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5186
5187 /* A range without an upper bound is, naturally, unbounded.
5188 Since convert would have cropped a very large value, use
5189 the max value for the destination type. */
5190 high_positive
5191 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5192 : TYPE_MAX_VALUE (arg0_type);
5193
5194 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5195 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5196 fold_convert_loc (loc, arg0_type,
5197 high_positive),
5198 build_int_cst (arg0_type, 1));
5199
5200 /* If the low bound is specified, "and" the range with the
5201 range for which the original unsigned value will be
5202 positive. */
5203 if (low != 0)
5204 {
5205 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5206 1, fold_convert_loc (loc, arg0_type,
5207 integer_zero_node),
5208 high_positive))
5209 return NULL_TREE;
5210
5211 in_p = (n_in_p == in_p);
5212 }
5213 else
5214 {
5215 /* Otherwise, "or" the range with the range of the input
5216 that will be interpreted as negative. */
5217 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5218 1, fold_convert_loc (loc, arg0_type,
5219 integer_zero_node),
5220 high_positive))
5221 return NULL_TREE;
5222
5223 in_p = (in_p != n_in_p);
5224 }
5225 }
5226
5227 *p_low = n_low;
5228 *p_high = n_high;
5229 *p_in_p = in_p;
5230 return arg0;
5231
5232 default:
5233 return NULL_TREE;
5234 }
5235 }
5236
5237 /* Given EXP, a logical expression, set the range it is testing into
5238 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5239 actually being tested. *PLOW and *PHIGH will be made of the same
5240 type as the returned expression. If EXP is not a comparison, we
5241 will most likely not be returning a useful value and range. Set
5242 *STRICT_OVERFLOW_P to true if the return value is only valid
5243 because signed overflow is undefined; otherwise, do not change
5244 *STRICT_OVERFLOW_P. */
5245
5246 tree
5247 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5248 bool *strict_overflow_p)
5249 {
5250 enum tree_code code;
5251 tree arg0, arg1 = NULL_TREE;
5252 tree exp_type, nexp;
5253 int in_p;
5254 tree low, high;
5255 location_t loc = EXPR_LOCATION (exp);
5256
5257 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5258 and see if we can refine the range. Some of the cases below may not
5259 happen, but it doesn't seem worth worrying about this. We "continue"
5260 the outer loop when we've changed something; otherwise we "break"
5261 the switch, which will "break" the while. */
5262
5263 in_p = 0;
5264 low = high = build_int_cst (TREE_TYPE (exp), 0);
5265
5266 while (1)
5267 {
5268 code = TREE_CODE (exp);
5269 exp_type = TREE_TYPE (exp);
5270 arg0 = NULL_TREE;
5271
5272 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5273 {
5274 if (TREE_OPERAND_LENGTH (exp) > 0)
5275 arg0 = TREE_OPERAND (exp, 0);
5276 if (TREE_CODE_CLASS (code) == tcc_binary
5277 || TREE_CODE_CLASS (code) == tcc_comparison
5278 || (TREE_CODE_CLASS (code) == tcc_expression
5279 && TREE_OPERAND_LENGTH (exp) > 1))
5280 arg1 = TREE_OPERAND (exp, 1);
5281 }
5282 if (arg0 == NULL_TREE)
5283 break;
5284
5285 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5286 &high, &in_p, strict_overflow_p);
5287 if (nexp == NULL_TREE)
5288 break;
5289 exp = nexp;
5290 }
5291
5292 /* If EXP is a constant, we can evaluate whether this is true or false. */
5293 if (TREE_CODE (exp) == INTEGER_CST)
5294 {
5295 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5296 exp, 0, low, 0))
5297 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5298 exp, 1, high, 1)));
5299 low = high = 0;
5300 exp = 0;
5301 }
5302
5303 *pin_p = in_p, *plow = low, *phigh = high;
5304 return exp;
5305 }
5306
5307 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5308 a bitwise check i.e. when
5309 LOW == 0xXX...X00...0
5310 HIGH == 0xXX...X11...1
5311 Return corresponding mask in MASK and stem in VALUE. */
5312
5313 static bool
5314 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5315 tree *value)
5316 {
5317 if (TREE_CODE (low) != INTEGER_CST
5318 || TREE_CODE (high) != INTEGER_CST)
5319 return false;
5320
5321 unsigned prec = TYPE_PRECISION (type);
5322 wide_int lo = wi::to_wide (low, prec);
5323 wide_int hi = wi::to_wide (high, prec);
5324
5325 wide_int end_mask = lo ^ hi;
5326 if ((end_mask & (end_mask + 1)) != 0
5327 || (lo & end_mask) != 0)
5328 return false;
5329
5330 wide_int stem_mask = ~end_mask;
5331 wide_int stem = lo & stem_mask;
5332 if (stem != (hi & stem_mask))
5333 return false;
5334
5335 *mask = wide_int_to_tree (type, stem_mask);
5336 *value = wide_int_to_tree (type, stem);
5337
5338 return true;
5339 }
5340 \f
5341 /* Helper routine for build_range_check and match.pd. Return the type to
5342 perform the check or NULL if it shouldn't be optimized. */
5343
5344 tree
5345 range_check_type (tree etype)
5346 {
5347 /* First make sure that arithmetics in this type is valid, then make sure
5348 that it wraps around. */
5349 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5350 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5351
5352 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5353 {
5354 tree utype, minv, maxv;
5355
5356 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5357 for the type in question, as we rely on this here. */
5358 utype = unsigned_type_for (etype);
5359 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5360 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5361 build_int_cst (TREE_TYPE (maxv), 1), 1);
5362 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5363
5364 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5365 minv, 1, maxv, 1)))
5366 etype = utype;
5367 else
5368 return NULL_TREE;
5369 }
5370 else if (POINTER_TYPE_P (etype))
5371 etype = unsigned_type_for (etype);
5372 return etype;
5373 }
5374
5375 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5376 type, TYPE, return an expression to test if EXP is in (or out of, depending
5377 on IN_P) the range. Return 0 if the test couldn't be created. */
5378
5379 tree
5380 build_range_check (location_t loc, tree type, tree exp, int in_p,
5381 tree low, tree high)
5382 {
5383 tree etype = TREE_TYPE (exp), mask, value;
5384
5385 /* Disable this optimization for function pointer expressions
5386 on targets that require function pointer canonicalization. */
5387 if (targetm.have_canonicalize_funcptr_for_compare ()
5388 && POINTER_TYPE_P (etype)
5389 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5390 return NULL_TREE;
5391
5392 if (! in_p)
5393 {
5394 value = build_range_check (loc, type, exp, 1, low, high);
5395 if (value != 0)
5396 return invert_truthvalue_loc (loc, value);
5397
5398 return 0;
5399 }
5400
5401 if (low == 0 && high == 0)
5402 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5403
5404 if (low == 0)
5405 return fold_build2_loc (loc, LE_EXPR, type, exp,
5406 fold_convert_loc (loc, etype, high));
5407
5408 if (high == 0)
5409 return fold_build2_loc (loc, GE_EXPR, type, exp,
5410 fold_convert_loc (loc, etype, low));
5411
5412 if (operand_equal_p (low, high, 0))
5413 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5414 fold_convert_loc (loc, etype, low));
5415
5416 if (TREE_CODE (exp) == BIT_AND_EXPR
5417 && maskable_range_p (low, high, etype, &mask, &value))
5418 return fold_build2_loc (loc, EQ_EXPR, type,
5419 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5420 exp, mask),
5421 value);
5422
5423 if (integer_zerop (low))
5424 {
5425 if (! TYPE_UNSIGNED (etype))
5426 {
5427 etype = unsigned_type_for (etype);
5428 high = fold_convert_loc (loc, etype, high);
5429 exp = fold_convert_loc (loc, etype, exp);
5430 }
5431 return build_range_check (loc, type, exp, 1, 0, high);
5432 }
5433
5434 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5435 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5436 {
5437 int prec = TYPE_PRECISION (etype);
5438
5439 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5440 {
5441 if (TYPE_UNSIGNED (etype))
5442 {
5443 tree signed_etype = signed_type_for (etype);
5444 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5445 etype
5446 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5447 else
5448 etype = signed_etype;
5449 exp = fold_convert_loc (loc, etype, exp);
5450 }
5451 return fold_build2_loc (loc, GT_EXPR, type, exp,
5452 build_int_cst (etype, 0));
5453 }
5454 }
5455
5456 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5457 This requires wrap-around arithmetics for the type of the expression. */
5458 etype = range_check_type (etype);
5459 if (etype == NULL_TREE)
5460 return NULL_TREE;
5461
5462 high = fold_convert_loc (loc, etype, high);
5463 low = fold_convert_loc (loc, etype, low);
5464 exp = fold_convert_loc (loc, etype, exp);
5465
5466 value = const_binop (MINUS_EXPR, high, low);
5467
5468 if (value != 0 && !TREE_OVERFLOW (value))
5469 return build_range_check (loc, type,
5470 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5471 1, build_int_cst (etype, 0), value);
5472
5473 return 0;
5474 }
5475 \f
5476 /* Return the predecessor of VAL in its type, handling the infinite case. */
5477
5478 static tree
5479 range_predecessor (tree val)
5480 {
5481 tree type = TREE_TYPE (val);
5482
5483 if (INTEGRAL_TYPE_P (type)
5484 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5485 return 0;
5486 else
5487 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5488 build_int_cst (TREE_TYPE (val), 1), 0);
5489 }
5490
5491 /* Return the successor of VAL in its type, handling the infinite case. */
5492
5493 static tree
5494 range_successor (tree val)
5495 {
5496 tree type = TREE_TYPE (val);
5497
5498 if (INTEGRAL_TYPE_P (type)
5499 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5500 return 0;
5501 else
5502 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5503 build_int_cst (TREE_TYPE (val), 1), 0);
5504 }
5505
5506 /* Given two ranges, see if we can merge them into one. Return 1 if we
5507 can, 0 if we can't. Set the output range into the specified parameters. */
5508
5509 bool
5510 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5511 tree high0, int in1_p, tree low1, tree high1)
5512 {
5513 int no_overlap;
5514 int subset;
5515 int temp;
5516 tree tem;
5517 int in_p;
5518 tree low, high;
5519 int lowequal = ((low0 == 0 && low1 == 0)
5520 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5521 low0, 0, low1, 0)));
5522 int highequal = ((high0 == 0 && high1 == 0)
5523 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5524 high0, 1, high1, 1)));
5525
5526 /* Make range 0 be the range that starts first, or ends last if they
5527 start at the same value. Swap them if it isn't. */
5528 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5529 low0, 0, low1, 0))
5530 || (lowequal
5531 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5532 high1, 1, high0, 1))))
5533 {
5534 temp = in0_p, in0_p = in1_p, in1_p = temp;
5535 tem = low0, low0 = low1, low1 = tem;
5536 tem = high0, high0 = high1, high1 = tem;
5537 }
5538
5539 /* If the second range is != high1 where high1 is the type maximum of
5540 the type, try first merging with < high1 range. */
5541 if (low1
5542 && high1
5543 && TREE_CODE (low1) == INTEGER_CST
5544 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5545 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5546 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5547 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5548 && operand_equal_p (low1, high1, 0))
5549 {
5550 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5551 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5552 !in1_p, NULL_TREE, range_predecessor (low1)))
5553 return true;
5554 /* Similarly for the second range != low1 where low1 is the type minimum
5555 of the type, try first merging with > low1 range. */
5556 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5557 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5558 !in1_p, range_successor (low1), NULL_TREE))
5559 return true;
5560 }
5561
5562 /* Now flag two cases, whether the ranges are disjoint or whether the
5563 second range is totally subsumed in the first. Note that the tests
5564 below are simplified by the ones above. */
5565 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5566 high0, 1, low1, 0));
5567 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5568 high1, 1, high0, 1));
5569
5570 /* We now have four cases, depending on whether we are including or
5571 excluding the two ranges. */
5572 if (in0_p && in1_p)
5573 {
5574 /* If they don't overlap, the result is false. If the second range
5575 is a subset it is the result. Otherwise, the range is from the start
5576 of the second to the end of the first. */
5577 if (no_overlap)
5578 in_p = 0, low = high = 0;
5579 else if (subset)
5580 in_p = 1, low = low1, high = high1;
5581 else
5582 in_p = 1, low = low1, high = high0;
5583 }
5584
5585 else if (in0_p && ! in1_p)
5586 {
5587 /* If they don't overlap, the result is the first range. If they are
5588 equal, the result is false. If the second range is a subset of the
5589 first, and the ranges begin at the same place, we go from just after
5590 the end of the second range to the end of the first. If the second
5591 range is not a subset of the first, or if it is a subset and both
5592 ranges end at the same place, the range starts at the start of the
5593 first range and ends just before the second range.
5594 Otherwise, we can't describe this as a single range. */
5595 if (no_overlap)
5596 in_p = 1, low = low0, high = high0;
5597 else if (lowequal && highequal)
5598 in_p = 0, low = high = 0;
5599 else if (subset && lowequal)
5600 {
5601 low = range_successor (high1);
5602 high = high0;
5603 in_p = 1;
5604 if (low == 0)
5605 {
5606 /* We are in the weird situation where high0 > high1 but
5607 high1 has no successor. Punt. */
5608 return 0;
5609 }
5610 }
5611 else if (! subset || highequal)
5612 {
5613 low = low0;
5614 high = range_predecessor (low1);
5615 in_p = 1;
5616 if (high == 0)
5617 {
5618 /* low0 < low1 but low1 has no predecessor. Punt. */
5619 return 0;
5620 }
5621 }
5622 else
5623 return 0;
5624 }
5625
5626 else if (! in0_p && in1_p)
5627 {
5628 /* If they don't overlap, the result is the second range. If the second
5629 is a subset of the first, the result is false. Otherwise,
5630 the range starts just after the first range and ends at the
5631 end of the second. */
5632 if (no_overlap)
5633 in_p = 1, low = low1, high = high1;
5634 else if (subset || highequal)
5635 in_p = 0, low = high = 0;
5636 else
5637 {
5638 low = range_successor (high0);
5639 high = high1;
5640 in_p = 1;
5641 if (low == 0)
5642 {
5643 /* high1 > high0 but high0 has no successor. Punt. */
5644 return 0;
5645 }
5646 }
5647 }
5648
5649 else
5650 {
5651 /* The case where we are excluding both ranges. Here the complex case
5652 is if they don't overlap. In that case, the only time we have a
5653 range is if they are adjacent. If the second is a subset of the
5654 first, the result is the first. Otherwise, the range to exclude
5655 starts at the beginning of the first range and ends at the end of the
5656 second. */
5657 if (no_overlap)
5658 {
5659 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5660 range_successor (high0),
5661 1, low1, 0)))
5662 in_p = 0, low = low0, high = high1;
5663 else
5664 {
5665 /* Canonicalize - [min, x] into - [-, x]. */
5666 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5667 switch (TREE_CODE (TREE_TYPE (low0)))
5668 {
5669 case ENUMERAL_TYPE:
5670 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5671 GET_MODE_BITSIZE
5672 (TYPE_MODE (TREE_TYPE (low0)))))
5673 break;
5674 /* FALLTHROUGH */
5675 case INTEGER_TYPE:
5676 if (tree_int_cst_equal (low0,
5677 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5678 low0 = 0;
5679 break;
5680 case POINTER_TYPE:
5681 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5682 && integer_zerop (low0))
5683 low0 = 0;
5684 break;
5685 default:
5686 break;
5687 }
5688
5689 /* Canonicalize - [x, max] into - [x, -]. */
5690 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5691 switch (TREE_CODE (TREE_TYPE (high1)))
5692 {
5693 case ENUMERAL_TYPE:
5694 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5695 GET_MODE_BITSIZE
5696 (TYPE_MODE (TREE_TYPE (high1)))))
5697 break;
5698 /* FALLTHROUGH */
5699 case INTEGER_TYPE:
5700 if (tree_int_cst_equal (high1,
5701 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5702 high1 = 0;
5703 break;
5704 case POINTER_TYPE:
5705 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5706 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5707 high1, 1,
5708 build_int_cst (TREE_TYPE (high1), 1),
5709 1)))
5710 high1 = 0;
5711 break;
5712 default:
5713 break;
5714 }
5715
5716 /* The ranges might be also adjacent between the maximum and
5717 minimum values of the given type. For
5718 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5719 return + [x + 1, y - 1]. */
5720 if (low0 == 0 && high1 == 0)
5721 {
5722 low = range_successor (high0);
5723 high = range_predecessor (low1);
5724 if (low == 0 || high == 0)
5725 return 0;
5726
5727 in_p = 1;
5728 }
5729 else
5730 return 0;
5731 }
5732 }
5733 else if (subset)
5734 in_p = 0, low = low0, high = high0;
5735 else
5736 in_p = 0, low = low0, high = high1;
5737 }
5738
5739 *pin_p = in_p, *plow = low, *phigh = high;
5740 return 1;
5741 }
5742 \f
5743
5744 /* Subroutine of fold, looking inside expressions of the form
5745 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5746 of the COND_EXPR. This function is being used also to optimize
5747 A op B ? C : A, by reversing the comparison first.
5748
5749 Return a folded expression whose code is not a COND_EXPR
5750 anymore, or NULL_TREE if no folding opportunity is found. */
5751
5752 static tree
5753 fold_cond_expr_with_comparison (location_t loc, tree type,
5754 tree arg0, tree arg1, tree arg2)
5755 {
5756 enum tree_code comp_code = TREE_CODE (arg0);
5757 tree arg00 = TREE_OPERAND (arg0, 0);
5758 tree arg01 = TREE_OPERAND (arg0, 1);
5759 tree arg1_type = TREE_TYPE (arg1);
5760 tree tem;
5761
5762 STRIP_NOPS (arg1);
5763 STRIP_NOPS (arg2);
5764
5765 /* If we have A op 0 ? A : -A, consider applying the following
5766 transformations:
5767
5768 A == 0? A : -A same as -A
5769 A != 0? A : -A same as A
5770 A >= 0? A : -A same as abs (A)
5771 A > 0? A : -A same as abs (A)
5772 A <= 0? A : -A same as -abs (A)
5773 A < 0? A : -A same as -abs (A)
5774
5775 None of these transformations work for modes with signed
5776 zeros. If A is +/-0, the first two transformations will
5777 change the sign of the result (from +0 to -0, or vice
5778 versa). The last four will fix the sign of the result,
5779 even though the original expressions could be positive or
5780 negative, depending on the sign of A.
5781
5782 Note that all these transformations are correct if A is
5783 NaN, since the two alternatives (A and -A) are also NaNs. */
5784 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5785 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5786 ? real_zerop (arg01)
5787 : integer_zerop (arg01))
5788 && ((TREE_CODE (arg2) == NEGATE_EXPR
5789 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5790 /* In the case that A is of the form X-Y, '-A' (arg2) may
5791 have already been folded to Y-X, check for that. */
5792 || (TREE_CODE (arg1) == MINUS_EXPR
5793 && TREE_CODE (arg2) == MINUS_EXPR
5794 && operand_equal_p (TREE_OPERAND (arg1, 0),
5795 TREE_OPERAND (arg2, 1), 0)
5796 && operand_equal_p (TREE_OPERAND (arg1, 1),
5797 TREE_OPERAND (arg2, 0), 0))))
5798 switch (comp_code)
5799 {
5800 case EQ_EXPR:
5801 case UNEQ_EXPR:
5802 tem = fold_convert_loc (loc, arg1_type, arg1);
5803 return fold_convert_loc (loc, type, negate_expr (tem));
5804 case NE_EXPR:
5805 case LTGT_EXPR:
5806 return fold_convert_loc (loc, type, arg1);
5807 case UNGE_EXPR:
5808 case UNGT_EXPR:
5809 if (flag_trapping_math)
5810 break;
5811 /* Fall through. */
5812 case GE_EXPR:
5813 case GT_EXPR:
5814 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5815 break;
5816 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5817 return fold_convert_loc (loc, type, tem);
5818 case UNLE_EXPR:
5819 case UNLT_EXPR:
5820 if (flag_trapping_math)
5821 break;
5822 /* FALLTHRU */
5823 case LE_EXPR:
5824 case LT_EXPR:
5825 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5826 break;
5827 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5828 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5829 {
5830 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5831 is not, invokes UB both in abs and in the negation of it.
5832 So, use ABSU_EXPR instead. */
5833 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5834 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5835 tem = negate_expr (tem);
5836 return fold_convert_loc (loc, type, tem);
5837 }
5838 else
5839 {
5840 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5841 return negate_expr (fold_convert_loc (loc, type, tem));
5842 }
5843 default:
5844 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5845 break;
5846 }
5847
5848 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5849 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5850 both transformations are correct when A is NaN: A != 0
5851 is then true, and A == 0 is false. */
5852
5853 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5854 && integer_zerop (arg01) && integer_zerop (arg2))
5855 {
5856 if (comp_code == NE_EXPR)
5857 return fold_convert_loc (loc, type, arg1);
5858 else if (comp_code == EQ_EXPR)
5859 return build_zero_cst (type);
5860 }
5861
5862 /* Try some transformations of A op B ? A : B.
5863
5864 A == B? A : B same as B
5865 A != B? A : B same as A
5866 A >= B? A : B same as max (A, B)
5867 A > B? A : B same as max (B, A)
5868 A <= B? A : B same as min (A, B)
5869 A < B? A : B same as min (B, A)
5870
5871 As above, these transformations don't work in the presence
5872 of signed zeros. For example, if A and B are zeros of
5873 opposite sign, the first two transformations will change
5874 the sign of the result. In the last four, the original
5875 expressions give different results for (A=+0, B=-0) and
5876 (A=-0, B=+0), but the transformed expressions do not.
5877
5878 The first two transformations are correct if either A or B
5879 is a NaN. In the first transformation, the condition will
5880 be false, and B will indeed be chosen. In the case of the
5881 second transformation, the condition A != B will be true,
5882 and A will be chosen.
5883
5884 The conversions to max() and min() are not correct if B is
5885 a number and A is not. The conditions in the original
5886 expressions will be false, so all four give B. The min()
5887 and max() versions would give a NaN instead. */
5888 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5889 && operand_equal_for_comparison_p (arg01, arg2)
5890 /* Avoid these transformations if the COND_EXPR may be used
5891 as an lvalue in the C++ front-end. PR c++/19199. */
5892 && (in_gimple_form
5893 || VECTOR_TYPE_P (type)
5894 || (! lang_GNU_CXX ()
5895 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5896 || ! maybe_lvalue_p (arg1)
5897 || ! maybe_lvalue_p (arg2)))
5898 {
5899 tree comp_op0 = arg00;
5900 tree comp_op1 = arg01;
5901 tree comp_type = TREE_TYPE (comp_op0);
5902
5903 switch (comp_code)
5904 {
5905 case EQ_EXPR:
5906 return fold_convert_loc (loc, type, arg2);
5907 case NE_EXPR:
5908 return fold_convert_loc (loc, type, arg1);
5909 case LE_EXPR:
5910 case LT_EXPR:
5911 case UNLE_EXPR:
5912 case UNLT_EXPR:
5913 /* In C++ a ?: expression can be an lvalue, so put the
5914 operand which will be used if they are equal first
5915 so that we can convert this back to the
5916 corresponding COND_EXPR. */
5917 if (!HONOR_NANS (arg1))
5918 {
5919 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5920 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5921 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5922 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5923 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5924 comp_op1, comp_op0);
5925 return fold_convert_loc (loc, type, tem);
5926 }
5927 break;
5928 case GE_EXPR:
5929 case GT_EXPR:
5930 case UNGE_EXPR:
5931 case UNGT_EXPR:
5932 if (!HONOR_NANS (arg1))
5933 {
5934 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5935 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5936 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5937 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5938 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5939 comp_op1, comp_op0);
5940 return fold_convert_loc (loc, type, tem);
5941 }
5942 break;
5943 case UNEQ_EXPR:
5944 if (!HONOR_NANS (arg1))
5945 return fold_convert_loc (loc, type, arg2);
5946 break;
5947 case LTGT_EXPR:
5948 if (!HONOR_NANS (arg1))
5949 return fold_convert_loc (loc, type, arg1);
5950 break;
5951 default:
5952 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5953 break;
5954 }
5955 }
5956
5957 return NULL_TREE;
5958 }
5959
5960
5961 \f
5962 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5963 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5964 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5965 false) >= 2)
5966 #endif
5967
5968 /* EXP is some logical combination of boolean tests. See if we can
5969 merge it into some range test. Return the new tree if so. */
5970
5971 static tree
5972 fold_range_test (location_t loc, enum tree_code code, tree type,
5973 tree op0, tree op1)
5974 {
5975 int or_op = (code == TRUTH_ORIF_EXPR
5976 || code == TRUTH_OR_EXPR);
5977 int in0_p, in1_p, in_p;
5978 tree low0, low1, low, high0, high1, high;
5979 bool strict_overflow_p = false;
5980 tree tem, lhs, rhs;
5981 const char * const warnmsg = G_("assuming signed overflow does not occur "
5982 "when simplifying range test");
5983
5984 if (!INTEGRAL_TYPE_P (type))
5985 return 0;
5986
5987 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5988 /* If op0 is known true or false and this is a short-circuiting
5989 operation we must not merge with op1 since that makes side-effects
5990 unconditional. So special-case this. */
5991 if (!lhs
5992 && ((code == TRUTH_ORIF_EXPR && in0_p)
5993 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5994 return op0;
5995 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5996
5997 /* If this is an OR operation, invert both sides; we will invert
5998 again at the end. */
5999 if (or_op)
6000 in0_p = ! in0_p, in1_p = ! in1_p;
6001
6002 /* If both expressions are the same, if we can merge the ranges, and we
6003 can build the range test, return it or it inverted. If one of the
6004 ranges is always true or always false, consider it to be the same
6005 expression as the other. */
6006 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6007 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6008 in1_p, low1, high1)
6009 && (tem = (build_range_check (loc, type,
6010 lhs != 0 ? lhs
6011 : rhs != 0 ? rhs : integer_zero_node,
6012 in_p, low, high))) != 0)
6013 {
6014 if (strict_overflow_p)
6015 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6016 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6017 }
6018
6019 /* On machines where the branch cost is expensive, if this is a
6020 short-circuited branch and the underlying object on both sides
6021 is the same, make a non-short-circuit operation. */
6022 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6023 if (param_logical_op_non_short_circuit != -1)
6024 logical_op_non_short_circuit
6025 = param_logical_op_non_short_circuit;
6026 if (logical_op_non_short_circuit
6027 && !flag_sanitize_coverage
6028 && lhs != 0 && rhs != 0
6029 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6030 && operand_equal_p (lhs, rhs, 0))
6031 {
6032 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6033 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6034 which cases we can't do this. */
6035 if (simple_operand_p (lhs))
6036 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6037 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6038 type, op0, op1);
6039
6040 else if (!lang_hooks.decls.global_bindings_p ()
6041 && !CONTAINS_PLACEHOLDER_P (lhs))
6042 {
6043 tree common = save_expr (lhs);
6044
6045 if ((lhs = build_range_check (loc, type, common,
6046 or_op ? ! in0_p : in0_p,
6047 low0, high0)) != 0
6048 && (rhs = build_range_check (loc, type, common,
6049 or_op ? ! in1_p : in1_p,
6050 low1, high1)) != 0)
6051 {
6052 if (strict_overflow_p)
6053 fold_overflow_warning (warnmsg,
6054 WARN_STRICT_OVERFLOW_COMPARISON);
6055 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6056 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6057 type, lhs, rhs);
6058 }
6059 }
6060 }
6061
6062 return 0;
6063 }
6064 \f
6065 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6066 bit value. Arrange things so the extra bits will be set to zero if and
6067 only if C is signed-extended to its full width. If MASK is nonzero,
6068 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6069
6070 static tree
6071 unextend (tree c, int p, int unsignedp, tree mask)
6072 {
6073 tree type = TREE_TYPE (c);
6074 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6075 tree temp;
6076
6077 if (p == modesize || unsignedp)
6078 return c;
6079
6080 /* We work by getting just the sign bit into the low-order bit, then
6081 into the high-order bit, then sign-extend. We then XOR that value
6082 with C. */
6083 temp = build_int_cst (TREE_TYPE (c),
6084 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6085
6086 /* We must use a signed type in order to get an arithmetic right shift.
6087 However, we must also avoid introducing accidental overflows, so that
6088 a subsequent call to integer_zerop will work. Hence we must
6089 do the type conversion here. At this point, the constant is either
6090 zero or one, and the conversion to a signed type can never overflow.
6091 We could get an overflow if this conversion is done anywhere else. */
6092 if (TYPE_UNSIGNED (type))
6093 temp = fold_convert (signed_type_for (type), temp);
6094
6095 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6096 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6097 if (mask != 0)
6098 temp = const_binop (BIT_AND_EXPR, temp,
6099 fold_convert (TREE_TYPE (c), mask));
6100 /* If necessary, convert the type back to match the type of C. */
6101 if (TYPE_UNSIGNED (type))
6102 temp = fold_convert (type, temp);
6103
6104 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6105 }
6106 \f
6107 /* For an expression that has the form
6108 (A && B) || ~B
6109 or
6110 (A || B) && ~B,
6111 we can drop one of the inner expressions and simplify to
6112 A || ~B
6113 or
6114 A && ~B
6115 LOC is the location of the resulting expression. OP is the inner
6116 logical operation; the left-hand side in the examples above, while CMPOP
6117 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6118 removing a condition that guards another, as in
6119 (A != NULL && A->...) || A == NULL
6120 which we must not transform. If RHS_ONLY is true, only eliminate the
6121 right-most operand of the inner logical operation. */
6122
6123 static tree
6124 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6125 bool rhs_only)
6126 {
6127 tree type = TREE_TYPE (cmpop);
6128 enum tree_code code = TREE_CODE (cmpop);
6129 enum tree_code truthop_code = TREE_CODE (op);
6130 tree lhs = TREE_OPERAND (op, 0);
6131 tree rhs = TREE_OPERAND (op, 1);
6132 tree orig_lhs = lhs, orig_rhs = rhs;
6133 enum tree_code rhs_code = TREE_CODE (rhs);
6134 enum tree_code lhs_code = TREE_CODE (lhs);
6135 enum tree_code inv_code;
6136
6137 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6138 return NULL_TREE;
6139
6140 if (TREE_CODE_CLASS (code) != tcc_comparison)
6141 return NULL_TREE;
6142
6143 if (rhs_code == truthop_code)
6144 {
6145 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6146 if (newrhs != NULL_TREE)
6147 {
6148 rhs = newrhs;
6149 rhs_code = TREE_CODE (rhs);
6150 }
6151 }
6152 if (lhs_code == truthop_code && !rhs_only)
6153 {
6154 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6155 if (newlhs != NULL_TREE)
6156 {
6157 lhs = newlhs;
6158 lhs_code = TREE_CODE (lhs);
6159 }
6160 }
6161
6162 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6163 if (inv_code == rhs_code
6164 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6165 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6166 return lhs;
6167 if (!rhs_only && inv_code == lhs_code
6168 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6169 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6170 return rhs;
6171 if (rhs != orig_rhs || lhs != orig_lhs)
6172 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6173 lhs, rhs);
6174 return NULL_TREE;
6175 }
6176
6177 /* Find ways of folding logical expressions of LHS and RHS:
6178 Try to merge two comparisons to the same innermost item.
6179 Look for range tests like "ch >= '0' && ch <= '9'".
6180 Look for combinations of simple terms on machines with expensive branches
6181 and evaluate the RHS unconditionally.
6182
6183 For example, if we have p->a == 2 && p->b == 4 and we can make an
6184 object large enough to span both A and B, we can do this with a comparison
6185 against the object ANDed with the a mask.
6186
6187 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6188 operations to do this with one comparison.
6189
6190 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6191 function and the one above.
6192
6193 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6194 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6195
6196 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6197 two operands.
6198
6199 We return the simplified tree or 0 if no optimization is possible. */
6200
6201 static tree
6202 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6203 tree lhs, tree rhs)
6204 {
6205 /* If this is the "or" of two comparisons, we can do something if
6206 the comparisons are NE_EXPR. If this is the "and", we can do something
6207 if the comparisons are EQ_EXPR. I.e.,
6208 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6209
6210 WANTED_CODE is this operation code. For single bit fields, we can
6211 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6212 comparison for one-bit fields. */
6213
6214 enum tree_code wanted_code;
6215 enum tree_code lcode, rcode;
6216 tree ll_arg, lr_arg, rl_arg, rr_arg;
6217 tree ll_inner, lr_inner, rl_inner, rr_inner;
6218 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6219 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6220 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6221 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6222 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6223 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6224 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6225 scalar_int_mode lnmode, rnmode;
6226 tree ll_mask, lr_mask, rl_mask, rr_mask;
6227 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6228 tree l_const, r_const;
6229 tree lntype, rntype, result;
6230 HOST_WIDE_INT first_bit, end_bit;
6231 int volatilep;
6232
6233 /* Start by getting the comparison codes. Fail if anything is volatile.
6234 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6235 it were surrounded with a NE_EXPR. */
6236
6237 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6238 return 0;
6239
6240 lcode = TREE_CODE (lhs);
6241 rcode = TREE_CODE (rhs);
6242
6243 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6244 {
6245 lhs = build2 (NE_EXPR, truth_type, lhs,
6246 build_int_cst (TREE_TYPE (lhs), 0));
6247 lcode = NE_EXPR;
6248 }
6249
6250 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6251 {
6252 rhs = build2 (NE_EXPR, truth_type, rhs,
6253 build_int_cst (TREE_TYPE (rhs), 0));
6254 rcode = NE_EXPR;
6255 }
6256
6257 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6258 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6259 return 0;
6260
6261 ll_arg = TREE_OPERAND (lhs, 0);
6262 lr_arg = TREE_OPERAND (lhs, 1);
6263 rl_arg = TREE_OPERAND (rhs, 0);
6264 rr_arg = TREE_OPERAND (rhs, 1);
6265
6266 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6267 if (simple_operand_p (ll_arg)
6268 && simple_operand_p (lr_arg))
6269 {
6270 if (operand_equal_p (ll_arg, rl_arg, 0)
6271 && operand_equal_p (lr_arg, rr_arg, 0))
6272 {
6273 result = combine_comparisons (loc, code, lcode, rcode,
6274 truth_type, ll_arg, lr_arg);
6275 if (result)
6276 return result;
6277 }
6278 else if (operand_equal_p (ll_arg, rr_arg, 0)
6279 && operand_equal_p (lr_arg, rl_arg, 0))
6280 {
6281 result = combine_comparisons (loc, code, lcode,
6282 swap_tree_comparison (rcode),
6283 truth_type, ll_arg, lr_arg);
6284 if (result)
6285 return result;
6286 }
6287 }
6288
6289 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6290 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6291
6292 /* If the RHS can be evaluated unconditionally and its operands are
6293 simple, it wins to evaluate the RHS unconditionally on machines
6294 with expensive branches. In this case, this isn't a comparison
6295 that can be merged. */
6296
6297 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6298 false) >= 2
6299 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6300 && simple_operand_p (rl_arg)
6301 && simple_operand_p (rr_arg))
6302 {
6303 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6304 if (code == TRUTH_OR_EXPR
6305 && lcode == NE_EXPR && integer_zerop (lr_arg)
6306 && rcode == NE_EXPR && integer_zerop (rr_arg)
6307 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6308 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6309 return build2_loc (loc, NE_EXPR, truth_type,
6310 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6311 ll_arg, rl_arg),
6312 build_int_cst (TREE_TYPE (ll_arg), 0));
6313
6314 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6315 if (code == TRUTH_AND_EXPR
6316 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6317 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6318 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6319 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6320 return build2_loc (loc, EQ_EXPR, truth_type,
6321 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6322 ll_arg, rl_arg),
6323 build_int_cst (TREE_TYPE (ll_arg), 0));
6324 }
6325
6326 /* See if the comparisons can be merged. Then get all the parameters for
6327 each side. */
6328
6329 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6330 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6331 return 0;
6332
6333 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6334 volatilep = 0;
6335 ll_inner = decode_field_reference (loc, &ll_arg,
6336 &ll_bitsize, &ll_bitpos, &ll_mode,
6337 &ll_unsignedp, &ll_reversep, &volatilep,
6338 &ll_mask, &ll_and_mask);
6339 lr_inner = decode_field_reference (loc, &lr_arg,
6340 &lr_bitsize, &lr_bitpos, &lr_mode,
6341 &lr_unsignedp, &lr_reversep, &volatilep,
6342 &lr_mask, &lr_and_mask);
6343 rl_inner = decode_field_reference (loc, &rl_arg,
6344 &rl_bitsize, &rl_bitpos, &rl_mode,
6345 &rl_unsignedp, &rl_reversep, &volatilep,
6346 &rl_mask, &rl_and_mask);
6347 rr_inner = decode_field_reference (loc, &rr_arg,
6348 &rr_bitsize, &rr_bitpos, &rr_mode,
6349 &rr_unsignedp, &rr_reversep, &volatilep,
6350 &rr_mask, &rr_and_mask);
6351
6352 /* It must be true that the inner operation on the lhs of each
6353 comparison must be the same if we are to be able to do anything.
6354 Then see if we have constants. If not, the same must be true for
6355 the rhs's. */
6356 if (volatilep
6357 || ll_reversep != rl_reversep
6358 || ll_inner == 0 || rl_inner == 0
6359 || ! operand_equal_p (ll_inner, rl_inner, 0))
6360 return 0;
6361
6362 if (TREE_CODE (lr_arg) == INTEGER_CST
6363 && TREE_CODE (rr_arg) == INTEGER_CST)
6364 {
6365 l_const = lr_arg, r_const = rr_arg;
6366 lr_reversep = ll_reversep;
6367 }
6368 else if (lr_reversep != rr_reversep
6369 || lr_inner == 0 || rr_inner == 0
6370 || ! operand_equal_p (lr_inner, rr_inner, 0))
6371 return 0;
6372 else
6373 l_const = r_const = 0;
6374
6375 /* If either comparison code is not correct for our logical operation,
6376 fail. However, we can convert a one-bit comparison against zero into
6377 the opposite comparison against that bit being set in the field. */
6378
6379 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6380 if (lcode != wanted_code)
6381 {
6382 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6383 {
6384 /* Make the left operand unsigned, since we are only interested
6385 in the value of one bit. Otherwise we are doing the wrong
6386 thing below. */
6387 ll_unsignedp = 1;
6388 l_const = ll_mask;
6389 }
6390 else
6391 return 0;
6392 }
6393
6394 /* This is analogous to the code for l_const above. */
6395 if (rcode != wanted_code)
6396 {
6397 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6398 {
6399 rl_unsignedp = 1;
6400 r_const = rl_mask;
6401 }
6402 else
6403 return 0;
6404 }
6405
6406 /* See if we can find a mode that contains both fields being compared on
6407 the left. If we can't, fail. Otherwise, update all constants and masks
6408 to be relative to a field of that size. */
6409 first_bit = MIN (ll_bitpos, rl_bitpos);
6410 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6411 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6412 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6413 volatilep, &lnmode))
6414 return 0;
6415
6416 lnbitsize = GET_MODE_BITSIZE (lnmode);
6417 lnbitpos = first_bit & ~ (lnbitsize - 1);
6418 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6419 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6420
6421 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6422 {
6423 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6424 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6425 }
6426
6427 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6428 size_int (xll_bitpos));
6429 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6430 size_int (xrl_bitpos));
6431
6432 if (l_const)
6433 {
6434 l_const = fold_convert_loc (loc, lntype, l_const);
6435 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6436 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6437 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6438 fold_build1_loc (loc, BIT_NOT_EXPR,
6439 lntype, ll_mask))))
6440 {
6441 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6442
6443 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6444 }
6445 }
6446 if (r_const)
6447 {
6448 r_const = fold_convert_loc (loc, lntype, r_const);
6449 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6450 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6451 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6452 fold_build1_loc (loc, BIT_NOT_EXPR,
6453 lntype, rl_mask))))
6454 {
6455 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6456
6457 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6458 }
6459 }
6460
6461 /* If the right sides are not constant, do the same for it. Also,
6462 disallow this optimization if a size, signedness or storage order
6463 mismatch occurs between the left and right sides. */
6464 if (l_const == 0)
6465 {
6466 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6467 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6468 || ll_reversep != lr_reversep
6469 /* Make sure the two fields on the right
6470 correspond to the left without being swapped. */
6471 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6472 return 0;
6473
6474 first_bit = MIN (lr_bitpos, rr_bitpos);
6475 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6476 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6477 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6478 volatilep, &rnmode))
6479 return 0;
6480
6481 rnbitsize = GET_MODE_BITSIZE (rnmode);
6482 rnbitpos = first_bit & ~ (rnbitsize - 1);
6483 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6484 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6485
6486 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6487 {
6488 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6489 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6490 }
6491
6492 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6493 rntype, lr_mask),
6494 size_int (xlr_bitpos));
6495 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6496 rntype, rr_mask),
6497 size_int (xrr_bitpos));
6498
6499 /* Make a mask that corresponds to both fields being compared.
6500 Do this for both items being compared. If the operands are the
6501 same size and the bits being compared are in the same position
6502 then we can do this by masking both and comparing the masked
6503 results. */
6504 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6505 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6506 if (lnbitsize == rnbitsize
6507 && xll_bitpos == xlr_bitpos
6508 && lnbitpos >= 0
6509 && rnbitpos >= 0)
6510 {
6511 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6512 lntype, lnbitsize, lnbitpos,
6513 ll_unsignedp || rl_unsignedp, ll_reversep);
6514 if (! all_ones_mask_p (ll_mask, lnbitsize))
6515 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6516
6517 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6518 rntype, rnbitsize, rnbitpos,
6519 lr_unsignedp || rr_unsignedp, lr_reversep);
6520 if (! all_ones_mask_p (lr_mask, rnbitsize))
6521 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6522
6523 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6524 }
6525
6526 /* There is still another way we can do something: If both pairs of
6527 fields being compared are adjacent, we may be able to make a wider
6528 field containing them both.
6529
6530 Note that we still must mask the lhs/rhs expressions. Furthermore,
6531 the mask must be shifted to account for the shift done by
6532 make_bit_field_ref. */
6533 if (((ll_bitsize + ll_bitpos == rl_bitpos
6534 && lr_bitsize + lr_bitpos == rr_bitpos)
6535 || (ll_bitpos == rl_bitpos + rl_bitsize
6536 && lr_bitpos == rr_bitpos + rr_bitsize))
6537 && ll_bitpos >= 0
6538 && rl_bitpos >= 0
6539 && lr_bitpos >= 0
6540 && rr_bitpos >= 0)
6541 {
6542 tree type;
6543
6544 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6545 ll_bitsize + rl_bitsize,
6546 MIN (ll_bitpos, rl_bitpos),
6547 ll_unsignedp, ll_reversep);
6548 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6549 lr_bitsize + rr_bitsize,
6550 MIN (lr_bitpos, rr_bitpos),
6551 lr_unsignedp, lr_reversep);
6552
6553 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6554 size_int (MIN (xll_bitpos, xrl_bitpos)));
6555 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6556 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6557
6558 /* Convert to the smaller type before masking out unwanted bits. */
6559 type = lntype;
6560 if (lntype != rntype)
6561 {
6562 if (lnbitsize > rnbitsize)
6563 {
6564 lhs = fold_convert_loc (loc, rntype, lhs);
6565 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6566 type = rntype;
6567 }
6568 else if (lnbitsize < rnbitsize)
6569 {
6570 rhs = fold_convert_loc (loc, lntype, rhs);
6571 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6572 type = lntype;
6573 }
6574 }
6575
6576 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6577 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6578
6579 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6580 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6581
6582 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6583 }
6584
6585 return 0;
6586 }
6587
6588 /* Handle the case of comparisons with constants. If there is something in
6589 common between the masks, those bits of the constants must be the same.
6590 If not, the condition is always false. Test for this to avoid generating
6591 incorrect code below. */
6592 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6593 if (! integer_zerop (result)
6594 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6595 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6596 {
6597 if (wanted_code == NE_EXPR)
6598 {
6599 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6600 return constant_boolean_node (true, truth_type);
6601 }
6602 else
6603 {
6604 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6605 return constant_boolean_node (false, truth_type);
6606 }
6607 }
6608
6609 if (lnbitpos < 0)
6610 return 0;
6611
6612 /* Construct the expression we will return. First get the component
6613 reference we will make. Unless the mask is all ones the width of
6614 that field, perform the mask operation. Then compare with the
6615 merged constant. */
6616 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6617 lntype, lnbitsize, lnbitpos,
6618 ll_unsignedp || rl_unsignedp, ll_reversep);
6619
6620 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6621 if (! all_ones_mask_p (ll_mask, lnbitsize))
6622 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6623
6624 return build2_loc (loc, wanted_code, truth_type, result,
6625 const_binop (BIT_IOR_EXPR, l_const, r_const));
6626 }
6627 \f
6628 /* T is an integer expression that is being multiplied, divided, or taken a
6629 modulus (CODE says which and what kind of divide or modulus) by a
6630 constant C. See if we can eliminate that operation by folding it with
6631 other operations already in T. WIDE_TYPE, if non-null, is a type that
6632 should be used for the computation if wider than our type.
6633
6634 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6635 (X * 2) + (Y * 4). We must, however, be assured that either the original
6636 expression would not overflow or that overflow is undefined for the type
6637 in the language in question.
6638
6639 If we return a non-null expression, it is an equivalent form of the
6640 original computation, but need not be in the original type.
6641
6642 We set *STRICT_OVERFLOW_P to true if the return values depends on
6643 signed overflow being undefined. Otherwise we do not change
6644 *STRICT_OVERFLOW_P. */
6645
6646 static tree
6647 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6648 bool *strict_overflow_p)
6649 {
6650 /* To avoid exponential search depth, refuse to allow recursion past
6651 three levels. Beyond that (1) it's highly unlikely that we'll find
6652 something interesting and (2) we've probably processed it before
6653 when we built the inner expression. */
6654
6655 static int depth;
6656 tree ret;
6657
6658 if (depth > 3)
6659 return NULL;
6660
6661 depth++;
6662 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6663 depth--;
6664
6665 return ret;
6666 }
6667
6668 static tree
6669 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6670 bool *strict_overflow_p)
6671 {
6672 tree type = TREE_TYPE (t);
6673 enum tree_code tcode = TREE_CODE (t);
6674 tree ctype = (wide_type != 0
6675 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6676 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6677 ? wide_type : type);
6678 tree t1, t2;
6679 int same_p = tcode == code;
6680 tree op0 = NULL_TREE, op1 = NULL_TREE;
6681 bool sub_strict_overflow_p;
6682
6683 /* Don't deal with constants of zero here; they confuse the code below. */
6684 if (integer_zerop (c))
6685 return NULL_TREE;
6686
6687 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6688 op0 = TREE_OPERAND (t, 0);
6689
6690 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6691 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6692
6693 /* Note that we need not handle conditional operations here since fold
6694 already handles those cases. So just do arithmetic here. */
6695 switch (tcode)
6696 {
6697 case INTEGER_CST:
6698 /* For a constant, we can always simplify if we are a multiply
6699 or (for divide and modulus) if it is a multiple of our constant. */
6700 if (code == MULT_EXPR
6701 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6702 TYPE_SIGN (type)))
6703 {
6704 tree tem = const_binop (code, fold_convert (ctype, t),
6705 fold_convert (ctype, c));
6706 /* If the multiplication overflowed, we lost information on it.
6707 See PR68142 and PR69845. */
6708 if (TREE_OVERFLOW (tem))
6709 return NULL_TREE;
6710 return tem;
6711 }
6712 break;
6713
6714 CASE_CONVERT: case NON_LVALUE_EXPR:
6715 /* If op0 is an expression ... */
6716 if ((COMPARISON_CLASS_P (op0)
6717 || UNARY_CLASS_P (op0)
6718 || BINARY_CLASS_P (op0)
6719 || VL_EXP_CLASS_P (op0)
6720 || EXPRESSION_CLASS_P (op0))
6721 /* ... and has wrapping overflow, and its type is smaller
6722 than ctype, then we cannot pass through as widening. */
6723 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6724 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6725 && (TYPE_PRECISION (ctype)
6726 > TYPE_PRECISION (TREE_TYPE (op0))))
6727 /* ... or this is a truncation (t is narrower than op0),
6728 then we cannot pass through this narrowing. */
6729 || (TYPE_PRECISION (type)
6730 < TYPE_PRECISION (TREE_TYPE (op0)))
6731 /* ... or signedness changes for division or modulus,
6732 then we cannot pass through this conversion. */
6733 || (code != MULT_EXPR
6734 && (TYPE_UNSIGNED (ctype)
6735 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6736 /* ... or has undefined overflow while the converted to
6737 type has not, we cannot do the operation in the inner type
6738 as that would introduce undefined overflow. */
6739 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6740 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6741 && !TYPE_OVERFLOW_UNDEFINED (type))))
6742 break;
6743
6744 /* Pass the constant down and see if we can make a simplification. If
6745 we can, replace this expression with the inner simplification for
6746 possible later conversion to our or some other type. */
6747 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6748 && TREE_CODE (t2) == INTEGER_CST
6749 && !TREE_OVERFLOW (t2)
6750 && (t1 = extract_muldiv (op0, t2, code,
6751 code == MULT_EXPR ? ctype : NULL_TREE,
6752 strict_overflow_p)) != 0)
6753 return t1;
6754 break;
6755
6756 case ABS_EXPR:
6757 /* If widening the type changes it from signed to unsigned, then we
6758 must avoid building ABS_EXPR itself as unsigned. */
6759 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6760 {
6761 tree cstype = (*signed_type_for) (ctype);
6762 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6763 != 0)
6764 {
6765 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6766 return fold_convert (ctype, t1);
6767 }
6768 break;
6769 }
6770 /* If the constant is negative, we cannot simplify this. */
6771 if (tree_int_cst_sgn (c) == -1)
6772 break;
6773 /* FALLTHROUGH */
6774 case NEGATE_EXPR:
6775 /* For division and modulus, type can't be unsigned, as e.g.
6776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6777 For signed types, even with wrapping overflow, this is fine. */
6778 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6779 break;
6780 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6781 != 0)
6782 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6783 break;
6784
6785 case MIN_EXPR: case MAX_EXPR:
6786 /* If widening the type changes the signedness, then we can't perform
6787 this optimization as that changes the result. */
6788 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6789 break;
6790
6791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6792 sub_strict_overflow_p = false;
6793 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6794 &sub_strict_overflow_p)) != 0
6795 && (t2 = extract_muldiv (op1, c, code, wide_type,
6796 &sub_strict_overflow_p)) != 0)
6797 {
6798 if (tree_int_cst_sgn (c) < 0)
6799 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6800 if (sub_strict_overflow_p)
6801 *strict_overflow_p = true;
6802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6803 fold_convert (ctype, t2));
6804 }
6805 break;
6806
6807 case LSHIFT_EXPR: case RSHIFT_EXPR:
6808 /* If the second operand is constant, this is a multiplication
6809 or floor division, by a power of two, so we can treat it that
6810 way unless the multiplier or divisor overflows. Signed
6811 left-shift overflow is implementation-defined rather than
6812 undefined in C90, so do not convert signed left shift into
6813 multiplication. */
6814 if (TREE_CODE (op1) == INTEGER_CST
6815 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6816 /* const_binop may not detect overflow correctly,
6817 so check for it explicitly here. */
6818 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6819 wi::to_wide (op1))
6820 && (t1 = fold_convert (ctype,
6821 const_binop (LSHIFT_EXPR, size_one_node,
6822 op1))) != 0
6823 && !TREE_OVERFLOW (t1))
6824 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6825 ? MULT_EXPR : FLOOR_DIV_EXPR,
6826 ctype,
6827 fold_convert (ctype, op0),
6828 t1),
6829 c, code, wide_type, strict_overflow_p);
6830 break;
6831
6832 case PLUS_EXPR: case MINUS_EXPR:
6833 /* See if we can eliminate the operation on both sides. If we can, we
6834 can return a new PLUS or MINUS. If we can't, the only remaining
6835 cases where we can do anything are if the second operand is a
6836 constant. */
6837 sub_strict_overflow_p = false;
6838 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6839 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6840 if (t1 != 0 && t2 != 0
6841 && TYPE_OVERFLOW_WRAPS (ctype)
6842 && (code == MULT_EXPR
6843 /* If not multiplication, we can only do this if both operands
6844 are divisible by c. */
6845 || (multiple_of_p (ctype, op0, c)
6846 && multiple_of_p (ctype, op1, c))))
6847 {
6848 if (sub_strict_overflow_p)
6849 *strict_overflow_p = true;
6850 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6851 fold_convert (ctype, t2));
6852 }
6853
6854 /* If this was a subtraction, negate OP1 and set it to be an addition.
6855 This simplifies the logic below. */
6856 if (tcode == MINUS_EXPR)
6857 {
6858 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6859 /* If OP1 was not easily negatable, the constant may be OP0. */
6860 if (TREE_CODE (op0) == INTEGER_CST)
6861 {
6862 std::swap (op0, op1);
6863 std::swap (t1, t2);
6864 }
6865 }
6866
6867 if (TREE_CODE (op1) != INTEGER_CST)
6868 break;
6869
6870 /* If either OP1 or C are negative, this optimization is not safe for
6871 some of the division and remainder types while for others we need
6872 to change the code. */
6873 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6874 {
6875 if (code == CEIL_DIV_EXPR)
6876 code = FLOOR_DIV_EXPR;
6877 else if (code == FLOOR_DIV_EXPR)
6878 code = CEIL_DIV_EXPR;
6879 else if (code != MULT_EXPR
6880 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6881 break;
6882 }
6883
6884 /* If it's a multiply or a division/modulus operation of a multiple
6885 of our constant, do the operation and verify it doesn't overflow. */
6886 if (code == MULT_EXPR
6887 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6888 TYPE_SIGN (type)))
6889 {
6890 op1 = const_binop (code, fold_convert (ctype, op1),
6891 fold_convert (ctype, c));
6892 /* We allow the constant to overflow with wrapping semantics. */
6893 if (op1 == 0
6894 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6895 break;
6896 }
6897 else
6898 break;
6899
6900 /* If we have an unsigned type, we cannot widen the operation since it
6901 will change the result if the original computation overflowed. */
6902 if (TYPE_UNSIGNED (ctype) && ctype != type)
6903 break;
6904
6905 /* The last case is if we are a multiply. In that case, we can
6906 apply the distributive law to commute the multiply and addition
6907 if the multiplication of the constants doesn't overflow
6908 and overflow is defined. With undefined overflow
6909 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6910 But fold_plusminus_mult_expr would factor back any power-of-two
6911 value so do not distribute in the first place in this case. */
6912 if (code == MULT_EXPR
6913 && TYPE_OVERFLOW_WRAPS (ctype)
6914 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6915 return fold_build2 (tcode, ctype,
6916 fold_build2 (code, ctype,
6917 fold_convert (ctype, op0),
6918 fold_convert (ctype, c)),
6919 op1);
6920
6921 break;
6922
6923 case MULT_EXPR:
6924 /* We have a special case here if we are doing something like
6925 (C * 8) % 4 since we know that's zero. */
6926 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6927 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6928 /* If the multiplication can overflow we cannot optimize this. */
6929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6930 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6931 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6932 TYPE_SIGN (type)))
6933 {
6934 *strict_overflow_p = true;
6935 return omit_one_operand (type, integer_zero_node, op0);
6936 }
6937
6938 /* ... fall through ... */
6939
6940 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6941 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6942 /* If we can extract our operation from the LHS, do so and return a
6943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6944 do something only if the second operand is a constant. */
6945 if (same_p
6946 && TYPE_OVERFLOW_WRAPS (ctype)
6947 && (t1 = extract_muldiv (op0, c, code, wide_type,
6948 strict_overflow_p)) != 0)
6949 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6950 fold_convert (ctype, op1));
6951 else if (tcode == MULT_EXPR && code == MULT_EXPR
6952 && TYPE_OVERFLOW_WRAPS (ctype)
6953 && (t1 = extract_muldiv (op1, c, code, wide_type,
6954 strict_overflow_p)) != 0)
6955 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6956 fold_convert (ctype, t1));
6957 else if (TREE_CODE (op1) != INTEGER_CST)
6958 return 0;
6959
6960 /* If these are the same operation types, we can associate them
6961 assuming no overflow. */
6962 if (tcode == code)
6963 {
6964 bool overflow_p = false;
6965 wi::overflow_type overflow_mul;
6966 signop sign = TYPE_SIGN (ctype);
6967 unsigned prec = TYPE_PRECISION (ctype);
6968 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6969 wi::to_wide (c, prec),
6970 sign, &overflow_mul);
6971 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6972 if (overflow_mul
6973 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6974 overflow_p = true;
6975 if (!overflow_p)
6976 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6977 wide_int_to_tree (ctype, mul));
6978 }
6979
6980 /* If these operations "cancel" each other, we have the main
6981 optimizations of this pass, which occur when either constant is a
6982 multiple of the other, in which case we replace this with either an
6983 operation or CODE or TCODE.
6984
6985 If we have an unsigned type, we cannot do this since it will change
6986 the result if the original computation overflowed. */
6987 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6988 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6989 || (tcode == MULT_EXPR
6990 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6991 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6992 && code != MULT_EXPR)))
6993 {
6994 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6995 TYPE_SIGN (type)))
6996 {
6997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6998 *strict_overflow_p = true;
6999 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7000 fold_convert (ctype,
7001 const_binop (TRUNC_DIV_EXPR,
7002 op1, c)));
7003 }
7004 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7005 TYPE_SIGN (type)))
7006 {
7007 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7008 *strict_overflow_p = true;
7009 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7010 fold_convert (ctype,
7011 const_binop (TRUNC_DIV_EXPR,
7012 c, op1)));
7013 }
7014 }
7015 break;
7016
7017 default:
7018 break;
7019 }
7020
7021 return 0;
7022 }
7023 \f
7024 /* Return a node which has the indicated constant VALUE (either 0 or
7025 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7026 and is of the indicated TYPE. */
7027
7028 tree
7029 constant_boolean_node (bool value, tree type)
7030 {
7031 if (type == integer_type_node)
7032 return value ? integer_one_node : integer_zero_node;
7033 else if (type == boolean_type_node)
7034 return value ? boolean_true_node : boolean_false_node;
7035 else if (TREE_CODE (type) == VECTOR_TYPE)
7036 return build_vector_from_val (type,
7037 build_int_cst (TREE_TYPE (type),
7038 value ? -1 : 0));
7039 else
7040 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7041 }
7042
7043
7044 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7045 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7046 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7047 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7048 COND is the first argument to CODE; otherwise (as in the example
7049 given here), it is the second argument. TYPE is the type of the
7050 original expression. Return NULL_TREE if no simplification is
7051 possible. */
7052
7053 static tree
7054 fold_binary_op_with_conditional_arg (location_t loc,
7055 enum tree_code code,
7056 tree type, tree op0, tree op1,
7057 tree cond, tree arg, int cond_first_p)
7058 {
7059 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7060 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7061 tree test, true_value, false_value;
7062 tree lhs = NULL_TREE;
7063 tree rhs = NULL_TREE;
7064 enum tree_code cond_code = COND_EXPR;
7065
7066 /* Do not move possibly trapping operations into the conditional as this
7067 pessimizes code and causes gimplification issues when applied late. */
7068 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7069 ANY_INTEGRAL_TYPE_P (type)
7070 && TYPE_OVERFLOW_TRAPS (type), op1))
7071 return NULL_TREE;
7072
7073 if (TREE_CODE (cond) == COND_EXPR
7074 || TREE_CODE (cond) == VEC_COND_EXPR)
7075 {
7076 test = TREE_OPERAND (cond, 0);
7077 true_value = TREE_OPERAND (cond, 1);
7078 false_value = TREE_OPERAND (cond, 2);
7079 /* If this operand throws an expression, then it does not make
7080 sense to try to perform a logical or arithmetic operation
7081 involving it. */
7082 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7083 lhs = true_value;
7084 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7085 rhs = false_value;
7086 }
7087 else if (!(TREE_CODE (type) != VECTOR_TYPE
7088 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7089 {
7090 tree testtype = TREE_TYPE (cond);
7091 test = cond;
7092 true_value = constant_boolean_node (true, testtype);
7093 false_value = constant_boolean_node (false, testtype);
7094 }
7095 else
7096 /* Detect the case of mixing vector and scalar types - bail out. */
7097 return NULL_TREE;
7098
7099 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7100 cond_code = VEC_COND_EXPR;
7101
7102 /* This transformation is only worthwhile if we don't have to wrap ARG
7103 in a SAVE_EXPR and the operation can be simplified without recursing
7104 on at least one of the branches once its pushed inside the COND_EXPR. */
7105 if (!TREE_CONSTANT (arg)
7106 && (TREE_SIDE_EFFECTS (arg)
7107 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7108 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7109 return NULL_TREE;
7110
7111 arg = fold_convert_loc (loc, arg_type, arg);
7112 if (lhs == 0)
7113 {
7114 true_value = fold_convert_loc (loc, cond_type, true_value);
7115 if (cond_first_p)
7116 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7117 else
7118 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7119 }
7120 if (rhs == 0)
7121 {
7122 false_value = fold_convert_loc (loc, cond_type, false_value);
7123 if (cond_first_p)
7124 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7125 else
7126 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7127 }
7128
7129 /* Check that we have simplified at least one of the branches. */
7130 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7131 return NULL_TREE;
7132
7133 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7134 }
7135
7136 \f
7137 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7138
7139 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7140 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7141 ADDEND is the same as X.
7142
7143 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7144 and finite. The problematic cases are when X is zero, and its mode
7145 has signed zeros. In the case of rounding towards -infinity,
7146 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7147 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7148
7149 bool
7150 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7151 {
7152 if (!real_zerop (addend))
7153 return false;
7154
7155 /* Don't allow the fold with -fsignaling-nans. */
7156 if (HONOR_SNANS (type))
7157 return false;
7158
7159 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7160 if (!HONOR_SIGNED_ZEROS (type))
7161 return true;
7162
7163 /* There is no case that is safe for all rounding modes. */
7164 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7165 return false;
7166
7167 /* In a vector or complex, we would need to check the sign of all zeros. */
7168 if (TREE_CODE (addend) == VECTOR_CST)
7169 addend = uniform_vector_p (addend);
7170 if (!addend || TREE_CODE (addend) != REAL_CST)
7171 return false;
7172
7173 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7174 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7175 negate = !negate;
7176
7177 /* The mode has signed zeros, and we have to honor their sign.
7178 In this situation, there is only one case we can return true for.
7179 X - 0 is the same as X with default rounding. */
7180 return negate;
7181 }
7182
7183 /* Subroutine of match.pd that optimizes comparisons of a division by
7184 a nonzero integer constant against an integer constant, i.e.
7185 X/C1 op C2.
7186
7187 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7188 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7189
7190 enum tree_code
7191 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7192 tree *hi, bool *neg_overflow)
7193 {
7194 tree prod, tmp, type = TREE_TYPE (c1);
7195 signop sign = TYPE_SIGN (type);
7196 wi::overflow_type overflow;
7197
7198 /* We have to do this the hard way to detect unsigned overflow.
7199 prod = int_const_binop (MULT_EXPR, c1, c2); */
7200 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7201 prod = force_fit_type (type, val, -1, overflow);
7202 *neg_overflow = false;
7203
7204 if (sign == UNSIGNED)
7205 {
7206 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7207 *lo = prod;
7208
7209 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7210 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7211 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7212 }
7213 else if (tree_int_cst_sgn (c1) >= 0)
7214 {
7215 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7216 switch (tree_int_cst_sgn (c2))
7217 {
7218 case -1:
7219 *neg_overflow = true;
7220 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7221 *hi = prod;
7222 break;
7223
7224 case 0:
7225 *lo = fold_negate_const (tmp, type);
7226 *hi = tmp;
7227 break;
7228
7229 case 1:
7230 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7231 *lo = prod;
7232 break;
7233
7234 default:
7235 gcc_unreachable ();
7236 }
7237 }
7238 else
7239 {
7240 /* A negative divisor reverses the relational operators. */
7241 code = swap_tree_comparison (code);
7242
7243 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7244 switch (tree_int_cst_sgn (c2))
7245 {
7246 case -1:
7247 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7248 *lo = prod;
7249 break;
7250
7251 case 0:
7252 *hi = fold_negate_const (tmp, type);
7253 *lo = tmp;
7254 break;
7255
7256 case 1:
7257 *neg_overflow = true;
7258 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7259 *hi = prod;
7260 break;
7261
7262 default:
7263 gcc_unreachable ();
7264 }
7265 }
7266
7267 if (code != EQ_EXPR && code != NE_EXPR)
7268 return code;
7269
7270 if (TREE_OVERFLOW (*lo)
7271 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7272 *lo = NULL_TREE;
7273 if (TREE_OVERFLOW (*hi)
7274 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7275 *hi = NULL_TREE;
7276
7277 return code;
7278 }
7279
7280
7281 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7282 equality/inequality test, then return a simplified form of the test
7283 using a sign testing. Otherwise return NULL. TYPE is the desired
7284 result type. */
7285
7286 static tree
7287 fold_single_bit_test_into_sign_test (location_t loc,
7288 enum tree_code code, tree arg0, tree arg1,
7289 tree result_type)
7290 {
7291 /* If this is testing a single bit, we can optimize the test. */
7292 if ((code == NE_EXPR || code == EQ_EXPR)
7293 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7294 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7295 {
7296 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7297 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7298 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7299
7300 if (arg00 != NULL_TREE
7301 /* This is only a win if casting to a signed type is cheap,
7302 i.e. when arg00's type is not a partial mode. */
7303 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7304 {
7305 tree stype = signed_type_for (TREE_TYPE (arg00));
7306 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7307 result_type,
7308 fold_convert_loc (loc, stype, arg00),
7309 build_int_cst (stype, 0));
7310 }
7311 }
7312
7313 return NULL_TREE;
7314 }
7315
7316 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7317 equality/inequality test, then return a simplified form of
7318 the test using shifts and logical operations. Otherwise return
7319 NULL. TYPE is the desired result type. */
7320
7321 tree
7322 fold_single_bit_test (location_t loc, enum tree_code code,
7323 tree arg0, tree arg1, tree result_type)
7324 {
7325 /* If this is testing a single bit, we can optimize the test. */
7326 if ((code == NE_EXPR || code == EQ_EXPR)
7327 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7328 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7329 {
7330 tree inner = TREE_OPERAND (arg0, 0);
7331 tree type = TREE_TYPE (arg0);
7332 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7333 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7334 int ops_unsigned;
7335 tree signed_type, unsigned_type, intermediate_type;
7336 tree tem, one;
7337
7338 /* First, see if we can fold the single bit test into a sign-bit
7339 test. */
7340 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7341 result_type);
7342 if (tem)
7343 return tem;
7344
7345 /* Otherwise we have (A & C) != 0 where C is a single bit,
7346 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7347 Similarly for (A & C) == 0. */
7348
7349 /* If INNER is a right shift of a constant and it plus BITNUM does
7350 not overflow, adjust BITNUM and INNER. */
7351 if (TREE_CODE (inner) == RSHIFT_EXPR
7352 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7353 && bitnum < TYPE_PRECISION (type)
7354 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7355 TYPE_PRECISION (type) - bitnum))
7356 {
7357 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7358 inner = TREE_OPERAND (inner, 0);
7359 }
7360
7361 /* If we are going to be able to omit the AND below, we must do our
7362 operations as unsigned. If we must use the AND, we have a choice.
7363 Normally unsigned is faster, but for some machines signed is. */
7364 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7365 && !flag_syntax_only) ? 0 : 1;
7366
7367 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7368 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7369 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7370 inner = fold_convert_loc (loc, intermediate_type, inner);
7371
7372 if (bitnum != 0)
7373 inner = build2 (RSHIFT_EXPR, intermediate_type,
7374 inner, size_int (bitnum));
7375
7376 one = build_int_cst (intermediate_type, 1);
7377
7378 if (code == EQ_EXPR)
7379 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7380
7381 /* Put the AND last so it can combine with more things. */
7382 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7383
7384 /* Make sure to return the proper type. */
7385 inner = fold_convert_loc (loc, result_type, inner);
7386
7387 return inner;
7388 }
7389 return NULL_TREE;
7390 }
7391
7392 /* Test whether it is preferable two swap two operands, ARG0 and
7393 ARG1, for example because ARG0 is an integer constant and ARG1
7394 isn't. */
7395
7396 bool
7397 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7398 {
7399 if (CONSTANT_CLASS_P (arg1))
7400 return 0;
7401 if (CONSTANT_CLASS_P (arg0))
7402 return 1;
7403
7404 STRIP_NOPS (arg0);
7405 STRIP_NOPS (arg1);
7406
7407 if (TREE_CONSTANT (arg1))
7408 return 0;
7409 if (TREE_CONSTANT (arg0))
7410 return 1;
7411
7412 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7413 for commutative and comparison operators. Ensuring a canonical
7414 form allows the optimizers to find additional redundancies without
7415 having to explicitly check for both orderings. */
7416 if (TREE_CODE (arg0) == SSA_NAME
7417 && TREE_CODE (arg1) == SSA_NAME
7418 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7419 return 1;
7420
7421 /* Put SSA_NAMEs last. */
7422 if (TREE_CODE (arg1) == SSA_NAME)
7423 return 0;
7424 if (TREE_CODE (arg0) == SSA_NAME)
7425 return 1;
7426
7427 /* Put variables last. */
7428 if (DECL_P (arg1))
7429 return 0;
7430 if (DECL_P (arg0))
7431 return 1;
7432
7433 return 0;
7434 }
7435
7436
7437 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7438 means A >= Y && A != MAX, but in this case we know that
7439 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7440
7441 static tree
7442 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7443 {
7444 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7445
7446 if (TREE_CODE (bound) == LT_EXPR)
7447 a = TREE_OPERAND (bound, 0);
7448 else if (TREE_CODE (bound) == GT_EXPR)
7449 a = TREE_OPERAND (bound, 1);
7450 else
7451 return NULL_TREE;
7452
7453 typea = TREE_TYPE (a);
7454 if (!INTEGRAL_TYPE_P (typea)
7455 && !POINTER_TYPE_P (typea))
7456 return NULL_TREE;
7457
7458 if (TREE_CODE (ineq) == LT_EXPR)
7459 {
7460 a1 = TREE_OPERAND (ineq, 1);
7461 y = TREE_OPERAND (ineq, 0);
7462 }
7463 else if (TREE_CODE (ineq) == GT_EXPR)
7464 {
7465 a1 = TREE_OPERAND (ineq, 0);
7466 y = TREE_OPERAND (ineq, 1);
7467 }
7468 else
7469 return NULL_TREE;
7470
7471 if (TREE_TYPE (a1) != typea)
7472 return NULL_TREE;
7473
7474 if (POINTER_TYPE_P (typea))
7475 {
7476 /* Convert the pointer types into integer before taking the difference. */
7477 tree ta = fold_convert_loc (loc, ssizetype, a);
7478 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7479 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7480 }
7481 else
7482 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7483
7484 if (!diff || !integer_onep (diff))
7485 return NULL_TREE;
7486
7487 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7488 }
7489
7490 /* Fold a sum or difference of at least one multiplication.
7491 Returns the folded tree or NULL if no simplification could be made. */
7492
7493 static tree
7494 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7495 tree arg0, tree arg1)
7496 {
7497 tree arg00, arg01, arg10, arg11;
7498 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7499
7500 /* (A * C) +- (B * C) -> (A+-B) * C.
7501 (A * C) +- A -> A * (C+-1).
7502 We are most concerned about the case where C is a constant,
7503 but other combinations show up during loop reduction. Since
7504 it is not difficult, try all four possibilities. */
7505
7506 if (TREE_CODE (arg0) == MULT_EXPR)
7507 {
7508 arg00 = TREE_OPERAND (arg0, 0);
7509 arg01 = TREE_OPERAND (arg0, 1);
7510 }
7511 else if (TREE_CODE (arg0) == INTEGER_CST)
7512 {
7513 arg00 = build_one_cst (type);
7514 arg01 = arg0;
7515 }
7516 else
7517 {
7518 /* We cannot generate constant 1 for fract. */
7519 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7520 return NULL_TREE;
7521 arg00 = arg0;
7522 arg01 = build_one_cst (type);
7523 }
7524 if (TREE_CODE (arg1) == MULT_EXPR)
7525 {
7526 arg10 = TREE_OPERAND (arg1, 0);
7527 arg11 = TREE_OPERAND (arg1, 1);
7528 }
7529 else if (TREE_CODE (arg1) == INTEGER_CST)
7530 {
7531 arg10 = build_one_cst (type);
7532 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7533 the purpose of this canonicalization. */
7534 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7535 && negate_expr_p (arg1)
7536 && code == PLUS_EXPR)
7537 {
7538 arg11 = negate_expr (arg1);
7539 code = MINUS_EXPR;
7540 }
7541 else
7542 arg11 = arg1;
7543 }
7544 else
7545 {
7546 /* We cannot generate constant 1 for fract. */
7547 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7548 return NULL_TREE;
7549 arg10 = arg1;
7550 arg11 = build_one_cst (type);
7551 }
7552 same = NULL_TREE;
7553
7554 /* Prefer factoring a common non-constant. */
7555 if (operand_equal_p (arg00, arg10, 0))
7556 same = arg00, alt0 = arg01, alt1 = arg11;
7557 else if (operand_equal_p (arg01, arg11, 0))
7558 same = arg01, alt0 = arg00, alt1 = arg10;
7559 else if (operand_equal_p (arg00, arg11, 0))
7560 same = arg00, alt0 = arg01, alt1 = arg10;
7561 else if (operand_equal_p (arg01, arg10, 0))
7562 same = arg01, alt0 = arg00, alt1 = arg11;
7563
7564 /* No identical multiplicands; see if we can find a common
7565 power-of-two factor in non-power-of-two multiplies. This
7566 can help in multi-dimensional array access. */
7567 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7568 {
7569 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7570 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7571 HOST_WIDE_INT tmp;
7572 bool swap = false;
7573 tree maybe_same;
7574
7575 /* Move min of absolute values to int11. */
7576 if (absu_hwi (int01) < absu_hwi (int11))
7577 {
7578 tmp = int01, int01 = int11, int11 = tmp;
7579 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7580 maybe_same = arg01;
7581 swap = true;
7582 }
7583 else
7584 maybe_same = arg11;
7585
7586 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7587 if (factor > 1
7588 && pow2p_hwi (factor)
7589 && (int01 & (factor - 1)) == 0
7590 /* The remainder should not be a constant, otherwise we
7591 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7592 increased the number of multiplications necessary. */
7593 && TREE_CODE (arg10) != INTEGER_CST)
7594 {
7595 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7596 build_int_cst (TREE_TYPE (arg00),
7597 int01 / int11));
7598 alt1 = arg10;
7599 same = maybe_same;
7600 if (swap)
7601 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7602 }
7603 }
7604
7605 if (!same)
7606 return NULL_TREE;
7607
7608 if (! ANY_INTEGRAL_TYPE_P (type)
7609 || TYPE_OVERFLOW_WRAPS (type)
7610 /* We are neither factoring zero nor minus one. */
7611 || TREE_CODE (same) == INTEGER_CST)
7612 return fold_build2_loc (loc, MULT_EXPR, type,
7613 fold_build2_loc (loc, code, type,
7614 fold_convert_loc (loc, type, alt0),
7615 fold_convert_loc (loc, type, alt1)),
7616 fold_convert_loc (loc, type, same));
7617
7618 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7619 same may be minus one and thus the multiplication may overflow. Perform
7620 the sum operation in an unsigned type. */
7621 tree utype = unsigned_type_for (type);
7622 tree tem = fold_build2_loc (loc, code, utype,
7623 fold_convert_loc (loc, utype, alt0),
7624 fold_convert_loc (loc, utype, alt1));
7625 /* If the sum evaluated to a constant that is not -INF the multiplication
7626 cannot overflow. */
7627 if (TREE_CODE (tem) == INTEGER_CST
7628 && (wi::to_wide (tem)
7629 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7630 return fold_build2_loc (loc, MULT_EXPR, type,
7631 fold_convert (type, tem), same);
7632
7633 /* Do not resort to unsigned multiplication because
7634 we lose the no-overflow property of the expression. */
7635 return NULL_TREE;
7636 }
7637
7638 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7639 specified by EXPR into the buffer PTR of length LEN bytes.
7640 Return the number of bytes placed in the buffer, or zero
7641 upon failure. */
7642
7643 static int
7644 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7645 {
7646 tree type = TREE_TYPE (expr);
7647 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7648 int byte, offset, word, words;
7649 unsigned char value;
7650
7651 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7652 return 0;
7653 if (off == -1)
7654 off = 0;
7655
7656 if (ptr == NULL)
7657 /* Dry run. */
7658 return MIN (len, total_bytes - off);
7659
7660 words = total_bytes / UNITS_PER_WORD;
7661
7662 for (byte = 0; byte < total_bytes; byte++)
7663 {
7664 int bitpos = byte * BITS_PER_UNIT;
7665 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7666 number of bytes. */
7667 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7668
7669 if (total_bytes > UNITS_PER_WORD)
7670 {
7671 word = byte / UNITS_PER_WORD;
7672 if (WORDS_BIG_ENDIAN)
7673 word = (words - 1) - word;
7674 offset = word * UNITS_PER_WORD;
7675 if (BYTES_BIG_ENDIAN)
7676 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7677 else
7678 offset += byte % UNITS_PER_WORD;
7679 }
7680 else
7681 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7682 if (offset >= off && offset - off < len)
7683 ptr[offset - off] = value;
7684 }
7685 return MIN (len, total_bytes - off);
7686 }
7687
7688
7689 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7690 specified by EXPR into the buffer PTR of length LEN bytes.
7691 Return the number of bytes placed in the buffer, or zero
7692 upon failure. */
7693
7694 static int
7695 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7696 {
7697 tree type = TREE_TYPE (expr);
7698 scalar_mode mode = SCALAR_TYPE_MODE (type);
7699 int total_bytes = GET_MODE_SIZE (mode);
7700 FIXED_VALUE_TYPE value;
7701 tree i_value, i_type;
7702
7703 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7704 return 0;
7705
7706 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7707
7708 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7709 return 0;
7710
7711 value = TREE_FIXED_CST (expr);
7712 i_value = double_int_to_tree (i_type, value.data);
7713
7714 return native_encode_int (i_value, ptr, len, off);
7715 }
7716
7717
7718 /* Subroutine of native_encode_expr. Encode the REAL_CST
7719 specified by EXPR into the buffer PTR of length LEN bytes.
7720 Return the number of bytes placed in the buffer, or zero
7721 upon failure. */
7722
7723 static int
7724 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7725 {
7726 tree type = TREE_TYPE (expr);
7727 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7728 int byte, offset, word, words, bitpos;
7729 unsigned char value;
7730
7731 /* There are always 32 bits in each long, no matter the size of
7732 the hosts long. We handle floating point representations with
7733 up to 192 bits. */
7734 long tmp[6];
7735
7736 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7737 return 0;
7738 if (off == -1)
7739 off = 0;
7740
7741 if (ptr == NULL)
7742 /* Dry run. */
7743 return MIN (len, total_bytes - off);
7744
7745 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7746
7747 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7748
7749 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7750 bitpos += BITS_PER_UNIT)
7751 {
7752 byte = (bitpos / BITS_PER_UNIT) & 3;
7753 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7754
7755 if (UNITS_PER_WORD < 4)
7756 {
7757 word = byte / UNITS_PER_WORD;
7758 if (WORDS_BIG_ENDIAN)
7759 word = (words - 1) - word;
7760 offset = word * UNITS_PER_WORD;
7761 if (BYTES_BIG_ENDIAN)
7762 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7763 else
7764 offset += byte % UNITS_PER_WORD;
7765 }
7766 else
7767 {
7768 offset = byte;
7769 if (BYTES_BIG_ENDIAN)
7770 {
7771 /* Reverse bytes within each long, or within the entire float
7772 if it's smaller than a long (for HFmode). */
7773 offset = MIN (3, total_bytes - 1) - offset;
7774 gcc_assert (offset >= 0);
7775 }
7776 }
7777 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7778 if (offset >= off
7779 && offset - off < len)
7780 ptr[offset - off] = value;
7781 }
7782 return MIN (len, total_bytes - off);
7783 }
7784
7785 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7786 specified by EXPR into the buffer PTR of length LEN bytes.
7787 Return the number of bytes placed in the buffer, or zero
7788 upon failure. */
7789
7790 static int
7791 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7792 {
7793 int rsize, isize;
7794 tree part;
7795
7796 part = TREE_REALPART (expr);
7797 rsize = native_encode_expr (part, ptr, len, off);
7798 if (off == -1 && rsize == 0)
7799 return 0;
7800 part = TREE_IMAGPART (expr);
7801 if (off != -1)
7802 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7803 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7804 len - rsize, off);
7805 if (off == -1 && isize != rsize)
7806 return 0;
7807 return rsize + isize;
7808 }
7809
7810 /* Like native_encode_vector, but only encode the first COUNT elements.
7811 The other arguments are as for native_encode_vector. */
7812
7813 static int
7814 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7815 int off, unsigned HOST_WIDE_INT count)
7816 {
7817 tree itype = TREE_TYPE (TREE_TYPE (expr));
7818 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7819 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7820 {
7821 /* This is the only case in which elements can be smaller than a byte.
7822 Element 0 is always in the lsb of the containing byte. */
7823 unsigned int elt_bits = TYPE_PRECISION (itype);
7824 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7825 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7826 return 0;
7827
7828 if (off == -1)
7829 off = 0;
7830
7831 /* Zero the buffer and then set bits later where necessary. */
7832 int extract_bytes = MIN (len, total_bytes - off);
7833 if (ptr)
7834 memset (ptr, 0, extract_bytes);
7835
7836 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7837 unsigned int first_elt = off * elts_per_byte;
7838 unsigned int extract_elts = extract_bytes * elts_per_byte;
7839 for (unsigned int i = 0; i < extract_elts; ++i)
7840 {
7841 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7842 if (TREE_CODE (elt) != INTEGER_CST)
7843 return 0;
7844
7845 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7846 {
7847 unsigned int bit = i * elt_bits;
7848 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7849 }
7850 }
7851 return extract_bytes;
7852 }
7853
7854 int offset = 0;
7855 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7856 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7857 {
7858 if (off >= size)
7859 {
7860 off -= size;
7861 continue;
7862 }
7863 tree elem = VECTOR_CST_ELT (expr, i);
7864 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7865 len - offset, off);
7866 if ((off == -1 && res != size) || res == 0)
7867 return 0;
7868 offset += res;
7869 if (offset >= len)
7870 return (off == -1 && i < count - 1) ? 0 : offset;
7871 if (off != -1)
7872 off = 0;
7873 }
7874 return offset;
7875 }
7876
7877 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7878 specified by EXPR into the buffer PTR of length LEN bytes.
7879 Return the number of bytes placed in the buffer, or zero
7880 upon failure. */
7881
7882 static int
7883 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7884 {
7885 unsigned HOST_WIDE_INT count;
7886 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7887 return 0;
7888 return native_encode_vector_part (expr, ptr, len, off, count);
7889 }
7890
7891
7892 /* Subroutine of native_encode_expr. Encode the STRING_CST
7893 specified by EXPR into the buffer PTR of length LEN bytes.
7894 Return the number of bytes placed in the buffer, or zero
7895 upon failure. */
7896
7897 static int
7898 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7899 {
7900 tree type = TREE_TYPE (expr);
7901
7902 /* Wide-char strings are encoded in target byte-order so native
7903 encoding them is trivial. */
7904 if (BITS_PER_UNIT != CHAR_BIT
7905 || TREE_CODE (type) != ARRAY_TYPE
7906 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7907 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7908 return 0;
7909
7910 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7911 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7912 return 0;
7913 if (off == -1)
7914 off = 0;
7915 len = MIN (total_bytes - off, len);
7916 if (ptr == NULL)
7917 /* Dry run. */;
7918 else
7919 {
7920 int written = 0;
7921 if (off < TREE_STRING_LENGTH (expr))
7922 {
7923 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7924 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7925 }
7926 memset (ptr + written, 0, len - written);
7927 }
7928 return len;
7929 }
7930
7931
7932 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7933 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7934 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7935 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7936 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7937 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7938 Return the number of bytes placed in the buffer, or zero upon failure. */
7939
7940 int
7941 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7942 {
7943 /* We don't support starting at negative offset and -1 is special. */
7944 if (off < -1)
7945 return 0;
7946
7947 switch (TREE_CODE (expr))
7948 {
7949 case INTEGER_CST:
7950 return native_encode_int (expr, ptr, len, off);
7951
7952 case REAL_CST:
7953 return native_encode_real (expr, ptr, len, off);
7954
7955 case FIXED_CST:
7956 return native_encode_fixed (expr, ptr, len, off);
7957
7958 case COMPLEX_CST:
7959 return native_encode_complex (expr, ptr, len, off);
7960
7961 case VECTOR_CST:
7962 return native_encode_vector (expr, ptr, len, off);
7963
7964 case STRING_CST:
7965 return native_encode_string (expr, ptr, len, off);
7966
7967 default:
7968 return 0;
7969 }
7970 }
7971
7972 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7973 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7974 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7975 machine modes, we can't just use build_nonstandard_integer_type. */
7976
7977 tree
7978 find_bitfield_repr_type (int fieldsize, int len)
7979 {
7980 machine_mode mode;
7981 for (int pass = 0; pass < 2; pass++)
7982 {
7983 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7984 FOR_EACH_MODE_IN_CLASS (mode, mclass)
7985 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7986 && known_eq (GET_MODE_PRECISION (mode),
7987 GET_MODE_BITSIZE (mode))
7988 && known_le (GET_MODE_SIZE (mode), len))
7989 {
7990 tree ret = lang_hooks.types.type_for_mode (mode, 1);
7991 if (ret && TYPE_MODE (ret) == mode)
7992 return ret;
7993 }
7994 }
7995
7996 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
7997 if (int_n_enabled_p[i]
7998 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
7999 && int_n_trees[i].unsigned_type)
8000 {
8001 tree ret = int_n_trees[i].unsigned_type;
8002 mode = TYPE_MODE (ret);
8003 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8004 && known_eq (GET_MODE_PRECISION (mode),
8005 GET_MODE_BITSIZE (mode))
8006 && known_le (GET_MODE_SIZE (mode), len))
8007 return ret;
8008 }
8009
8010 return NULL_TREE;
8011 }
8012
8013 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8014 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8015 to be non-NULL and OFF zero), then in addition to filling the
8016 bytes pointed by PTR with the value also clear any bits pointed
8017 by MASK that are known to be initialized, keep them as is for
8018 e.g. uninitialized padding bits or uninitialized fields. */
8019
8020 int
8021 native_encode_initializer (tree init, unsigned char *ptr, int len,
8022 int off, unsigned char *mask)
8023 {
8024 int r;
8025
8026 /* We don't support starting at negative offset and -1 is special. */
8027 if (off < -1 || init == NULL_TREE)
8028 return 0;
8029
8030 gcc_assert (mask == NULL || (off == 0 && ptr));
8031
8032 STRIP_NOPS (init);
8033 switch (TREE_CODE (init))
8034 {
8035 case VIEW_CONVERT_EXPR:
8036 case NON_LVALUE_EXPR:
8037 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8038 mask);
8039 default:
8040 r = native_encode_expr (init, ptr, len, off);
8041 if (mask)
8042 memset (mask, 0, r);
8043 return r;
8044 case CONSTRUCTOR:
8045 tree type = TREE_TYPE (init);
8046 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8047 if (total_bytes < 0)
8048 return 0;
8049 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8050 return 0;
8051 int o = off == -1 ? 0 : off;
8052 if (TREE_CODE (type) == ARRAY_TYPE)
8053 {
8054 HOST_WIDE_INT min_index;
8055 unsigned HOST_WIDE_INT cnt;
8056 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8057 constructor_elt *ce;
8058
8059 if (TYPE_DOMAIN (type) == NULL_TREE
8060 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
8061 return 0;
8062
8063 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8064 if (fieldsize <= 0)
8065 return 0;
8066
8067 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
8068 if (ptr != NULL)
8069 memset (ptr, '\0', MIN (total_bytes - off, len));
8070
8071 for (cnt = 0; ; cnt++)
8072 {
8073 tree val = NULL_TREE, index = NULL_TREE;
8074 HOST_WIDE_INT pos = curpos, count = 0;
8075 bool full = false;
8076 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8077 {
8078 val = ce->value;
8079 index = ce->index;
8080 }
8081 else if (mask == NULL
8082 || CONSTRUCTOR_NO_CLEARING (init)
8083 || curpos >= total_bytes)
8084 break;
8085 else
8086 pos = total_bytes;
8087 if (index && TREE_CODE (index) == RANGE_EXPR)
8088 {
8089 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
8090 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
8091 return 0;
8092 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
8093 * fieldsize;
8094 count = (tree_to_shwi (TREE_OPERAND (index, 1))
8095 - tree_to_shwi (TREE_OPERAND (index, 0)));
8096 }
8097 else if (index)
8098 {
8099 if (!tree_fits_shwi_p (index))
8100 return 0;
8101 pos = (tree_to_shwi (index) - min_index) * fieldsize;
8102 }
8103
8104 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8105 {
8106 if (valueinit == -1)
8107 {
8108 tree zero = build_zero_cst (TREE_TYPE (type));
8109 r = native_encode_initializer (zero, ptr + curpos,
8110 fieldsize, 0,
8111 mask + curpos);
8112 if (TREE_CODE (zero) == CONSTRUCTOR)
8113 ggc_free (zero);
8114 if (!r)
8115 return 0;
8116 valueinit = curpos;
8117 curpos += fieldsize;
8118 }
8119 while (curpos != pos)
8120 {
8121 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8122 memcpy (mask + curpos, mask + valueinit, fieldsize);
8123 curpos += fieldsize;
8124 }
8125 }
8126
8127 curpos = pos;
8128 if (val)
8129 do
8130 {
8131 if (off == -1
8132 || (curpos >= off
8133 && (curpos + fieldsize
8134 <= (HOST_WIDE_INT) off + len)))
8135 {
8136 if (full)
8137 {
8138 if (ptr)
8139 memcpy (ptr + (curpos - o), ptr + (pos - o),
8140 fieldsize);
8141 if (mask)
8142 memcpy (mask + curpos, mask + pos, fieldsize);
8143 }
8144 else if (!native_encode_initializer (val,
8145 ptr
8146 ? ptr + curpos - o
8147 : NULL,
8148 fieldsize,
8149 off == -1 ? -1
8150 : 0,
8151 mask
8152 ? mask + curpos
8153 : NULL))
8154 return 0;
8155 else
8156 {
8157 full = true;
8158 pos = curpos;
8159 }
8160 }
8161 else if (curpos + fieldsize > off
8162 && curpos < (HOST_WIDE_INT) off + len)
8163 {
8164 /* Partial overlap. */
8165 unsigned char *p = NULL;
8166 int no = 0;
8167 int l;
8168 gcc_assert (mask == NULL);
8169 if (curpos >= off)
8170 {
8171 if (ptr)
8172 p = ptr + curpos - off;
8173 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8174 fieldsize);
8175 }
8176 else
8177 {
8178 p = ptr;
8179 no = off - curpos;
8180 l = len;
8181 }
8182 if (!native_encode_initializer (val, p, l, no, NULL))
8183 return 0;
8184 }
8185 curpos += fieldsize;
8186 }
8187 while (count-- != 0);
8188 }
8189 return MIN (total_bytes - off, len);
8190 }
8191 else if (TREE_CODE (type) == RECORD_TYPE
8192 || TREE_CODE (type) == UNION_TYPE)
8193 {
8194 unsigned HOST_WIDE_INT cnt;
8195 constructor_elt *ce;
8196 tree fld_base = TYPE_FIELDS (type);
8197 tree to_free = NULL_TREE;
8198
8199 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8200 if (ptr != NULL)
8201 memset (ptr, '\0', MIN (total_bytes - o, len));
8202 for (cnt = 0; ; cnt++)
8203 {
8204 tree val = NULL_TREE, field = NULL_TREE;
8205 HOST_WIDE_INT pos = 0, fieldsize;
8206 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8207
8208 if (to_free)
8209 {
8210 ggc_free (to_free);
8211 to_free = NULL_TREE;
8212 }
8213
8214 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8215 {
8216 val = ce->value;
8217 field = ce->index;
8218 if (field == NULL_TREE)
8219 return 0;
8220
8221 pos = int_byte_position (field);
8222 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8223 continue;
8224 }
8225 else if (mask == NULL
8226 || CONSTRUCTOR_NO_CLEARING (init))
8227 break;
8228 else
8229 pos = total_bytes;
8230
8231 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8232 {
8233 tree fld;
8234 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8235 {
8236 if (TREE_CODE (fld) != FIELD_DECL)
8237 continue;
8238 if (fld == field)
8239 break;
8240 if (DECL_PADDING_P (fld))
8241 continue;
8242 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8243 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8244 return 0;
8245 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8246 continue;
8247 break;
8248 }
8249 if (fld == NULL_TREE)
8250 {
8251 if (ce == NULL)
8252 break;
8253 return 0;
8254 }
8255 fld_base = DECL_CHAIN (fld);
8256 if (fld != field)
8257 {
8258 cnt--;
8259 field = fld;
8260 pos = int_byte_position (field);
8261 val = build_zero_cst (TREE_TYPE (fld));
8262 if (TREE_CODE (val) == CONSTRUCTOR)
8263 to_free = val;
8264 }
8265 }
8266
8267 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8268 && TYPE_DOMAIN (TREE_TYPE (field))
8269 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8270 {
8271 if (mask || off != -1)
8272 return 0;
8273 if (val == NULL_TREE)
8274 continue;
8275 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8276 return 0;
8277 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8278 if (fieldsize < 0
8279 || (int) fieldsize != fieldsize
8280 || (pos + fieldsize) > INT_MAX)
8281 return 0;
8282 if (pos + fieldsize > total_bytes)
8283 {
8284 if (ptr != NULL && total_bytes < len)
8285 memset (ptr + total_bytes, '\0',
8286 MIN (pos + fieldsize, len) - total_bytes);
8287 total_bytes = pos + fieldsize;
8288 }
8289 }
8290 else
8291 {
8292 if (DECL_SIZE_UNIT (field) == NULL_TREE
8293 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8294 return 0;
8295 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8296 }
8297 if (fieldsize == 0)
8298 continue;
8299
8300 if (DECL_BIT_FIELD (field))
8301 {
8302 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8303 return 0;
8304 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8305 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8306 if (bpos % BITS_PER_UNIT)
8307 bpos %= BITS_PER_UNIT;
8308 else
8309 bpos = 0;
8310 fieldsize += bpos;
8311 epos = fieldsize % BITS_PER_UNIT;
8312 fieldsize += BITS_PER_UNIT - 1;
8313 fieldsize /= BITS_PER_UNIT;
8314 }
8315
8316 if (off != -1 && pos + fieldsize <= off)
8317 continue;
8318
8319 if (val == NULL_TREE)
8320 continue;
8321
8322 if (DECL_BIT_FIELD (field))
8323 {
8324 /* FIXME: Handle PDP endian. */
8325 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8326 return 0;
8327
8328 if (TREE_CODE (val) != INTEGER_CST)
8329 return 0;
8330
8331 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8332 tree repr_type = NULL_TREE;
8333 HOST_WIDE_INT rpos = 0;
8334 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8335 {
8336 rpos = int_byte_position (repr);
8337 repr_type = TREE_TYPE (repr);
8338 }
8339 else
8340 {
8341 repr_type = find_bitfield_repr_type (fieldsize, len);
8342 if (repr_type == NULL_TREE)
8343 return 0;
8344 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8345 gcc_assert (repr_size > 0 && repr_size <= len);
8346 if (pos + repr_size <= o + len)
8347 rpos = pos;
8348 else
8349 {
8350 rpos = o + len - repr_size;
8351 gcc_assert (rpos <= pos);
8352 }
8353 }
8354
8355 if (rpos > pos)
8356 return 0;
8357 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8358 int diff = (TYPE_PRECISION (repr_type)
8359 - TYPE_PRECISION (TREE_TYPE (field)));
8360 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8361 if (!BYTES_BIG_ENDIAN)
8362 w = wi::lshift (w, bitoff);
8363 else
8364 w = wi::lshift (w, diff - bitoff);
8365 val = wide_int_to_tree (repr_type, w);
8366
8367 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8368 / BITS_PER_UNIT + 1];
8369 int l = native_encode_int (val, buf, sizeof buf, 0);
8370 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8371 return 0;
8372
8373 if (ptr == NULL)
8374 continue;
8375
8376 /* If the bitfield does not start at byte boundary, handle
8377 the partial byte at the start. */
8378 if (bpos
8379 && (off == -1 || (pos >= off && len >= 1)))
8380 {
8381 if (!BYTES_BIG_ENDIAN)
8382 {
8383 int msk = (1 << bpos) - 1;
8384 buf[pos - rpos] &= ~msk;
8385 buf[pos - rpos] |= ptr[pos - o] & msk;
8386 if (mask)
8387 {
8388 if (fieldsize > 1 || epos == 0)
8389 mask[pos] &= msk;
8390 else
8391 mask[pos] &= (msk | ~((1 << epos) - 1));
8392 }
8393 }
8394 else
8395 {
8396 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8397 buf[pos - rpos] &= msk;
8398 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8399 if (mask)
8400 {
8401 if (fieldsize > 1 || epos == 0)
8402 mask[pos] &= ~msk;
8403 else
8404 mask[pos] &= (~msk
8405 | ((1 << (BITS_PER_UNIT - epos))
8406 - 1));
8407 }
8408 }
8409 }
8410 /* If the bitfield does not end at byte boundary, handle
8411 the partial byte at the end. */
8412 if (epos
8413 && (off == -1
8414 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8415 {
8416 if (!BYTES_BIG_ENDIAN)
8417 {
8418 int msk = (1 << epos) - 1;
8419 buf[pos - rpos + fieldsize - 1] &= msk;
8420 buf[pos - rpos + fieldsize - 1]
8421 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8422 if (mask && (fieldsize > 1 || bpos == 0))
8423 mask[pos + fieldsize - 1] &= ~msk;
8424 }
8425 else
8426 {
8427 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8428 buf[pos - rpos + fieldsize - 1] &= ~msk;
8429 buf[pos - rpos + fieldsize - 1]
8430 |= ptr[pos + fieldsize - 1 - o] & msk;
8431 if (mask && (fieldsize > 1 || bpos == 0))
8432 mask[pos + fieldsize - 1] &= msk;
8433 }
8434 }
8435 if (off == -1
8436 || (pos >= off
8437 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8438 {
8439 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8440 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8441 memset (mask + pos + (bpos != 0), 0,
8442 fieldsize - (bpos != 0) - (epos != 0));
8443 }
8444 else
8445 {
8446 /* Partial overlap. */
8447 HOST_WIDE_INT fsz = fieldsize;
8448 gcc_assert (mask == NULL);
8449 if (pos < off)
8450 {
8451 fsz -= (off - pos);
8452 pos = off;
8453 }
8454 if (pos + fsz > (HOST_WIDE_INT) off + len)
8455 fsz = (HOST_WIDE_INT) off + len - pos;
8456 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8457 }
8458 continue;
8459 }
8460
8461 if (off == -1
8462 || (pos >= off
8463 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8464 {
8465 int fldsize = fieldsize;
8466 if (off == -1)
8467 {
8468 tree fld = DECL_CHAIN (field);
8469 while (fld)
8470 {
8471 if (TREE_CODE (fld) == FIELD_DECL)
8472 break;
8473 fld = DECL_CHAIN (fld);
8474 }
8475 if (fld == NULL_TREE)
8476 fldsize = len - pos;
8477 }
8478 r = native_encode_initializer (val, ptr ? ptr + pos - o
8479 : NULL,
8480 fldsize,
8481 off == -1 ? -1 : 0,
8482 mask ? mask + pos : NULL);
8483 if (!r)
8484 return 0;
8485 if (off == -1
8486 && fldsize != fieldsize
8487 && r > fieldsize
8488 && pos + r > total_bytes)
8489 total_bytes = pos + r;
8490 }
8491 else
8492 {
8493 /* Partial overlap. */
8494 unsigned char *p = NULL;
8495 int no = 0;
8496 int l;
8497 gcc_assert (mask == NULL);
8498 if (pos >= off)
8499 {
8500 if (ptr)
8501 p = ptr + pos - off;
8502 l = MIN ((HOST_WIDE_INT) off + len - pos,
8503 fieldsize);
8504 }
8505 else
8506 {
8507 p = ptr;
8508 no = off - pos;
8509 l = len;
8510 }
8511 if (!native_encode_initializer (val, p, l, no, NULL))
8512 return 0;
8513 }
8514 }
8515 return MIN (total_bytes - off, len);
8516 }
8517 return 0;
8518 }
8519 }
8520
8521
8522 /* Subroutine of native_interpret_expr. Interpret the contents of
8523 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8524 If the buffer cannot be interpreted, return NULL_TREE. */
8525
8526 static tree
8527 native_interpret_int (tree type, const unsigned char *ptr, int len)
8528 {
8529 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8530
8531 if (total_bytes > len
8532 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8533 return NULL_TREE;
8534
8535 wide_int result = wi::from_buffer (ptr, total_bytes);
8536
8537 return wide_int_to_tree (type, result);
8538 }
8539
8540
8541 /* Subroutine of native_interpret_expr. Interpret the contents of
8542 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8543 If the buffer cannot be interpreted, return NULL_TREE. */
8544
8545 static tree
8546 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8547 {
8548 scalar_mode mode = SCALAR_TYPE_MODE (type);
8549 int total_bytes = GET_MODE_SIZE (mode);
8550 double_int result;
8551 FIXED_VALUE_TYPE fixed_value;
8552
8553 if (total_bytes > len
8554 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8555 return NULL_TREE;
8556
8557 result = double_int::from_buffer (ptr, total_bytes);
8558 fixed_value = fixed_from_double_int (result, mode);
8559
8560 return build_fixed (type, fixed_value);
8561 }
8562
8563
8564 /* Subroutine of native_interpret_expr. Interpret the contents of
8565 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8566 If the buffer cannot be interpreted, return NULL_TREE. */
8567
8568 static tree
8569 native_interpret_real (tree type, const unsigned char *ptr, int len)
8570 {
8571 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8572 int total_bytes = GET_MODE_SIZE (mode);
8573 unsigned char value;
8574 /* There are always 32 bits in each long, no matter the size of
8575 the hosts long. We handle floating point representations with
8576 up to 192 bits. */
8577 REAL_VALUE_TYPE r;
8578 long tmp[6];
8579
8580 if (total_bytes > len || total_bytes > 24)
8581 return NULL_TREE;
8582 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8583
8584 memset (tmp, 0, sizeof (tmp));
8585 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8586 bitpos += BITS_PER_UNIT)
8587 {
8588 /* Both OFFSET and BYTE index within a long;
8589 bitpos indexes the whole float. */
8590 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8591 if (UNITS_PER_WORD < 4)
8592 {
8593 int word = byte / UNITS_PER_WORD;
8594 if (WORDS_BIG_ENDIAN)
8595 word = (words - 1) - word;
8596 offset = word * UNITS_PER_WORD;
8597 if (BYTES_BIG_ENDIAN)
8598 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8599 else
8600 offset += byte % UNITS_PER_WORD;
8601 }
8602 else
8603 {
8604 offset = byte;
8605 if (BYTES_BIG_ENDIAN)
8606 {
8607 /* Reverse bytes within each long, or within the entire float
8608 if it's smaller than a long (for HFmode). */
8609 offset = MIN (3, total_bytes - 1) - offset;
8610 gcc_assert (offset >= 0);
8611 }
8612 }
8613 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8614
8615 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8616 }
8617
8618 real_from_target (&r, tmp, mode);
8619 tree ret = build_real (type, r);
8620 if (MODE_COMPOSITE_P (mode))
8621 {
8622 /* For floating point values in composite modes, punt if this folding
8623 doesn't preserve bit representation. As the mode doesn't have fixed
8624 precision while GCC pretends it does, there could be valid values that
8625 GCC can't really represent accurately. See PR95450. */
8626 unsigned char buf[24];
8627 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8628 || memcmp (ptr, buf, total_bytes) != 0)
8629 ret = NULL_TREE;
8630 }
8631 return ret;
8632 }
8633
8634
8635 /* Subroutine of native_interpret_expr. Interpret the contents of
8636 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8637 If the buffer cannot be interpreted, return NULL_TREE. */
8638
8639 static tree
8640 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8641 {
8642 tree etype, rpart, ipart;
8643 int size;
8644
8645 etype = TREE_TYPE (type);
8646 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8647 if (size * 2 > len)
8648 return NULL_TREE;
8649 rpart = native_interpret_expr (etype, ptr, size);
8650 if (!rpart)
8651 return NULL_TREE;
8652 ipart = native_interpret_expr (etype, ptr+size, size);
8653 if (!ipart)
8654 return NULL_TREE;
8655 return build_complex (type, rpart, ipart);
8656 }
8657
8658 /* Read a vector of type TYPE from the target memory image given by BYTES,
8659 which contains LEN bytes. The vector is known to be encodable using
8660 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8661
8662 Return the vector on success, otherwise return null. */
8663
8664 static tree
8665 native_interpret_vector_part (tree type, const unsigned char *bytes,
8666 unsigned int len, unsigned int npatterns,
8667 unsigned int nelts_per_pattern)
8668 {
8669 tree elt_type = TREE_TYPE (type);
8670 if (VECTOR_BOOLEAN_TYPE_P (type)
8671 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8672 {
8673 /* This is the only case in which elements can be smaller than a byte.
8674 Element 0 is always in the lsb of the containing byte. */
8675 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8676 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8677 return NULL_TREE;
8678
8679 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8680 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8681 {
8682 unsigned int bit_index = i * elt_bits;
8683 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8684 unsigned int lsb = bit_index % BITS_PER_UNIT;
8685 builder.quick_push (bytes[byte_index] & (1 << lsb)
8686 ? build_all_ones_cst (elt_type)
8687 : build_zero_cst (elt_type));
8688 }
8689 return builder.build ();
8690 }
8691
8692 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8693 if (elt_bytes * npatterns * nelts_per_pattern > len)
8694 return NULL_TREE;
8695
8696 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8697 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8698 {
8699 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8700 if (!elt)
8701 return NULL_TREE;
8702 builder.quick_push (elt);
8703 bytes += elt_bytes;
8704 }
8705 return builder.build ();
8706 }
8707
8708 /* Subroutine of native_interpret_expr. Interpret the contents of
8709 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8710 If the buffer cannot be interpreted, return NULL_TREE. */
8711
8712 static tree
8713 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8714 {
8715 tree etype;
8716 unsigned int size;
8717 unsigned HOST_WIDE_INT count;
8718
8719 etype = TREE_TYPE (type);
8720 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8721 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8722 || size * count > len)
8723 return NULL_TREE;
8724
8725 return native_interpret_vector_part (type, ptr, len, count, 1);
8726 }
8727
8728
8729 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8730 the buffer PTR of length LEN as a constant of type TYPE. For
8731 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8732 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8733 return NULL_TREE. */
8734
8735 tree
8736 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8737 {
8738 switch (TREE_CODE (type))
8739 {
8740 case INTEGER_TYPE:
8741 case ENUMERAL_TYPE:
8742 case BOOLEAN_TYPE:
8743 case POINTER_TYPE:
8744 case REFERENCE_TYPE:
8745 return native_interpret_int (type, ptr, len);
8746
8747 case REAL_TYPE:
8748 return native_interpret_real (type, ptr, len);
8749
8750 case FIXED_POINT_TYPE:
8751 return native_interpret_fixed (type, ptr, len);
8752
8753 case COMPLEX_TYPE:
8754 return native_interpret_complex (type, ptr, len);
8755
8756 case VECTOR_TYPE:
8757 return native_interpret_vector (type, ptr, len);
8758
8759 default:
8760 return NULL_TREE;
8761 }
8762 }
8763
8764 /* Returns true if we can interpret the contents of a native encoding
8765 as TYPE. */
8766
8767 bool
8768 can_native_interpret_type_p (tree type)
8769 {
8770 switch (TREE_CODE (type))
8771 {
8772 case INTEGER_TYPE:
8773 case ENUMERAL_TYPE:
8774 case BOOLEAN_TYPE:
8775 case POINTER_TYPE:
8776 case REFERENCE_TYPE:
8777 case FIXED_POINT_TYPE:
8778 case REAL_TYPE:
8779 case COMPLEX_TYPE:
8780 case VECTOR_TYPE:
8781 return true;
8782 default:
8783 return false;
8784 }
8785 }
8786
8787 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8788 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8789
8790 tree
8791 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8792 int len)
8793 {
8794 vec<constructor_elt, va_gc> *elts = NULL;
8795 if (TREE_CODE (type) == ARRAY_TYPE)
8796 {
8797 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8798 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8799 return NULL_TREE;
8800
8801 HOST_WIDE_INT cnt = 0;
8802 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8803 {
8804 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8805 return NULL_TREE;
8806 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8807 }
8808 if (eltsz == 0)
8809 cnt = 0;
8810 HOST_WIDE_INT pos = 0;
8811 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8812 {
8813 tree v = NULL_TREE;
8814 if (pos >= len || pos + eltsz > len)
8815 return NULL_TREE;
8816 if (can_native_interpret_type_p (TREE_TYPE (type)))
8817 {
8818 v = native_interpret_expr (TREE_TYPE (type),
8819 ptr + off + pos, eltsz);
8820 if (v == NULL_TREE)
8821 return NULL_TREE;
8822 }
8823 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8824 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8825 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8826 eltsz);
8827 if (v == NULL_TREE)
8828 return NULL_TREE;
8829 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8830 }
8831 return build_constructor (type, elts);
8832 }
8833 if (TREE_CODE (type) != RECORD_TYPE)
8834 return NULL_TREE;
8835 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8836 {
8837 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8838 continue;
8839 tree fld = field;
8840 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8841 int diff = 0;
8842 tree v = NULL_TREE;
8843 if (DECL_BIT_FIELD (field))
8844 {
8845 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8846 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8847 {
8848 poly_int64 bitoffset;
8849 poly_uint64 field_offset, fld_offset;
8850 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8851 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8852 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8853 else
8854 bitoffset = 0;
8855 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8856 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8857 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8858 - TYPE_PRECISION (TREE_TYPE (field)));
8859 if (!bitoffset.is_constant (&bitoff)
8860 || bitoff < 0
8861 || bitoff > diff)
8862 return NULL_TREE;
8863 }
8864 else
8865 {
8866 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8867 return NULL_TREE;
8868 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8869 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8870 bpos %= BITS_PER_UNIT;
8871 fieldsize += bpos;
8872 fieldsize += BITS_PER_UNIT - 1;
8873 fieldsize /= BITS_PER_UNIT;
8874 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8875 if (repr_type == NULL_TREE)
8876 return NULL_TREE;
8877 sz = int_size_in_bytes (repr_type);
8878 if (sz < 0 || sz > len)
8879 return NULL_TREE;
8880 pos = int_byte_position (field);
8881 if (pos < 0 || pos > len || pos + fieldsize > len)
8882 return NULL_TREE;
8883 HOST_WIDE_INT rpos;
8884 if (pos + sz <= len)
8885 rpos = pos;
8886 else
8887 {
8888 rpos = len - sz;
8889 gcc_assert (rpos <= pos);
8890 }
8891 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8892 pos = rpos;
8893 diff = (TYPE_PRECISION (repr_type)
8894 - TYPE_PRECISION (TREE_TYPE (field)));
8895 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8896 if (v == NULL_TREE)
8897 return NULL_TREE;
8898 fld = NULL_TREE;
8899 }
8900 }
8901
8902 if (fld)
8903 {
8904 sz = int_size_in_bytes (TREE_TYPE (fld));
8905 if (sz < 0 || sz > len)
8906 return NULL_TREE;
8907 tree byte_pos = byte_position (fld);
8908 if (!tree_fits_shwi_p (byte_pos))
8909 return NULL_TREE;
8910 pos = tree_to_shwi (byte_pos);
8911 if (pos < 0 || pos > len || pos + sz > len)
8912 return NULL_TREE;
8913 }
8914 if (fld == NULL_TREE)
8915 /* Already handled above. */;
8916 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8917 {
8918 v = native_interpret_expr (TREE_TYPE (fld),
8919 ptr + off + pos, sz);
8920 if (v == NULL_TREE)
8921 return NULL_TREE;
8922 }
8923 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8924 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8925 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8926 if (v == NULL_TREE)
8927 return NULL_TREE;
8928 if (fld != field)
8929 {
8930 if (TREE_CODE (v) != INTEGER_CST)
8931 return NULL_TREE;
8932
8933 /* FIXME: Figure out how to handle PDP endian bitfields. */
8934 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8935 return NULL_TREE;
8936 if (!BYTES_BIG_ENDIAN)
8937 v = wide_int_to_tree (TREE_TYPE (field),
8938 wi::lrshift (wi::to_wide (v), bitoff));
8939 else
8940 v = wide_int_to_tree (TREE_TYPE (field),
8941 wi::lrshift (wi::to_wide (v),
8942 diff - bitoff));
8943 }
8944 CONSTRUCTOR_APPEND_ELT (elts, field, v);
8945 }
8946 return build_constructor (type, elts);
8947 }
8948
8949 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8950 or extracted constant positions and/or sizes aren't byte aligned. */
8951
8952 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8953 bits between adjacent elements. AMNT should be within
8954 [0, BITS_PER_UNIT).
8955 Example, AMNT = 2:
8956 00011111|11100000 << 2 = 01111111|10000000
8957 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8958
8959 void
8960 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8961 unsigned int amnt)
8962 {
8963 if (amnt == 0)
8964 return;
8965
8966 unsigned char carry_over = 0U;
8967 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8968 unsigned char clear_mask = (~0U) << amnt;
8969
8970 for (unsigned int i = 0; i < sz; i++)
8971 {
8972 unsigned prev_carry_over = carry_over;
8973 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8974
8975 ptr[i] <<= amnt;
8976 if (i != 0)
8977 {
8978 ptr[i] &= clear_mask;
8979 ptr[i] |= prev_carry_over;
8980 }
8981 }
8982 }
8983
8984 /* Like shift_bytes_in_array_left but for big-endian.
8985 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8986 bits between adjacent elements. AMNT should be within
8987 [0, BITS_PER_UNIT).
8988 Example, AMNT = 2:
8989 00011111|11100000 >> 2 = 00000111|11111000
8990 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8991
8992 void
8993 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8994 unsigned int amnt)
8995 {
8996 if (amnt == 0)
8997 return;
8998
8999 unsigned char carry_over = 0U;
9000 unsigned char carry_mask = ~(~0U << amnt);
9001
9002 for (unsigned int i = 0; i < sz; i++)
9003 {
9004 unsigned prev_carry_over = carry_over;
9005 carry_over = ptr[i] & carry_mask;
9006
9007 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9008 ptr[i] >>= amnt;
9009 ptr[i] |= prev_carry_over;
9010 }
9011 }
9012
9013 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9014 directly on the VECTOR_CST encoding, in a way that works for variable-
9015 length vectors. Return the resulting VECTOR_CST on success or null
9016 on failure. */
9017
9018 static tree
9019 fold_view_convert_vector_encoding (tree type, tree expr)
9020 {
9021 tree expr_type = TREE_TYPE (expr);
9022 poly_uint64 type_bits, expr_bits;
9023 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9024 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9025 return NULL_TREE;
9026
9027 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9028 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9029 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9030 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9031
9032 /* We can only preserve the semantics of a stepped pattern if the new
9033 vector element is an integer of the same size. */
9034 if (VECTOR_CST_STEPPED_P (expr)
9035 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9036 return NULL_TREE;
9037
9038 /* The number of bits needed to encode one element from every pattern
9039 of the original vector. */
9040 unsigned int expr_sequence_bits
9041 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9042
9043 /* The number of bits needed to encode one element from every pattern
9044 of the result. */
9045 unsigned int type_sequence_bits
9046 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9047
9048 /* Don't try to read more bytes than are available, which can happen
9049 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9050 The general VIEW_CONVERT handling can cope with that case, so there's
9051 no point complicating things here. */
9052 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9053 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9054 BITS_PER_UNIT);
9055 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9056 if (known_gt (buffer_bits, expr_bits))
9057 return NULL_TREE;
9058
9059 /* Get enough bytes of EXPR to form the new encoding. */
9060 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9061 buffer.quick_grow (buffer_bytes);
9062 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9063 buffer_bits / expr_elt_bits)
9064 != (int) buffer_bytes)
9065 return NULL_TREE;
9066
9067 /* Reencode the bytes as TYPE. */
9068 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9069 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9070 type_npatterns, nelts_per_pattern);
9071 }
9072
9073 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9074 TYPE at compile-time. If we're unable to perform the conversion
9075 return NULL_TREE. */
9076
9077 static tree
9078 fold_view_convert_expr (tree type, tree expr)
9079 {
9080 /* We support up to 512-bit values (for V8DFmode). */
9081 unsigned char buffer[64];
9082 int len;
9083
9084 /* Check that the host and target are sane. */
9085 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9086 return NULL_TREE;
9087
9088 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9089 if (tree res = fold_view_convert_vector_encoding (type, expr))
9090 return res;
9091
9092 len = native_encode_expr (expr, buffer, sizeof (buffer));
9093 if (len == 0)
9094 return NULL_TREE;
9095
9096 return native_interpret_expr (type, buffer, len);
9097 }
9098
9099 /* Build an expression for the address of T. Folds away INDIRECT_REF
9100 to avoid confusing the gimplify process. */
9101
9102 tree
9103 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9104 {
9105 /* The size of the object is not relevant when talking about its address. */
9106 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9107 t = TREE_OPERAND (t, 0);
9108
9109 if (TREE_CODE (t) == INDIRECT_REF)
9110 {
9111 t = TREE_OPERAND (t, 0);
9112
9113 if (TREE_TYPE (t) != ptrtype)
9114 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9115 }
9116 else if (TREE_CODE (t) == MEM_REF
9117 && integer_zerop (TREE_OPERAND (t, 1)))
9118 {
9119 t = TREE_OPERAND (t, 0);
9120
9121 if (TREE_TYPE (t) != ptrtype)
9122 t = fold_convert_loc (loc, ptrtype, t);
9123 }
9124 else if (TREE_CODE (t) == MEM_REF
9125 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9126 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9127 TREE_OPERAND (t, 0),
9128 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9129 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9130 {
9131 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9132
9133 if (TREE_TYPE (t) != ptrtype)
9134 t = fold_convert_loc (loc, ptrtype, t);
9135 }
9136 else
9137 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9138
9139 return t;
9140 }
9141
9142 /* Build an expression for the address of T. */
9143
9144 tree
9145 build_fold_addr_expr_loc (location_t loc, tree t)
9146 {
9147 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9148
9149 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9150 }
9151
9152 /* Fold a unary expression of code CODE and type TYPE with operand
9153 OP0. Return the folded expression if folding is successful.
9154 Otherwise, return NULL_TREE. */
9155
9156 tree
9157 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9158 {
9159 tree tem;
9160 tree arg0;
9161 enum tree_code_class kind = TREE_CODE_CLASS (code);
9162
9163 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9164 && TREE_CODE_LENGTH (code) == 1);
9165
9166 arg0 = op0;
9167 if (arg0)
9168 {
9169 if (CONVERT_EXPR_CODE_P (code)
9170 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9171 {
9172 /* Don't use STRIP_NOPS, because signedness of argument type
9173 matters. */
9174 STRIP_SIGN_NOPS (arg0);
9175 }
9176 else
9177 {
9178 /* Strip any conversions that don't change the mode. This
9179 is safe for every expression, except for a comparison
9180 expression because its signedness is derived from its
9181 operands.
9182
9183 Note that this is done as an internal manipulation within
9184 the constant folder, in order to find the simplest
9185 representation of the arguments so that their form can be
9186 studied. In any cases, the appropriate type conversions
9187 should be put back in the tree that will get out of the
9188 constant folder. */
9189 STRIP_NOPS (arg0);
9190 }
9191
9192 if (CONSTANT_CLASS_P (arg0))
9193 {
9194 tree tem = const_unop (code, type, arg0);
9195 if (tem)
9196 {
9197 if (TREE_TYPE (tem) != type)
9198 tem = fold_convert_loc (loc, type, tem);
9199 return tem;
9200 }
9201 }
9202 }
9203
9204 tem = generic_simplify (loc, code, type, op0);
9205 if (tem)
9206 return tem;
9207
9208 if (TREE_CODE_CLASS (code) == tcc_unary)
9209 {
9210 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9211 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9212 fold_build1_loc (loc, code, type,
9213 fold_convert_loc (loc, TREE_TYPE (op0),
9214 TREE_OPERAND (arg0, 1))));
9215 else if (TREE_CODE (arg0) == COND_EXPR)
9216 {
9217 tree arg01 = TREE_OPERAND (arg0, 1);
9218 tree arg02 = TREE_OPERAND (arg0, 2);
9219 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9220 arg01 = fold_build1_loc (loc, code, type,
9221 fold_convert_loc (loc,
9222 TREE_TYPE (op0), arg01));
9223 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9224 arg02 = fold_build1_loc (loc, code, type,
9225 fold_convert_loc (loc,
9226 TREE_TYPE (op0), arg02));
9227 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9228 arg01, arg02);
9229
9230 /* If this was a conversion, and all we did was to move into
9231 inside the COND_EXPR, bring it back out. But leave it if
9232 it is a conversion from integer to integer and the
9233 result precision is no wider than a word since such a
9234 conversion is cheap and may be optimized away by combine,
9235 while it couldn't if it were outside the COND_EXPR. Then return
9236 so we don't get into an infinite recursion loop taking the
9237 conversion out and then back in. */
9238
9239 if ((CONVERT_EXPR_CODE_P (code)
9240 || code == NON_LVALUE_EXPR)
9241 && TREE_CODE (tem) == COND_EXPR
9242 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9243 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9244 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9245 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9246 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9247 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9248 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9249 && (INTEGRAL_TYPE_P
9250 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9251 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9252 || flag_syntax_only))
9253 tem = build1_loc (loc, code, type,
9254 build3 (COND_EXPR,
9255 TREE_TYPE (TREE_OPERAND
9256 (TREE_OPERAND (tem, 1), 0)),
9257 TREE_OPERAND (tem, 0),
9258 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9259 TREE_OPERAND (TREE_OPERAND (tem, 2),
9260 0)));
9261 return tem;
9262 }
9263 }
9264
9265 switch (code)
9266 {
9267 case NON_LVALUE_EXPR:
9268 if (!maybe_lvalue_p (op0))
9269 return fold_convert_loc (loc, type, op0);
9270 return NULL_TREE;
9271
9272 CASE_CONVERT:
9273 case FLOAT_EXPR:
9274 case FIX_TRUNC_EXPR:
9275 if (COMPARISON_CLASS_P (op0))
9276 {
9277 /* If we have (type) (a CMP b) and type is an integral type, return
9278 new expression involving the new type. Canonicalize
9279 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9280 non-integral type.
9281 Do not fold the result as that would not simplify further, also
9282 folding again results in recursions. */
9283 if (TREE_CODE (type) == BOOLEAN_TYPE)
9284 return build2_loc (loc, TREE_CODE (op0), type,
9285 TREE_OPERAND (op0, 0),
9286 TREE_OPERAND (op0, 1));
9287 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9288 && TREE_CODE (type) != VECTOR_TYPE)
9289 return build3_loc (loc, COND_EXPR, type, op0,
9290 constant_boolean_node (true, type),
9291 constant_boolean_node (false, type));
9292 }
9293
9294 /* Handle (T *)&A.B.C for A being of type T and B and C
9295 living at offset zero. This occurs frequently in
9296 C++ upcasting and then accessing the base. */
9297 if (TREE_CODE (op0) == ADDR_EXPR
9298 && POINTER_TYPE_P (type)
9299 && handled_component_p (TREE_OPERAND (op0, 0)))
9300 {
9301 poly_int64 bitsize, bitpos;
9302 tree offset;
9303 machine_mode mode;
9304 int unsignedp, reversep, volatilep;
9305 tree base
9306 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9307 &offset, &mode, &unsignedp, &reversep,
9308 &volatilep);
9309 /* If the reference was to a (constant) zero offset, we can use
9310 the address of the base if it has the same base type
9311 as the result type and the pointer type is unqualified. */
9312 if (!offset
9313 && known_eq (bitpos, 0)
9314 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9315 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9316 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9317 return fold_convert_loc (loc, type,
9318 build_fold_addr_expr_loc (loc, base));
9319 }
9320
9321 if (TREE_CODE (op0) == MODIFY_EXPR
9322 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9323 /* Detect assigning a bitfield. */
9324 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9325 && DECL_BIT_FIELD
9326 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9327 {
9328 /* Don't leave an assignment inside a conversion
9329 unless assigning a bitfield. */
9330 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9331 /* First do the assignment, then return converted constant. */
9332 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9333 TREE_NO_WARNING (tem) = 1;
9334 TREE_USED (tem) = 1;
9335 return tem;
9336 }
9337
9338 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9339 constants (if x has signed type, the sign bit cannot be set
9340 in c). This folds extension into the BIT_AND_EXPR.
9341 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9342 very likely don't have maximal range for their precision and this
9343 transformation effectively doesn't preserve non-maximal ranges. */
9344 if (TREE_CODE (type) == INTEGER_TYPE
9345 && TREE_CODE (op0) == BIT_AND_EXPR
9346 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9347 {
9348 tree and_expr = op0;
9349 tree and0 = TREE_OPERAND (and_expr, 0);
9350 tree and1 = TREE_OPERAND (and_expr, 1);
9351 int change = 0;
9352
9353 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9354 || (TYPE_PRECISION (type)
9355 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9356 change = 1;
9357 else if (TYPE_PRECISION (TREE_TYPE (and1))
9358 <= HOST_BITS_PER_WIDE_INT
9359 && tree_fits_uhwi_p (and1))
9360 {
9361 unsigned HOST_WIDE_INT cst;
9362
9363 cst = tree_to_uhwi (and1);
9364 cst &= HOST_WIDE_INT_M1U
9365 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9366 change = (cst == 0);
9367 if (change
9368 && !flag_syntax_only
9369 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9370 == ZERO_EXTEND))
9371 {
9372 tree uns = unsigned_type_for (TREE_TYPE (and0));
9373 and0 = fold_convert_loc (loc, uns, and0);
9374 and1 = fold_convert_loc (loc, uns, and1);
9375 }
9376 }
9377 if (change)
9378 {
9379 tem = force_fit_type (type, wi::to_widest (and1), 0,
9380 TREE_OVERFLOW (and1));
9381 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9382 fold_convert_loc (loc, type, and0), tem);
9383 }
9384 }
9385
9386 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9387 cast (T1)X will fold away. We assume that this happens when X itself
9388 is a cast. */
9389 if (POINTER_TYPE_P (type)
9390 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9391 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9392 {
9393 tree arg00 = TREE_OPERAND (arg0, 0);
9394 tree arg01 = TREE_OPERAND (arg0, 1);
9395
9396 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9397 when the pointed type needs higher alignment than
9398 the p+ first operand's pointed type. */
9399 if (!in_gimple_form
9400 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9401 && (min_align_of_type (TREE_TYPE (type))
9402 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9403 return NULL_TREE;
9404
9405 arg00 = fold_convert_loc (loc, type, arg00);
9406 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9407 }
9408
9409 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9410 of the same precision, and X is an integer type not narrower than
9411 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9412 if (INTEGRAL_TYPE_P (type)
9413 && TREE_CODE (op0) == BIT_NOT_EXPR
9414 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9415 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9416 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9417 {
9418 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9419 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9420 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9421 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9422 fold_convert_loc (loc, type, tem));
9423 }
9424
9425 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9426 type of X and Y (integer types only). */
9427 if (INTEGRAL_TYPE_P (type)
9428 && TREE_CODE (op0) == MULT_EXPR
9429 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9430 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9431 {
9432 /* Be careful not to introduce new overflows. */
9433 tree mult_type;
9434 if (TYPE_OVERFLOW_WRAPS (type))
9435 mult_type = type;
9436 else
9437 mult_type = unsigned_type_for (type);
9438
9439 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9440 {
9441 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9442 fold_convert_loc (loc, mult_type,
9443 TREE_OPERAND (op0, 0)),
9444 fold_convert_loc (loc, mult_type,
9445 TREE_OPERAND (op0, 1)));
9446 return fold_convert_loc (loc, type, tem);
9447 }
9448 }
9449
9450 return NULL_TREE;
9451
9452 case VIEW_CONVERT_EXPR:
9453 if (TREE_CODE (op0) == MEM_REF)
9454 {
9455 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9456 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9457 tem = fold_build2_loc (loc, MEM_REF, type,
9458 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9459 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9460 return tem;
9461 }
9462
9463 return NULL_TREE;
9464
9465 case NEGATE_EXPR:
9466 tem = fold_negate_expr (loc, arg0);
9467 if (tem)
9468 return fold_convert_loc (loc, type, tem);
9469 return NULL_TREE;
9470
9471 case ABS_EXPR:
9472 /* Convert fabs((double)float) into (double)fabsf(float). */
9473 if (TREE_CODE (arg0) == NOP_EXPR
9474 && TREE_CODE (type) == REAL_TYPE)
9475 {
9476 tree targ0 = strip_float_extensions (arg0);
9477 if (targ0 != arg0)
9478 return fold_convert_loc (loc, type,
9479 fold_build1_loc (loc, ABS_EXPR,
9480 TREE_TYPE (targ0),
9481 targ0));
9482 }
9483 return NULL_TREE;
9484
9485 case BIT_NOT_EXPR:
9486 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9487 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9488 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9489 fold_convert_loc (loc, type,
9490 TREE_OPERAND (arg0, 0)))))
9491 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9492 fold_convert_loc (loc, type,
9493 TREE_OPERAND (arg0, 1)));
9494 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9495 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9496 fold_convert_loc (loc, type,
9497 TREE_OPERAND (arg0, 1)))))
9498 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9499 fold_convert_loc (loc, type,
9500 TREE_OPERAND (arg0, 0)), tem);
9501
9502 return NULL_TREE;
9503
9504 case TRUTH_NOT_EXPR:
9505 /* Note that the operand of this must be an int
9506 and its values must be 0 or 1.
9507 ("true" is a fixed value perhaps depending on the language,
9508 but we don't handle values other than 1 correctly yet.) */
9509 tem = fold_truth_not_expr (loc, arg0);
9510 if (!tem)
9511 return NULL_TREE;
9512 return fold_convert_loc (loc, type, tem);
9513
9514 case INDIRECT_REF:
9515 /* Fold *&X to X if X is an lvalue. */
9516 if (TREE_CODE (op0) == ADDR_EXPR)
9517 {
9518 tree op00 = TREE_OPERAND (op0, 0);
9519 if ((VAR_P (op00)
9520 || TREE_CODE (op00) == PARM_DECL
9521 || TREE_CODE (op00) == RESULT_DECL)
9522 && !TREE_READONLY (op00))
9523 return op00;
9524 }
9525 return NULL_TREE;
9526
9527 default:
9528 return NULL_TREE;
9529 } /* switch (code) */
9530 }
9531
9532
9533 /* If the operation was a conversion do _not_ mark a resulting constant
9534 with TREE_OVERFLOW if the original constant was not. These conversions
9535 have implementation defined behavior and retaining the TREE_OVERFLOW
9536 flag here would confuse later passes such as VRP. */
9537 tree
9538 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9539 tree type, tree op0)
9540 {
9541 tree res = fold_unary_loc (loc, code, type, op0);
9542 if (res
9543 && TREE_CODE (res) == INTEGER_CST
9544 && TREE_CODE (op0) == INTEGER_CST
9545 && CONVERT_EXPR_CODE_P (code))
9546 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9547
9548 return res;
9549 }
9550
9551 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9552 operands OP0 and OP1. LOC is the location of the resulting expression.
9553 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9554 Return the folded expression if folding is successful. Otherwise,
9555 return NULL_TREE. */
9556 static tree
9557 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9558 tree arg0, tree arg1, tree op0, tree op1)
9559 {
9560 tree tem;
9561
9562 /* We only do these simplifications if we are optimizing. */
9563 if (!optimize)
9564 return NULL_TREE;
9565
9566 /* Check for things like (A || B) && (A || C). We can convert this
9567 to A || (B && C). Note that either operator can be any of the four
9568 truth and/or operations and the transformation will still be
9569 valid. Also note that we only care about order for the
9570 ANDIF and ORIF operators. If B contains side effects, this
9571 might change the truth-value of A. */
9572 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9573 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9574 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9575 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9576 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9577 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9578 {
9579 tree a00 = TREE_OPERAND (arg0, 0);
9580 tree a01 = TREE_OPERAND (arg0, 1);
9581 tree a10 = TREE_OPERAND (arg1, 0);
9582 tree a11 = TREE_OPERAND (arg1, 1);
9583 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9584 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9585 && (code == TRUTH_AND_EXPR
9586 || code == TRUTH_OR_EXPR));
9587
9588 if (operand_equal_p (a00, a10, 0))
9589 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9590 fold_build2_loc (loc, code, type, a01, a11));
9591 else if (commutative && operand_equal_p (a00, a11, 0))
9592 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9593 fold_build2_loc (loc, code, type, a01, a10));
9594 else if (commutative && operand_equal_p (a01, a10, 0))
9595 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9596 fold_build2_loc (loc, code, type, a00, a11));
9597
9598 /* This case if tricky because we must either have commutative
9599 operators or else A10 must not have side-effects. */
9600
9601 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9602 && operand_equal_p (a01, a11, 0))
9603 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9604 fold_build2_loc (loc, code, type, a00, a10),
9605 a01);
9606 }
9607
9608 /* See if we can build a range comparison. */
9609 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9610 return tem;
9611
9612 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9613 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9614 {
9615 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9616 if (tem)
9617 return fold_build2_loc (loc, code, type, tem, arg1);
9618 }
9619
9620 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9621 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9622 {
9623 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9624 if (tem)
9625 return fold_build2_loc (loc, code, type, arg0, tem);
9626 }
9627
9628 /* Check for the possibility of merging component references. If our
9629 lhs is another similar operation, try to merge its rhs with our
9630 rhs. Then try to merge our lhs and rhs. */
9631 if (TREE_CODE (arg0) == code
9632 && (tem = fold_truth_andor_1 (loc, code, type,
9633 TREE_OPERAND (arg0, 1), arg1)) != 0)
9634 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9635
9636 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9637 return tem;
9638
9639 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9640 if (param_logical_op_non_short_circuit != -1)
9641 logical_op_non_short_circuit
9642 = param_logical_op_non_short_circuit;
9643 if (logical_op_non_short_circuit
9644 && !flag_sanitize_coverage
9645 && (code == TRUTH_AND_EXPR
9646 || code == TRUTH_ANDIF_EXPR
9647 || code == TRUTH_OR_EXPR
9648 || code == TRUTH_ORIF_EXPR))
9649 {
9650 enum tree_code ncode, icode;
9651
9652 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9653 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9654 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9655
9656 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9657 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9658 We don't want to pack more than two leafs to a non-IF AND/OR
9659 expression.
9660 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9661 equal to IF-CODE, then we don't want to add right-hand operand.
9662 If the inner right-hand side of left-hand operand has
9663 side-effects, or isn't simple, then we can't add to it,
9664 as otherwise we might destroy if-sequence. */
9665 if (TREE_CODE (arg0) == icode
9666 && simple_operand_p_2 (arg1)
9667 /* Needed for sequence points to handle trappings, and
9668 side-effects. */
9669 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9670 {
9671 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9672 arg1);
9673 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9674 tem);
9675 }
9676 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9677 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9678 else if (TREE_CODE (arg1) == icode
9679 && simple_operand_p_2 (arg0)
9680 /* Needed for sequence points to handle trappings, and
9681 side-effects. */
9682 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9683 {
9684 tem = fold_build2_loc (loc, ncode, type,
9685 arg0, TREE_OPERAND (arg1, 0));
9686 return fold_build2_loc (loc, icode, type, tem,
9687 TREE_OPERAND (arg1, 1));
9688 }
9689 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9690 into (A OR B).
9691 For sequence point consistancy, we need to check for trapping,
9692 and side-effects. */
9693 else if (code == icode && simple_operand_p_2 (arg0)
9694 && simple_operand_p_2 (arg1))
9695 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9696 }
9697
9698 return NULL_TREE;
9699 }
9700
9701 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9702 by changing CODE to reduce the magnitude of constants involved in
9703 ARG0 of the comparison.
9704 Returns a canonicalized comparison tree if a simplification was
9705 possible, otherwise returns NULL_TREE.
9706 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9707 valid if signed overflow is undefined. */
9708
9709 static tree
9710 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9711 tree arg0, tree arg1,
9712 bool *strict_overflow_p)
9713 {
9714 enum tree_code code0 = TREE_CODE (arg0);
9715 tree t, cst0 = NULL_TREE;
9716 int sgn0;
9717
9718 /* Match A +- CST code arg1. We can change this only if overflow
9719 is undefined. */
9720 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9721 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9722 /* In principle pointers also have undefined overflow behavior,
9723 but that causes problems elsewhere. */
9724 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9725 && (code0 == MINUS_EXPR
9726 || code0 == PLUS_EXPR)
9727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9728 return NULL_TREE;
9729
9730 /* Identify the constant in arg0 and its sign. */
9731 cst0 = TREE_OPERAND (arg0, 1);
9732 sgn0 = tree_int_cst_sgn (cst0);
9733
9734 /* Overflowed constants and zero will cause problems. */
9735 if (integer_zerop (cst0)
9736 || TREE_OVERFLOW (cst0))
9737 return NULL_TREE;
9738
9739 /* See if we can reduce the magnitude of the constant in
9740 arg0 by changing the comparison code. */
9741 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9742 if (code == LT_EXPR
9743 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9744 code = LE_EXPR;
9745 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9746 else if (code == GT_EXPR
9747 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9748 code = GE_EXPR;
9749 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9750 else if (code == LE_EXPR
9751 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9752 code = LT_EXPR;
9753 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9754 else if (code == GE_EXPR
9755 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9756 code = GT_EXPR;
9757 else
9758 return NULL_TREE;
9759 *strict_overflow_p = true;
9760
9761 /* Now build the constant reduced in magnitude. But not if that
9762 would produce one outside of its types range. */
9763 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9764 && ((sgn0 == 1
9765 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9766 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9767 || (sgn0 == -1
9768 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9769 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9770 return NULL_TREE;
9771
9772 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9773 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9774 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9775 t = fold_convert (TREE_TYPE (arg1), t);
9776
9777 return fold_build2_loc (loc, code, type, t, arg1);
9778 }
9779
9780 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9781 overflow further. Try to decrease the magnitude of constants involved
9782 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9783 and put sole constants at the second argument position.
9784 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9785
9786 static tree
9787 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9788 tree arg0, tree arg1)
9789 {
9790 tree t;
9791 bool strict_overflow_p;
9792 const char * const warnmsg = G_("assuming signed overflow does not occur "
9793 "when reducing constant in comparison");
9794
9795 /* Try canonicalization by simplifying arg0. */
9796 strict_overflow_p = false;
9797 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9798 &strict_overflow_p);
9799 if (t)
9800 {
9801 if (strict_overflow_p)
9802 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9803 return t;
9804 }
9805
9806 /* Try canonicalization by simplifying arg1 using the swapped
9807 comparison. */
9808 code = swap_tree_comparison (code);
9809 strict_overflow_p = false;
9810 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9811 &strict_overflow_p);
9812 if (t && strict_overflow_p)
9813 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9814 return t;
9815 }
9816
9817 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9818 space. This is used to avoid issuing overflow warnings for
9819 expressions like &p->x which cannot wrap. */
9820
9821 static bool
9822 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9823 {
9824 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9825 return true;
9826
9827 if (maybe_lt (bitpos, 0))
9828 return true;
9829
9830 poly_wide_int wi_offset;
9831 int precision = TYPE_PRECISION (TREE_TYPE (base));
9832 if (offset == NULL_TREE)
9833 wi_offset = wi::zero (precision);
9834 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9835 return true;
9836 else
9837 wi_offset = wi::to_poly_wide (offset);
9838
9839 wi::overflow_type overflow;
9840 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9841 precision);
9842 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9843 if (overflow)
9844 return true;
9845
9846 poly_uint64 total_hwi, size;
9847 if (!total.to_uhwi (&total_hwi)
9848 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9849 &size)
9850 || known_eq (size, 0U))
9851 return true;
9852
9853 if (known_le (total_hwi, size))
9854 return false;
9855
9856 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9857 array. */
9858 if (TREE_CODE (base) == ADDR_EXPR
9859 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9860 &size)
9861 && maybe_ne (size, 0U)
9862 && known_le (total_hwi, size))
9863 return false;
9864
9865 return true;
9866 }
9867
9868 /* Return a positive integer when the symbol DECL is known to have
9869 a nonzero address, zero when it's known not to (e.g., it's a weak
9870 symbol), and a negative integer when the symbol is not yet in the
9871 symbol table and so whether or not its address is zero is unknown.
9872 For function local objects always return positive integer. */
9873 static int
9874 maybe_nonzero_address (tree decl)
9875 {
9876 if (DECL_P (decl) && decl_in_symtab_p (decl))
9877 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9878 return symbol->nonzero_address ();
9879
9880 /* Function local objects are never NULL. */
9881 if (DECL_P (decl)
9882 && (DECL_CONTEXT (decl)
9883 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9884 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9885 return 1;
9886
9887 return -1;
9888 }
9889
9890 /* Subroutine of fold_binary. This routine performs all of the
9891 transformations that are common to the equality/inequality
9892 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9893 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9894 fold_binary should call fold_binary. Fold a comparison with
9895 tree code CODE and type TYPE with operands OP0 and OP1. Return
9896 the folded comparison or NULL_TREE. */
9897
9898 static tree
9899 fold_comparison (location_t loc, enum tree_code code, tree type,
9900 tree op0, tree op1)
9901 {
9902 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9903 tree arg0, arg1, tem;
9904
9905 arg0 = op0;
9906 arg1 = op1;
9907
9908 STRIP_SIGN_NOPS (arg0);
9909 STRIP_SIGN_NOPS (arg1);
9910
9911 /* For comparisons of pointers we can decompose it to a compile time
9912 comparison of the base objects and the offsets into the object.
9913 This requires at least one operand being an ADDR_EXPR or a
9914 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9915 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9916 && (TREE_CODE (arg0) == ADDR_EXPR
9917 || TREE_CODE (arg1) == ADDR_EXPR
9918 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9919 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9920 {
9921 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9922 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9923 machine_mode mode;
9924 int volatilep, reversep, unsignedp;
9925 bool indirect_base0 = false, indirect_base1 = false;
9926
9927 /* Get base and offset for the access. Strip ADDR_EXPR for
9928 get_inner_reference, but put it back by stripping INDIRECT_REF
9929 off the base object if possible. indirect_baseN will be true
9930 if baseN is not an address but refers to the object itself. */
9931 base0 = arg0;
9932 if (TREE_CODE (arg0) == ADDR_EXPR)
9933 {
9934 base0
9935 = get_inner_reference (TREE_OPERAND (arg0, 0),
9936 &bitsize, &bitpos0, &offset0, &mode,
9937 &unsignedp, &reversep, &volatilep);
9938 if (TREE_CODE (base0) == INDIRECT_REF)
9939 base0 = TREE_OPERAND (base0, 0);
9940 else
9941 indirect_base0 = true;
9942 }
9943 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9944 {
9945 base0 = TREE_OPERAND (arg0, 0);
9946 STRIP_SIGN_NOPS (base0);
9947 if (TREE_CODE (base0) == ADDR_EXPR)
9948 {
9949 base0
9950 = get_inner_reference (TREE_OPERAND (base0, 0),
9951 &bitsize, &bitpos0, &offset0, &mode,
9952 &unsignedp, &reversep, &volatilep);
9953 if (TREE_CODE (base0) == INDIRECT_REF)
9954 base0 = TREE_OPERAND (base0, 0);
9955 else
9956 indirect_base0 = true;
9957 }
9958 if (offset0 == NULL_TREE || integer_zerop (offset0))
9959 offset0 = TREE_OPERAND (arg0, 1);
9960 else
9961 offset0 = size_binop (PLUS_EXPR, offset0,
9962 TREE_OPERAND (arg0, 1));
9963 if (poly_int_tree_p (offset0))
9964 {
9965 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9966 TYPE_PRECISION (sizetype));
9967 tem <<= LOG2_BITS_PER_UNIT;
9968 tem += bitpos0;
9969 if (tem.to_shwi (&bitpos0))
9970 offset0 = NULL_TREE;
9971 }
9972 }
9973
9974 base1 = arg1;
9975 if (TREE_CODE (arg1) == ADDR_EXPR)
9976 {
9977 base1
9978 = get_inner_reference (TREE_OPERAND (arg1, 0),
9979 &bitsize, &bitpos1, &offset1, &mode,
9980 &unsignedp, &reversep, &volatilep);
9981 if (TREE_CODE (base1) == INDIRECT_REF)
9982 base1 = TREE_OPERAND (base1, 0);
9983 else
9984 indirect_base1 = true;
9985 }
9986 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9987 {
9988 base1 = TREE_OPERAND (arg1, 0);
9989 STRIP_SIGN_NOPS (base1);
9990 if (TREE_CODE (base1) == ADDR_EXPR)
9991 {
9992 base1
9993 = get_inner_reference (TREE_OPERAND (base1, 0),
9994 &bitsize, &bitpos1, &offset1, &mode,
9995 &unsignedp, &reversep, &volatilep);
9996 if (TREE_CODE (base1) == INDIRECT_REF)
9997 base1 = TREE_OPERAND (base1, 0);
9998 else
9999 indirect_base1 = true;
10000 }
10001 if (offset1 == NULL_TREE || integer_zerop (offset1))
10002 offset1 = TREE_OPERAND (arg1, 1);
10003 else
10004 offset1 = size_binop (PLUS_EXPR, offset1,
10005 TREE_OPERAND (arg1, 1));
10006 if (poly_int_tree_p (offset1))
10007 {
10008 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10009 TYPE_PRECISION (sizetype));
10010 tem <<= LOG2_BITS_PER_UNIT;
10011 tem += bitpos1;
10012 if (tem.to_shwi (&bitpos1))
10013 offset1 = NULL_TREE;
10014 }
10015 }
10016
10017 /* If we have equivalent bases we might be able to simplify. */
10018 if (indirect_base0 == indirect_base1
10019 && operand_equal_p (base0, base1,
10020 indirect_base0 ? OEP_ADDRESS_OF : 0))
10021 {
10022 /* We can fold this expression to a constant if the non-constant
10023 offset parts are equal. */
10024 if ((offset0 == offset1
10025 || (offset0 && offset1
10026 && operand_equal_p (offset0, offset1, 0)))
10027 && (equality_code
10028 || (indirect_base0
10029 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10030 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10031 {
10032 if (!equality_code
10033 && maybe_ne (bitpos0, bitpos1)
10034 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10035 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10036 fold_overflow_warning (("assuming pointer wraparound does not "
10037 "occur when comparing P +- C1 with "
10038 "P +- C2"),
10039 WARN_STRICT_OVERFLOW_CONDITIONAL);
10040
10041 switch (code)
10042 {
10043 case EQ_EXPR:
10044 if (known_eq (bitpos0, bitpos1))
10045 return constant_boolean_node (true, type);
10046 if (known_ne (bitpos0, bitpos1))
10047 return constant_boolean_node (false, type);
10048 break;
10049 case NE_EXPR:
10050 if (known_ne (bitpos0, bitpos1))
10051 return constant_boolean_node (true, type);
10052 if (known_eq (bitpos0, bitpos1))
10053 return constant_boolean_node (false, type);
10054 break;
10055 case LT_EXPR:
10056 if (known_lt (bitpos0, bitpos1))
10057 return constant_boolean_node (true, type);
10058 if (known_ge (bitpos0, bitpos1))
10059 return constant_boolean_node (false, type);
10060 break;
10061 case LE_EXPR:
10062 if (known_le (bitpos0, bitpos1))
10063 return constant_boolean_node (true, type);
10064 if (known_gt (bitpos0, bitpos1))
10065 return constant_boolean_node (false, type);
10066 break;
10067 case GE_EXPR:
10068 if (known_ge (bitpos0, bitpos1))
10069 return constant_boolean_node (true, type);
10070 if (known_lt (bitpos0, bitpos1))
10071 return constant_boolean_node (false, type);
10072 break;
10073 case GT_EXPR:
10074 if (known_gt (bitpos0, bitpos1))
10075 return constant_boolean_node (true, type);
10076 if (known_le (bitpos0, bitpos1))
10077 return constant_boolean_node (false, type);
10078 break;
10079 default:;
10080 }
10081 }
10082 /* We can simplify the comparison to a comparison of the variable
10083 offset parts if the constant offset parts are equal.
10084 Be careful to use signed sizetype here because otherwise we
10085 mess with array offsets in the wrong way. This is possible
10086 because pointer arithmetic is restricted to retain within an
10087 object and overflow on pointer differences is undefined as of
10088 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10089 else if (known_eq (bitpos0, bitpos1)
10090 && (equality_code
10091 || (indirect_base0
10092 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10093 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10094 {
10095 /* By converting to signed sizetype we cover middle-end pointer
10096 arithmetic which operates on unsigned pointer types of size
10097 type size and ARRAY_REF offsets which are properly sign or
10098 zero extended from their type in case it is narrower than
10099 sizetype. */
10100 if (offset0 == NULL_TREE)
10101 offset0 = build_int_cst (ssizetype, 0);
10102 else
10103 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10104 if (offset1 == NULL_TREE)
10105 offset1 = build_int_cst (ssizetype, 0);
10106 else
10107 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10108
10109 if (!equality_code
10110 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10111 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10112 fold_overflow_warning (("assuming pointer wraparound does not "
10113 "occur when comparing P +- C1 with "
10114 "P +- C2"),
10115 WARN_STRICT_OVERFLOW_COMPARISON);
10116
10117 return fold_build2_loc (loc, code, type, offset0, offset1);
10118 }
10119 }
10120 /* For equal offsets we can simplify to a comparison of the
10121 base addresses. */
10122 else if (known_eq (bitpos0, bitpos1)
10123 && (indirect_base0
10124 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10125 && (indirect_base1
10126 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10127 && ((offset0 == offset1)
10128 || (offset0 && offset1
10129 && operand_equal_p (offset0, offset1, 0))))
10130 {
10131 if (indirect_base0)
10132 base0 = build_fold_addr_expr_loc (loc, base0);
10133 if (indirect_base1)
10134 base1 = build_fold_addr_expr_loc (loc, base1);
10135 return fold_build2_loc (loc, code, type, base0, base1);
10136 }
10137 /* Comparison between an ordinary (non-weak) symbol and a null
10138 pointer can be eliminated since such symbols must have a non
10139 null address. In C, relational expressions between pointers
10140 to objects and null pointers are undefined. The results
10141 below follow the C++ rules with the additional property that
10142 every object pointer compares greater than a null pointer.
10143 */
10144 else if (((DECL_P (base0)
10145 && maybe_nonzero_address (base0) > 0
10146 /* Avoid folding references to struct members at offset 0 to
10147 prevent tests like '&ptr->firstmember == 0' from getting
10148 eliminated. When ptr is null, although the -> expression
10149 is strictly speaking invalid, GCC retains it as a matter
10150 of QoI. See PR c/44555. */
10151 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10152 || CONSTANT_CLASS_P (base0))
10153 && indirect_base0
10154 /* The caller guarantees that when one of the arguments is
10155 constant (i.e., null in this case) it is second. */
10156 && integer_zerop (arg1))
10157 {
10158 switch (code)
10159 {
10160 case EQ_EXPR:
10161 case LE_EXPR:
10162 case LT_EXPR:
10163 return constant_boolean_node (false, type);
10164 case GE_EXPR:
10165 case GT_EXPR:
10166 case NE_EXPR:
10167 return constant_boolean_node (true, type);
10168 default:
10169 gcc_unreachable ();
10170 }
10171 }
10172 }
10173
10174 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10175 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10176 the resulting offset is smaller in absolute value than the
10177 original one and has the same sign. */
10178 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10179 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10180 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10181 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10182 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10183 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10184 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10185 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10186 {
10187 tree const1 = TREE_OPERAND (arg0, 1);
10188 tree const2 = TREE_OPERAND (arg1, 1);
10189 tree variable1 = TREE_OPERAND (arg0, 0);
10190 tree variable2 = TREE_OPERAND (arg1, 0);
10191 tree cst;
10192 const char * const warnmsg = G_("assuming signed overflow does not "
10193 "occur when combining constants around "
10194 "a comparison");
10195
10196 /* Put the constant on the side where it doesn't overflow and is
10197 of lower absolute value and of same sign than before. */
10198 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10199 ? MINUS_EXPR : PLUS_EXPR,
10200 const2, const1);
10201 if (!TREE_OVERFLOW (cst)
10202 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10203 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10204 {
10205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10206 return fold_build2_loc (loc, code, type,
10207 variable1,
10208 fold_build2_loc (loc, TREE_CODE (arg1),
10209 TREE_TYPE (arg1),
10210 variable2, cst));
10211 }
10212
10213 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10214 ? MINUS_EXPR : PLUS_EXPR,
10215 const1, const2);
10216 if (!TREE_OVERFLOW (cst)
10217 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10218 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10219 {
10220 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10221 return fold_build2_loc (loc, code, type,
10222 fold_build2_loc (loc, TREE_CODE (arg0),
10223 TREE_TYPE (arg0),
10224 variable1, cst),
10225 variable2);
10226 }
10227 }
10228
10229 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10230 if (tem)
10231 return tem;
10232
10233 /* If we are comparing an expression that just has comparisons
10234 of two integer values, arithmetic expressions of those comparisons,
10235 and constants, we can simplify it. There are only three cases
10236 to check: the two values can either be equal, the first can be
10237 greater, or the second can be greater. Fold the expression for
10238 those three values. Since each value must be 0 or 1, we have
10239 eight possibilities, each of which corresponds to the constant 0
10240 or 1 or one of the six possible comparisons.
10241
10242 This handles common cases like (a > b) == 0 but also handles
10243 expressions like ((x > y) - (y > x)) > 0, which supposedly
10244 occur in macroized code. */
10245
10246 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10247 {
10248 tree cval1 = 0, cval2 = 0;
10249
10250 if (twoval_comparison_p (arg0, &cval1, &cval2)
10251 /* Don't handle degenerate cases here; they should already
10252 have been handled anyway. */
10253 && cval1 != 0 && cval2 != 0
10254 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10255 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10256 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10257 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10258 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10259 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10260 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10261 {
10262 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10263 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10264
10265 /* We can't just pass T to eval_subst in case cval1 or cval2
10266 was the same as ARG1. */
10267
10268 tree high_result
10269 = fold_build2_loc (loc, code, type,
10270 eval_subst (loc, arg0, cval1, maxval,
10271 cval2, minval),
10272 arg1);
10273 tree equal_result
10274 = fold_build2_loc (loc, code, type,
10275 eval_subst (loc, arg0, cval1, maxval,
10276 cval2, maxval),
10277 arg1);
10278 tree low_result
10279 = fold_build2_loc (loc, code, type,
10280 eval_subst (loc, arg0, cval1, minval,
10281 cval2, maxval),
10282 arg1);
10283
10284 /* All three of these results should be 0 or 1. Confirm they are.
10285 Then use those values to select the proper code to use. */
10286
10287 if (TREE_CODE (high_result) == INTEGER_CST
10288 && TREE_CODE (equal_result) == INTEGER_CST
10289 && TREE_CODE (low_result) == INTEGER_CST)
10290 {
10291 /* Make a 3-bit mask with the high-order bit being the
10292 value for `>', the next for '=', and the low for '<'. */
10293 switch ((integer_onep (high_result) * 4)
10294 + (integer_onep (equal_result) * 2)
10295 + integer_onep (low_result))
10296 {
10297 case 0:
10298 /* Always false. */
10299 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10300 case 1:
10301 code = LT_EXPR;
10302 break;
10303 case 2:
10304 code = EQ_EXPR;
10305 break;
10306 case 3:
10307 code = LE_EXPR;
10308 break;
10309 case 4:
10310 code = GT_EXPR;
10311 break;
10312 case 5:
10313 code = NE_EXPR;
10314 break;
10315 case 6:
10316 code = GE_EXPR;
10317 break;
10318 case 7:
10319 /* Always true. */
10320 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10321 }
10322
10323 return fold_build2_loc (loc, code, type, cval1, cval2);
10324 }
10325 }
10326 }
10327
10328 return NULL_TREE;
10329 }
10330
10331
10332 /* Subroutine of fold_binary. Optimize complex multiplications of the
10333 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10334 argument EXPR represents the expression "z" of type TYPE. */
10335
10336 static tree
10337 fold_mult_zconjz (location_t loc, tree type, tree expr)
10338 {
10339 tree itype = TREE_TYPE (type);
10340 tree rpart, ipart, tem;
10341
10342 if (TREE_CODE (expr) == COMPLEX_EXPR)
10343 {
10344 rpart = TREE_OPERAND (expr, 0);
10345 ipart = TREE_OPERAND (expr, 1);
10346 }
10347 else if (TREE_CODE (expr) == COMPLEX_CST)
10348 {
10349 rpart = TREE_REALPART (expr);
10350 ipart = TREE_IMAGPART (expr);
10351 }
10352 else
10353 {
10354 expr = save_expr (expr);
10355 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10356 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10357 }
10358
10359 rpart = save_expr (rpart);
10360 ipart = save_expr (ipart);
10361 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10362 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10363 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10364 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10365 build_zero_cst (itype));
10366 }
10367
10368
10369 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10370 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10371 true if successful. */
10372
10373 static bool
10374 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10375 {
10376 unsigned HOST_WIDE_INT i, nunits;
10377
10378 if (TREE_CODE (arg) == VECTOR_CST
10379 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10380 {
10381 for (i = 0; i < nunits; ++i)
10382 elts[i] = VECTOR_CST_ELT (arg, i);
10383 }
10384 else if (TREE_CODE (arg) == CONSTRUCTOR)
10385 {
10386 constructor_elt *elt;
10387
10388 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10389 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10390 return false;
10391 else
10392 elts[i] = elt->value;
10393 }
10394 else
10395 return false;
10396 for (; i < nelts; i++)
10397 elts[i]
10398 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10399 return true;
10400 }
10401
10402 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10403 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10404 NULL_TREE otherwise. */
10405
10406 tree
10407 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10408 {
10409 unsigned int i;
10410 unsigned HOST_WIDE_INT nelts;
10411 bool need_ctor = false;
10412
10413 if (!sel.length ().is_constant (&nelts))
10414 return NULL_TREE;
10415 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10416 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10417 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10418 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10419 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10420 return NULL_TREE;
10421
10422 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10423 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10424 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10425 return NULL_TREE;
10426
10427 tree_vector_builder out_elts (type, nelts, 1);
10428 for (i = 0; i < nelts; i++)
10429 {
10430 HOST_WIDE_INT index;
10431 if (!sel[i].is_constant (&index))
10432 return NULL_TREE;
10433 if (!CONSTANT_CLASS_P (in_elts[index]))
10434 need_ctor = true;
10435 out_elts.quick_push (unshare_expr (in_elts[index]));
10436 }
10437
10438 if (need_ctor)
10439 {
10440 vec<constructor_elt, va_gc> *v;
10441 vec_alloc (v, nelts);
10442 for (i = 0; i < nelts; i++)
10443 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10444 return build_constructor (type, v);
10445 }
10446 else
10447 return out_elts.build ();
10448 }
10449
10450 /* Try to fold a pointer difference of type TYPE two address expressions of
10451 array references AREF0 and AREF1 using location LOC. Return a
10452 simplified expression for the difference or NULL_TREE. */
10453
10454 static tree
10455 fold_addr_of_array_ref_difference (location_t loc, tree type,
10456 tree aref0, tree aref1,
10457 bool use_pointer_diff)
10458 {
10459 tree base0 = TREE_OPERAND (aref0, 0);
10460 tree base1 = TREE_OPERAND (aref1, 0);
10461 tree base_offset = build_int_cst (type, 0);
10462
10463 /* If the bases are array references as well, recurse. If the bases
10464 are pointer indirections compute the difference of the pointers.
10465 If the bases are equal, we are set. */
10466 if ((TREE_CODE (base0) == ARRAY_REF
10467 && TREE_CODE (base1) == ARRAY_REF
10468 && (base_offset
10469 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10470 use_pointer_diff)))
10471 || (INDIRECT_REF_P (base0)
10472 && INDIRECT_REF_P (base1)
10473 && (base_offset
10474 = use_pointer_diff
10475 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10476 TREE_OPERAND (base0, 0),
10477 TREE_OPERAND (base1, 0))
10478 : fold_binary_loc (loc, MINUS_EXPR, type,
10479 fold_convert (type,
10480 TREE_OPERAND (base0, 0)),
10481 fold_convert (type,
10482 TREE_OPERAND (base1, 0)))))
10483 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10484 {
10485 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10486 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10487 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10488 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10489 return fold_build2_loc (loc, PLUS_EXPR, type,
10490 base_offset,
10491 fold_build2_loc (loc, MULT_EXPR, type,
10492 diff, esz));
10493 }
10494 return NULL_TREE;
10495 }
10496
10497 /* If the real or vector real constant CST of type TYPE has an exact
10498 inverse, return it, else return NULL. */
10499
10500 tree
10501 exact_inverse (tree type, tree cst)
10502 {
10503 REAL_VALUE_TYPE r;
10504 tree unit_type;
10505 machine_mode mode;
10506
10507 switch (TREE_CODE (cst))
10508 {
10509 case REAL_CST:
10510 r = TREE_REAL_CST (cst);
10511
10512 if (exact_real_inverse (TYPE_MODE (type), &r))
10513 return build_real (type, r);
10514
10515 return NULL_TREE;
10516
10517 case VECTOR_CST:
10518 {
10519 unit_type = TREE_TYPE (type);
10520 mode = TYPE_MODE (unit_type);
10521
10522 tree_vector_builder elts;
10523 if (!elts.new_unary_operation (type, cst, false))
10524 return NULL_TREE;
10525 unsigned int count = elts.encoded_nelts ();
10526 for (unsigned int i = 0; i < count; ++i)
10527 {
10528 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10529 if (!exact_real_inverse (mode, &r))
10530 return NULL_TREE;
10531 elts.quick_push (build_real (unit_type, r));
10532 }
10533
10534 return elts.build ();
10535 }
10536
10537 default:
10538 return NULL_TREE;
10539 }
10540 }
10541
10542 /* Mask out the tz least significant bits of X of type TYPE where
10543 tz is the number of trailing zeroes in Y. */
10544 static wide_int
10545 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10546 {
10547 int tz = wi::ctz (y);
10548 if (tz > 0)
10549 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10550 return x;
10551 }
10552
10553 /* Return true when T is an address and is known to be nonzero.
10554 For floating point we further ensure that T is not denormal.
10555 Similar logic is present in nonzero_address in rtlanal.h.
10556
10557 If the return value is based on the assumption that signed overflow
10558 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10559 change *STRICT_OVERFLOW_P. */
10560
10561 static bool
10562 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10563 {
10564 tree type = TREE_TYPE (t);
10565 enum tree_code code;
10566
10567 /* Doing something useful for floating point would need more work. */
10568 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10569 return false;
10570
10571 code = TREE_CODE (t);
10572 switch (TREE_CODE_CLASS (code))
10573 {
10574 case tcc_unary:
10575 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10576 strict_overflow_p);
10577 case tcc_binary:
10578 case tcc_comparison:
10579 return tree_binary_nonzero_warnv_p (code, type,
10580 TREE_OPERAND (t, 0),
10581 TREE_OPERAND (t, 1),
10582 strict_overflow_p);
10583 case tcc_constant:
10584 case tcc_declaration:
10585 case tcc_reference:
10586 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10587
10588 default:
10589 break;
10590 }
10591
10592 switch (code)
10593 {
10594 case TRUTH_NOT_EXPR:
10595 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10596 strict_overflow_p);
10597
10598 case TRUTH_AND_EXPR:
10599 case TRUTH_OR_EXPR:
10600 case TRUTH_XOR_EXPR:
10601 return tree_binary_nonzero_warnv_p (code, type,
10602 TREE_OPERAND (t, 0),
10603 TREE_OPERAND (t, 1),
10604 strict_overflow_p);
10605
10606 case COND_EXPR:
10607 case CONSTRUCTOR:
10608 case OBJ_TYPE_REF:
10609 case ASSERT_EXPR:
10610 case ADDR_EXPR:
10611 case WITH_SIZE_EXPR:
10612 case SSA_NAME:
10613 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10614
10615 case COMPOUND_EXPR:
10616 case MODIFY_EXPR:
10617 case BIND_EXPR:
10618 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10619 strict_overflow_p);
10620
10621 case SAVE_EXPR:
10622 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10623 strict_overflow_p);
10624
10625 case CALL_EXPR:
10626 {
10627 tree fndecl = get_callee_fndecl (t);
10628 if (!fndecl) return false;
10629 if (flag_delete_null_pointer_checks && !flag_check_new
10630 && DECL_IS_OPERATOR_NEW_P (fndecl)
10631 && !TREE_NOTHROW (fndecl))
10632 return true;
10633 if (flag_delete_null_pointer_checks
10634 && lookup_attribute ("returns_nonnull",
10635 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10636 return true;
10637 return alloca_call_p (t);
10638 }
10639
10640 default:
10641 break;
10642 }
10643 return false;
10644 }
10645
10646 /* Return true when T is an address and is known to be nonzero.
10647 Handle warnings about undefined signed overflow. */
10648
10649 bool
10650 tree_expr_nonzero_p (tree t)
10651 {
10652 bool ret, strict_overflow_p;
10653
10654 strict_overflow_p = false;
10655 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10656 if (strict_overflow_p)
10657 fold_overflow_warning (("assuming signed overflow does not occur when "
10658 "determining that expression is always "
10659 "non-zero"),
10660 WARN_STRICT_OVERFLOW_MISC);
10661 return ret;
10662 }
10663
10664 /* Return true if T is known not to be equal to an integer W. */
10665
10666 bool
10667 expr_not_equal_to (tree t, const wide_int &w)
10668 {
10669 value_range vr;
10670 switch (TREE_CODE (t))
10671 {
10672 case INTEGER_CST:
10673 return wi::to_wide (t) != w;
10674
10675 case SSA_NAME:
10676 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10677 return false;
10678 get_range_info (t, vr);
10679 if (!vr.undefined_p ()
10680 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10681 return true;
10682 /* If T has some known zero bits and W has any of those bits set,
10683 then T is known not to be equal to W. */
10684 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10685 TYPE_PRECISION (TREE_TYPE (t))), 0))
10686 return true;
10687 return false;
10688
10689 default:
10690 return false;
10691 }
10692 }
10693
10694 /* Fold a binary expression of code CODE and type TYPE with operands
10695 OP0 and OP1. LOC is the location of the resulting expression.
10696 Return the folded expression if folding is successful. Otherwise,
10697 return NULL_TREE. */
10698
10699 tree
10700 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10701 tree op0, tree op1)
10702 {
10703 enum tree_code_class kind = TREE_CODE_CLASS (code);
10704 tree arg0, arg1, tem;
10705 tree t1 = NULL_TREE;
10706 bool strict_overflow_p;
10707 unsigned int prec;
10708
10709 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10710 && TREE_CODE_LENGTH (code) == 2
10711 && op0 != NULL_TREE
10712 && op1 != NULL_TREE);
10713
10714 arg0 = op0;
10715 arg1 = op1;
10716
10717 /* Strip any conversions that don't change the mode. This is
10718 safe for every expression, except for a comparison expression
10719 because its signedness is derived from its operands. So, in
10720 the latter case, only strip conversions that don't change the
10721 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10722 preserved.
10723
10724 Note that this is done as an internal manipulation within the
10725 constant folder, in order to find the simplest representation
10726 of the arguments so that their form can be studied. In any
10727 cases, the appropriate type conversions should be put back in
10728 the tree that will get out of the constant folder. */
10729
10730 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10731 {
10732 STRIP_SIGN_NOPS (arg0);
10733 STRIP_SIGN_NOPS (arg1);
10734 }
10735 else
10736 {
10737 STRIP_NOPS (arg0);
10738 STRIP_NOPS (arg1);
10739 }
10740
10741 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10742 constant but we can't do arithmetic on them. */
10743 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10744 {
10745 tem = const_binop (code, type, arg0, arg1);
10746 if (tem != NULL_TREE)
10747 {
10748 if (TREE_TYPE (tem) != type)
10749 tem = fold_convert_loc (loc, type, tem);
10750 return tem;
10751 }
10752 }
10753
10754 /* If this is a commutative operation, and ARG0 is a constant, move it
10755 to ARG1 to reduce the number of tests below. */
10756 if (commutative_tree_code (code)
10757 && tree_swap_operands_p (arg0, arg1))
10758 return fold_build2_loc (loc, code, type, op1, op0);
10759
10760 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10761 to ARG1 to reduce the number of tests below. */
10762 if (kind == tcc_comparison
10763 && tree_swap_operands_p (arg0, arg1))
10764 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10765
10766 tem = generic_simplify (loc, code, type, op0, op1);
10767 if (tem)
10768 return tem;
10769
10770 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10771
10772 First check for cases where an arithmetic operation is applied to a
10773 compound, conditional, or comparison operation. Push the arithmetic
10774 operation inside the compound or conditional to see if any folding
10775 can then be done. Convert comparison to conditional for this purpose.
10776 The also optimizes non-constant cases that used to be done in
10777 expand_expr.
10778
10779 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10780 one of the operands is a comparison and the other is a comparison, a
10781 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10782 code below would make the expression more complex. Change it to a
10783 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10784 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10785
10786 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10787 || code == EQ_EXPR || code == NE_EXPR)
10788 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10789 && ((truth_value_p (TREE_CODE (arg0))
10790 && (truth_value_p (TREE_CODE (arg1))
10791 || (TREE_CODE (arg1) == BIT_AND_EXPR
10792 && integer_onep (TREE_OPERAND (arg1, 1)))))
10793 || (truth_value_p (TREE_CODE (arg1))
10794 && (truth_value_p (TREE_CODE (arg0))
10795 || (TREE_CODE (arg0) == BIT_AND_EXPR
10796 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10797 {
10798 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10799 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10800 : TRUTH_XOR_EXPR,
10801 boolean_type_node,
10802 fold_convert_loc (loc, boolean_type_node, arg0),
10803 fold_convert_loc (loc, boolean_type_node, arg1));
10804
10805 if (code == EQ_EXPR)
10806 tem = invert_truthvalue_loc (loc, tem);
10807
10808 return fold_convert_loc (loc, type, tem);
10809 }
10810
10811 if (TREE_CODE_CLASS (code) == tcc_binary
10812 || TREE_CODE_CLASS (code) == tcc_comparison)
10813 {
10814 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10815 {
10816 tem = fold_build2_loc (loc, code, type,
10817 fold_convert_loc (loc, TREE_TYPE (op0),
10818 TREE_OPERAND (arg0, 1)), op1);
10819 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10820 tem);
10821 }
10822 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10823 {
10824 tem = fold_build2_loc (loc, code, type, op0,
10825 fold_convert_loc (loc, TREE_TYPE (op1),
10826 TREE_OPERAND (arg1, 1)));
10827 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10828 tem);
10829 }
10830
10831 if (TREE_CODE (arg0) == COND_EXPR
10832 || TREE_CODE (arg0) == VEC_COND_EXPR
10833 || COMPARISON_CLASS_P (arg0))
10834 {
10835 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10836 arg0, arg1,
10837 /*cond_first_p=*/1);
10838 if (tem != NULL_TREE)
10839 return tem;
10840 }
10841
10842 if (TREE_CODE (arg1) == COND_EXPR
10843 || TREE_CODE (arg1) == VEC_COND_EXPR
10844 || COMPARISON_CLASS_P (arg1))
10845 {
10846 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10847 arg1, arg0,
10848 /*cond_first_p=*/0);
10849 if (tem != NULL_TREE)
10850 return tem;
10851 }
10852 }
10853
10854 switch (code)
10855 {
10856 case MEM_REF:
10857 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10858 if (TREE_CODE (arg0) == ADDR_EXPR
10859 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10860 {
10861 tree iref = TREE_OPERAND (arg0, 0);
10862 return fold_build2 (MEM_REF, type,
10863 TREE_OPERAND (iref, 0),
10864 int_const_binop (PLUS_EXPR, arg1,
10865 TREE_OPERAND (iref, 1)));
10866 }
10867
10868 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10869 if (TREE_CODE (arg0) == ADDR_EXPR
10870 && handled_component_p (TREE_OPERAND (arg0, 0)))
10871 {
10872 tree base;
10873 poly_int64 coffset;
10874 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10875 &coffset);
10876 if (!base)
10877 return NULL_TREE;
10878 return fold_build2 (MEM_REF, type,
10879 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10880 int_const_binop (PLUS_EXPR, arg1,
10881 size_int (coffset)));
10882 }
10883
10884 return NULL_TREE;
10885
10886 case POINTER_PLUS_EXPR:
10887 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10888 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10889 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10890 return fold_convert_loc (loc, type,
10891 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10892 fold_convert_loc (loc, sizetype,
10893 arg1),
10894 fold_convert_loc (loc, sizetype,
10895 arg0)));
10896
10897 return NULL_TREE;
10898
10899 case PLUS_EXPR:
10900 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10901 {
10902 /* X + (X / CST) * -CST is X % CST. */
10903 if (TREE_CODE (arg1) == MULT_EXPR
10904 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10905 && operand_equal_p (arg0,
10906 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10907 {
10908 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10909 tree cst1 = TREE_OPERAND (arg1, 1);
10910 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10911 cst1, cst0);
10912 if (sum && integer_zerop (sum))
10913 return fold_convert_loc (loc, type,
10914 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10915 TREE_TYPE (arg0), arg0,
10916 cst0));
10917 }
10918 }
10919
10920 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10921 one. Make sure the type is not saturating and has the signedness of
10922 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10923 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10924 if ((TREE_CODE (arg0) == MULT_EXPR
10925 || TREE_CODE (arg1) == MULT_EXPR)
10926 && !TYPE_SATURATING (type)
10927 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10928 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10929 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10930 {
10931 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10932 if (tem)
10933 return tem;
10934 }
10935
10936 if (! FLOAT_TYPE_P (type))
10937 {
10938 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10939 (plus (plus (mult) (mult)) (foo)) so that we can
10940 take advantage of the factoring cases below. */
10941 if (ANY_INTEGRAL_TYPE_P (type)
10942 && TYPE_OVERFLOW_WRAPS (type)
10943 && (((TREE_CODE (arg0) == PLUS_EXPR
10944 || TREE_CODE (arg0) == MINUS_EXPR)
10945 && TREE_CODE (arg1) == MULT_EXPR)
10946 || ((TREE_CODE (arg1) == PLUS_EXPR
10947 || TREE_CODE (arg1) == MINUS_EXPR)
10948 && TREE_CODE (arg0) == MULT_EXPR)))
10949 {
10950 tree parg0, parg1, parg, marg;
10951 enum tree_code pcode;
10952
10953 if (TREE_CODE (arg1) == MULT_EXPR)
10954 parg = arg0, marg = arg1;
10955 else
10956 parg = arg1, marg = arg0;
10957 pcode = TREE_CODE (parg);
10958 parg0 = TREE_OPERAND (parg, 0);
10959 parg1 = TREE_OPERAND (parg, 1);
10960 STRIP_NOPS (parg0);
10961 STRIP_NOPS (parg1);
10962
10963 if (TREE_CODE (parg0) == MULT_EXPR
10964 && TREE_CODE (parg1) != MULT_EXPR)
10965 return fold_build2_loc (loc, pcode, type,
10966 fold_build2_loc (loc, PLUS_EXPR, type,
10967 fold_convert_loc (loc, type,
10968 parg0),
10969 fold_convert_loc (loc, type,
10970 marg)),
10971 fold_convert_loc (loc, type, parg1));
10972 if (TREE_CODE (parg0) != MULT_EXPR
10973 && TREE_CODE (parg1) == MULT_EXPR)
10974 return
10975 fold_build2_loc (loc, PLUS_EXPR, type,
10976 fold_convert_loc (loc, type, parg0),
10977 fold_build2_loc (loc, pcode, type,
10978 fold_convert_loc (loc, type, marg),
10979 fold_convert_loc (loc, type,
10980 parg1)));
10981 }
10982 }
10983 else
10984 {
10985 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10986 to __complex__ ( x, y ). This is not the same for SNaNs or
10987 if signed zeros are involved. */
10988 if (!HONOR_SNANS (element_mode (arg0))
10989 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10990 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10991 {
10992 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10993 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10994 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10995 bool arg0rz = false, arg0iz = false;
10996 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10997 || (arg0i && (arg0iz = real_zerop (arg0i))))
10998 {
10999 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11000 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11001 if (arg0rz && arg1i && real_zerop (arg1i))
11002 {
11003 tree rp = arg1r ? arg1r
11004 : build1 (REALPART_EXPR, rtype, arg1);
11005 tree ip = arg0i ? arg0i
11006 : build1 (IMAGPART_EXPR, rtype, arg0);
11007 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11008 }
11009 else if (arg0iz && arg1r && real_zerop (arg1r))
11010 {
11011 tree rp = arg0r ? arg0r
11012 : build1 (REALPART_EXPR, rtype, arg0);
11013 tree ip = arg1i ? arg1i
11014 : build1 (IMAGPART_EXPR, rtype, arg1);
11015 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11016 }
11017 }
11018 }
11019
11020 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11021 We associate floats only if the user has specified
11022 -fassociative-math. */
11023 if (flag_associative_math
11024 && TREE_CODE (arg1) == PLUS_EXPR
11025 && TREE_CODE (arg0) != MULT_EXPR)
11026 {
11027 tree tree10 = TREE_OPERAND (arg1, 0);
11028 tree tree11 = TREE_OPERAND (arg1, 1);
11029 if (TREE_CODE (tree11) == MULT_EXPR
11030 && TREE_CODE (tree10) == MULT_EXPR)
11031 {
11032 tree tree0;
11033 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11034 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11035 }
11036 }
11037 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11038 We associate floats only if the user has specified
11039 -fassociative-math. */
11040 if (flag_associative_math
11041 && TREE_CODE (arg0) == PLUS_EXPR
11042 && TREE_CODE (arg1) != MULT_EXPR)
11043 {
11044 tree tree00 = TREE_OPERAND (arg0, 0);
11045 tree tree01 = TREE_OPERAND (arg0, 1);
11046 if (TREE_CODE (tree01) == MULT_EXPR
11047 && TREE_CODE (tree00) == MULT_EXPR)
11048 {
11049 tree tree0;
11050 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11051 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11052 }
11053 }
11054 }
11055
11056 bit_rotate:
11057 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11058 is a rotate of A by C1 bits. */
11059 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11060 is a rotate of A by B bits.
11061 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11062 though in this case CODE must be | and not + or ^, otherwise
11063 it doesn't return A when B is 0. */
11064 {
11065 enum tree_code code0, code1;
11066 tree rtype;
11067 code0 = TREE_CODE (arg0);
11068 code1 = TREE_CODE (arg1);
11069 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11070 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11071 && operand_equal_p (TREE_OPERAND (arg0, 0),
11072 TREE_OPERAND (arg1, 0), 0)
11073 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11074 TYPE_UNSIGNED (rtype))
11075 /* Only create rotates in complete modes. Other cases are not
11076 expanded properly. */
11077 && (element_precision (rtype)
11078 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11079 {
11080 tree tree01, tree11;
11081 tree orig_tree01, orig_tree11;
11082 enum tree_code code01, code11;
11083
11084 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11085 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11086 STRIP_NOPS (tree01);
11087 STRIP_NOPS (tree11);
11088 code01 = TREE_CODE (tree01);
11089 code11 = TREE_CODE (tree11);
11090 if (code11 != MINUS_EXPR
11091 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11092 {
11093 std::swap (code0, code1);
11094 std::swap (code01, code11);
11095 std::swap (tree01, tree11);
11096 std::swap (orig_tree01, orig_tree11);
11097 }
11098 if (code01 == INTEGER_CST
11099 && code11 == INTEGER_CST
11100 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11101 == element_precision (rtype)))
11102 {
11103 tem = build2_loc (loc, LROTATE_EXPR,
11104 rtype, TREE_OPERAND (arg0, 0),
11105 code0 == LSHIFT_EXPR
11106 ? orig_tree01 : orig_tree11);
11107 return fold_convert_loc (loc, type, tem);
11108 }
11109 else if (code11 == MINUS_EXPR)
11110 {
11111 tree tree110, tree111;
11112 tree110 = TREE_OPERAND (tree11, 0);
11113 tree111 = TREE_OPERAND (tree11, 1);
11114 STRIP_NOPS (tree110);
11115 STRIP_NOPS (tree111);
11116 if (TREE_CODE (tree110) == INTEGER_CST
11117 && compare_tree_int (tree110,
11118 element_precision (rtype)) == 0
11119 && operand_equal_p (tree01, tree111, 0))
11120 {
11121 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11122 ? LROTATE_EXPR : RROTATE_EXPR),
11123 rtype, TREE_OPERAND (arg0, 0),
11124 orig_tree01);
11125 return fold_convert_loc (loc, type, tem);
11126 }
11127 }
11128 else if (code == BIT_IOR_EXPR
11129 && code11 == BIT_AND_EXPR
11130 && pow2p_hwi (element_precision (rtype)))
11131 {
11132 tree tree110, tree111;
11133 tree110 = TREE_OPERAND (tree11, 0);
11134 tree111 = TREE_OPERAND (tree11, 1);
11135 STRIP_NOPS (tree110);
11136 STRIP_NOPS (tree111);
11137 if (TREE_CODE (tree110) == NEGATE_EXPR
11138 && TREE_CODE (tree111) == INTEGER_CST
11139 && compare_tree_int (tree111,
11140 element_precision (rtype) - 1) == 0
11141 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11142 {
11143 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11144 ? LROTATE_EXPR : RROTATE_EXPR),
11145 rtype, TREE_OPERAND (arg0, 0),
11146 orig_tree01);
11147 return fold_convert_loc (loc, type, tem);
11148 }
11149 }
11150 }
11151 }
11152
11153 associate:
11154 /* In most languages, can't associate operations on floats through
11155 parentheses. Rather than remember where the parentheses were, we
11156 don't associate floats at all, unless the user has specified
11157 -fassociative-math.
11158 And, we need to make sure type is not saturating. */
11159
11160 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11161 && !TYPE_SATURATING (type))
11162 {
11163 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11164 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11165 tree atype = type;
11166 bool ok = true;
11167
11168 /* Split both trees into variables, constants, and literals. Then
11169 associate each group together, the constants with literals,
11170 then the result with variables. This increases the chances of
11171 literals being recombined later and of generating relocatable
11172 expressions for the sum of a constant and literal. */
11173 var0 = split_tree (arg0, type, code,
11174 &minus_var0, &con0, &minus_con0,
11175 &lit0, &minus_lit0, 0);
11176 var1 = split_tree (arg1, type, code,
11177 &minus_var1, &con1, &minus_con1,
11178 &lit1, &minus_lit1, code == MINUS_EXPR);
11179
11180 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11181 if (code == MINUS_EXPR)
11182 code = PLUS_EXPR;
11183
11184 /* With undefined overflow prefer doing association in a type
11185 which wraps on overflow, if that is one of the operand types. */
11186 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11187 && !TYPE_OVERFLOW_WRAPS (type))
11188 {
11189 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11190 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11191 atype = TREE_TYPE (arg0);
11192 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11193 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11194 atype = TREE_TYPE (arg1);
11195 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11196 }
11197
11198 /* With undefined overflow we can only associate constants with one
11199 variable, and constants whose association doesn't overflow. */
11200 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11201 && !TYPE_OVERFLOW_WRAPS (atype))
11202 {
11203 if ((var0 && var1) || (minus_var0 && minus_var1))
11204 {
11205 /* ??? If split_tree would handle NEGATE_EXPR we could
11206 simply reject these cases and the allowed cases would
11207 be the var0/minus_var1 ones. */
11208 tree tmp0 = var0 ? var0 : minus_var0;
11209 tree tmp1 = var1 ? var1 : minus_var1;
11210 bool one_neg = false;
11211
11212 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11213 {
11214 tmp0 = TREE_OPERAND (tmp0, 0);
11215 one_neg = !one_neg;
11216 }
11217 if (CONVERT_EXPR_P (tmp0)
11218 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11219 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11220 <= TYPE_PRECISION (atype)))
11221 tmp0 = TREE_OPERAND (tmp0, 0);
11222 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11223 {
11224 tmp1 = TREE_OPERAND (tmp1, 0);
11225 one_neg = !one_neg;
11226 }
11227 if (CONVERT_EXPR_P (tmp1)
11228 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11229 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11230 <= TYPE_PRECISION (atype)))
11231 tmp1 = TREE_OPERAND (tmp1, 0);
11232 /* The only case we can still associate with two variables
11233 is if they cancel out. */
11234 if (!one_neg
11235 || !operand_equal_p (tmp0, tmp1, 0))
11236 ok = false;
11237 }
11238 else if ((var0 && minus_var1
11239 && ! operand_equal_p (var0, minus_var1, 0))
11240 || (minus_var0 && var1
11241 && ! operand_equal_p (minus_var0, var1, 0)))
11242 ok = false;
11243 }
11244
11245 /* Only do something if we found more than two objects. Otherwise,
11246 nothing has changed and we risk infinite recursion. */
11247 if (ok
11248 && ((var0 != 0) + (var1 != 0)
11249 + (minus_var0 != 0) + (minus_var1 != 0)
11250 + (con0 != 0) + (con1 != 0)
11251 + (minus_con0 != 0) + (minus_con1 != 0)
11252 + (lit0 != 0) + (lit1 != 0)
11253 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11254 {
11255 var0 = associate_trees (loc, var0, var1, code, atype);
11256 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11257 code, atype);
11258 con0 = associate_trees (loc, con0, con1, code, atype);
11259 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11260 code, atype);
11261 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11262 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11263 code, atype);
11264
11265 if (minus_var0 && var0)
11266 {
11267 var0 = associate_trees (loc, var0, minus_var0,
11268 MINUS_EXPR, atype);
11269 minus_var0 = 0;
11270 }
11271 if (minus_con0 && con0)
11272 {
11273 con0 = associate_trees (loc, con0, minus_con0,
11274 MINUS_EXPR, atype);
11275 minus_con0 = 0;
11276 }
11277
11278 /* Preserve the MINUS_EXPR if the negative part of the literal is
11279 greater than the positive part. Otherwise, the multiplicative
11280 folding code (i.e extract_muldiv) may be fooled in case
11281 unsigned constants are subtracted, like in the following
11282 example: ((X*2 + 4) - 8U)/2. */
11283 if (minus_lit0 && lit0)
11284 {
11285 if (TREE_CODE (lit0) == INTEGER_CST
11286 && TREE_CODE (minus_lit0) == INTEGER_CST
11287 && tree_int_cst_lt (lit0, minus_lit0)
11288 /* But avoid ending up with only negated parts. */
11289 && (var0 || con0))
11290 {
11291 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11292 MINUS_EXPR, atype);
11293 lit0 = 0;
11294 }
11295 else
11296 {
11297 lit0 = associate_trees (loc, lit0, minus_lit0,
11298 MINUS_EXPR, atype);
11299 minus_lit0 = 0;
11300 }
11301 }
11302
11303 /* Don't introduce overflows through reassociation. */
11304 if ((lit0 && TREE_OVERFLOW_P (lit0))
11305 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11306 return NULL_TREE;
11307
11308 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11309 con0 = associate_trees (loc, con0, lit0, code, atype);
11310 lit0 = 0;
11311 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11312 code, atype);
11313 minus_lit0 = 0;
11314
11315 /* Eliminate minus_con0. */
11316 if (minus_con0)
11317 {
11318 if (con0)
11319 con0 = associate_trees (loc, con0, minus_con0,
11320 MINUS_EXPR, atype);
11321 else if (var0)
11322 var0 = associate_trees (loc, var0, minus_con0,
11323 MINUS_EXPR, atype);
11324 else
11325 gcc_unreachable ();
11326 minus_con0 = 0;
11327 }
11328
11329 /* Eliminate minus_var0. */
11330 if (minus_var0)
11331 {
11332 if (con0)
11333 con0 = associate_trees (loc, con0, minus_var0,
11334 MINUS_EXPR, atype);
11335 else
11336 gcc_unreachable ();
11337 minus_var0 = 0;
11338 }
11339
11340 return
11341 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11342 code, atype));
11343 }
11344 }
11345
11346 return NULL_TREE;
11347
11348 case POINTER_DIFF_EXPR:
11349 case MINUS_EXPR:
11350 /* Fold &a[i] - &a[j] to i-j. */
11351 if (TREE_CODE (arg0) == ADDR_EXPR
11352 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11353 && TREE_CODE (arg1) == ADDR_EXPR
11354 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11355 {
11356 tree tem = fold_addr_of_array_ref_difference (loc, type,
11357 TREE_OPERAND (arg0, 0),
11358 TREE_OPERAND (arg1, 0),
11359 code
11360 == POINTER_DIFF_EXPR);
11361 if (tem)
11362 return tem;
11363 }
11364
11365 /* Further transformations are not for pointers. */
11366 if (code == POINTER_DIFF_EXPR)
11367 return NULL_TREE;
11368
11369 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11370 if (TREE_CODE (arg0) == NEGATE_EXPR
11371 && negate_expr_p (op1)
11372 /* If arg0 is e.g. unsigned int and type is int, then this could
11373 introduce UB, because if A is INT_MIN at runtime, the original
11374 expression can be well defined while the latter is not.
11375 See PR83269. */
11376 && !(ANY_INTEGRAL_TYPE_P (type)
11377 && TYPE_OVERFLOW_UNDEFINED (type)
11378 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11379 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11380 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11381 fold_convert_loc (loc, type,
11382 TREE_OPERAND (arg0, 0)));
11383
11384 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11385 __complex__ ( x, -y ). This is not the same for SNaNs or if
11386 signed zeros are involved. */
11387 if (!HONOR_SNANS (element_mode (arg0))
11388 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11389 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11390 {
11391 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11392 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11393 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11394 bool arg0rz = false, arg0iz = false;
11395 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11396 || (arg0i && (arg0iz = real_zerop (arg0i))))
11397 {
11398 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11399 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11400 if (arg0rz && arg1i && real_zerop (arg1i))
11401 {
11402 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11403 arg1r ? arg1r
11404 : build1 (REALPART_EXPR, rtype, arg1));
11405 tree ip = arg0i ? arg0i
11406 : build1 (IMAGPART_EXPR, rtype, arg0);
11407 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11408 }
11409 else if (arg0iz && arg1r && real_zerop (arg1r))
11410 {
11411 tree rp = arg0r ? arg0r
11412 : build1 (REALPART_EXPR, rtype, arg0);
11413 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11414 arg1i ? arg1i
11415 : build1 (IMAGPART_EXPR, rtype, arg1));
11416 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11417 }
11418 }
11419 }
11420
11421 /* A - B -> A + (-B) if B is easily negatable. */
11422 if (negate_expr_p (op1)
11423 && ! TYPE_OVERFLOW_SANITIZED (type)
11424 && ((FLOAT_TYPE_P (type)
11425 /* Avoid this transformation if B is a positive REAL_CST. */
11426 && (TREE_CODE (op1) != REAL_CST
11427 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11428 || INTEGRAL_TYPE_P (type)))
11429 return fold_build2_loc (loc, PLUS_EXPR, type,
11430 fold_convert_loc (loc, type, arg0),
11431 negate_expr (op1));
11432
11433 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11434 one. Make sure the type is not saturating and has the signedness of
11435 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11436 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11437 if ((TREE_CODE (arg0) == MULT_EXPR
11438 || TREE_CODE (arg1) == MULT_EXPR)
11439 && !TYPE_SATURATING (type)
11440 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11441 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11442 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11443 {
11444 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11445 if (tem)
11446 return tem;
11447 }
11448
11449 goto associate;
11450
11451 case MULT_EXPR:
11452 if (! FLOAT_TYPE_P (type))
11453 {
11454 /* Transform x * -C into -x * C if x is easily negatable. */
11455 if (TREE_CODE (op1) == INTEGER_CST
11456 && tree_int_cst_sgn (op1) == -1
11457 && negate_expr_p (op0)
11458 && negate_expr_p (op1)
11459 && (tem = negate_expr (op1)) != op1
11460 && ! TREE_OVERFLOW (tem))
11461 return fold_build2_loc (loc, MULT_EXPR, type,
11462 fold_convert_loc (loc, type,
11463 negate_expr (op0)), tem);
11464
11465 strict_overflow_p = false;
11466 if (TREE_CODE (arg1) == INTEGER_CST
11467 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11468 &strict_overflow_p)) != 0)
11469 {
11470 if (strict_overflow_p)
11471 fold_overflow_warning (("assuming signed overflow does not "
11472 "occur when simplifying "
11473 "multiplication"),
11474 WARN_STRICT_OVERFLOW_MISC);
11475 return fold_convert_loc (loc, type, tem);
11476 }
11477
11478 /* Optimize z * conj(z) for integer complex numbers. */
11479 if (TREE_CODE (arg0) == CONJ_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11481 return fold_mult_zconjz (loc, type, arg1);
11482 if (TREE_CODE (arg1) == CONJ_EXPR
11483 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11484 return fold_mult_zconjz (loc, type, arg0);
11485 }
11486 else
11487 {
11488 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11489 This is not the same for NaNs or if signed zeros are
11490 involved. */
11491 if (!HONOR_NANS (arg0)
11492 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11493 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11494 && TREE_CODE (arg1) == COMPLEX_CST
11495 && real_zerop (TREE_REALPART (arg1)))
11496 {
11497 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11498 if (real_onep (TREE_IMAGPART (arg1)))
11499 return
11500 fold_build2_loc (loc, COMPLEX_EXPR, type,
11501 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11502 rtype, arg0)),
11503 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11504 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11505 return
11506 fold_build2_loc (loc, COMPLEX_EXPR, type,
11507 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11508 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11509 rtype, arg0)));
11510 }
11511
11512 /* Optimize z * conj(z) for floating point complex numbers.
11513 Guarded by flag_unsafe_math_optimizations as non-finite
11514 imaginary components don't produce scalar results. */
11515 if (flag_unsafe_math_optimizations
11516 && TREE_CODE (arg0) == CONJ_EXPR
11517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11518 return fold_mult_zconjz (loc, type, arg1);
11519 if (flag_unsafe_math_optimizations
11520 && TREE_CODE (arg1) == CONJ_EXPR
11521 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11522 return fold_mult_zconjz (loc, type, arg0);
11523 }
11524 goto associate;
11525
11526 case BIT_IOR_EXPR:
11527 /* Canonicalize (X & C1) | C2. */
11528 if (TREE_CODE (arg0) == BIT_AND_EXPR
11529 && TREE_CODE (arg1) == INTEGER_CST
11530 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11531 {
11532 int width = TYPE_PRECISION (type), w;
11533 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11534 wide_int c2 = wi::to_wide (arg1);
11535
11536 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11537 if ((c1 & c2) == c1)
11538 return omit_one_operand_loc (loc, type, arg1,
11539 TREE_OPERAND (arg0, 0));
11540
11541 wide_int msk = wi::mask (width, false,
11542 TYPE_PRECISION (TREE_TYPE (arg1)));
11543
11544 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11545 if (wi::bit_and_not (msk, c1 | c2) == 0)
11546 {
11547 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11548 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11549 }
11550
11551 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11552 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11553 mode which allows further optimizations. */
11554 c1 &= msk;
11555 c2 &= msk;
11556 wide_int c3 = wi::bit_and_not (c1, c2);
11557 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11558 {
11559 wide_int mask = wi::mask (w, false,
11560 TYPE_PRECISION (type));
11561 if (((c1 | c2) & mask) == mask
11562 && wi::bit_and_not (c1, mask) == 0)
11563 {
11564 c3 = mask;
11565 break;
11566 }
11567 }
11568
11569 if (c3 != c1)
11570 {
11571 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11572 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11573 wide_int_to_tree (type, c3));
11574 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11575 }
11576 }
11577
11578 /* See if this can be simplified into a rotate first. If that
11579 is unsuccessful continue in the association code. */
11580 goto bit_rotate;
11581
11582 case BIT_XOR_EXPR:
11583 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11584 if (TREE_CODE (arg0) == BIT_AND_EXPR
11585 && INTEGRAL_TYPE_P (type)
11586 && integer_onep (TREE_OPERAND (arg0, 1))
11587 && integer_onep (arg1))
11588 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11589 build_zero_cst (TREE_TYPE (arg0)));
11590
11591 /* See if this can be simplified into a rotate first. If that
11592 is unsuccessful continue in the association code. */
11593 goto bit_rotate;
11594
11595 case BIT_AND_EXPR:
11596 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11597 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11598 && INTEGRAL_TYPE_P (type)
11599 && integer_onep (TREE_OPERAND (arg0, 1))
11600 && integer_onep (arg1))
11601 {
11602 tree tem2;
11603 tem = TREE_OPERAND (arg0, 0);
11604 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11605 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11606 tem, tem2);
11607 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11608 build_zero_cst (TREE_TYPE (tem)));
11609 }
11610 /* Fold ~X & 1 as (X & 1) == 0. */
11611 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11612 && INTEGRAL_TYPE_P (type)
11613 && integer_onep (arg1))
11614 {
11615 tree tem2;
11616 tem = TREE_OPERAND (arg0, 0);
11617 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11618 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11619 tem, tem2);
11620 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11621 build_zero_cst (TREE_TYPE (tem)));
11622 }
11623 /* Fold !X & 1 as X == 0. */
11624 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11625 && integer_onep (arg1))
11626 {
11627 tem = TREE_OPERAND (arg0, 0);
11628 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11629 build_zero_cst (TREE_TYPE (tem)));
11630 }
11631
11632 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11633 multiple of 1 << CST. */
11634 if (TREE_CODE (arg1) == INTEGER_CST)
11635 {
11636 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11637 wide_int ncst1 = -cst1;
11638 if ((cst1 & ncst1) == ncst1
11639 && multiple_of_p (type, arg0,
11640 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11641 return fold_convert_loc (loc, type, arg0);
11642 }
11643
11644 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11645 bits from CST2. */
11646 if (TREE_CODE (arg1) == INTEGER_CST
11647 && TREE_CODE (arg0) == MULT_EXPR
11648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11649 {
11650 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11651 wide_int masked
11652 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11653
11654 if (masked == 0)
11655 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11656 arg0, arg1);
11657 else if (masked != warg1)
11658 {
11659 /* Avoid the transform if arg1 is a mask of some
11660 mode which allows further optimizations. */
11661 int pop = wi::popcount (warg1);
11662 if (!(pop >= BITS_PER_UNIT
11663 && pow2p_hwi (pop)
11664 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11665 return fold_build2_loc (loc, code, type, op0,
11666 wide_int_to_tree (type, masked));
11667 }
11668 }
11669
11670 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11671 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11672 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11673 {
11674 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11675
11676 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11677 if (mask == -1)
11678 return
11679 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11680 }
11681
11682 goto associate;
11683
11684 case RDIV_EXPR:
11685 /* Don't touch a floating-point divide by zero unless the mode
11686 of the constant can represent infinity. */
11687 if (TREE_CODE (arg1) == REAL_CST
11688 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11689 && real_zerop (arg1))
11690 return NULL_TREE;
11691
11692 /* (-A) / (-B) -> A / B */
11693 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11694 return fold_build2_loc (loc, RDIV_EXPR, type,
11695 TREE_OPERAND (arg0, 0),
11696 negate_expr (arg1));
11697 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11698 return fold_build2_loc (loc, RDIV_EXPR, type,
11699 negate_expr (arg0),
11700 TREE_OPERAND (arg1, 0));
11701 return NULL_TREE;
11702
11703 case TRUNC_DIV_EXPR:
11704 /* Fall through */
11705
11706 case FLOOR_DIV_EXPR:
11707 /* Simplify A / (B << N) where A and B are positive and B is
11708 a power of 2, to A >> (N + log2(B)). */
11709 strict_overflow_p = false;
11710 if (TREE_CODE (arg1) == LSHIFT_EXPR
11711 && (TYPE_UNSIGNED (type)
11712 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11713 {
11714 tree sval = TREE_OPERAND (arg1, 0);
11715 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11716 {
11717 tree sh_cnt = TREE_OPERAND (arg1, 1);
11718 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11719 wi::exact_log2 (wi::to_wide (sval)));
11720
11721 if (strict_overflow_p)
11722 fold_overflow_warning (("assuming signed overflow does not "
11723 "occur when simplifying A / (B << N)"),
11724 WARN_STRICT_OVERFLOW_MISC);
11725
11726 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11727 sh_cnt, pow2);
11728 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11729 fold_convert_loc (loc, type, arg0), sh_cnt);
11730 }
11731 }
11732
11733 /* Fall through */
11734
11735 case ROUND_DIV_EXPR:
11736 case CEIL_DIV_EXPR:
11737 case EXACT_DIV_EXPR:
11738 if (integer_zerop (arg1))
11739 return NULL_TREE;
11740
11741 /* Convert -A / -B to A / B when the type is signed and overflow is
11742 undefined. */
11743 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11744 && TREE_CODE (op0) == NEGATE_EXPR
11745 && negate_expr_p (op1))
11746 {
11747 if (ANY_INTEGRAL_TYPE_P (type))
11748 fold_overflow_warning (("assuming signed overflow does not occur "
11749 "when distributing negation across "
11750 "division"),
11751 WARN_STRICT_OVERFLOW_MISC);
11752 return fold_build2_loc (loc, code, type,
11753 fold_convert_loc (loc, type,
11754 TREE_OPERAND (arg0, 0)),
11755 negate_expr (op1));
11756 }
11757 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11758 && TREE_CODE (arg1) == NEGATE_EXPR
11759 && negate_expr_p (op0))
11760 {
11761 if (ANY_INTEGRAL_TYPE_P (type))
11762 fold_overflow_warning (("assuming signed overflow does not occur "
11763 "when distributing negation across "
11764 "division"),
11765 WARN_STRICT_OVERFLOW_MISC);
11766 return fold_build2_loc (loc, code, type,
11767 negate_expr (op0),
11768 fold_convert_loc (loc, type,
11769 TREE_OPERAND (arg1, 0)));
11770 }
11771
11772 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11773 operation, EXACT_DIV_EXPR.
11774
11775 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11776 At one time others generated faster code, it's not clear if they do
11777 after the last round to changes to the DIV code in expmed.c. */
11778 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11779 && multiple_of_p (type, arg0, arg1))
11780 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11781 fold_convert (type, arg0),
11782 fold_convert (type, arg1));
11783
11784 strict_overflow_p = false;
11785 if (TREE_CODE (arg1) == INTEGER_CST
11786 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11787 &strict_overflow_p)) != 0)
11788 {
11789 if (strict_overflow_p)
11790 fold_overflow_warning (("assuming signed overflow does not occur "
11791 "when simplifying division"),
11792 WARN_STRICT_OVERFLOW_MISC);
11793 return fold_convert_loc (loc, type, tem);
11794 }
11795
11796 return NULL_TREE;
11797
11798 case CEIL_MOD_EXPR:
11799 case FLOOR_MOD_EXPR:
11800 case ROUND_MOD_EXPR:
11801 case TRUNC_MOD_EXPR:
11802 strict_overflow_p = false;
11803 if (TREE_CODE (arg1) == INTEGER_CST
11804 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11805 &strict_overflow_p)) != 0)
11806 {
11807 if (strict_overflow_p)
11808 fold_overflow_warning (("assuming signed overflow does not occur "
11809 "when simplifying modulus"),
11810 WARN_STRICT_OVERFLOW_MISC);
11811 return fold_convert_loc (loc, type, tem);
11812 }
11813
11814 return NULL_TREE;
11815
11816 case LROTATE_EXPR:
11817 case RROTATE_EXPR:
11818 case RSHIFT_EXPR:
11819 case LSHIFT_EXPR:
11820 /* Since negative shift count is not well-defined,
11821 don't try to compute it in the compiler. */
11822 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11823 return NULL_TREE;
11824
11825 prec = element_precision (type);
11826
11827 /* If we have a rotate of a bit operation with the rotate count and
11828 the second operand of the bit operation both constant,
11829 permute the two operations. */
11830 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11831 && (TREE_CODE (arg0) == BIT_AND_EXPR
11832 || TREE_CODE (arg0) == BIT_IOR_EXPR
11833 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11835 {
11836 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11837 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11838 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11839 fold_build2_loc (loc, code, type,
11840 arg00, arg1),
11841 fold_build2_loc (loc, code, type,
11842 arg01, arg1));
11843 }
11844
11845 /* Two consecutive rotates adding up to the some integer
11846 multiple of the precision of the type can be ignored. */
11847 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11848 && TREE_CODE (arg0) == RROTATE_EXPR
11849 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11850 && wi::umod_trunc (wi::to_wide (arg1)
11851 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11852 prec) == 0)
11853 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11854
11855 return NULL_TREE;
11856
11857 case MIN_EXPR:
11858 case MAX_EXPR:
11859 goto associate;
11860
11861 case TRUTH_ANDIF_EXPR:
11862 /* Note that the operands of this must be ints
11863 and their values must be 0 or 1.
11864 ("true" is a fixed value perhaps depending on the language.) */
11865 /* If first arg is constant zero, return it. */
11866 if (integer_zerop (arg0))
11867 return fold_convert_loc (loc, type, arg0);
11868 /* FALLTHRU */
11869 case TRUTH_AND_EXPR:
11870 /* If either arg is constant true, drop it. */
11871 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11872 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11873 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11874 /* Preserve sequence points. */
11875 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11877 /* If second arg is constant zero, result is zero, but first arg
11878 must be evaluated. */
11879 if (integer_zerop (arg1))
11880 return omit_one_operand_loc (loc, type, arg1, arg0);
11881 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11882 case will be handled here. */
11883 if (integer_zerop (arg0))
11884 return omit_one_operand_loc (loc, type, arg0, arg1);
11885
11886 /* !X && X is always false. */
11887 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11888 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11889 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11890 /* X && !X is always false. */
11891 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11892 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11893 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11894
11895 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11896 means A >= Y && A != MAX, but in this case we know that
11897 A < X <= MAX. */
11898
11899 if (!TREE_SIDE_EFFECTS (arg0)
11900 && !TREE_SIDE_EFFECTS (arg1))
11901 {
11902 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11903 if (tem && !operand_equal_p (tem, arg0, 0))
11904 return fold_build2_loc (loc, code, type, tem, arg1);
11905
11906 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11907 if (tem && !operand_equal_p (tem, arg1, 0))
11908 return fold_build2_loc (loc, code, type, arg0, tem);
11909 }
11910
11911 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11912 != NULL_TREE)
11913 return tem;
11914
11915 return NULL_TREE;
11916
11917 case TRUTH_ORIF_EXPR:
11918 /* Note that the operands of this must be ints
11919 and their values must be 0 or true.
11920 ("true" is a fixed value perhaps depending on the language.) */
11921 /* If first arg is constant true, return it. */
11922 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11923 return fold_convert_loc (loc, type, arg0);
11924 /* FALLTHRU */
11925 case TRUTH_OR_EXPR:
11926 /* If either arg is constant zero, drop it. */
11927 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11929 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11930 /* Preserve sequence points. */
11931 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11933 /* If second arg is constant true, result is true, but we must
11934 evaluate first arg. */
11935 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11936 return omit_one_operand_loc (loc, type, arg1, arg0);
11937 /* Likewise for first arg, but note this only occurs here for
11938 TRUTH_OR_EXPR. */
11939 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11940 return omit_one_operand_loc (loc, type, arg0, arg1);
11941
11942 /* !X || X is always true. */
11943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11945 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11946 /* X || !X is always true. */
11947 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11949 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11950
11951 /* (X && !Y) || (!X && Y) is X ^ Y */
11952 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11953 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11954 {
11955 tree a0, a1, l0, l1, n0, n1;
11956
11957 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11958 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11959
11960 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11961 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11962
11963 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11964 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11965
11966 if ((operand_equal_p (n0, a0, 0)
11967 && operand_equal_p (n1, a1, 0))
11968 || (operand_equal_p (n0, a1, 0)
11969 && operand_equal_p (n1, a0, 0)))
11970 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11971 }
11972
11973 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11974 != NULL_TREE)
11975 return tem;
11976
11977 return NULL_TREE;
11978
11979 case TRUTH_XOR_EXPR:
11980 /* If the second arg is constant zero, drop it. */
11981 if (integer_zerop (arg1))
11982 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11983 /* If the second arg is constant true, this is a logical inversion. */
11984 if (integer_onep (arg1))
11985 {
11986 tem = invert_truthvalue_loc (loc, arg0);
11987 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11988 }
11989 /* Identical arguments cancel to zero. */
11990 if (operand_equal_p (arg0, arg1, 0))
11991 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11992
11993 /* !X ^ X is always true. */
11994 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11995 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11996 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11997
11998 /* X ^ !X is always true. */
11999 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12000 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12001 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12002
12003 return NULL_TREE;
12004
12005 case EQ_EXPR:
12006 case NE_EXPR:
12007 STRIP_NOPS (arg0);
12008 STRIP_NOPS (arg1);
12009
12010 tem = fold_comparison (loc, code, type, op0, op1);
12011 if (tem != NULL_TREE)
12012 return tem;
12013
12014 /* bool_var != 1 becomes !bool_var. */
12015 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12016 && code == NE_EXPR)
12017 return fold_convert_loc (loc, type,
12018 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12019 TREE_TYPE (arg0), arg0));
12020
12021 /* bool_var == 0 becomes !bool_var. */
12022 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12023 && code == EQ_EXPR)
12024 return fold_convert_loc (loc, type,
12025 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12026 TREE_TYPE (arg0), arg0));
12027
12028 /* !exp != 0 becomes !exp */
12029 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12030 && code == NE_EXPR)
12031 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12032
12033 /* If this is an EQ or NE comparison with zero and ARG0 is
12034 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12035 two operations, but the latter can be done in one less insn
12036 on machines that have only two-operand insns or on which a
12037 constant cannot be the first operand. */
12038 if (TREE_CODE (arg0) == BIT_AND_EXPR
12039 && integer_zerop (arg1))
12040 {
12041 tree arg00 = TREE_OPERAND (arg0, 0);
12042 tree arg01 = TREE_OPERAND (arg0, 1);
12043 if (TREE_CODE (arg00) == LSHIFT_EXPR
12044 && integer_onep (TREE_OPERAND (arg00, 0)))
12045 {
12046 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12047 arg01, TREE_OPERAND (arg00, 1));
12048 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12049 build_int_cst (TREE_TYPE (arg0), 1));
12050 return fold_build2_loc (loc, code, type,
12051 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12052 arg1);
12053 }
12054 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12055 && integer_onep (TREE_OPERAND (arg01, 0)))
12056 {
12057 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12058 arg00, TREE_OPERAND (arg01, 1));
12059 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12060 build_int_cst (TREE_TYPE (arg0), 1));
12061 return fold_build2_loc (loc, code, type,
12062 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12063 arg1);
12064 }
12065 }
12066
12067 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12068 C1 is a valid shift constant, and C2 is a power of two, i.e.
12069 a single bit. */
12070 if (TREE_CODE (arg0) == BIT_AND_EXPR
12071 && integer_pow2p (TREE_OPERAND (arg0, 1))
12072 && integer_zerop (arg1))
12073 {
12074 tree arg00 = TREE_OPERAND (arg0, 0);
12075 STRIP_NOPS (arg00);
12076 if (TREE_CODE (arg00) == RSHIFT_EXPR
12077 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12078 {
12079 tree itype = TREE_TYPE (arg00);
12080 tree arg001 = TREE_OPERAND (arg00, 1);
12081 prec = TYPE_PRECISION (itype);
12082
12083 /* Check for a valid shift count. */
12084 if (wi::ltu_p (wi::to_wide (arg001), prec))
12085 {
12086 tree arg01 = TREE_OPERAND (arg0, 1);
12087 tree arg000 = TREE_OPERAND (arg00, 0);
12088 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12089 /* If (C2 << C1) doesn't overflow, then
12090 ((X >> C1) & C2) != 0 can be rewritten as
12091 (X & (C2 << C1)) != 0. */
12092 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12093 {
12094 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12095 arg01, arg001);
12096 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12097 arg000, tem);
12098 return fold_build2_loc (loc, code, type, tem,
12099 fold_convert_loc (loc, itype, arg1));
12100 }
12101 /* Otherwise, for signed (arithmetic) shifts,
12102 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12103 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12104 else if (!TYPE_UNSIGNED (itype))
12105 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12106 : LT_EXPR,
12107 type, arg000,
12108 build_int_cst (itype, 0));
12109 /* Otherwise, of unsigned (logical) shifts,
12110 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12111 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12112 else
12113 return omit_one_operand_loc (loc, type,
12114 code == EQ_EXPR ? integer_one_node
12115 : integer_zero_node,
12116 arg000);
12117 }
12118 }
12119 }
12120
12121 /* If this is a comparison of a field, we may be able to simplify it. */
12122 if ((TREE_CODE (arg0) == COMPONENT_REF
12123 || TREE_CODE (arg0) == BIT_FIELD_REF)
12124 /* Handle the constant case even without -O
12125 to make sure the warnings are given. */
12126 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12127 {
12128 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12129 if (t1)
12130 return t1;
12131 }
12132
12133 /* Optimize comparisons of strlen vs zero to a compare of the
12134 first character of the string vs zero. To wit,
12135 strlen(ptr) == 0 => *ptr == 0
12136 strlen(ptr) != 0 => *ptr != 0
12137 Other cases should reduce to one of these two (or a constant)
12138 due to the return value of strlen being unsigned. */
12139 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12140 {
12141 tree fndecl = get_callee_fndecl (arg0);
12142
12143 if (fndecl
12144 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12145 && call_expr_nargs (arg0) == 1
12146 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12147 == POINTER_TYPE))
12148 {
12149 tree ptrtype
12150 = build_pointer_type (build_qualified_type (char_type_node,
12151 TYPE_QUAL_CONST));
12152 tree ptr = fold_convert_loc (loc, ptrtype,
12153 CALL_EXPR_ARG (arg0, 0));
12154 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12155 return fold_build2_loc (loc, code, type, iref,
12156 build_int_cst (TREE_TYPE (iref), 0));
12157 }
12158 }
12159
12160 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12161 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12162 if (TREE_CODE (arg0) == RSHIFT_EXPR
12163 && integer_zerop (arg1)
12164 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12165 {
12166 tree arg00 = TREE_OPERAND (arg0, 0);
12167 tree arg01 = TREE_OPERAND (arg0, 1);
12168 tree itype = TREE_TYPE (arg00);
12169 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12170 {
12171 if (TYPE_UNSIGNED (itype))
12172 {
12173 itype = signed_type_for (itype);
12174 arg00 = fold_convert_loc (loc, itype, arg00);
12175 }
12176 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12177 type, arg00, build_zero_cst (itype));
12178 }
12179 }
12180
12181 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12182 (X & C) == 0 when C is a single bit. */
12183 if (TREE_CODE (arg0) == BIT_AND_EXPR
12184 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12185 && integer_zerop (arg1)
12186 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12187 {
12188 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12189 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12190 TREE_OPERAND (arg0, 1));
12191 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12192 type, tem,
12193 fold_convert_loc (loc, TREE_TYPE (arg0),
12194 arg1));
12195 }
12196
12197 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12198 constant C is a power of two, i.e. a single bit. */
12199 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12200 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12201 && integer_zerop (arg1)
12202 && integer_pow2p (TREE_OPERAND (arg0, 1))
12203 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12204 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12205 {
12206 tree arg00 = TREE_OPERAND (arg0, 0);
12207 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12208 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12209 }
12210
12211 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12212 when is C is a power of two, i.e. a single bit. */
12213 if (TREE_CODE (arg0) == BIT_AND_EXPR
12214 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12215 && integer_zerop (arg1)
12216 && integer_pow2p (TREE_OPERAND (arg0, 1))
12217 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12218 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12219 {
12220 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12221 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12222 arg000, TREE_OPERAND (arg0, 1));
12223 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12224 tem, build_int_cst (TREE_TYPE (tem), 0));
12225 }
12226
12227 if (integer_zerop (arg1)
12228 && tree_expr_nonzero_p (arg0))
12229 {
12230 tree res = constant_boolean_node (code==NE_EXPR, type);
12231 return omit_one_operand_loc (loc, type, res, arg0);
12232 }
12233
12234 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12235 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12236 {
12237 tree arg00 = TREE_OPERAND (arg0, 0);
12238 tree arg01 = TREE_OPERAND (arg0, 1);
12239 tree arg10 = TREE_OPERAND (arg1, 0);
12240 tree arg11 = TREE_OPERAND (arg1, 1);
12241 tree itype = TREE_TYPE (arg0);
12242
12243 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12244 operand_equal_p guarantees no side-effects so we don't need
12245 to use omit_one_operand on Z. */
12246 if (operand_equal_p (arg01, arg11, 0))
12247 return fold_build2_loc (loc, code, type, arg00,
12248 fold_convert_loc (loc, TREE_TYPE (arg00),
12249 arg10));
12250 if (operand_equal_p (arg01, arg10, 0))
12251 return fold_build2_loc (loc, code, type, arg00,
12252 fold_convert_loc (loc, TREE_TYPE (arg00),
12253 arg11));
12254 if (operand_equal_p (arg00, arg11, 0))
12255 return fold_build2_loc (loc, code, type, arg01,
12256 fold_convert_loc (loc, TREE_TYPE (arg01),
12257 arg10));
12258 if (operand_equal_p (arg00, arg10, 0))
12259 return fold_build2_loc (loc, code, type, arg01,
12260 fold_convert_loc (loc, TREE_TYPE (arg01),
12261 arg11));
12262
12263 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12264 if (TREE_CODE (arg01) == INTEGER_CST
12265 && TREE_CODE (arg11) == INTEGER_CST)
12266 {
12267 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12268 fold_convert_loc (loc, itype, arg11));
12269 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12270 return fold_build2_loc (loc, code, type, tem,
12271 fold_convert_loc (loc, itype, arg10));
12272 }
12273 }
12274
12275 /* Attempt to simplify equality/inequality comparisons of complex
12276 values. Only lower the comparison if the result is known or
12277 can be simplified to a single scalar comparison. */
12278 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12279 || TREE_CODE (arg0) == COMPLEX_CST)
12280 && (TREE_CODE (arg1) == COMPLEX_EXPR
12281 || TREE_CODE (arg1) == COMPLEX_CST))
12282 {
12283 tree real0, imag0, real1, imag1;
12284 tree rcond, icond;
12285
12286 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12287 {
12288 real0 = TREE_OPERAND (arg0, 0);
12289 imag0 = TREE_OPERAND (arg0, 1);
12290 }
12291 else
12292 {
12293 real0 = TREE_REALPART (arg0);
12294 imag0 = TREE_IMAGPART (arg0);
12295 }
12296
12297 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12298 {
12299 real1 = TREE_OPERAND (arg1, 0);
12300 imag1 = TREE_OPERAND (arg1, 1);
12301 }
12302 else
12303 {
12304 real1 = TREE_REALPART (arg1);
12305 imag1 = TREE_IMAGPART (arg1);
12306 }
12307
12308 rcond = fold_binary_loc (loc, code, type, real0, real1);
12309 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12310 {
12311 if (integer_zerop (rcond))
12312 {
12313 if (code == EQ_EXPR)
12314 return omit_two_operands_loc (loc, type, boolean_false_node,
12315 imag0, imag1);
12316 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12317 }
12318 else
12319 {
12320 if (code == NE_EXPR)
12321 return omit_two_operands_loc (loc, type, boolean_true_node,
12322 imag0, imag1);
12323 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12324 }
12325 }
12326
12327 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12328 if (icond && TREE_CODE (icond) == INTEGER_CST)
12329 {
12330 if (integer_zerop (icond))
12331 {
12332 if (code == EQ_EXPR)
12333 return omit_two_operands_loc (loc, type, boolean_false_node,
12334 real0, real1);
12335 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12336 }
12337 else
12338 {
12339 if (code == NE_EXPR)
12340 return omit_two_operands_loc (loc, type, boolean_true_node,
12341 real0, real1);
12342 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12343 }
12344 }
12345 }
12346
12347 return NULL_TREE;
12348
12349 case LT_EXPR:
12350 case GT_EXPR:
12351 case LE_EXPR:
12352 case GE_EXPR:
12353 tem = fold_comparison (loc, code, type, op0, op1);
12354 if (tem != NULL_TREE)
12355 return tem;
12356
12357 /* Transform comparisons of the form X +- C CMP X. */
12358 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12359 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12361 && !HONOR_SNANS (arg0))
12362 {
12363 tree arg01 = TREE_OPERAND (arg0, 1);
12364 enum tree_code code0 = TREE_CODE (arg0);
12365 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12366
12367 /* (X - c) > X becomes false. */
12368 if (code == GT_EXPR
12369 && ((code0 == MINUS_EXPR && is_positive >= 0)
12370 || (code0 == PLUS_EXPR && is_positive <= 0)))
12371 return constant_boolean_node (0, type);
12372
12373 /* Likewise (X + c) < X becomes false. */
12374 if (code == LT_EXPR
12375 && ((code0 == PLUS_EXPR && is_positive >= 0)
12376 || (code0 == MINUS_EXPR && is_positive <= 0)))
12377 return constant_boolean_node (0, type);
12378
12379 /* Convert (X - c) <= X to true. */
12380 if (!HONOR_NANS (arg1)
12381 && code == LE_EXPR
12382 && ((code0 == MINUS_EXPR && is_positive >= 0)
12383 || (code0 == PLUS_EXPR && is_positive <= 0)))
12384 return constant_boolean_node (1, type);
12385
12386 /* Convert (X + c) >= X to true. */
12387 if (!HONOR_NANS (arg1)
12388 && code == GE_EXPR
12389 && ((code0 == PLUS_EXPR && is_positive >= 0)
12390 || (code0 == MINUS_EXPR && is_positive <= 0)))
12391 return constant_boolean_node (1, type);
12392 }
12393
12394 /* If we are comparing an ABS_EXPR with a constant, we can
12395 convert all the cases into explicit comparisons, but they may
12396 well not be faster than doing the ABS and one comparison.
12397 But ABS (X) <= C is a range comparison, which becomes a subtraction
12398 and a comparison, and is probably faster. */
12399 if (code == LE_EXPR
12400 && TREE_CODE (arg1) == INTEGER_CST
12401 && TREE_CODE (arg0) == ABS_EXPR
12402 && ! TREE_SIDE_EFFECTS (arg0)
12403 && (tem = negate_expr (arg1)) != 0
12404 && TREE_CODE (tem) == INTEGER_CST
12405 && !TREE_OVERFLOW (tem))
12406 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12407 build2 (GE_EXPR, type,
12408 TREE_OPERAND (arg0, 0), tem),
12409 build2 (LE_EXPR, type,
12410 TREE_OPERAND (arg0, 0), arg1));
12411
12412 /* Convert ABS_EXPR<x> >= 0 to true. */
12413 strict_overflow_p = false;
12414 if (code == GE_EXPR
12415 && (integer_zerop (arg1)
12416 || (! HONOR_NANS (arg0)
12417 && real_zerop (arg1)))
12418 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12419 {
12420 if (strict_overflow_p)
12421 fold_overflow_warning (("assuming signed overflow does not occur "
12422 "when simplifying comparison of "
12423 "absolute value and zero"),
12424 WARN_STRICT_OVERFLOW_CONDITIONAL);
12425 return omit_one_operand_loc (loc, type,
12426 constant_boolean_node (true, type),
12427 arg0);
12428 }
12429
12430 /* Convert ABS_EXPR<x> < 0 to false. */
12431 strict_overflow_p = false;
12432 if (code == LT_EXPR
12433 && (integer_zerop (arg1) || real_zerop (arg1))
12434 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12435 {
12436 if (strict_overflow_p)
12437 fold_overflow_warning (("assuming signed overflow does not occur "
12438 "when simplifying comparison of "
12439 "absolute value and zero"),
12440 WARN_STRICT_OVERFLOW_CONDITIONAL);
12441 return omit_one_operand_loc (loc, type,
12442 constant_boolean_node (false, type),
12443 arg0);
12444 }
12445
12446 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12447 and similarly for >= into !=. */
12448 if ((code == LT_EXPR || code == GE_EXPR)
12449 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12450 && TREE_CODE (arg1) == LSHIFT_EXPR
12451 && integer_onep (TREE_OPERAND (arg1, 0)))
12452 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12453 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12454 TREE_OPERAND (arg1, 1)),
12455 build_zero_cst (TREE_TYPE (arg0)));
12456
12457 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12458 otherwise Y might be >= # of bits in X's type and thus e.g.
12459 (unsigned char) (1 << Y) for Y 15 might be 0.
12460 If the cast is widening, then 1 << Y should have unsigned type,
12461 otherwise if Y is number of bits in the signed shift type minus 1,
12462 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12463 31 might be 0xffffffff80000000. */
12464 if ((code == LT_EXPR || code == GE_EXPR)
12465 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12466 && CONVERT_EXPR_P (arg1)
12467 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12468 && (element_precision (TREE_TYPE (arg1))
12469 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12470 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12471 || (element_precision (TREE_TYPE (arg1))
12472 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12473 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12474 {
12475 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12476 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12477 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12478 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12479 build_zero_cst (TREE_TYPE (arg0)));
12480 }
12481
12482 return NULL_TREE;
12483
12484 case UNORDERED_EXPR:
12485 case ORDERED_EXPR:
12486 case UNLT_EXPR:
12487 case UNLE_EXPR:
12488 case UNGT_EXPR:
12489 case UNGE_EXPR:
12490 case UNEQ_EXPR:
12491 case LTGT_EXPR:
12492 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12493 {
12494 tree targ0 = strip_float_extensions (arg0);
12495 tree targ1 = strip_float_extensions (arg1);
12496 tree newtype = TREE_TYPE (targ0);
12497
12498 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12499 newtype = TREE_TYPE (targ1);
12500
12501 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12502 return fold_build2_loc (loc, code, type,
12503 fold_convert_loc (loc, newtype, targ0),
12504 fold_convert_loc (loc, newtype, targ1));
12505 }
12506
12507 return NULL_TREE;
12508
12509 case COMPOUND_EXPR:
12510 /* When pedantic, a compound expression can be neither an lvalue
12511 nor an integer constant expression. */
12512 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12513 return NULL_TREE;
12514 /* Don't let (0, 0) be null pointer constant. */
12515 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12516 : fold_convert_loc (loc, type, arg1);
12517 return pedantic_non_lvalue_loc (loc, tem);
12518
12519 case ASSERT_EXPR:
12520 /* An ASSERT_EXPR should never be passed to fold_binary. */
12521 gcc_unreachable ();
12522
12523 default:
12524 return NULL_TREE;
12525 } /* switch (code) */
12526 }
12527
12528 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12529 ((A & N) + B) & M -> (A + B) & M
12530 Similarly if (N & M) == 0,
12531 ((A | N) + B) & M -> (A + B) & M
12532 and for - instead of + (or unary - instead of +)
12533 and/or ^ instead of |.
12534 If B is constant and (B & M) == 0, fold into A & M.
12535
12536 This function is a helper for match.pd patterns. Return non-NULL
12537 type in which the simplified operation should be performed only
12538 if any optimization is possible.
12539
12540 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12541 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12542 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12543 +/-. */
12544 tree
12545 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12546 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12547 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12548 tree *pmop)
12549 {
12550 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12551 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12552 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12553 if (~cst1 == 0
12554 || (cst1 & (cst1 + 1)) != 0
12555 || !INTEGRAL_TYPE_P (type)
12556 || (!TYPE_OVERFLOW_WRAPS (type)
12557 && TREE_CODE (type) != INTEGER_TYPE)
12558 || (wi::max_value (type) & cst1) != cst1)
12559 return NULL_TREE;
12560
12561 enum tree_code codes[2] = { code00, code01 };
12562 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12563 int which = 0;
12564 wide_int cst0;
12565
12566 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12567 arg1 (M) is == (1LL << cst) - 1.
12568 Store C into PMOP[0] and D into PMOP[1]. */
12569 pmop[0] = arg00;
12570 pmop[1] = arg01;
12571 which = code != NEGATE_EXPR;
12572
12573 for (; which >= 0; which--)
12574 switch (codes[which])
12575 {
12576 case BIT_AND_EXPR:
12577 case BIT_IOR_EXPR:
12578 case BIT_XOR_EXPR:
12579 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12580 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12581 if (codes[which] == BIT_AND_EXPR)
12582 {
12583 if (cst0 != cst1)
12584 break;
12585 }
12586 else if (cst0 != 0)
12587 break;
12588 /* If C or D is of the form (A & N) where
12589 (N & M) == M, or of the form (A | N) or
12590 (A ^ N) where (N & M) == 0, replace it with A. */
12591 pmop[which] = arg0xx[2 * which];
12592 break;
12593 case ERROR_MARK:
12594 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12595 break;
12596 /* If C or D is a N where (N & M) == 0, it can be
12597 omitted (replaced with 0). */
12598 if ((code == PLUS_EXPR
12599 || (code == MINUS_EXPR && which == 0))
12600 && (cst1 & wi::to_wide (pmop[which])) == 0)
12601 pmop[which] = build_int_cst (type, 0);
12602 /* Similarly, with C - N where (-N & M) == 0. */
12603 if (code == MINUS_EXPR
12604 && which == 1
12605 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12606 pmop[which] = build_int_cst (type, 0);
12607 break;
12608 default:
12609 gcc_unreachable ();
12610 }
12611
12612 /* Only build anything new if we optimized one or both arguments above. */
12613 if (pmop[0] == arg00 && pmop[1] == arg01)
12614 return NULL_TREE;
12615
12616 if (TYPE_OVERFLOW_WRAPS (type))
12617 return type;
12618 else
12619 return unsigned_type_for (type);
12620 }
12621
12622 /* Used by contains_label_[p1]. */
12623
12624 struct contains_label_data
12625 {
12626 hash_set<tree> *pset;
12627 bool inside_switch_p;
12628 };
12629
12630 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12631 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12632 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12633
12634 static tree
12635 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12636 {
12637 contains_label_data *d = (contains_label_data *) data;
12638 switch (TREE_CODE (*tp))
12639 {
12640 case LABEL_EXPR:
12641 return *tp;
12642
12643 case CASE_LABEL_EXPR:
12644 if (!d->inside_switch_p)
12645 return *tp;
12646 return NULL_TREE;
12647
12648 case SWITCH_EXPR:
12649 if (!d->inside_switch_p)
12650 {
12651 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12652 return *tp;
12653 d->inside_switch_p = true;
12654 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12655 return *tp;
12656 d->inside_switch_p = false;
12657 *walk_subtrees = 0;
12658 }
12659 return NULL_TREE;
12660
12661 case GOTO_EXPR:
12662 *walk_subtrees = 0;
12663 return NULL_TREE;
12664
12665 default:
12666 return NULL_TREE;
12667 }
12668 }
12669
12670 /* Return whether the sub-tree ST contains a label which is accessible from
12671 outside the sub-tree. */
12672
12673 static bool
12674 contains_label_p (tree st)
12675 {
12676 hash_set<tree> pset;
12677 contains_label_data data = { &pset, false };
12678 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12679 }
12680
12681 /* Fold a ternary expression of code CODE and type TYPE with operands
12682 OP0, OP1, and OP2. Return the folded expression if folding is
12683 successful. Otherwise, return NULL_TREE. */
12684
12685 tree
12686 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12687 tree op0, tree op1, tree op2)
12688 {
12689 tree tem;
12690 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12691 enum tree_code_class kind = TREE_CODE_CLASS (code);
12692
12693 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12694 && TREE_CODE_LENGTH (code) == 3);
12695
12696 /* If this is a commutative operation, and OP0 is a constant, move it
12697 to OP1 to reduce the number of tests below. */
12698 if (commutative_ternary_tree_code (code)
12699 && tree_swap_operands_p (op0, op1))
12700 return fold_build3_loc (loc, code, type, op1, op0, op2);
12701
12702 tem = generic_simplify (loc, code, type, op0, op1, op2);
12703 if (tem)
12704 return tem;
12705
12706 /* Strip any conversions that don't change the mode. This is safe
12707 for every expression, except for a comparison expression because
12708 its signedness is derived from its operands. So, in the latter
12709 case, only strip conversions that don't change the signedness.
12710
12711 Note that this is done as an internal manipulation within the
12712 constant folder, in order to find the simplest representation of
12713 the arguments so that their form can be studied. In any cases,
12714 the appropriate type conversions should be put back in the tree
12715 that will get out of the constant folder. */
12716 if (op0)
12717 {
12718 arg0 = op0;
12719 STRIP_NOPS (arg0);
12720 }
12721
12722 if (op1)
12723 {
12724 arg1 = op1;
12725 STRIP_NOPS (arg1);
12726 }
12727
12728 if (op2)
12729 {
12730 arg2 = op2;
12731 STRIP_NOPS (arg2);
12732 }
12733
12734 switch (code)
12735 {
12736 case COMPONENT_REF:
12737 if (TREE_CODE (arg0) == CONSTRUCTOR
12738 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12739 {
12740 unsigned HOST_WIDE_INT idx;
12741 tree field, value;
12742 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12743 if (field == arg1)
12744 return value;
12745 }
12746 return NULL_TREE;
12747
12748 case COND_EXPR:
12749 case VEC_COND_EXPR:
12750 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12751 so all simple results must be passed through pedantic_non_lvalue. */
12752 if (TREE_CODE (arg0) == INTEGER_CST)
12753 {
12754 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12755 tem = integer_zerop (arg0) ? op2 : op1;
12756 /* Only optimize constant conditions when the selected branch
12757 has the same type as the COND_EXPR. This avoids optimizing
12758 away "c ? x : throw", where the throw has a void type.
12759 Avoid throwing away that operand which contains label. */
12760 if ((!TREE_SIDE_EFFECTS (unused_op)
12761 || !contains_label_p (unused_op))
12762 && (! VOID_TYPE_P (TREE_TYPE (tem))
12763 || VOID_TYPE_P (type)))
12764 return pedantic_non_lvalue_loc (loc, tem);
12765 return NULL_TREE;
12766 }
12767 else if (TREE_CODE (arg0) == VECTOR_CST)
12768 {
12769 unsigned HOST_WIDE_INT nelts;
12770 if ((TREE_CODE (arg1) == VECTOR_CST
12771 || TREE_CODE (arg1) == CONSTRUCTOR)
12772 && (TREE_CODE (arg2) == VECTOR_CST
12773 || TREE_CODE (arg2) == CONSTRUCTOR)
12774 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12775 {
12776 vec_perm_builder sel (nelts, nelts, 1);
12777 for (unsigned int i = 0; i < nelts; i++)
12778 {
12779 tree val = VECTOR_CST_ELT (arg0, i);
12780 if (integer_all_onesp (val))
12781 sel.quick_push (i);
12782 else if (integer_zerop (val))
12783 sel.quick_push (nelts + i);
12784 else /* Currently unreachable. */
12785 return NULL_TREE;
12786 }
12787 vec_perm_indices indices (sel, 2, nelts);
12788 tree t = fold_vec_perm (type, arg1, arg2, indices);
12789 if (t != NULL_TREE)
12790 return t;
12791 }
12792 }
12793
12794 /* If we have A op B ? A : C, we may be able to convert this to a
12795 simpler expression, depending on the operation and the values
12796 of B and C. Signed zeros prevent all of these transformations,
12797 for reasons given above each one.
12798
12799 Also try swapping the arguments and inverting the conditional. */
12800 if (COMPARISON_CLASS_P (arg0)
12801 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12802 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12803 {
12804 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12805 if (tem)
12806 return tem;
12807 }
12808
12809 if (COMPARISON_CLASS_P (arg0)
12810 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12811 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12812 {
12813 location_t loc0 = expr_location_or (arg0, loc);
12814 tem = fold_invert_truthvalue (loc0, arg0);
12815 if (tem && COMPARISON_CLASS_P (tem))
12816 {
12817 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12818 if (tem)
12819 return tem;
12820 }
12821 }
12822
12823 /* If the second operand is simpler than the third, swap them
12824 since that produces better jump optimization results. */
12825 if (truth_value_p (TREE_CODE (arg0))
12826 && tree_swap_operands_p (op1, op2))
12827 {
12828 location_t loc0 = expr_location_or (arg0, loc);
12829 /* See if this can be inverted. If it can't, possibly because
12830 it was a floating-point inequality comparison, don't do
12831 anything. */
12832 tem = fold_invert_truthvalue (loc0, arg0);
12833 if (tem)
12834 return fold_build3_loc (loc, code, type, tem, op2, op1);
12835 }
12836
12837 /* Convert A ? 1 : 0 to simply A. */
12838 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12839 : (integer_onep (op1)
12840 && !VECTOR_TYPE_P (type)))
12841 && integer_zerop (op2)
12842 /* If we try to convert OP0 to our type, the
12843 call to fold will try to move the conversion inside
12844 a COND, which will recurse. In that case, the COND_EXPR
12845 is probably the best choice, so leave it alone. */
12846 && type == TREE_TYPE (arg0))
12847 return pedantic_non_lvalue_loc (loc, arg0);
12848
12849 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12850 over COND_EXPR in cases such as floating point comparisons. */
12851 if (integer_zerop (op1)
12852 && code == COND_EXPR
12853 && integer_onep (op2)
12854 && !VECTOR_TYPE_P (type)
12855 && truth_value_p (TREE_CODE (arg0)))
12856 return pedantic_non_lvalue_loc (loc,
12857 fold_convert_loc (loc, type,
12858 invert_truthvalue_loc (loc,
12859 arg0)));
12860
12861 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12862 if (TREE_CODE (arg0) == LT_EXPR
12863 && integer_zerop (TREE_OPERAND (arg0, 1))
12864 && integer_zerop (op2)
12865 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12866 {
12867 /* sign_bit_p looks through both zero and sign extensions,
12868 but for this optimization only sign extensions are
12869 usable. */
12870 tree tem2 = TREE_OPERAND (arg0, 0);
12871 while (tem != tem2)
12872 {
12873 if (TREE_CODE (tem2) != NOP_EXPR
12874 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12875 {
12876 tem = NULL_TREE;
12877 break;
12878 }
12879 tem2 = TREE_OPERAND (tem2, 0);
12880 }
12881 /* sign_bit_p only checks ARG1 bits within A's precision.
12882 If <sign bit of A> has wider type than A, bits outside
12883 of A's precision in <sign bit of A> need to be checked.
12884 If they are all 0, this optimization needs to be done
12885 in unsigned A's type, if they are all 1 in signed A's type,
12886 otherwise this can't be done. */
12887 if (tem
12888 && TYPE_PRECISION (TREE_TYPE (tem))
12889 < TYPE_PRECISION (TREE_TYPE (arg1))
12890 && TYPE_PRECISION (TREE_TYPE (tem))
12891 < TYPE_PRECISION (type))
12892 {
12893 int inner_width, outer_width;
12894 tree tem_type;
12895
12896 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12897 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12898 if (outer_width > TYPE_PRECISION (type))
12899 outer_width = TYPE_PRECISION (type);
12900
12901 wide_int mask = wi::shifted_mask
12902 (inner_width, outer_width - inner_width, false,
12903 TYPE_PRECISION (TREE_TYPE (arg1)));
12904
12905 wide_int common = mask & wi::to_wide (arg1);
12906 if (common == mask)
12907 {
12908 tem_type = signed_type_for (TREE_TYPE (tem));
12909 tem = fold_convert_loc (loc, tem_type, tem);
12910 }
12911 else if (common == 0)
12912 {
12913 tem_type = unsigned_type_for (TREE_TYPE (tem));
12914 tem = fold_convert_loc (loc, tem_type, tem);
12915 }
12916 else
12917 tem = NULL;
12918 }
12919
12920 if (tem)
12921 return
12922 fold_convert_loc (loc, type,
12923 fold_build2_loc (loc, BIT_AND_EXPR,
12924 TREE_TYPE (tem), tem,
12925 fold_convert_loc (loc,
12926 TREE_TYPE (tem),
12927 arg1)));
12928 }
12929
12930 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12931 already handled above. */
12932 if (TREE_CODE (arg0) == BIT_AND_EXPR
12933 && integer_onep (TREE_OPERAND (arg0, 1))
12934 && integer_zerop (op2)
12935 && integer_pow2p (arg1))
12936 {
12937 tree tem = TREE_OPERAND (arg0, 0);
12938 STRIP_NOPS (tem);
12939 if (TREE_CODE (tem) == RSHIFT_EXPR
12940 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12941 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12942 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12943 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12944 fold_convert_loc (loc, type,
12945 TREE_OPERAND (tem, 0)),
12946 op1);
12947 }
12948
12949 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12950 is probably obsolete because the first operand should be a
12951 truth value (that's why we have the two cases above), but let's
12952 leave it in until we can confirm this for all front-ends. */
12953 if (integer_zerop (op2)
12954 && TREE_CODE (arg0) == NE_EXPR
12955 && integer_zerop (TREE_OPERAND (arg0, 1))
12956 && integer_pow2p (arg1)
12957 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12959 arg1, OEP_ONLY_CONST)
12960 /* operand_equal_p compares just value, not precision, so e.g.
12961 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12962 second operand 32-bit -128, which is not a power of two (or vice
12963 versa. */
12964 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12965 return pedantic_non_lvalue_loc (loc,
12966 fold_convert_loc (loc, type,
12967 TREE_OPERAND (arg0,
12968 0)));
12969
12970 /* Disable the transformations below for vectors, since
12971 fold_binary_op_with_conditional_arg may undo them immediately,
12972 yielding an infinite loop. */
12973 if (code == VEC_COND_EXPR)
12974 return NULL_TREE;
12975
12976 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12977 if (integer_zerop (op2)
12978 && truth_value_p (TREE_CODE (arg0))
12979 && truth_value_p (TREE_CODE (arg1))
12980 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12981 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12982 : TRUTH_ANDIF_EXPR,
12983 type, fold_convert_loc (loc, type, arg0), op1);
12984
12985 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12986 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12987 && truth_value_p (TREE_CODE (arg0))
12988 && truth_value_p (TREE_CODE (arg1))
12989 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12990 {
12991 location_t loc0 = expr_location_or (arg0, loc);
12992 /* Only perform transformation if ARG0 is easily inverted. */
12993 tem = fold_invert_truthvalue (loc0, arg0);
12994 if (tem)
12995 return fold_build2_loc (loc, code == VEC_COND_EXPR
12996 ? BIT_IOR_EXPR
12997 : TRUTH_ORIF_EXPR,
12998 type, fold_convert_loc (loc, type, tem),
12999 op1);
13000 }
13001
13002 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13003 if (integer_zerop (arg1)
13004 && truth_value_p (TREE_CODE (arg0))
13005 && truth_value_p (TREE_CODE (op2))
13006 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13007 {
13008 location_t loc0 = expr_location_or (arg0, loc);
13009 /* Only perform transformation if ARG0 is easily inverted. */
13010 tem = fold_invert_truthvalue (loc0, arg0);
13011 if (tem)
13012 return fold_build2_loc (loc, code == VEC_COND_EXPR
13013 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13014 type, fold_convert_loc (loc, type, tem),
13015 op2);
13016 }
13017
13018 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13019 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13020 && truth_value_p (TREE_CODE (arg0))
13021 && truth_value_p (TREE_CODE (op2))
13022 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13023 return fold_build2_loc (loc, code == VEC_COND_EXPR
13024 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13025 type, fold_convert_loc (loc, type, arg0), op2);
13026
13027 return NULL_TREE;
13028
13029 case CALL_EXPR:
13030 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13031 of fold_ternary on them. */
13032 gcc_unreachable ();
13033
13034 case BIT_FIELD_REF:
13035 if (TREE_CODE (arg0) == VECTOR_CST
13036 && (type == TREE_TYPE (TREE_TYPE (arg0))
13037 || (VECTOR_TYPE_P (type)
13038 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13039 && tree_fits_uhwi_p (op1)
13040 && tree_fits_uhwi_p (op2))
13041 {
13042 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13043 unsigned HOST_WIDE_INT width
13044 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13045 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13046 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13047 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13048
13049 if (n != 0
13050 && (idx % width) == 0
13051 && (n % width) == 0
13052 && known_le ((idx + n) / width,
13053 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13054 {
13055 idx = idx / width;
13056 n = n / width;
13057
13058 if (TREE_CODE (arg0) == VECTOR_CST)
13059 {
13060 if (n == 1)
13061 {
13062 tem = VECTOR_CST_ELT (arg0, idx);
13063 if (VECTOR_TYPE_P (type))
13064 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13065 return tem;
13066 }
13067
13068 tree_vector_builder vals (type, n, 1);
13069 for (unsigned i = 0; i < n; ++i)
13070 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13071 return vals.build ();
13072 }
13073 }
13074 }
13075
13076 /* On constants we can use native encode/interpret to constant
13077 fold (nearly) all BIT_FIELD_REFs. */
13078 if (CONSTANT_CLASS_P (arg0)
13079 && can_native_interpret_type_p (type)
13080 && BITS_PER_UNIT == 8
13081 && tree_fits_uhwi_p (op1)
13082 && tree_fits_uhwi_p (op2))
13083 {
13084 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13085 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13086 /* Limit us to a reasonable amount of work. To relax the
13087 other limitations we need bit-shifting of the buffer
13088 and rounding up the size. */
13089 if (bitpos % BITS_PER_UNIT == 0
13090 && bitsize % BITS_PER_UNIT == 0
13091 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13092 {
13093 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13094 unsigned HOST_WIDE_INT len
13095 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13096 bitpos / BITS_PER_UNIT);
13097 if (len > 0
13098 && len * BITS_PER_UNIT >= bitsize)
13099 {
13100 tree v = native_interpret_expr (type, b,
13101 bitsize / BITS_PER_UNIT);
13102 if (v)
13103 return v;
13104 }
13105 }
13106 }
13107
13108 return NULL_TREE;
13109
13110 case VEC_PERM_EXPR:
13111 /* Perform constant folding of BIT_INSERT_EXPR. */
13112 if (TREE_CODE (arg2) == VECTOR_CST
13113 && TREE_CODE (op0) == VECTOR_CST
13114 && TREE_CODE (op1) == VECTOR_CST)
13115 {
13116 /* Build a vector of integers from the tree mask. */
13117 vec_perm_builder builder;
13118 if (!tree_to_vec_perm_builder (&builder, arg2))
13119 return NULL_TREE;
13120
13121 /* Create a vec_perm_indices for the integer vector. */
13122 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13123 bool single_arg = (op0 == op1);
13124 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13125 return fold_vec_perm (type, op0, op1, sel);
13126 }
13127 return NULL_TREE;
13128
13129 case BIT_INSERT_EXPR:
13130 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13131 if (TREE_CODE (arg0) == INTEGER_CST
13132 && TREE_CODE (arg1) == INTEGER_CST)
13133 {
13134 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13135 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13136 wide_int tem = (wi::to_wide (arg0)
13137 & wi::shifted_mask (bitpos, bitsize, true,
13138 TYPE_PRECISION (type)));
13139 wide_int tem2
13140 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13141 bitsize), bitpos);
13142 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13143 }
13144 else if (TREE_CODE (arg0) == VECTOR_CST
13145 && CONSTANT_CLASS_P (arg1)
13146 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13147 TREE_TYPE (arg1)))
13148 {
13149 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13150 unsigned HOST_WIDE_INT elsize
13151 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13152 if (bitpos % elsize == 0)
13153 {
13154 unsigned k = bitpos / elsize;
13155 unsigned HOST_WIDE_INT nelts;
13156 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13157 return arg0;
13158 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13159 {
13160 tree_vector_builder elts (type, nelts, 1);
13161 elts.quick_grow (nelts);
13162 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13163 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13164 return elts.build ();
13165 }
13166 }
13167 }
13168 return NULL_TREE;
13169
13170 default:
13171 return NULL_TREE;
13172 } /* switch (code) */
13173 }
13174
13175 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13176 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13177 constructor element index of the value returned. If the element is
13178 not found NULL_TREE is returned and *CTOR_IDX is updated to
13179 the index of the element after the ACCESS_INDEX position (which
13180 may be outside of the CTOR array). */
13181
13182 tree
13183 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13184 unsigned *ctor_idx)
13185 {
13186 tree index_type = NULL_TREE;
13187 signop index_sgn = UNSIGNED;
13188 offset_int low_bound = 0;
13189
13190 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13191 {
13192 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13193 if (domain_type && TYPE_MIN_VALUE (domain_type))
13194 {
13195 /* Static constructors for variably sized objects makes no sense. */
13196 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13197 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13198 /* ??? When it is obvious that the range is signed, treat it so. */
13199 if (TYPE_UNSIGNED (index_type)
13200 && TYPE_MAX_VALUE (domain_type)
13201 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13202 TYPE_MIN_VALUE (domain_type)))
13203 {
13204 index_sgn = SIGNED;
13205 low_bound
13206 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13207 SIGNED);
13208 }
13209 else
13210 {
13211 index_sgn = TYPE_SIGN (index_type);
13212 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13213 }
13214 }
13215 }
13216
13217 if (index_type)
13218 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13219 index_sgn);
13220
13221 offset_int index = low_bound;
13222 if (index_type)
13223 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13224
13225 offset_int max_index = index;
13226 unsigned cnt;
13227 tree cfield, cval;
13228 bool first_p = true;
13229
13230 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13231 {
13232 /* Array constructor might explicitly set index, or specify a range,
13233 or leave index NULL meaning that it is next index after previous
13234 one. */
13235 if (cfield)
13236 {
13237 if (TREE_CODE (cfield) == INTEGER_CST)
13238 max_index = index
13239 = offset_int::from (wi::to_wide (cfield), index_sgn);
13240 else
13241 {
13242 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13243 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13244 index_sgn);
13245 max_index
13246 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13247 index_sgn);
13248 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13249 }
13250 }
13251 else if (!first_p)
13252 {
13253 index = max_index + 1;
13254 if (index_type)
13255 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13256 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13257 max_index = index;
13258 }
13259 else
13260 first_p = false;
13261
13262 /* Do we have match? */
13263 if (wi::cmp (access_index, index, index_sgn) >= 0)
13264 {
13265 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13266 {
13267 if (ctor_idx)
13268 *ctor_idx = cnt;
13269 return cval;
13270 }
13271 }
13272 else if (in_gimple_form)
13273 /* We're past the element we search for. Note during parsing
13274 the elements might not be sorted.
13275 ??? We should use a binary search and a flag on the
13276 CONSTRUCTOR as to whether elements are sorted in declaration
13277 order. */
13278 break;
13279 }
13280 if (ctor_idx)
13281 *ctor_idx = cnt;
13282 return NULL_TREE;
13283 }
13284
13285 /* Perform constant folding and related simplification of EXPR.
13286 The related simplifications include x*1 => x, x*0 => 0, etc.,
13287 and application of the associative law.
13288 NOP_EXPR conversions may be removed freely (as long as we
13289 are careful not to change the type of the overall expression).
13290 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13291 but we can constant-fold them if they have constant operands. */
13292
13293 #ifdef ENABLE_FOLD_CHECKING
13294 # define fold(x) fold_1 (x)
13295 static tree fold_1 (tree);
13296 static
13297 #endif
13298 tree
13299 fold (tree expr)
13300 {
13301 const tree t = expr;
13302 enum tree_code code = TREE_CODE (t);
13303 enum tree_code_class kind = TREE_CODE_CLASS (code);
13304 tree tem;
13305 location_t loc = EXPR_LOCATION (expr);
13306
13307 /* Return right away if a constant. */
13308 if (kind == tcc_constant)
13309 return t;
13310
13311 /* CALL_EXPR-like objects with variable numbers of operands are
13312 treated specially. */
13313 if (kind == tcc_vl_exp)
13314 {
13315 if (code == CALL_EXPR)
13316 {
13317 tem = fold_call_expr (loc, expr, false);
13318 return tem ? tem : expr;
13319 }
13320 return expr;
13321 }
13322
13323 if (IS_EXPR_CODE_CLASS (kind))
13324 {
13325 tree type = TREE_TYPE (t);
13326 tree op0, op1, op2;
13327
13328 switch (TREE_CODE_LENGTH (code))
13329 {
13330 case 1:
13331 op0 = TREE_OPERAND (t, 0);
13332 tem = fold_unary_loc (loc, code, type, op0);
13333 return tem ? tem : expr;
13334 case 2:
13335 op0 = TREE_OPERAND (t, 0);
13336 op1 = TREE_OPERAND (t, 1);
13337 tem = fold_binary_loc (loc, code, type, op0, op1);
13338 return tem ? tem : expr;
13339 case 3:
13340 op0 = TREE_OPERAND (t, 0);
13341 op1 = TREE_OPERAND (t, 1);
13342 op2 = TREE_OPERAND (t, 2);
13343 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13344 return tem ? tem : expr;
13345 default:
13346 break;
13347 }
13348 }
13349
13350 switch (code)
13351 {
13352 case ARRAY_REF:
13353 {
13354 tree op0 = TREE_OPERAND (t, 0);
13355 tree op1 = TREE_OPERAND (t, 1);
13356
13357 if (TREE_CODE (op1) == INTEGER_CST
13358 && TREE_CODE (op0) == CONSTRUCTOR
13359 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13360 {
13361 tree val = get_array_ctor_element_at_index (op0,
13362 wi::to_offset (op1));
13363 if (val)
13364 return val;
13365 }
13366
13367 return t;
13368 }
13369
13370 /* Return a VECTOR_CST if possible. */
13371 case CONSTRUCTOR:
13372 {
13373 tree type = TREE_TYPE (t);
13374 if (TREE_CODE (type) != VECTOR_TYPE)
13375 return t;
13376
13377 unsigned i;
13378 tree val;
13379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13380 if (! CONSTANT_CLASS_P (val))
13381 return t;
13382
13383 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13384 }
13385
13386 case CONST_DECL:
13387 return fold (DECL_INITIAL (t));
13388
13389 default:
13390 return t;
13391 } /* switch (code) */
13392 }
13393
13394 #ifdef ENABLE_FOLD_CHECKING
13395 #undef fold
13396
13397 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13398 hash_table<nofree_ptr_hash<const tree_node> > *);
13399 static void fold_check_failed (const_tree, const_tree);
13400 void print_fold_checksum (const_tree);
13401
13402 /* When --enable-checking=fold, compute a digest of expr before
13403 and after actual fold call to see if fold did not accidentally
13404 change original expr. */
13405
13406 tree
13407 fold (tree expr)
13408 {
13409 tree ret;
13410 struct md5_ctx ctx;
13411 unsigned char checksum_before[16], checksum_after[16];
13412 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13413
13414 md5_init_ctx (&ctx);
13415 fold_checksum_tree (expr, &ctx, &ht);
13416 md5_finish_ctx (&ctx, checksum_before);
13417 ht.empty ();
13418
13419 ret = fold_1 (expr);
13420
13421 md5_init_ctx (&ctx);
13422 fold_checksum_tree (expr, &ctx, &ht);
13423 md5_finish_ctx (&ctx, checksum_after);
13424
13425 if (memcmp (checksum_before, checksum_after, 16))
13426 fold_check_failed (expr, ret);
13427
13428 return ret;
13429 }
13430
13431 void
13432 print_fold_checksum (const_tree expr)
13433 {
13434 struct md5_ctx ctx;
13435 unsigned char checksum[16], cnt;
13436 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13437
13438 md5_init_ctx (&ctx);
13439 fold_checksum_tree (expr, &ctx, &ht);
13440 md5_finish_ctx (&ctx, checksum);
13441 for (cnt = 0; cnt < 16; ++cnt)
13442 fprintf (stderr, "%02x", checksum[cnt]);
13443 putc ('\n', stderr);
13444 }
13445
13446 static void
13447 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13448 {
13449 internal_error ("fold check: original tree changed by fold");
13450 }
13451
13452 static void
13453 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13454 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13455 {
13456 const tree_node **slot;
13457 enum tree_code code;
13458 union tree_node *buf;
13459 int i, len;
13460
13461 recursive_label:
13462 if (expr == NULL)
13463 return;
13464 slot = ht->find_slot (expr, INSERT);
13465 if (*slot != NULL)
13466 return;
13467 *slot = expr;
13468 code = TREE_CODE (expr);
13469 if (TREE_CODE_CLASS (code) == tcc_declaration
13470 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13471 {
13472 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13473 size_t sz = tree_size (expr);
13474 buf = XALLOCAVAR (union tree_node, sz);
13475 memcpy ((char *) buf, expr, sz);
13476 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13477 buf->decl_with_vis.symtab_node = NULL;
13478 buf->base.nowarning_flag = 0;
13479 expr = (tree) buf;
13480 }
13481 else if (TREE_CODE_CLASS (code) == tcc_type
13482 && (TYPE_POINTER_TO (expr)
13483 || TYPE_REFERENCE_TO (expr)
13484 || TYPE_CACHED_VALUES_P (expr)
13485 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13486 || TYPE_NEXT_VARIANT (expr)
13487 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13488 {
13489 /* Allow these fields to be modified. */
13490 tree tmp;
13491 size_t sz = tree_size (expr);
13492 buf = XALLOCAVAR (union tree_node, sz);
13493 memcpy ((char *) buf, expr, sz);
13494 expr = tmp = (tree) buf;
13495 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13496 TYPE_POINTER_TO (tmp) = NULL;
13497 TYPE_REFERENCE_TO (tmp) = NULL;
13498 TYPE_NEXT_VARIANT (tmp) = NULL;
13499 TYPE_ALIAS_SET (tmp) = -1;
13500 if (TYPE_CACHED_VALUES_P (tmp))
13501 {
13502 TYPE_CACHED_VALUES_P (tmp) = 0;
13503 TYPE_CACHED_VALUES (tmp) = NULL;
13504 }
13505 }
13506 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13507 {
13508 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13509 and change builtins.c etc. instead - see PR89543. */
13510 size_t sz = tree_size (expr);
13511 buf = XALLOCAVAR (union tree_node, sz);
13512 memcpy ((char *) buf, expr, sz);
13513 buf->base.nowarning_flag = 0;
13514 expr = (tree) buf;
13515 }
13516 md5_process_bytes (expr, tree_size (expr), ctx);
13517 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13518 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13519 if (TREE_CODE_CLASS (code) != tcc_type
13520 && TREE_CODE_CLASS (code) != tcc_declaration
13521 && code != TREE_LIST
13522 && code != SSA_NAME
13523 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13524 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13525 switch (TREE_CODE_CLASS (code))
13526 {
13527 case tcc_constant:
13528 switch (code)
13529 {
13530 case STRING_CST:
13531 md5_process_bytes (TREE_STRING_POINTER (expr),
13532 TREE_STRING_LENGTH (expr), ctx);
13533 break;
13534 case COMPLEX_CST:
13535 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13536 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13537 break;
13538 case VECTOR_CST:
13539 len = vector_cst_encoded_nelts (expr);
13540 for (i = 0; i < len; ++i)
13541 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13542 break;
13543 default:
13544 break;
13545 }
13546 break;
13547 case tcc_exceptional:
13548 switch (code)
13549 {
13550 case TREE_LIST:
13551 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13552 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13553 expr = TREE_CHAIN (expr);
13554 goto recursive_label;
13555 break;
13556 case TREE_VEC:
13557 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13558 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13559 break;
13560 default:
13561 break;
13562 }
13563 break;
13564 case tcc_expression:
13565 case tcc_reference:
13566 case tcc_comparison:
13567 case tcc_unary:
13568 case tcc_binary:
13569 case tcc_statement:
13570 case tcc_vl_exp:
13571 len = TREE_OPERAND_LENGTH (expr);
13572 for (i = 0; i < len; ++i)
13573 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13574 break;
13575 case tcc_declaration:
13576 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13577 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13578 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13579 {
13580 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13581 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13582 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13583 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13584 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13585 }
13586
13587 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13588 {
13589 if (TREE_CODE (expr) == FUNCTION_DECL)
13590 {
13591 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13592 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13593 }
13594 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13595 }
13596 break;
13597 case tcc_type:
13598 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13599 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13600 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13601 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13602 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13603 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13604 if (INTEGRAL_TYPE_P (expr)
13605 || SCALAR_FLOAT_TYPE_P (expr))
13606 {
13607 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13608 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13609 }
13610 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13611 if (TREE_CODE (expr) == RECORD_TYPE
13612 || TREE_CODE (expr) == UNION_TYPE
13613 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13614 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13615 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13616 break;
13617 default:
13618 break;
13619 }
13620 }
13621
13622 /* Helper function for outputting the checksum of a tree T. When
13623 debugging with gdb, you can "define mynext" to be "next" followed
13624 by "call debug_fold_checksum (op0)", then just trace down till the
13625 outputs differ. */
13626
13627 DEBUG_FUNCTION void
13628 debug_fold_checksum (const_tree t)
13629 {
13630 int i;
13631 unsigned char checksum[16];
13632 struct md5_ctx ctx;
13633 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13634
13635 md5_init_ctx (&ctx);
13636 fold_checksum_tree (t, &ctx, &ht);
13637 md5_finish_ctx (&ctx, checksum);
13638 ht.empty ();
13639
13640 for (i = 0; i < 16; i++)
13641 fprintf (stderr, "%d ", checksum[i]);
13642
13643 fprintf (stderr, "\n");
13644 }
13645
13646 #endif
13647
13648 /* Fold a unary tree expression with code CODE of type TYPE with an
13649 operand OP0. LOC is the location of the resulting expression.
13650 Return a folded expression if successful. Otherwise, return a tree
13651 expression with code CODE of type TYPE with an operand OP0. */
13652
13653 tree
13654 fold_build1_loc (location_t loc,
13655 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13656 {
13657 tree tem;
13658 #ifdef ENABLE_FOLD_CHECKING
13659 unsigned char checksum_before[16], checksum_after[16];
13660 struct md5_ctx ctx;
13661 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13662
13663 md5_init_ctx (&ctx);
13664 fold_checksum_tree (op0, &ctx, &ht);
13665 md5_finish_ctx (&ctx, checksum_before);
13666 ht.empty ();
13667 #endif
13668
13669 tem = fold_unary_loc (loc, code, type, op0);
13670 if (!tem)
13671 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13672
13673 #ifdef ENABLE_FOLD_CHECKING
13674 md5_init_ctx (&ctx);
13675 fold_checksum_tree (op0, &ctx, &ht);
13676 md5_finish_ctx (&ctx, checksum_after);
13677
13678 if (memcmp (checksum_before, checksum_after, 16))
13679 fold_check_failed (op0, tem);
13680 #endif
13681 return tem;
13682 }
13683
13684 /* Fold a binary tree expression with code CODE of type TYPE with
13685 operands OP0 and OP1. LOC is the location of the resulting
13686 expression. Return a folded expression if successful. Otherwise,
13687 return a tree expression with code CODE of type TYPE with operands
13688 OP0 and OP1. */
13689
13690 tree
13691 fold_build2_loc (location_t loc,
13692 enum tree_code code, tree type, tree op0, tree op1
13693 MEM_STAT_DECL)
13694 {
13695 tree tem;
13696 #ifdef ENABLE_FOLD_CHECKING
13697 unsigned char checksum_before_op0[16],
13698 checksum_before_op1[16],
13699 checksum_after_op0[16],
13700 checksum_after_op1[16];
13701 struct md5_ctx ctx;
13702 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13703
13704 md5_init_ctx (&ctx);
13705 fold_checksum_tree (op0, &ctx, &ht);
13706 md5_finish_ctx (&ctx, checksum_before_op0);
13707 ht.empty ();
13708
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (op1, &ctx, &ht);
13711 md5_finish_ctx (&ctx, checksum_before_op1);
13712 ht.empty ();
13713 #endif
13714
13715 tem = fold_binary_loc (loc, code, type, op0, op1);
13716 if (!tem)
13717 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13718
13719 #ifdef ENABLE_FOLD_CHECKING
13720 md5_init_ctx (&ctx);
13721 fold_checksum_tree (op0, &ctx, &ht);
13722 md5_finish_ctx (&ctx, checksum_after_op0);
13723 ht.empty ();
13724
13725 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13726 fold_check_failed (op0, tem);
13727
13728 md5_init_ctx (&ctx);
13729 fold_checksum_tree (op1, &ctx, &ht);
13730 md5_finish_ctx (&ctx, checksum_after_op1);
13731
13732 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13733 fold_check_failed (op1, tem);
13734 #endif
13735 return tem;
13736 }
13737
13738 /* Fold a ternary tree expression with code CODE of type TYPE with
13739 operands OP0, OP1, and OP2. Return a folded expression if
13740 successful. Otherwise, return a tree expression with code CODE of
13741 type TYPE with operands OP0, OP1, and OP2. */
13742
13743 tree
13744 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13745 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13746 {
13747 tree tem;
13748 #ifdef ENABLE_FOLD_CHECKING
13749 unsigned char checksum_before_op0[16],
13750 checksum_before_op1[16],
13751 checksum_before_op2[16],
13752 checksum_after_op0[16],
13753 checksum_after_op1[16],
13754 checksum_after_op2[16];
13755 struct md5_ctx ctx;
13756 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13757
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (op0, &ctx, &ht);
13760 md5_finish_ctx (&ctx, checksum_before_op0);
13761 ht.empty ();
13762
13763 md5_init_ctx (&ctx);
13764 fold_checksum_tree (op1, &ctx, &ht);
13765 md5_finish_ctx (&ctx, checksum_before_op1);
13766 ht.empty ();
13767
13768 md5_init_ctx (&ctx);
13769 fold_checksum_tree (op2, &ctx, &ht);
13770 md5_finish_ctx (&ctx, checksum_before_op2);
13771 ht.empty ();
13772 #endif
13773
13774 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13775 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13776 if (!tem)
13777 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13778
13779 #ifdef ENABLE_FOLD_CHECKING
13780 md5_init_ctx (&ctx);
13781 fold_checksum_tree (op0, &ctx, &ht);
13782 md5_finish_ctx (&ctx, checksum_after_op0);
13783 ht.empty ();
13784
13785 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13786 fold_check_failed (op0, tem);
13787
13788 md5_init_ctx (&ctx);
13789 fold_checksum_tree (op1, &ctx, &ht);
13790 md5_finish_ctx (&ctx, checksum_after_op1);
13791 ht.empty ();
13792
13793 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13794 fold_check_failed (op1, tem);
13795
13796 md5_init_ctx (&ctx);
13797 fold_checksum_tree (op2, &ctx, &ht);
13798 md5_finish_ctx (&ctx, checksum_after_op2);
13799
13800 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13801 fold_check_failed (op2, tem);
13802 #endif
13803 return tem;
13804 }
13805
13806 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13807 arguments in ARGARRAY, and a null static chain.
13808 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13809 of type TYPE from the given operands as constructed by build_call_array. */
13810
13811 tree
13812 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13813 int nargs, tree *argarray)
13814 {
13815 tree tem;
13816 #ifdef ENABLE_FOLD_CHECKING
13817 unsigned char checksum_before_fn[16],
13818 checksum_before_arglist[16],
13819 checksum_after_fn[16],
13820 checksum_after_arglist[16];
13821 struct md5_ctx ctx;
13822 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13823 int i;
13824
13825 md5_init_ctx (&ctx);
13826 fold_checksum_tree (fn, &ctx, &ht);
13827 md5_finish_ctx (&ctx, checksum_before_fn);
13828 ht.empty ();
13829
13830 md5_init_ctx (&ctx);
13831 for (i = 0; i < nargs; i++)
13832 fold_checksum_tree (argarray[i], &ctx, &ht);
13833 md5_finish_ctx (&ctx, checksum_before_arglist);
13834 ht.empty ();
13835 #endif
13836
13837 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13838 if (!tem)
13839 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13840
13841 #ifdef ENABLE_FOLD_CHECKING
13842 md5_init_ctx (&ctx);
13843 fold_checksum_tree (fn, &ctx, &ht);
13844 md5_finish_ctx (&ctx, checksum_after_fn);
13845 ht.empty ();
13846
13847 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13848 fold_check_failed (fn, tem);
13849
13850 md5_init_ctx (&ctx);
13851 for (i = 0; i < nargs; i++)
13852 fold_checksum_tree (argarray[i], &ctx, &ht);
13853 md5_finish_ctx (&ctx, checksum_after_arglist);
13854
13855 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13856 fold_check_failed (NULL_TREE, tem);
13857 #endif
13858 return tem;
13859 }
13860
13861 /* Perform constant folding and related simplification of initializer
13862 expression EXPR. These behave identically to "fold_buildN" but ignore
13863 potential run-time traps and exceptions that fold must preserve. */
13864
13865 #define START_FOLD_INIT \
13866 int saved_signaling_nans = flag_signaling_nans;\
13867 int saved_trapping_math = flag_trapping_math;\
13868 int saved_rounding_math = flag_rounding_math;\
13869 int saved_trapv = flag_trapv;\
13870 int saved_folding_initializer = folding_initializer;\
13871 flag_signaling_nans = 0;\
13872 flag_trapping_math = 0;\
13873 flag_rounding_math = 0;\
13874 flag_trapv = 0;\
13875 folding_initializer = 1;
13876
13877 #define END_FOLD_INIT \
13878 flag_signaling_nans = saved_signaling_nans;\
13879 flag_trapping_math = saved_trapping_math;\
13880 flag_rounding_math = saved_rounding_math;\
13881 flag_trapv = saved_trapv;\
13882 folding_initializer = saved_folding_initializer;
13883
13884 tree
13885 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13886 tree type, tree op)
13887 {
13888 tree result;
13889 START_FOLD_INIT;
13890
13891 result = fold_build1_loc (loc, code, type, op);
13892
13893 END_FOLD_INIT;
13894 return result;
13895 }
13896
13897 tree
13898 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13899 tree type, tree op0, tree op1)
13900 {
13901 tree result;
13902 START_FOLD_INIT;
13903
13904 result = fold_build2_loc (loc, code, type, op0, op1);
13905
13906 END_FOLD_INIT;
13907 return result;
13908 }
13909
13910 tree
13911 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13912 int nargs, tree *argarray)
13913 {
13914 tree result;
13915 START_FOLD_INIT;
13916
13917 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13918
13919 END_FOLD_INIT;
13920 return result;
13921 }
13922
13923 #undef START_FOLD_INIT
13924 #undef END_FOLD_INIT
13925
13926 /* Determine if first argument is a multiple of second argument. Return 0 if
13927 it is not, or we cannot easily determined it to be.
13928
13929 An example of the sort of thing we care about (at this point; this routine
13930 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13931 fold cases do now) is discovering that
13932
13933 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13934
13935 is a multiple of
13936
13937 SAVE_EXPR (J * 8)
13938
13939 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13940
13941 This code also handles discovering that
13942
13943 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13944
13945 is a multiple of 8 so we don't have to worry about dealing with a
13946 possible remainder.
13947
13948 Note that we *look* inside a SAVE_EXPR only to determine how it was
13949 calculated; it is not safe for fold to do much of anything else with the
13950 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13951 at run time. For example, the latter example above *cannot* be implemented
13952 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13953 evaluation time of the original SAVE_EXPR is not necessarily the same at
13954 the time the new expression is evaluated. The only optimization of this
13955 sort that would be valid is changing
13956
13957 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13958
13959 divided by 8 to
13960
13961 SAVE_EXPR (I) * SAVE_EXPR (J)
13962
13963 (where the same SAVE_EXPR (J) is used in the original and the
13964 transformed version). */
13965
13966 int
13967 multiple_of_p (tree type, const_tree top, const_tree bottom)
13968 {
13969 gimple *stmt;
13970 tree t1, op1, op2;
13971
13972 if (operand_equal_p (top, bottom, 0))
13973 return 1;
13974
13975 if (TREE_CODE (type) != INTEGER_TYPE)
13976 return 0;
13977
13978 switch (TREE_CODE (top))
13979 {
13980 case BIT_AND_EXPR:
13981 /* Bitwise and provides a power of two multiple. If the mask is
13982 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13983 if (!integer_pow2p (bottom))
13984 return 0;
13985 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13986 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13987
13988 case MULT_EXPR:
13989 if (TREE_CODE (bottom) == INTEGER_CST)
13990 {
13991 op1 = TREE_OPERAND (top, 0);
13992 op2 = TREE_OPERAND (top, 1);
13993 if (TREE_CODE (op1) == INTEGER_CST)
13994 std::swap (op1, op2);
13995 if (TREE_CODE (op2) == INTEGER_CST)
13996 {
13997 if (multiple_of_p (type, op2, bottom))
13998 return 1;
13999 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14000 if (multiple_of_p (type, bottom, op2))
14001 {
14002 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14003 wi::to_widest (op2));
14004 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14005 {
14006 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14007 return multiple_of_p (type, op1, op2);
14008 }
14009 }
14010 return multiple_of_p (type, op1, bottom);
14011 }
14012 }
14013 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14014 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14015
14016 case MINUS_EXPR:
14017 /* It is impossible to prove if op0 - op1 is multiple of bottom
14018 precisely, so be conservative here checking if both op0 and op1
14019 are multiple of bottom. Note we check the second operand first
14020 since it's usually simpler. */
14021 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14022 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14023
14024 case PLUS_EXPR:
14025 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14026 as op0 - 3 if the expression has unsigned type. For example,
14027 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14028 op1 = TREE_OPERAND (top, 1);
14029 if (TYPE_UNSIGNED (type)
14030 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14031 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14032 return (multiple_of_p (type, op1, bottom)
14033 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14034
14035 case LSHIFT_EXPR:
14036 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14037 {
14038 op1 = TREE_OPERAND (top, 1);
14039 /* const_binop may not detect overflow correctly,
14040 so check for it explicitly here. */
14041 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14042 wi::to_wide (op1))
14043 && (t1 = fold_convert (type,
14044 const_binop (LSHIFT_EXPR, size_one_node,
14045 op1))) != 0
14046 && !TREE_OVERFLOW (t1))
14047 return multiple_of_p (type, t1, bottom);
14048 }
14049 return 0;
14050
14051 case NOP_EXPR:
14052 /* Can't handle conversions from non-integral or wider integral type. */
14053 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14054 || (TYPE_PRECISION (type)
14055 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14056 return 0;
14057
14058 /* fall through */
14059
14060 case SAVE_EXPR:
14061 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14062
14063 case COND_EXPR:
14064 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14065 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14066
14067 case INTEGER_CST:
14068 if (TREE_CODE (bottom) != INTEGER_CST
14069 || integer_zerop (bottom)
14070 || (TYPE_UNSIGNED (type)
14071 && (tree_int_cst_sgn (top) < 0
14072 || tree_int_cst_sgn (bottom) < 0)))
14073 return 0;
14074 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14075 SIGNED);
14076
14077 case SSA_NAME:
14078 if (TREE_CODE (bottom) == INTEGER_CST
14079 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14080 && gimple_code (stmt) == GIMPLE_ASSIGN)
14081 {
14082 enum tree_code code = gimple_assign_rhs_code (stmt);
14083
14084 /* Check for special cases to see if top is defined as multiple
14085 of bottom:
14086
14087 top = (X & ~(bottom - 1) ; bottom is power of 2
14088
14089 or
14090
14091 Y = X % bottom
14092 top = X - Y. */
14093 if (code == BIT_AND_EXPR
14094 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14095 && TREE_CODE (op2) == INTEGER_CST
14096 && integer_pow2p (bottom)
14097 && wi::multiple_of_p (wi::to_widest (op2),
14098 wi::to_widest (bottom), UNSIGNED))
14099 return 1;
14100
14101 op1 = gimple_assign_rhs1 (stmt);
14102 if (code == MINUS_EXPR
14103 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14104 && TREE_CODE (op2) == SSA_NAME
14105 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14106 && gimple_code (stmt) == GIMPLE_ASSIGN
14107 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14108 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14109 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14110 return 1;
14111 }
14112
14113 /* fall through */
14114
14115 default:
14116 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14117 return multiple_p (wi::to_poly_widest (top),
14118 wi::to_poly_widest (bottom));
14119
14120 return 0;
14121 }
14122 }
14123
14124 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14125 This function returns true for integer expressions, and returns
14126 false if uncertain. */
14127
14128 bool
14129 tree_expr_finite_p (const_tree x)
14130 {
14131 machine_mode mode = element_mode (x);
14132 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14133 return true;
14134 switch (TREE_CODE (x))
14135 {
14136 case REAL_CST:
14137 return real_isfinite (TREE_REAL_CST_PTR (x));
14138 case COMPLEX_CST:
14139 return tree_expr_finite_p (TREE_REALPART (x))
14140 && tree_expr_finite_p (TREE_IMAGPART (x));
14141 case FLOAT_EXPR:
14142 return true;
14143 case ABS_EXPR:
14144 case CONVERT_EXPR:
14145 case NON_LVALUE_EXPR:
14146 case NEGATE_EXPR:
14147 case SAVE_EXPR:
14148 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14149 case MIN_EXPR:
14150 case MAX_EXPR:
14151 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14152 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14153 case COND_EXPR:
14154 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14155 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14156 case CALL_EXPR:
14157 switch (get_call_combined_fn (x))
14158 {
14159 CASE_CFN_FABS:
14160 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14161 CASE_CFN_FMAX:
14162 CASE_CFN_FMIN:
14163 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14164 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14165 default:
14166 return false;
14167 }
14168
14169 default:
14170 return false;
14171 }
14172 }
14173
14174 /* Return true if expression X evaluates to an infinity.
14175 This function returns false for integer expressions. */
14176
14177 bool
14178 tree_expr_infinite_p (const_tree x)
14179 {
14180 if (!HONOR_INFINITIES (x))
14181 return false;
14182 switch (TREE_CODE (x))
14183 {
14184 case REAL_CST:
14185 return real_isinf (TREE_REAL_CST_PTR (x));
14186 case ABS_EXPR:
14187 case NEGATE_EXPR:
14188 case NON_LVALUE_EXPR:
14189 case SAVE_EXPR:
14190 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14191 case COND_EXPR:
14192 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14193 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14194 default:
14195 return false;
14196 }
14197 }
14198
14199 /* Return true if expression X could evaluate to an infinity.
14200 This function returns false for integer expressions, and returns
14201 true if uncertain. */
14202
14203 bool
14204 tree_expr_maybe_infinite_p (const_tree x)
14205 {
14206 if (!HONOR_INFINITIES (x))
14207 return false;
14208 switch (TREE_CODE (x))
14209 {
14210 case REAL_CST:
14211 return real_isinf (TREE_REAL_CST_PTR (x));
14212 case FLOAT_EXPR:
14213 return false;
14214 case ABS_EXPR:
14215 case NEGATE_EXPR:
14216 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14217 case COND_EXPR:
14218 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14219 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14220 default:
14221 return true;
14222 }
14223 }
14224
14225 /* Return true if expression X evaluates to a signaling NaN.
14226 This function returns false for integer expressions. */
14227
14228 bool
14229 tree_expr_signaling_nan_p (const_tree x)
14230 {
14231 if (!HONOR_SNANS (x))
14232 return false;
14233 switch (TREE_CODE (x))
14234 {
14235 case REAL_CST:
14236 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14237 case NON_LVALUE_EXPR:
14238 case SAVE_EXPR:
14239 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14240 case COND_EXPR:
14241 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14242 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14243 default:
14244 return false;
14245 }
14246 }
14247
14248 /* Return true if expression X could evaluate to a signaling NaN.
14249 This function returns false for integer expressions, and returns
14250 true if uncertain. */
14251
14252 bool
14253 tree_expr_maybe_signaling_nan_p (const_tree x)
14254 {
14255 if (!HONOR_SNANS (x))
14256 return false;
14257 switch (TREE_CODE (x))
14258 {
14259 case REAL_CST:
14260 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14261 case FLOAT_EXPR:
14262 return false;
14263 case ABS_EXPR:
14264 case CONVERT_EXPR:
14265 case NEGATE_EXPR:
14266 case NON_LVALUE_EXPR:
14267 case SAVE_EXPR:
14268 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14269 case MIN_EXPR:
14270 case MAX_EXPR:
14271 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14272 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14273 case COND_EXPR:
14274 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14275 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14276 case CALL_EXPR:
14277 switch (get_call_combined_fn (x))
14278 {
14279 CASE_CFN_FABS:
14280 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14281 CASE_CFN_FMAX:
14282 CASE_CFN_FMIN:
14283 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14284 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14285 default:
14286 return true;
14287 }
14288 default:
14289 return true;
14290 }
14291 }
14292
14293 /* Return true if expression X evaluates to a NaN.
14294 This function returns false for integer expressions. */
14295
14296 bool
14297 tree_expr_nan_p (const_tree x)
14298 {
14299 if (!HONOR_NANS (x))
14300 return false;
14301 switch (TREE_CODE (x))
14302 {
14303 case REAL_CST:
14304 return real_isnan (TREE_REAL_CST_PTR (x));
14305 case NON_LVALUE_EXPR:
14306 case SAVE_EXPR:
14307 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14308 case COND_EXPR:
14309 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14310 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14311 default:
14312 return false;
14313 }
14314 }
14315
14316 /* Return true if expression X could evaluate to a NaN.
14317 This function returns false for integer expressions, and returns
14318 true if uncertain. */
14319
14320 bool
14321 tree_expr_maybe_nan_p (const_tree x)
14322 {
14323 if (!HONOR_NANS (x))
14324 return false;
14325 switch (TREE_CODE (x))
14326 {
14327 case REAL_CST:
14328 return real_isnan (TREE_REAL_CST_PTR (x));
14329 case FLOAT_EXPR:
14330 return false;
14331 case PLUS_EXPR:
14332 case MINUS_EXPR:
14333 case MULT_EXPR:
14334 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14335 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14336 case ABS_EXPR:
14337 case CONVERT_EXPR:
14338 case NEGATE_EXPR:
14339 case NON_LVALUE_EXPR:
14340 case SAVE_EXPR:
14341 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14342 case MIN_EXPR:
14343 case MAX_EXPR:
14344 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14345 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14346 case COND_EXPR:
14347 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14348 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14349 case CALL_EXPR:
14350 switch (get_call_combined_fn (x))
14351 {
14352 CASE_CFN_FABS:
14353 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14354 CASE_CFN_FMAX:
14355 CASE_CFN_FMIN:
14356 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14357 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14358 default:
14359 return true;
14360 }
14361 default:
14362 return true;
14363 }
14364 }
14365
14366 #define tree_expr_nonnegative_warnv_p(X, Y) \
14367 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14368
14369 #define RECURSE(X) \
14370 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14371
14372 /* Return true if CODE or TYPE is known to be non-negative. */
14373
14374 static bool
14375 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14376 {
14377 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14378 && truth_value_p (code))
14379 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14380 have a signed:1 type (where the value is -1 and 0). */
14381 return true;
14382 return false;
14383 }
14384
14385 /* Return true if (CODE OP0) is known to be non-negative. If the return
14386 value is based on the assumption that signed overflow is undefined,
14387 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14388 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14389
14390 bool
14391 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14392 bool *strict_overflow_p, int depth)
14393 {
14394 if (TYPE_UNSIGNED (type))
14395 return true;
14396
14397 switch (code)
14398 {
14399 case ABS_EXPR:
14400 /* We can't return 1 if flag_wrapv is set because
14401 ABS_EXPR<INT_MIN> = INT_MIN. */
14402 if (!ANY_INTEGRAL_TYPE_P (type))
14403 return true;
14404 if (TYPE_OVERFLOW_UNDEFINED (type))
14405 {
14406 *strict_overflow_p = true;
14407 return true;
14408 }
14409 break;
14410
14411 case NON_LVALUE_EXPR:
14412 case FLOAT_EXPR:
14413 case FIX_TRUNC_EXPR:
14414 return RECURSE (op0);
14415
14416 CASE_CONVERT:
14417 {
14418 tree inner_type = TREE_TYPE (op0);
14419 tree outer_type = type;
14420
14421 if (TREE_CODE (outer_type) == REAL_TYPE)
14422 {
14423 if (TREE_CODE (inner_type) == REAL_TYPE)
14424 return RECURSE (op0);
14425 if (INTEGRAL_TYPE_P (inner_type))
14426 {
14427 if (TYPE_UNSIGNED (inner_type))
14428 return true;
14429 return RECURSE (op0);
14430 }
14431 }
14432 else if (INTEGRAL_TYPE_P (outer_type))
14433 {
14434 if (TREE_CODE (inner_type) == REAL_TYPE)
14435 return RECURSE (op0);
14436 if (INTEGRAL_TYPE_P (inner_type))
14437 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14438 && TYPE_UNSIGNED (inner_type);
14439 }
14440 }
14441 break;
14442
14443 default:
14444 return tree_simple_nonnegative_warnv_p (code, type);
14445 }
14446
14447 /* We don't know sign of `t', so be conservative and return false. */
14448 return false;
14449 }
14450
14451 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14452 value is based on the assumption that signed overflow is undefined,
14453 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14454 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14455
14456 bool
14457 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14458 tree op1, bool *strict_overflow_p,
14459 int depth)
14460 {
14461 if (TYPE_UNSIGNED (type))
14462 return true;
14463
14464 switch (code)
14465 {
14466 case POINTER_PLUS_EXPR:
14467 case PLUS_EXPR:
14468 if (FLOAT_TYPE_P (type))
14469 return RECURSE (op0) && RECURSE (op1);
14470
14471 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14472 both unsigned and at least 2 bits shorter than the result. */
14473 if (TREE_CODE (type) == INTEGER_TYPE
14474 && TREE_CODE (op0) == NOP_EXPR
14475 && TREE_CODE (op1) == NOP_EXPR)
14476 {
14477 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14478 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14479 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14480 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14481 {
14482 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14483 TYPE_PRECISION (inner2)) + 1;
14484 return prec < TYPE_PRECISION (type);
14485 }
14486 }
14487 break;
14488
14489 case MULT_EXPR:
14490 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14491 {
14492 /* x * x is always non-negative for floating point x
14493 or without overflow. */
14494 if (operand_equal_p (op0, op1, 0)
14495 || (RECURSE (op0) && RECURSE (op1)))
14496 {
14497 if (ANY_INTEGRAL_TYPE_P (type)
14498 && TYPE_OVERFLOW_UNDEFINED (type))
14499 *strict_overflow_p = true;
14500 return true;
14501 }
14502 }
14503
14504 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14505 both unsigned and their total bits is shorter than the result. */
14506 if (TREE_CODE (type) == INTEGER_TYPE
14507 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14508 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14509 {
14510 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14511 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14512 : TREE_TYPE (op0);
14513 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14514 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14515 : TREE_TYPE (op1);
14516
14517 bool unsigned0 = TYPE_UNSIGNED (inner0);
14518 bool unsigned1 = TYPE_UNSIGNED (inner1);
14519
14520 if (TREE_CODE (op0) == INTEGER_CST)
14521 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14522
14523 if (TREE_CODE (op1) == INTEGER_CST)
14524 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14525
14526 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14527 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14528 {
14529 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14530 ? tree_int_cst_min_precision (op0, UNSIGNED)
14531 : TYPE_PRECISION (inner0);
14532
14533 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14534 ? tree_int_cst_min_precision (op1, UNSIGNED)
14535 : TYPE_PRECISION (inner1);
14536
14537 return precision0 + precision1 < TYPE_PRECISION (type);
14538 }
14539 }
14540 return false;
14541
14542 case BIT_AND_EXPR:
14543 return RECURSE (op0) || RECURSE (op1);
14544
14545 case MAX_EXPR:
14546 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14547 things. */
14548 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14549 return RECURSE (op0) && RECURSE (op1);
14550 return RECURSE (op0) || RECURSE (op1);
14551
14552 case BIT_IOR_EXPR:
14553 case BIT_XOR_EXPR:
14554 case MIN_EXPR:
14555 case RDIV_EXPR:
14556 case TRUNC_DIV_EXPR:
14557 case CEIL_DIV_EXPR:
14558 case FLOOR_DIV_EXPR:
14559 case ROUND_DIV_EXPR:
14560 return RECURSE (op0) && RECURSE (op1);
14561
14562 case TRUNC_MOD_EXPR:
14563 return RECURSE (op0);
14564
14565 case FLOOR_MOD_EXPR:
14566 return RECURSE (op1);
14567
14568 case CEIL_MOD_EXPR:
14569 case ROUND_MOD_EXPR:
14570 default:
14571 return tree_simple_nonnegative_warnv_p (code, type);
14572 }
14573
14574 /* We don't know sign of `t', so be conservative and return false. */
14575 return false;
14576 }
14577
14578 /* Return true if T is known to be non-negative. If the return
14579 value is based on the assumption that signed overflow is undefined,
14580 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14581 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14582
14583 bool
14584 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14585 {
14586 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14587 return true;
14588
14589 switch (TREE_CODE (t))
14590 {
14591 case INTEGER_CST:
14592 return tree_int_cst_sgn (t) >= 0;
14593
14594 case REAL_CST:
14595 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14596
14597 case FIXED_CST:
14598 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14599
14600 case COND_EXPR:
14601 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14602
14603 case SSA_NAME:
14604 /* Limit the depth of recursion to avoid quadratic behavior.
14605 This is expected to catch almost all occurrences in practice.
14606 If this code misses important cases that unbounded recursion
14607 would not, passes that need this information could be revised
14608 to provide it through dataflow propagation. */
14609 return (!name_registered_for_update_p (t)
14610 && depth < param_max_ssa_name_query_depth
14611 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14612 strict_overflow_p, depth));
14613
14614 default:
14615 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14616 }
14617 }
14618
14619 /* Return true if T is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14623
14624 bool
14625 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14626 bool *strict_overflow_p, int depth)
14627 {
14628 switch (fn)
14629 {
14630 CASE_CFN_ACOS:
14631 CASE_CFN_ACOSH:
14632 CASE_CFN_CABS:
14633 CASE_CFN_COSH:
14634 CASE_CFN_ERFC:
14635 CASE_CFN_EXP:
14636 CASE_CFN_EXP10:
14637 CASE_CFN_EXP2:
14638 CASE_CFN_FABS:
14639 CASE_CFN_FDIM:
14640 CASE_CFN_HYPOT:
14641 CASE_CFN_POW10:
14642 CASE_CFN_FFS:
14643 CASE_CFN_PARITY:
14644 CASE_CFN_POPCOUNT:
14645 CASE_CFN_CLZ:
14646 CASE_CFN_CLRSB:
14647 case CFN_BUILT_IN_BSWAP16:
14648 case CFN_BUILT_IN_BSWAP32:
14649 case CFN_BUILT_IN_BSWAP64:
14650 case CFN_BUILT_IN_BSWAP128:
14651 /* Always true. */
14652 return true;
14653
14654 CASE_CFN_SQRT:
14655 CASE_CFN_SQRT_FN:
14656 /* sqrt(-0.0) is -0.0. */
14657 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14658 return true;
14659 return RECURSE (arg0);
14660
14661 CASE_CFN_ASINH:
14662 CASE_CFN_ATAN:
14663 CASE_CFN_ATANH:
14664 CASE_CFN_CBRT:
14665 CASE_CFN_CEIL:
14666 CASE_CFN_CEIL_FN:
14667 CASE_CFN_ERF:
14668 CASE_CFN_EXPM1:
14669 CASE_CFN_FLOOR:
14670 CASE_CFN_FLOOR_FN:
14671 CASE_CFN_FMOD:
14672 CASE_CFN_FREXP:
14673 CASE_CFN_ICEIL:
14674 CASE_CFN_IFLOOR:
14675 CASE_CFN_IRINT:
14676 CASE_CFN_IROUND:
14677 CASE_CFN_LCEIL:
14678 CASE_CFN_LDEXP:
14679 CASE_CFN_LFLOOR:
14680 CASE_CFN_LLCEIL:
14681 CASE_CFN_LLFLOOR:
14682 CASE_CFN_LLRINT:
14683 CASE_CFN_LLROUND:
14684 CASE_CFN_LRINT:
14685 CASE_CFN_LROUND:
14686 CASE_CFN_MODF:
14687 CASE_CFN_NEARBYINT:
14688 CASE_CFN_NEARBYINT_FN:
14689 CASE_CFN_RINT:
14690 CASE_CFN_RINT_FN:
14691 CASE_CFN_ROUND:
14692 CASE_CFN_ROUND_FN:
14693 CASE_CFN_ROUNDEVEN:
14694 CASE_CFN_ROUNDEVEN_FN:
14695 CASE_CFN_SCALB:
14696 CASE_CFN_SCALBLN:
14697 CASE_CFN_SCALBN:
14698 CASE_CFN_SIGNBIT:
14699 CASE_CFN_SIGNIFICAND:
14700 CASE_CFN_SINH:
14701 CASE_CFN_TANH:
14702 CASE_CFN_TRUNC:
14703 CASE_CFN_TRUNC_FN:
14704 /* True if the 1st argument is nonnegative. */
14705 return RECURSE (arg0);
14706
14707 CASE_CFN_FMAX:
14708 CASE_CFN_FMAX_FN:
14709 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14710 things. In the presence of sNaNs, we're only guaranteed to be
14711 non-negative if both operands are non-negative. In the presence
14712 of qNaNs, we're non-negative if either operand is non-negative
14713 and can't be a qNaN, or if both operands are non-negative. */
14714 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14715 tree_expr_maybe_signaling_nan_p (arg1))
14716 return RECURSE (arg0) && RECURSE (arg1);
14717 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14718 || RECURSE (arg1))
14719 : (RECURSE (arg1)
14720 && !tree_expr_maybe_nan_p (arg1));
14721
14722 CASE_CFN_FMIN:
14723 CASE_CFN_FMIN_FN:
14724 /* True if the 1st AND 2nd arguments are nonnegative. */
14725 return RECURSE (arg0) && RECURSE (arg1);
14726
14727 CASE_CFN_COPYSIGN:
14728 CASE_CFN_COPYSIGN_FN:
14729 /* True if the 2nd argument is nonnegative. */
14730 return RECURSE (arg1);
14731
14732 CASE_CFN_POWI:
14733 /* True if the 1st argument is nonnegative or the second
14734 argument is an even integer. */
14735 if (TREE_CODE (arg1) == INTEGER_CST
14736 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14737 return true;
14738 return RECURSE (arg0);
14739
14740 CASE_CFN_POW:
14741 /* True if the 1st argument is nonnegative or the second
14742 argument is an even integer valued real. */
14743 if (TREE_CODE (arg1) == REAL_CST)
14744 {
14745 REAL_VALUE_TYPE c;
14746 HOST_WIDE_INT n;
14747
14748 c = TREE_REAL_CST (arg1);
14749 n = real_to_integer (&c);
14750 if ((n & 1) == 0)
14751 {
14752 REAL_VALUE_TYPE cint;
14753 real_from_integer (&cint, VOIDmode, n, SIGNED);
14754 if (real_identical (&c, &cint))
14755 return true;
14756 }
14757 }
14758 return RECURSE (arg0);
14759
14760 default:
14761 break;
14762 }
14763 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14764 }
14765
14766 /* Return true if T is known to be non-negative. If the return
14767 value is based on the assumption that signed overflow is undefined,
14768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14769 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14770
14771 static bool
14772 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14773 {
14774 enum tree_code code = TREE_CODE (t);
14775 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14776 return true;
14777
14778 switch (code)
14779 {
14780 case TARGET_EXPR:
14781 {
14782 tree temp = TARGET_EXPR_SLOT (t);
14783 t = TARGET_EXPR_INITIAL (t);
14784
14785 /* If the initializer is non-void, then it's a normal expression
14786 that will be assigned to the slot. */
14787 if (!VOID_TYPE_P (t))
14788 return RECURSE (t);
14789
14790 /* Otherwise, the initializer sets the slot in some way. One common
14791 way is an assignment statement at the end of the initializer. */
14792 while (1)
14793 {
14794 if (TREE_CODE (t) == BIND_EXPR)
14795 t = expr_last (BIND_EXPR_BODY (t));
14796 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14797 || TREE_CODE (t) == TRY_CATCH_EXPR)
14798 t = expr_last (TREE_OPERAND (t, 0));
14799 else if (TREE_CODE (t) == STATEMENT_LIST)
14800 t = expr_last (t);
14801 else
14802 break;
14803 }
14804 if (TREE_CODE (t) == MODIFY_EXPR
14805 && TREE_OPERAND (t, 0) == temp)
14806 return RECURSE (TREE_OPERAND (t, 1));
14807
14808 return false;
14809 }
14810
14811 case CALL_EXPR:
14812 {
14813 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14814 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14815
14816 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14817 get_call_combined_fn (t),
14818 arg0,
14819 arg1,
14820 strict_overflow_p, depth);
14821 }
14822 case COMPOUND_EXPR:
14823 case MODIFY_EXPR:
14824 return RECURSE (TREE_OPERAND (t, 1));
14825
14826 case BIND_EXPR:
14827 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14828
14829 case SAVE_EXPR:
14830 return RECURSE (TREE_OPERAND (t, 0));
14831
14832 default:
14833 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14834 }
14835 }
14836
14837 #undef RECURSE
14838 #undef tree_expr_nonnegative_warnv_p
14839
14840 /* Return true if T is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14844
14845 bool
14846 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14847 {
14848 enum tree_code code;
14849 if (t == error_mark_node)
14850 return false;
14851
14852 code = TREE_CODE (t);
14853 switch (TREE_CODE_CLASS (code))
14854 {
14855 case tcc_binary:
14856 case tcc_comparison:
14857 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14858 TREE_TYPE (t),
14859 TREE_OPERAND (t, 0),
14860 TREE_OPERAND (t, 1),
14861 strict_overflow_p, depth);
14862
14863 case tcc_unary:
14864 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14865 TREE_TYPE (t),
14866 TREE_OPERAND (t, 0),
14867 strict_overflow_p, depth);
14868
14869 case tcc_constant:
14870 case tcc_declaration:
14871 case tcc_reference:
14872 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14873
14874 default:
14875 break;
14876 }
14877
14878 switch (code)
14879 {
14880 case TRUTH_AND_EXPR:
14881 case TRUTH_OR_EXPR:
14882 case TRUTH_XOR_EXPR:
14883 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14884 TREE_TYPE (t),
14885 TREE_OPERAND (t, 0),
14886 TREE_OPERAND (t, 1),
14887 strict_overflow_p, depth);
14888 case TRUTH_NOT_EXPR:
14889 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14890 TREE_TYPE (t),
14891 TREE_OPERAND (t, 0),
14892 strict_overflow_p, depth);
14893
14894 case COND_EXPR:
14895 case CONSTRUCTOR:
14896 case OBJ_TYPE_REF:
14897 case ASSERT_EXPR:
14898 case ADDR_EXPR:
14899 case WITH_SIZE_EXPR:
14900 case SSA_NAME:
14901 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14902
14903 default:
14904 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14905 }
14906 }
14907
14908 /* Return true if `t' is known to be non-negative. Handle warnings
14909 about undefined signed overflow. */
14910
14911 bool
14912 tree_expr_nonnegative_p (tree t)
14913 {
14914 bool ret, strict_overflow_p;
14915
14916 strict_overflow_p = false;
14917 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14918 if (strict_overflow_p)
14919 fold_overflow_warning (("assuming signed overflow does not occur when "
14920 "determining that expression is always "
14921 "non-negative"),
14922 WARN_STRICT_OVERFLOW_MISC);
14923 return ret;
14924 }
14925
14926
14927 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14928 For floating point we further ensure that T is not denormal.
14929 Similar logic is present in nonzero_address in rtlanal.h.
14930
14931 If the return value is based on the assumption that signed overflow
14932 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14933 change *STRICT_OVERFLOW_P. */
14934
14935 bool
14936 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14937 bool *strict_overflow_p)
14938 {
14939 switch (code)
14940 {
14941 case ABS_EXPR:
14942 return tree_expr_nonzero_warnv_p (op0,
14943 strict_overflow_p);
14944
14945 case NOP_EXPR:
14946 {
14947 tree inner_type = TREE_TYPE (op0);
14948 tree outer_type = type;
14949
14950 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14951 && tree_expr_nonzero_warnv_p (op0,
14952 strict_overflow_p));
14953 }
14954 break;
14955
14956 case NON_LVALUE_EXPR:
14957 return tree_expr_nonzero_warnv_p (op0,
14958 strict_overflow_p);
14959
14960 default:
14961 break;
14962 }
14963
14964 return false;
14965 }
14966
14967 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14968 For floating point we further ensure that T is not denormal.
14969 Similar logic is present in nonzero_address in rtlanal.h.
14970
14971 If the return value is based on the assumption that signed overflow
14972 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14973 change *STRICT_OVERFLOW_P. */
14974
14975 bool
14976 tree_binary_nonzero_warnv_p (enum tree_code code,
14977 tree type,
14978 tree op0,
14979 tree op1, bool *strict_overflow_p)
14980 {
14981 bool sub_strict_overflow_p;
14982 switch (code)
14983 {
14984 case POINTER_PLUS_EXPR:
14985 case PLUS_EXPR:
14986 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14987 {
14988 /* With the presence of negative values it is hard
14989 to say something. */
14990 sub_strict_overflow_p = false;
14991 if (!tree_expr_nonnegative_warnv_p (op0,
14992 &sub_strict_overflow_p)
14993 || !tree_expr_nonnegative_warnv_p (op1,
14994 &sub_strict_overflow_p))
14995 return false;
14996 /* One of operands must be positive and the other non-negative. */
14997 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14998 overflows, on a twos-complement machine the sum of two
14999 nonnegative numbers can never be zero. */
15000 return (tree_expr_nonzero_warnv_p (op0,
15001 strict_overflow_p)
15002 || tree_expr_nonzero_warnv_p (op1,
15003 strict_overflow_p));
15004 }
15005 break;
15006
15007 case MULT_EXPR:
15008 if (TYPE_OVERFLOW_UNDEFINED (type))
15009 {
15010 if (tree_expr_nonzero_warnv_p (op0,
15011 strict_overflow_p)
15012 && tree_expr_nonzero_warnv_p (op1,
15013 strict_overflow_p))
15014 {
15015 *strict_overflow_p = true;
15016 return true;
15017 }
15018 }
15019 break;
15020
15021 case MIN_EXPR:
15022 sub_strict_overflow_p = false;
15023 if (tree_expr_nonzero_warnv_p (op0,
15024 &sub_strict_overflow_p)
15025 && tree_expr_nonzero_warnv_p (op1,
15026 &sub_strict_overflow_p))
15027 {
15028 if (sub_strict_overflow_p)
15029 *strict_overflow_p = true;
15030 }
15031 break;
15032
15033 case MAX_EXPR:
15034 sub_strict_overflow_p = false;
15035 if (tree_expr_nonzero_warnv_p (op0,
15036 &sub_strict_overflow_p))
15037 {
15038 if (sub_strict_overflow_p)
15039 *strict_overflow_p = true;
15040
15041 /* When both operands are nonzero, then MAX must be too. */
15042 if (tree_expr_nonzero_warnv_p (op1,
15043 strict_overflow_p))
15044 return true;
15045
15046 /* MAX where operand 0 is positive is positive. */
15047 return tree_expr_nonnegative_warnv_p (op0,
15048 strict_overflow_p);
15049 }
15050 /* MAX where operand 1 is positive is positive. */
15051 else if (tree_expr_nonzero_warnv_p (op1,
15052 &sub_strict_overflow_p)
15053 && tree_expr_nonnegative_warnv_p (op1,
15054 &sub_strict_overflow_p))
15055 {
15056 if (sub_strict_overflow_p)
15057 *strict_overflow_p = true;
15058 return true;
15059 }
15060 break;
15061
15062 case BIT_IOR_EXPR:
15063 return (tree_expr_nonzero_warnv_p (op1,
15064 strict_overflow_p)
15065 || tree_expr_nonzero_warnv_p (op0,
15066 strict_overflow_p));
15067
15068 default:
15069 break;
15070 }
15071
15072 return false;
15073 }
15074
15075 /* Return true when T is an address and is known to be nonzero.
15076 For floating point we further ensure that T is not denormal.
15077 Similar logic is present in nonzero_address in rtlanal.h.
15078
15079 If the return value is based on the assumption that signed overflow
15080 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15081 change *STRICT_OVERFLOW_P. */
15082
15083 bool
15084 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15085 {
15086 bool sub_strict_overflow_p;
15087 switch (TREE_CODE (t))
15088 {
15089 case INTEGER_CST:
15090 return !integer_zerop (t);
15091
15092 case ADDR_EXPR:
15093 {
15094 tree base = TREE_OPERAND (t, 0);
15095
15096 if (!DECL_P (base))
15097 base = get_base_address (base);
15098
15099 if (base && TREE_CODE (base) == TARGET_EXPR)
15100 base = TARGET_EXPR_SLOT (base);
15101
15102 if (!base)
15103 return false;
15104
15105 /* For objects in symbol table check if we know they are non-zero.
15106 Don't do anything for variables and functions before symtab is built;
15107 it is quite possible that they will be declared weak later. */
15108 int nonzero_addr = maybe_nonzero_address (base);
15109 if (nonzero_addr >= 0)
15110 return nonzero_addr;
15111
15112 /* Constants are never weak. */
15113 if (CONSTANT_CLASS_P (base))
15114 return true;
15115
15116 return false;
15117 }
15118
15119 case COND_EXPR:
15120 sub_strict_overflow_p = false;
15121 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15122 &sub_strict_overflow_p)
15123 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15124 &sub_strict_overflow_p))
15125 {
15126 if (sub_strict_overflow_p)
15127 *strict_overflow_p = true;
15128 return true;
15129 }
15130 break;
15131
15132 case SSA_NAME:
15133 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15134 break;
15135 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15136
15137 default:
15138 break;
15139 }
15140 return false;
15141 }
15142
15143 #define integer_valued_real_p(X) \
15144 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15145
15146 #define RECURSE(X) \
15147 ((integer_valued_real_p) (X, depth + 1))
15148
15149 /* Return true if the floating point result of (CODE OP0) has an
15150 integer value. We also allow +Inf, -Inf and NaN to be considered
15151 integer values. Return false for signaling NaN.
15152
15153 DEPTH is the current nesting depth of the query. */
15154
15155 bool
15156 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15157 {
15158 switch (code)
15159 {
15160 case FLOAT_EXPR:
15161 return true;
15162
15163 case ABS_EXPR:
15164 return RECURSE (op0);
15165
15166 CASE_CONVERT:
15167 {
15168 tree type = TREE_TYPE (op0);
15169 if (TREE_CODE (type) == INTEGER_TYPE)
15170 return true;
15171 if (TREE_CODE (type) == REAL_TYPE)
15172 return RECURSE (op0);
15173 break;
15174 }
15175
15176 default:
15177 break;
15178 }
15179 return false;
15180 }
15181
15182 /* Return true if the floating point result of (CODE OP0 OP1) has an
15183 integer value. We also allow +Inf, -Inf and NaN to be considered
15184 integer values. Return false for signaling NaN.
15185
15186 DEPTH is the current nesting depth of the query. */
15187
15188 bool
15189 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15190 {
15191 switch (code)
15192 {
15193 case PLUS_EXPR:
15194 case MINUS_EXPR:
15195 case MULT_EXPR:
15196 case MIN_EXPR:
15197 case MAX_EXPR:
15198 return RECURSE (op0) && RECURSE (op1);
15199
15200 default:
15201 break;
15202 }
15203 return false;
15204 }
15205
15206 /* Return true if the floating point result of calling FNDECL with arguments
15207 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15208 considered integer values. Return false for signaling NaN. If FNDECL
15209 takes fewer than 2 arguments, the remaining ARGn are null.
15210
15211 DEPTH is the current nesting depth of the query. */
15212
15213 bool
15214 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15215 {
15216 switch (fn)
15217 {
15218 CASE_CFN_CEIL:
15219 CASE_CFN_CEIL_FN:
15220 CASE_CFN_FLOOR:
15221 CASE_CFN_FLOOR_FN:
15222 CASE_CFN_NEARBYINT:
15223 CASE_CFN_NEARBYINT_FN:
15224 CASE_CFN_RINT:
15225 CASE_CFN_RINT_FN:
15226 CASE_CFN_ROUND:
15227 CASE_CFN_ROUND_FN:
15228 CASE_CFN_ROUNDEVEN:
15229 CASE_CFN_ROUNDEVEN_FN:
15230 CASE_CFN_TRUNC:
15231 CASE_CFN_TRUNC_FN:
15232 return true;
15233
15234 CASE_CFN_FMIN:
15235 CASE_CFN_FMIN_FN:
15236 CASE_CFN_FMAX:
15237 CASE_CFN_FMAX_FN:
15238 return RECURSE (arg0) && RECURSE (arg1);
15239
15240 default:
15241 break;
15242 }
15243 return false;
15244 }
15245
15246 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15247 has an integer value. We also allow +Inf, -Inf and NaN to be
15248 considered integer values. Return false for signaling NaN.
15249
15250 DEPTH is the current nesting depth of the query. */
15251
15252 bool
15253 integer_valued_real_single_p (tree t, int depth)
15254 {
15255 switch (TREE_CODE (t))
15256 {
15257 case REAL_CST:
15258 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15259
15260 case COND_EXPR:
15261 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15262
15263 case SSA_NAME:
15264 /* Limit the depth of recursion to avoid quadratic behavior.
15265 This is expected to catch almost all occurrences in practice.
15266 If this code misses important cases that unbounded recursion
15267 would not, passes that need this information could be revised
15268 to provide it through dataflow propagation. */
15269 return (!name_registered_for_update_p (t)
15270 && depth < param_max_ssa_name_query_depth
15271 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15272 depth));
15273
15274 default:
15275 break;
15276 }
15277 return false;
15278 }
15279
15280 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15281 has an integer value. We also allow +Inf, -Inf and NaN to be
15282 considered integer values. Return false for signaling NaN.
15283
15284 DEPTH is the current nesting depth of the query. */
15285
15286 static bool
15287 integer_valued_real_invalid_p (tree t, int depth)
15288 {
15289 switch (TREE_CODE (t))
15290 {
15291 case COMPOUND_EXPR:
15292 case MODIFY_EXPR:
15293 case BIND_EXPR:
15294 return RECURSE (TREE_OPERAND (t, 1));
15295
15296 case SAVE_EXPR:
15297 return RECURSE (TREE_OPERAND (t, 0));
15298
15299 default:
15300 break;
15301 }
15302 return false;
15303 }
15304
15305 #undef RECURSE
15306 #undef integer_valued_real_p
15307
15308 /* Return true if the floating point expression T has an integer value.
15309 We also allow +Inf, -Inf and NaN to be considered integer values.
15310 Return false for signaling NaN.
15311
15312 DEPTH is the current nesting depth of the query. */
15313
15314 bool
15315 integer_valued_real_p (tree t, int depth)
15316 {
15317 if (t == error_mark_node)
15318 return false;
15319
15320 STRIP_ANY_LOCATION_WRAPPER (t);
15321
15322 tree_code code = TREE_CODE (t);
15323 switch (TREE_CODE_CLASS (code))
15324 {
15325 case tcc_binary:
15326 case tcc_comparison:
15327 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15328 TREE_OPERAND (t, 1), depth);
15329
15330 case tcc_unary:
15331 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15332
15333 case tcc_constant:
15334 case tcc_declaration:
15335 case tcc_reference:
15336 return integer_valued_real_single_p (t, depth);
15337
15338 default:
15339 break;
15340 }
15341
15342 switch (code)
15343 {
15344 case COND_EXPR:
15345 case SSA_NAME:
15346 return integer_valued_real_single_p (t, depth);
15347
15348 case CALL_EXPR:
15349 {
15350 tree arg0 = (call_expr_nargs (t) > 0
15351 ? CALL_EXPR_ARG (t, 0)
15352 : NULL_TREE);
15353 tree arg1 = (call_expr_nargs (t) > 1
15354 ? CALL_EXPR_ARG (t, 1)
15355 : NULL_TREE);
15356 return integer_valued_real_call_p (get_call_combined_fn (t),
15357 arg0, arg1, depth);
15358 }
15359
15360 default:
15361 return integer_valued_real_invalid_p (t, depth);
15362 }
15363 }
15364
15365 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15366 attempt to fold the expression to a constant without modifying TYPE,
15367 OP0 or OP1.
15368
15369 If the expression could be simplified to a constant, then return
15370 the constant. If the expression would not be simplified to a
15371 constant, then return NULL_TREE. */
15372
15373 tree
15374 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15375 {
15376 tree tem = fold_binary (code, type, op0, op1);
15377 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15378 }
15379
15380 /* Given the components of a unary expression CODE, TYPE and OP0,
15381 attempt to fold the expression to a constant without modifying
15382 TYPE or OP0.
15383
15384 If the expression could be simplified to a constant, then return
15385 the constant. If the expression would not be simplified to a
15386 constant, then return NULL_TREE. */
15387
15388 tree
15389 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15390 {
15391 tree tem = fold_unary (code, type, op0);
15392 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15393 }
15394
15395 /* If EXP represents referencing an element in a constant string
15396 (either via pointer arithmetic or array indexing), return the
15397 tree representing the value accessed, otherwise return NULL. */
15398
15399 tree
15400 fold_read_from_constant_string (tree exp)
15401 {
15402 if ((TREE_CODE (exp) == INDIRECT_REF
15403 || TREE_CODE (exp) == ARRAY_REF)
15404 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15405 {
15406 tree exp1 = TREE_OPERAND (exp, 0);
15407 tree index;
15408 tree string;
15409 location_t loc = EXPR_LOCATION (exp);
15410
15411 if (TREE_CODE (exp) == INDIRECT_REF)
15412 string = string_constant (exp1, &index, NULL, NULL);
15413 else
15414 {
15415 tree low_bound = array_ref_low_bound (exp);
15416 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15417
15418 /* Optimize the special-case of a zero lower bound.
15419
15420 We convert the low_bound to sizetype to avoid some problems
15421 with constant folding. (E.g. suppose the lower bound is 1,
15422 and its mode is QI. Without the conversion,l (ARRAY
15423 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15424 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15425 if (! integer_zerop (low_bound))
15426 index = size_diffop_loc (loc, index,
15427 fold_convert_loc (loc, sizetype, low_bound));
15428
15429 string = exp1;
15430 }
15431
15432 scalar_int_mode char_mode;
15433 if (string
15434 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15435 && TREE_CODE (string) == STRING_CST
15436 && TREE_CODE (index) == INTEGER_CST
15437 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15438 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15439 &char_mode)
15440 && GET_MODE_SIZE (char_mode) == 1)
15441 return build_int_cst_type (TREE_TYPE (exp),
15442 (TREE_STRING_POINTER (string)
15443 [TREE_INT_CST_LOW (index)]));
15444 }
15445 return NULL;
15446 }
15447
15448 /* Folds a read from vector element at IDX of vector ARG. */
15449
15450 tree
15451 fold_read_from_vector (tree arg, poly_uint64 idx)
15452 {
15453 unsigned HOST_WIDE_INT i;
15454 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15455 && known_ge (idx, 0u)
15456 && idx.is_constant (&i))
15457 {
15458 if (TREE_CODE (arg) == VECTOR_CST)
15459 return VECTOR_CST_ELT (arg, i);
15460 else if (TREE_CODE (arg) == CONSTRUCTOR)
15461 {
15462 if (i >= CONSTRUCTOR_NELTS (arg))
15463 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15464 return CONSTRUCTOR_ELT (arg, i)->value;
15465 }
15466 }
15467 return NULL_TREE;
15468 }
15469
15470 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15471 an integer constant, real, or fixed-point constant.
15472
15473 TYPE is the type of the result. */
15474
15475 static tree
15476 fold_negate_const (tree arg0, tree type)
15477 {
15478 tree t = NULL_TREE;
15479
15480 switch (TREE_CODE (arg0))
15481 {
15482 case REAL_CST:
15483 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15484 break;
15485
15486 case FIXED_CST:
15487 {
15488 FIXED_VALUE_TYPE f;
15489 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15490 &(TREE_FIXED_CST (arg0)), NULL,
15491 TYPE_SATURATING (type));
15492 t = build_fixed (type, f);
15493 /* Propagate overflow flags. */
15494 if (overflow_p | TREE_OVERFLOW (arg0))
15495 TREE_OVERFLOW (t) = 1;
15496 break;
15497 }
15498
15499 default:
15500 if (poly_int_tree_p (arg0))
15501 {
15502 wi::overflow_type overflow;
15503 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15504 t = force_fit_type (type, res, 1,
15505 (overflow && ! TYPE_UNSIGNED (type))
15506 || TREE_OVERFLOW (arg0));
15507 break;
15508 }
15509
15510 gcc_unreachable ();
15511 }
15512
15513 return t;
15514 }
15515
15516 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15517 an integer constant or real constant.
15518
15519 TYPE is the type of the result. */
15520
15521 tree
15522 fold_abs_const (tree arg0, tree type)
15523 {
15524 tree t = NULL_TREE;
15525
15526 switch (TREE_CODE (arg0))
15527 {
15528 case INTEGER_CST:
15529 {
15530 /* If the value is unsigned or non-negative, then the absolute value
15531 is the same as the ordinary value. */
15532 wide_int val = wi::to_wide (arg0);
15533 wi::overflow_type overflow = wi::OVF_NONE;
15534 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15535 ;
15536
15537 /* If the value is negative, then the absolute value is
15538 its negation. */
15539 else
15540 val = wi::neg (val, &overflow);
15541
15542 /* Force to the destination type, set TREE_OVERFLOW for signed
15543 TYPE only. */
15544 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15545 }
15546 break;
15547
15548 case REAL_CST:
15549 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15550 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15551 else
15552 t = arg0;
15553 break;
15554
15555 default:
15556 gcc_unreachable ();
15557 }
15558
15559 return t;
15560 }
15561
15562 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15563 constant. TYPE is the type of the result. */
15564
15565 static tree
15566 fold_not_const (const_tree arg0, tree type)
15567 {
15568 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15569
15570 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15571 }
15572
15573 /* Given CODE, a relational operator, the target type, TYPE and two
15574 constant operands OP0 and OP1, return the result of the
15575 relational operation. If the result is not a compile time
15576 constant, then return NULL_TREE. */
15577
15578 static tree
15579 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15580 {
15581 int result, invert;
15582
15583 /* From here on, the only cases we handle are when the result is
15584 known to be a constant. */
15585
15586 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15587 {
15588 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15589 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15590
15591 /* Handle the cases where either operand is a NaN. */
15592 if (real_isnan (c0) || real_isnan (c1))
15593 {
15594 switch (code)
15595 {
15596 case EQ_EXPR:
15597 case ORDERED_EXPR:
15598 result = 0;
15599 break;
15600
15601 case NE_EXPR:
15602 case UNORDERED_EXPR:
15603 case UNLT_EXPR:
15604 case UNLE_EXPR:
15605 case UNGT_EXPR:
15606 case UNGE_EXPR:
15607 case UNEQ_EXPR:
15608 result = 1;
15609 break;
15610
15611 case LT_EXPR:
15612 case LE_EXPR:
15613 case GT_EXPR:
15614 case GE_EXPR:
15615 case LTGT_EXPR:
15616 if (flag_trapping_math)
15617 return NULL_TREE;
15618 result = 0;
15619 break;
15620
15621 default:
15622 gcc_unreachable ();
15623 }
15624
15625 return constant_boolean_node (result, type);
15626 }
15627
15628 return constant_boolean_node (real_compare (code, c0, c1), type);
15629 }
15630
15631 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15632 {
15633 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15634 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15635 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15636 }
15637
15638 /* Handle equality/inequality of complex constants. */
15639 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15640 {
15641 tree rcond = fold_relational_const (code, type,
15642 TREE_REALPART (op0),
15643 TREE_REALPART (op1));
15644 tree icond = fold_relational_const (code, type,
15645 TREE_IMAGPART (op0),
15646 TREE_IMAGPART (op1));
15647 if (code == EQ_EXPR)
15648 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15649 else if (code == NE_EXPR)
15650 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15651 else
15652 return NULL_TREE;
15653 }
15654
15655 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15656 {
15657 if (!VECTOR_TYPE_P (type))
15658 {
15659 /* Have vector comparison with scalar boolean result. */
15660 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15661 && known_eq (VECTOR_CST_NELTS (op0),
15662 VECTOR_CST_NELTS (op1)));
15663 unsigned HOST_WIDE_INT nunits;
15664 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15665 return NULL_TREE;
15666 for (unsigned i = 0; i < nunits; i++)
15667 {
15668 tree elem0 = VECTOR_CST_ELT (op0, i);
15669 tree elem1 = VECTOR_CST_ELT (op1, i);
15670 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15671 if (tmp == NULL_TREE)
15672 return NULL_TREE;
15673 if (integer_zerop (tmp))
15674 return constant_boolean_node (code == NE_EXPR, type);
15675 }
15676 return constant_boolean_node (code == EQ_EXPR, type);
15677 }
15678 tree_vector_builder elts;
15679 if (!elts.new_binary_operation (type, op0, op1, false))
15680 return NULL_TREE;
15681 unsigned int count = elts.encoded_nelts ();
15682 for (unsigned i = 0; i < count; i++)
15683 {
15684 tree elem_type = TREE_TYPE (type);
15685 tree elem0 = VECTOR_CST_ELT (op0, i);
15686 tree elem1 = VECTOR_CST_ELT (op1, i);
15687
15688 tree tem = fold_relational_const (code, elem_type,
15689 elem0, elem1);
15690
15691 if (tem == NULL_TREE)
15692 return NULL_TREE;
15693
15694 elts.quick_push (build_int_cst (elem_type,
15695 integer_zerop (tem) ? 0 : -1));
15696 }
15697
15698 return elts.build ();
15699 }
15700
15701 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15702
15703 To compute GT, swap the arguments and do LT.
15704 To compute GE, do LT and invert the result.
15705 To compute LE, swap the arguments, do LT and invert the result.
15706 To compute NE, do EQ and invert the result.
15707
15708 Therefore, the code below must handle only EQ and LT. */
15709
15710 if (code == LE_EXPR || code == GT_EXPR)
15711 {
15712 std::swap (op0, op1);
15713 code = swap_tree_comparison (code);
15714 }
15715
15716 /* Note that it is safe to invert for real values here because we
15717 have already handled the one case that it matters. */
15718
15719 invert = 0;
15720 if (code == NE_EXPR || code == GE_EXPR)
15721 {
15722 invert = 1;
15723 code = invert_tree_comparison (code, false);
15724 }
15725
15726 /* Compute a result for LT or EQ if args permit;
15727 Otherwise return T. */
15728 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15729 {
15730 if (code == EQ_EXPR)
15731 result = tree_int_cst_equal (op0, op1);
15732 else
15733 result = tree_int_cst_lt (op0, op1);
15734 }
15735 else
15736 return NULL_TREE;
15737
15738 if (invert)
15739 result ^= 1;
15740 return constant_boolean_node (result, type);
15741 }
15742
15743 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15744 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15745 itself. */
15746
15747 tree
15748 fold_build_cleanup_point_expr (tree type, tree expr)
15749 {
15750 /* If the expression does not have side effects then we don't have to wrap
15751 it with a cleanup point expression. */
15752 if (!TREE_SIDE_EFFECTS (expr))
15753 return expr;
15754
15755 /* If the expression is a return, check to see if the expression inside the
15756 return has no side effects or the right hand side of the modify expression
15757 inside the return. If either don't have side effects set we don't need to
15758 wrap the expression in a cleanup point expression. Note we don't check the
15759 left hand side of the modify because it should always be a return decl. */
15760 if (TREE_CODE (expr) == RETURN_EXPR)
15761 {
15762 tree op = TREE_OPERAND (expr, 0);
15763 if (!op || !TREE_SIDE_EFFECTS (op))
15764 return expr;
15765 op = TREE_OPERAND (op, 1);
15766 if (!TREE_SIDE_EFFECTS (op))
15767 return expr;
15768 }
15769
15770 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15771 }
15772
15773 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15774 of an indirection through OP0, or NULL_TREE if no simplification is
15775 possible. */
15776
15777 tree
15778 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15779 {
15780 tree sub = op0;
15781 tree subtype;
15782 poly_uint64 const_op01;
15783
15784 STRIP_NOPS (sub);
15785 subtype = TREE_TYPE (sub);
15786 if (!POINTER_TYPE_P (subtype)
15787 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15788 return NULL_TREE;
15789
15790 if (TREE_CODE (sub) == ADDR_EXPR)
15791 {
15792 tree op = TREE_OPERAND (sub, 0);
15793 tree optype = TREE_TYPE (op);
15794
15795 /* *&CONST_DECL -> to the value of the const decl. */
15796 if (TREE_CODE (op) == CONST_DECL)
15797 return DECL_INITIAL (op);
15798 /* *&p => p; make sure to handle *&"str"[cst] here. */
15799 if (type == optype)
15800 {
15801 tree fop = fold_read_from_constant_string (op);
15802 if (fop)
15803 return fop;
15804 else
15805 return op;
15806 }
15807 /* *(foo *)&fooarray => fooarray[0] */
15808 else if (TREE_CODE (optype) == ARRAY_TYPE
15809 && type == TREE_TYPE (optype)
15810 && (!in_gimple_form
15811 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15812 {
15813 tree type_domain = TYPE_DOMAIN (optype);
15814 tree min_val = size_zero_node;
15815 if (type_domain && TYPE_MIN_VALUE (type_domain))
15816 min_val = TYPE_MIN_VALUE (type_domain);
15817 if (in_gimple_form
15818 && TREE_CODE (min_val) != INTEGER_CST)
15819 return NULL_TREE;
15820 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15821 NULL_TREE, NULL_TREE);
15822 }
15823 /* *(foo *)&complexfoo => __real__ complexfoo */
15824 else if (TREE_CODE (optype) == COMPLEX_TYPE
15825 && type == TREE_TYPE (optype))
15826 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15827 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15828 else if (VECTOR_TYPE_P (optype)
15829 && type == TREE_TYPE (optype))
15830 {
15831 tree part_width = TYPE_SIZE (type);
15832 tree index = bitsize_int (0);
15833 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15834 index);
15835 }
15836 }
15837
15838 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15839 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15840 {
15841 tree op00 = TREE_OPERAND (sub, 0);
15842 tree op01 = TREE_OPERAND (sub, 1);
15843
15844 STRIP_NOPS (op00);
15845 if (TREE_CODE (op00) == ADDR_EXPR)
15846 {
15847 tree op00type;
15848 op00 = TREE_OPERAND (op00, 0);
15849 op00type = TREE_TYPE (op00);
15850
15851 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15852 if (VECTOR_TYPE_P (op00type)
15853 && type == TREE_TYPE (op00type)
15854 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15855 but we want to treat offsets with MSB set as negative.
15856 For the code below negative offsets are invalid and
15857 TYPE_SIZE of the element is something unsigned, so
15858 check whether op01 fits into poly_int64, which implies
15859 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15860 then just use poly_uint64 because we want to treat the
15861 value as unsigned. */
15862 && tree_fits_poly_int64_p (op01))
15863 {
15864 tree part_width = TYPE_SIZE (type);
15865 poly_uint64 max_offset
15866 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15867 * TYPE_VECTOR_SUBPARTS (op00type));
15868 if (known_lt (const_op01, max_offset))
15869 {
15870 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15871 return fold_build3_loc (loc,
15872 BIT_FIELD_REF, type, op00,
15873 part_width, index);
15874 }
15875 }
15876 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15877 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15878 && type == TREE_TYPE (op00type))
15879 {
15880 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15881 const_op01))
15882 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15883 }
15884 /* ((foo *)&fooarray)[1] => fooarray[1] */
15885 else if (TREE_CODE (op00type) == ARRAY_TYPE
15886 && type == TREE_TYPE (op00type))
15887 {
15888 tree type_domain = TYPE_DOMAIN (op00type);
15889 tree min_val = size_zero_node;
15890 if (type_domain && TYPE_MIN_VALUE (type_domain))
15891 min_val = TYPE_MIN_VALUE (type_domain);
15892 poly_uint64 type_size, index;
15893 if (poly_int_tree_p (min_val)
15894 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15895 && multiple_p (const_op01, type_size, &index))
15896 {
15897 poly_offset_int off = index + wi::to_poly_offset (min_val);
15898 op01 = wide_int_to_tree (sizetype, off);
15899 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15900 NULL_TREE, NULL_TREE);
15901 }
15902 }
15903 }
15904 }
15905
15906 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15907 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15908 && type == TREE_TYPE (TREE_TYPE (subtype))
15909 && (!in_gimple_form
15910 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15911 {
15912 tree type_domain;
15913 tree min_val = size_zero_node;
15914 sub = build_fold_indirect_ref_loc (loc, sub);
15915 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15916 if (type_domain && TYPE_MIN_VALUE (type_domain))
15917 min_val = TYPE_MIN_VALUE (type_domain);
15918 if (in_gimple_form
15919 && TREE_CODE (min_val) != INTEGER_CST)
15920 return NULL_TREE;
15921 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15922 NULL_TREE);
15923 }
15924
15925 return NULL_TREE;
15926 }
15927
15928 /* Builds an expression for an indirection through T, simplifying some
15929 cases. */
15930
15931 tree
15932 build_fold_indirect_ref_loc (location_t loc, tree t)
15933 {
15934 tree type = TREE_TYPE (TREE_TYPE (t));
15935 tree sub = fold_indirect_ref_1 (loc, type, t);
15936
15937 if (sub)
15938 return sub;
15939
15940 return build1_loc (loc, INDIRECT_REF, type, t);
15941 }
15942
15943 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15944
15945 tree
15946 fold_indirect_ref_loc (location_t loc, tree t)
15947 {
15948 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15949
15950 if (sub)
15951 return sub;
15952 else
15953 return t;
15954 }
15955
15956 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15957 whose result is ignored. The type of the returned tree need not be
15958 the same as the original expression. */
15959
15960 tree
15961 fold_ignored_result (tree t)
15962 {
15963 if (!TREE_SIDE_EFFECTS (t))
15964 return integer_zero_node;
15965
15966 for (;;)
15967 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15968 {
15969 case tcc_unary:
15970 t = TREE_OPERAND (t, 0);
15971 break;
15972
15973 case tcc_binary:
15974 case tcc_comparison:
15975 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15976 t = TREE_OPERAND (t, 0);
15977 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15978 t = TREE_OPERAND (t, 1);
15979 else
15980 return t;
15981 break;
15982
15983 case tcc_expression:
15984 switch (TREE_CODE (t))
15985 {
15986 case COMPOUND_EXPR:
15987 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15988 return t;
15989 t = TREE_OPERAND (t, 0);
15990 break;
15991
15992 case COND_EXPR:
15993 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15994 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15995 return t;
15996 t = TREE_OPERAND (t, 0);
15997 break;
15998
15999 default:
16000 return t;
16001 }
16002 break;
16003
16004 default:
16005 return t;
16006 }
16007 }
16008
16009 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16010
16011 tree
16012 round_up_loc (location_t loc, tree value, unsigned int divisor)
16013 {
16014 tree div = NULL_TREE;
16015
16016 if (divisor == 1)
16017 return value;
16018
16019 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16020 have to do anything. Only do this when we are not given a const,
16021 because in that case, this check is more expensive than just
16022 doing it. */
16023 if (TREE_CODE (value) != INTEGER_CST)
16024 {
16025 div = build_int_cst (TREE_TYPE (value), divisor);
16026
16027 if (multiple_of_p (TREE_TYPE (value), value, div))
16028 return value;
16029 }
16030
16031 /* If divisor is a power of two, simplify this to bit manipulation. */
16032 if (pow2_or_zerop (divisor))
16033 {
16034 if (TREE_CODE (value) == INTEGER_CST)
16035 {
16036 wide_int val = wi::to_wide (value);
16037 bool overflow_p;
16038
16039 if ((val & (divisor - 1)) == 0)
16040 return value;
16041
16042 overflow_p = TREE_OVERFLOW (value);
16043 val += divisor - 1;
16044 val &= (int) -divisor;
16045 if (val == 0)
16046 overflow_p = true;
16047
16048 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16049 }
16050 else
16051 {
16052 tree t;
16053
16054 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16055 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16056 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16057 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16058 }
16059 }
16060 else
16061 {
16062 if (!div)
16063 div = build_int_cst (TREE_TYPE (value), divisor);
16064 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16065 value = size_binop_loc (loc, MULT_EXPR, value, div);
16066 }
16067
16068 return value;
16069 }
16070
16071 /* Likewise, but round down. */
16072
16073 tree
16074 round_down_loc (location_t loc, tree value, int divisor)
16075 {
16076 tree div = NULL_TREE;
16077
16078 gcc_assert (divisor > 0);
16079 if (divisor == 1)
16080 return value;
16081
16082 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16083 have to do anything. Only do this when we are not given a const,
16084 because in that case, this check is more expensive than just
16085 doing it. */
16086 if (TREE_CODE (value) != INTEGER_CST)
16087 {
16088 div = build_int_cst (TREE_TYPE (value), divisor);
16089
16090 if (multiple_of_p (TREE_TYPE (value), value, div))
16091 return value;
16092 }
16093
16094 /* If divisor is a power of two, simplify this to bit manipulation. */
16095 if (pow2_or_zerop (divisor))
16096 {
16097 tree t;
16098
16099 t = build_int_cst (TREE_TYPE (value), -divisor);
16100 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16101 }
16102 else
16103 {
16104 if (!div)
16105 div = build_int_cst (TREE_TYPE (value), divisor);
16106 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16107 value = size_binop_loc (loc, MULT_EXPR, value, div);
16108 }
16109
16110 return value;
16111 }
16112
16113 /* Returns the pointer to the base of the object addressed by EXP and
16114 extracts the information about the offset of the access, storing it
16115 to PBITPOS and POFFSET. */
16116
16117 static tree
16118 split_address_to_core_and_offset (tree exp,
16119 poly_int64_pod *pbitpos, tree *poffset)
16120 {
16121 tree core;
16122 machine_mode mode;
16123 int unsignedp, reversep, volatilep;
16124 poly_int64 bitsize;
16125 location_t loc = EXPR_LOCATION (exp);
16126
16127 if (TREE_CODE (exp) == ADDR_EXPR)
16128 {
16129 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16130 poffset, &mode, &unsignedp, &reversep,
16131 &volatilep);
16132 core = build_fold_addr_expr_loc (loc, core);
16133 }
16134 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16135 {
16136 core = TREE_OPERAND (exp, 0);
16137 STRIP_NOPS (core);
16138 *pbitpos = 0;
16139 *poffset = TREE_OPERAND (exp, 1);
16140 if (poly_int_tree_p (*poffset))
16141 {
16142 poly_offset_int tem
16143 = wi::sext (wi::to_poly_offset (*poffset),
16144 TYPE_PRECISION (TREE_TYPE (*poffset)));
16145 tem <<= LOG2_BITS_PER_UNIT;
16146 if (tem.to_shwi (pbitpos))
16147 *poffset = NULL_TREE;
16148 }
16149 }
16150 else
16151 {
16152 core = exp;
16153 *pbitpos = 0;
16154 *poffset = NULL_TREE;
16155 }
16156
16157 return core;
16158 }
16159
16160 /* Returns true if addresses of E1 and E2 differ by a constant, false
16161 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16162
16163 bool
16164 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16165 {
16166 tree core1, core2;
16167 poly_int64 bitpos1, bitpos2;
16168 tree toffset1, toffset2, tdiff, type;
16169
16170 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16171 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16172
16173 poly_int64 bytepos1, bytepos2;
16174 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16175 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16176 || !operand_equal_p (core1, core2, 0))
16177 return false;
16178
16179 if (toffset1 && toffset2)
16180 {
16181 type = TREE_TYPE (toffset1);
16182 if (type != TREE_TYPE (toffset2))
16183 toffset2 = fold_convert (type, toffset2);
16184
16185 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16186 if (!cst_and_fits_in_hwi (tdiff))
16187 return false;
16188
16189 *diff = int_cst_value (tdiff);
16190 }
16191 else if (toffset1 || toffset2)
16192 {
16193 /* If only one of the offsets is non-constant, the difference cannot
16194 be a constant. */
16195 return false;
16196 }
16197 else
16198 *diff = 0;
16199
16200 *diff += bytepos1 - bytepos2;
16201 return true;
16202 }
16203
16204 /* Return OFF converted to a pointer offset type suitable as offset for
16205 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16206 tree
16207 convert_to_ptrofftype_loc (location_t loc, tree off)
16208 {
16209 if (ptrofftype_p (TREE_TYPE (off)))
16210 return off;
16211 return fold_convert_loc (loc, sizetype, off);
16212 }
16213
16214 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16215 tree
16216 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16217 {
16218 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16219 ptr, convert_to_ptrofftype_loc (loc, off));
16220 }
16221
16222 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16223 tree
16224 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16225 {
16226 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16227 ptr, size_int (off));
16228 }
16229
16230 /* Return a pointer to a NUL-terminated string containing the sequence
16231 of bytes corresponding to the representation of the object referred to
16232 by SRC (or a subsequence of such bytes within it if SRC is a reference
16233 to an initialized constant array plus some constant offset).
16234 Set *STRSIZE the number of bytes in the constant sequence including
16235 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16236 where A is the array that stores the constant sequence that SRC points
16237 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16238 need not point to a string or even an array of characters but may point
16239 to an object of any type. */
16240
16241 const char *
16242 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16243 {
16244 /* The offset into the array A storing the string, and A's byte size. */
16245 tree offset_node;
16246 tree mem_size;
16247
16248 if (strsize)
16249 *strsize = 0;
16250
16251 if (strsize)
16252 src = byte_representation (src, &offset_node, &mem_size, NULL);
16253 else
16254 src = string_constant (src, &offset_node, &mem_size, NULL);
16255 if (!src)
16256 return NULL;
16257
16258 unsigned HOST_WIDE_INT offset = 0;
16259 if (offset_node != NULL_TREE)
16260 {
16261 if (!tree_fits_uhwi_p (offset_node))
16262 return NULL;
16263 else
16264 offset = tree_to_uhwi (offset_node);
16265 }
16266
16267 if (!tree_fits_uhwi_p (mem_size))
16268 return NULL;
16269
16270 /* ARRAY_SIZE is the byte size of the array the constant sequence
16271 is stored in and equal to sizeof A. INIT_BYTES is the number
16272 of bytes in the constant sequence used to initialize the array,
16273 including any embedded NULs as well as the terminating NUL (for
16274 strings), but not including any trailing zeros/NULs past
16275 the terminating one appended implicitly to a string literal to
16276 zero out the remainder of the array it's stored in. For example,
16277 given:
16278 const char a[7] = "abc\0d";
16279 n = strlen (a + 1);
16280 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16281 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16282 is equal to strlen (A) + 1. */
16283 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16284 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16285 const char *string = TREE_STRING_POINTER (src);
16286
16287 /* Ideally this would turn into a gcc_checking_assert over time. */
16288 if (init_bytes > array_size)
16289 init_bytes = array_size;
16290
16291 if (init_bytes == 0 || offset >= array_size)
16292 return NULL;
16293
16294 if (strsize)
16295 {
16296 /* Compute and store the number of characters from the beginning
16297 of the substring at OFFSET to the end, including the terminating
16298 nul. Offsets past the initial length refer to null strings. */
16299 if (offset < init_bytes)
16300 *strsize = init_bytes - offset;
16301 else
16302 *strsize = 1;
16303 }
16304 else
16305 {
16306 tree eltype = TREE_TYPE (TREE_TYPE (src));
16307 /* Support only properly NUL-terminated single byte strings. */
16308 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16309 return NULL;
16310 if (string[init_bytes - 1] != '\0')
16311 return NULL;
16312 }
16313
16314 return offset < init_bytes ? string + offset : "";
16315 }
16316
16317 /* Return a pointer to a NUL-terminated string corresponding to
16318 the expression STR referencing a constant string, possibly
16319 involving a constant offset. Return null if STR either doesn't
16320 reference a constant string or if it involves a nonconstant
16321 offset. */
16322
16323 const char *
16324 c_getstr (tree str)
16325 {
16326 return getbyterep (str, NULL);
16327 }
16328
16329 /* Given a tree T, compute which bits in T may be nonzero. */
16330
16331 wide_int
16332 tree_nonzero_bits (const_tree t)
16333 {
16334 switch (TREE_CODE (t))
16335 {
16336 case INTEGER_CST:
16337 return wi::to_wide (t);
16338 case SSA_NAME:
16339 return get_nonzero_bits (t);
16340 case NON_LVALUE_EXPR:
16341 case SAVE_EXPR:
16342 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16343 case BIT_AND_EXPR:
16344 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16345 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16346 case BIT_IOR_EXPR:
16347 case BIT_XOR_EXPR:
16348 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16349 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16350 case COND_EXPR:
16351 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16352 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16353 CASE_CONVERT:
16354 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16355 TYPE_PRECISION (TREE_TYPE (t)),
16356 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16357 case PLUS_EXPR:
16358 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16359 {
16360 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16361 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16362 if (wi::bit_and (nzbits1, nzbits2) == 0)
16363 return wi::bit_or (nzbits1, nzbits2);
16364 }
16365 break;
16366 case LSHIFT_EXPR:
16367 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16368 {
16369 tree type = TREE_TYPE (t);
16370 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16371 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16372 TYPE_PRECISION (type));
16373 return wi::neg_p (arg1)
16374 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16375 : wi::lshift (nzbits, arg1);
16376 }
16377 break;
16378 case RSHIFT_EXPR:
16379 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16380 {
16381 tree type = TREE_TYPE (t);
16382 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16383 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16384 TYPE_PRECISION (type));
16385 return wi::neg_p (arg1)
16386 ? wi::lshift (nzbits, -arg1)
16387 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16388 }
16389 break;
16390 default:
16391 break;
16392 }
16393
16394 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16395 }
16396
16397 #if CHECKING_P
16398
16399 namespace selftest {
16400
16401 /* Helper functions for writing tests of folding trees. */
16402
16403 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16404
16405 static void
16406 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16407 tree constant)
16408 {
16409 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16410 }
16411
16412 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16413 wrapping WRAPPED_EXPR. */
16414
16415 static void
16416 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16417 tree wrapped_expr)
16418 {
16419 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16420 ASSERT_NE (wrapped_expr, result);
16421 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16422 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16423 }
16424
16425 /* Verify that various arithmetic binary operations are folded
16426 correctly. */
16427
16428 static void
16429 test_arithmetic_folding ()
16430 {
16431 tree type = integer_type_node;
16432 tree x = create_tmp_var_raw (type, "x");
16433 tree zero = build_zero_cst (type);
16434 tree one = build_int_cst (type, 1);
16435
16436 /* Addition. */
16437 /* 1 <-- (0 + 1) */
16438 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16439 one);
16440 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16441 one);
16442
16443 /* (nonlvalue)x <-- (x + 0) */
16444 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16445 x);
16446
16447 /* Subtraction. */
16448 /* 0 <-- (x - x) */
16449 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16450 zero);
16451 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16452 x);
16453
16454 /* Multiplication. */
16455 /* 0 <-- (x * 0) */
16456 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16457 zero);
16458
16459 /* (nonlvalue)x <-- (x * 1) */
16460 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16461 x);
16462 }
16463
16464 /* Verify that various binary operations on vectors are folded
16465 correctly. */
16466
16467 static void
16468 test_vector_folding ()
16469 {
16470 tree inner_type = integer_type_node;
16471 tree type = build_vector_type (inner_type, 4);
16472 tree zero = build_zero_cst (type);
16473 tree one = build_one_cst (type);
16474 tree index = build_index_vector (type, 0, 1);
16475
16476 /* Verify equality tests that return a scalar boolean result. */
16477 tree res_type = boolean_type_node;
16478 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16479 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16480 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16481 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16482 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16483 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16484 index, one)));
16485 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16486 index, index)));
16487 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16488 index, index)));
16489 }
16490
16491 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16492
16493 static void
16494 test_vec_duplicate_folding ()
16495 {
16496 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16497 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16498 /* This will be 1 if VEC_MODE isn't a vector mode. */
16499 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16500
16501 tree type = build_vector_type (ssizetype, nunits);
16502 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16503 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16504 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16505 }
16506
16507 /* Run all of the selftests within this file. */
16508
16509 void
16510 fold_const_c_tests ()
16511 {
16512 test_arithmetic_folding ();
16513 test_vector_folding ();
16514 test_vec_duplicate_folding ();
16515 }
16516
16517 } // namespace selftest
16518
16519 #endif /* CHECKING_P */