on ARRAY_REFs sign-extend offsets only from sizetype's precision [PR98255]
authorJakub Jelinek <jakub@redhat.com>
Fri, 22 Jan 2021 10:42:03 +0000 (11:42 +0100)
committerJakub Jelinek <jakub@redhat.com>
Fri, 22 Jan 2021 10:42:03 +0000 (11:42 +0100)
As discussed in the PR, the problem here is that the routines changed in
this patch sign extend the difference of index and low_bound from the
precision of the index, so e.g. when index is unsigned int and contains
value -2U, we treat it as index -2 rather than 0x00000000fffffffeU on 64-bit
arches.
On the other hand, get_inner_reference which is used during expansion, does:
            if (! integer_zerop (low_bound))
              index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
                                   index, low_bound);

            offset = size_binop (PLUS_EXPR, offset,
                                 size_binop (MULT_EXPR,
                                             fold_convert (sizetype, index),
                                             unit_size));
which effectively requires that either low_bound is constant 0 and then
index in ARRAY_REFs can be arbitrary type which is then sign or zero
extended to sizetype, or low_bound is something else and then index and
low_bound must have compatible types and it is still converted afterwards to
sizetype and from there then a few lines later:
expr.c-  if (poly_int_tree_p (offset))
expr.c-    {
expr.c:      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
expr.c-                               TYPE_PRECISION (sizetype));
The following patch makes those routines match what get_inner_reference is
doing.

2021-01-22  Jakub Jelinek  <jakub@redhat.com>

PR tree-optimization/98255
* tree-dfa.c (get_ref_base_and_extent): For ARRAY_REFs, sign
extend index - low_bound from sizetype's precision rather than index
precision.
(get_addr_base_and_unit_offset_1): Likewise.
* tree-ssa-sccvn.c (ao_ref_init_from_vn_reference): Likewise.
* gimple-fold.c (fold_const_aggregate_ref_1): Likewise.

* gcc.dg/pr98255.c: New test.

gcc/gimple-fold.c
gcc/testsuite/gcc.dg/pr98255.c [new file with mode: 0644]
gcc/tree-dfa.c
gcc/tree-ssa-sccvn.c

index fe46d987faab657b7d5c2c55a4c637b5bb996070..de5a6c22395d8114000eea7599aba2d4a39f6c6d 100644 (file)
@@ -8007,7 +8007,7 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
              poly_offset_int woffset
                = wi::sext (wi::to_poly_offset (idx)
                            - wi::to_poly_offset (low_bound),
-                           TYPE_PRECISION (TREE_TYPE (idx)));
+                           TYPE_PRECISION (sizetype));
              woffset *= tree_to_uhwi (unit_size);
              woffset *= BITS_PER_UNIT;
              if (woffset.to_shwi (&offset))
diff --git a/gcc/testsuite/gcc.dg/pr98255.c b/gcc/testsuite/gcc.dg/pr98255.c
new file mode 100644 (file)
index 0000000..5cbed68
--- /dev/null
@@ -0,0 +1,49 @@
+/* PR tree-optimization/98255 */
+/* { dg-do run } */
+/* { dg-options "-Os" } */
+/* { dg-additional-options "-fPIC" { target fpic } } */
+
+struct A { volatile unsigned b; unsigned c; };
+int d, *e, h, k, l;
+static struct A f;
+long g;
+static unsigned i = -2U;
+volatile int j;
+
+long
+foo (void)
+{
+  char n[4][4][3]
+    = { { {9, 2, 8}, {9, 2, 8}, {9, 2, 8}, {9} }, { {8} }, { {8} }, { {2} } };
+  while (d)
+    {
+      for (; f.c < 4; f.c++)
+       {
+         *e = 0;
+         h = n[f.c + 4][0][d];
+       }
+      while (g)
+       return n[0][3][i];
+      while (1)
+       {
+         if (k)
+           {
+             j = 0;
+             if (j)
+               continue;
+           }
+         if (l)
+           break;
+       }
+    }
+  return 0;
+}
+
+int
+main ()
+{
+  asm volatile ("" : "+g" (d), "+g" (g), "+g" (f.c));
+  asm volatile ("" : "+g" (e), "+g" (k), "+g" (l));
+  foo ();
+  return 0;
+}
index 648fdd0e0d79474baf90a30afc781d696b6cf97c..0482b05e26cf9dd96a1c6df556d39f1324a68450 100644 (file)
@@ -503,7 +503,7 @@ get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
                poly_offset_int woffset
                  = wi::sext (wi::to_poly_offset (index)
                              - wi::to_poly_offset (low_bound),
-                             TYPE_PRECISION (TREE_TYPE (index)));
+                             TYPE_PRECISION (sizetype));
                woffset *= wi::to_offset (unit_size);
                woffset <<= LOG2_BITS_PER_UNIT;
                bit_offset += woffset;
@@ -564,7 +564,7 @@ get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
                      {
                        poly_offset_int woffset
                          = wi::sext (omin - lbound,
-                                     TYPE_PRECISION (TREE_TYPE (index)));
+                                     TYPE_PRECISION (sizetype));
                        woffset *= wi::to_offset (unit_size);
                        woffset <<= LOG2_BITS_PER_UNIT;
                        bit_offset += woffset;
@@ -822,7 +822,7 @@ get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
            poly_offset_int woffset
                = wi::sext (wi::to_poly_offset (index)
                            - wi::to_poly_offset (low_bound),
-                           TYPE_PRECISION (TREE_TYPE (index)));
+                           TYPE_PRECISION (sizetype));
            woffset *= wi::to_offset (unit_size);
            byte_offset += woffset.force_shwi ();
          }
index 588f1b82478ee48406e4a99d656272b7c71589b7..d45aee8e502090f0af2394442c200f91e66a4355 100644 (file)
@@ -1108,7 +1108,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
              poly_offset_int woffset
                = wi::sext (wi::to_poly_offset (op->op0)
                            - wi::to_poly_offset (op->op1),
-                           TYPE_PRECISION (TREE_TYPE (op->op0)));
+                           TYPE_PRECISION (sizetype));
              woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
              woffset <<= LOG2_BITS_PER_UNIT;
              offset += woffset;