summaryrefslogtreecommitdiff
blob: df4fcee90c51aa395ce40cfcdb5adbfeef3383c8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
https://gcc.gnu.org/git/?p=gcc.git;a=commit;h=d54151df3ba0ee3203e0b8cb8f8fcd168a766c51
https://gcc.gnu.org/PR114965

From d54151df3ba0ee3203e0b8cb8f8fcd168a766c51 Mon Sep 17 00:00:00 2001
From: Jakub Jelinek <jakub@redhat.com>
Date: Wed, 8 May 2024 10:17:32 +0200
Subject: [PATCH] reassoc: Fix up optimize_range_tests_to_bit_test [PR114965]

The optimize_range_tests_to_bit_test optimization normally emits a range
test first:
          if (entry_test_needed)
            {
              tem = build_range_check (loc, optype, unshare_expr (exp),
                                       false, lowi, high);
              if (tem == NULL_TREE || is_gimple_val (tem))
                continue;
            }
so during the bit test we already know that exp is in the [lowi, high]
range, but skips it if we have range info which tells us this isn't
necessary.
Also, normally it emits shifts by exp - lowi counter, but has an
optimization to use just exp counter if the mask isn't a more expensive
constant in that case and lowi is > 0 and high is smaller than prec.

The following testcase is miscompiled because the two abnormal cases
are triggered.  The range of exp is [43, 43][48, 48][95, 95], so we on
64-bit arch decide we don't need the entry test, because 95 - 43 < 64.
And we also decide to use just exp as counter, because the range test
tests just for exp == 43 || exp == 48, so high is smaller than 64 too.
Because 95 is in the exp range, we can't do that, we'd either need to
do a range test first, i.e.
if (exp - 43U <= 48U - 43U) if ((1UL << exp) & mask1))
or need to subtract lowi from the shift counter, i.e.
if ((1UL << (exp - 43)) & mask2)
but can't do both unless r.upper_bound () is < prec.

The following patch ensures that.

2024-05-08  Jakub Jelinek  <jakub@redhat.com>

	PR tree-optimization/114965
	* tree-ssa-reassoc.cc (optimize_range_tests_to_bit_test): Don't try to
	optimize away exp - lowi subtraction from shift count unless entry
	test is emitted or unless r.upper_bound () is smaller than prec.

	* gcc.c-torture/execute/pr114965.c: New test.

(cherry picked from commit 9adec2d91e62a479474ae79df5b455fd4b8463ba)
---
 .../gcc.c-torture/execute/pr114965.c          | 30 +++++++++++++++++++
 gcc/tree-ssa-reassoc.cc                       |  3 +-
 2 files changed, 32 insertions(+), 1 deletion(-)
 create mode 100644 gcc/testsuite/gcc.c-torture/execute/pr114965.c

diff --git a/gcc/testsuite/gcc.c-torture/execute/pr114965.c b/gcc/testsuite/gcc.c-torture/execute/pr114965.c
new file mode 100644
index 000000000000..89d68e187015
--- /dev/null
+++ b/gcc/testsuite/gcc.c-torture/execute/pr114965.c
@@ -0,0 +1,30 @@
+/* PR tree-optimization/114965 */
+
+static void
+foo (const char *x)
+{
+
+  char a = '0';
+  while (1)
+    {
+      switch (*x)
+	{
+	case '_':
+	case '+':
+	  a = *x;
+	  x++;
+	  continue;
+	default:
+	  break;
+	}
+      break;
+    }
+  if (a == '0' || a == '+')
+    __builtin_abort ();
+}
+
+int
+main ()
+{
+  foo ("_");
+}
diff --git a/gcc/tree-ssa-reassoc.cc b/gcc/tree-ssa-reassoc.cc
index 61f54f07b577..556ecdebe2d7 100644
--- a/gcc/tree-ssa-reassoc.cc
+++ b/gcc/tree-ssa-reassoc.cc
@@ -3418,7 +3418,8 @@ optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
 	     We can avoid then subtraction of the minimum value, but the
 	     mask constant could be perhaps more expensive.  */
 	  if (compare_tree_int (lowi, 0) > 0
-	      && compare_tree_int (high, prec) < 0)
+	      && compare_tree_int (high, prec) < 0
+	      && (entry_test_needed || wi::ltu_p (r.upper_bound (), prec)))
 	    {
 	      int cost_diff;
 	      HOST_WIDE_INT m = tree_to_uhwi (lowi);
-- 
2.39.3