summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog5
-rw-r--r--gcc/testsuite/ChangeLog4
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/vrp53.c24
-rw-r--r--gcc/tree-vrp.c92
4 files changed, 125 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 0b7b39e0353..0652932b5f9 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,8 @@
+2010-07-12 Jakub Jelinek <jakub@redhat.com>
+
+ * tree-vrp.c (simplify_bit_ops_using_ranges): New function.
+ (simplify_stmt_using_ranges): Use it.
+
2010-07-12 Ulrich Weigand <Ulrich.Weigand@de.ibm.com>
* config/spu/spu.h (struct spu_builtin_description): Remove FNDECL
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index f9870585d2b..94f7e77f928 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,7 @@
+2010-07-12 Jakub Jelinek <jakub@redhat.com>
+
+ * gcc.dg/tree-ssa/vrp53.c: New test.
+
2010-07-12 Paolo Carlini <paolo.carlini@oracle.com>
PR c++/44907
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/vrp53.c b/gcc/testsuite/gcc.dg/tree-ssa/vrp53.c
new file mode 100644
index 00000000000..66bbfb67b90
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/vrp53.c
@@ -0,0 +1,24 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-vrp1" } */
+
+int
+f1 (int x)
+{
+ x &= 0xff;
+ x += 0x400;
+ x &= 0x7ff;
+ return x;
+}
+
+int
+f2 (int x)
+{
+ x &= 0xff;
+ x += 0x5400;
+ x |= 0x4400;
+ return x;
+}
+
+/* { dg-final { scan-tree-dump-not "\& (2047|0x7ff)" "vrp1" } } */
+/* { dg-final { scan-tree-dump-not "\\| (17408|0x4400)" "vrp1" } } */
+/* { dg-final { cleanup-tree-dump "vrp1" } } */
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index c8555110a59..afe69a3b8a1 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -6913,6 +6913,89 @@ simplify_abs_using_ranges (gimple stmt)
return false;
}
+/* Optimize away redundant BIT_AND_EXPR and BIT_IOR_EXPR.
+ If all the bits that are being cleared by & are already
+ known to be zero from VR, or all the bits that are being
+ set by | are already known to be one from VR, the bit
+ operation is redundant. */
+
+static bool
+simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+{
+ tree op0 = gimple_assign_rhs1 (stmt);
+ tree op1 = gimple_assign_rhs2 (stmt);
+ tree op = NULL_TREE;
+ value_range_t vr0 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
+ value_range_t vr1 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
+ double_int may_be_nonzero0, may_be_nonzero1;
+ double_int must_be_nonzero0, must_be_nonzero1;
+ double_int mask;
+
+ if (TREE_CODE (op0) == SSA_NAME)
+ vr0 = *(get_value_range (op0));
+ else if (is_gimple_min_invariant (op0))
+ set_value_range_to_value (&vr0, op0, NULL);
+ else
+ return false;
+
+ if (TREE_CODE (op1) == SSA_NAME)
+ vr1 = *(get_value_range (op1));
+ else if (is_gimple_min_invariant (op1))
+ set_value_range_to_value (&vr1, op1, NULL);
+ else
+ return false;
+
+ if (!zero_nonzero_bits_from_vr (&vr0, &may_be_nonzero0, &must_be_nonzero0))
+ return false;
+ if (!zero_nonzero_bits_from_vr (&vr1, &may_be_nonzero1, &must_be_nonzero1))
+ return false;
+
+ switch (gimple_assign_rhs_code (stmt))
+ {
+ case BIT_AND_EXPR:
+ mask = double_int_and (may_be_nonzero0,
+ double_int_not (must_be_nonzero1));
+ if (double_int_zero_p (mask))
+ {
+ op = op0;
+ break;
+ }
+ mask = double_int_and (may_be_nonzero1,
+ double_int_not (must_be_nonzero0));
+ if (double_int_zero_p (mask))
+ {
+ op = op1;
+ break;
+ }
+ break;
+ case BIT_IOR_EXPR:
+ mask = double_int_and (may_be_nonzero0,
+ double_int_not (must_be_nonzero1));
+ if (double_int_zero_p (mask))
+ {
+ op = op1;
+ break;
+ }
+ mask = double_int_and (may_be_nonzero1,
+ double_int_not (must_be_nonzero0));
+ if (double_int_zero_p (mask))
+ {
+ op = op0;
+ break;
+ }
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ if (op == NULL_TREE)
+ return false;
+
+ gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op), op, NULL);
+ update_stmt (gsi_stmt (*gsi));
+ return true;
+}
+
/* We are comparing trees OP0 and OP1 using COND_CODE. OP0 has
a known value range VR.
@@ -7198,6 +7281,15 @@ simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
return simplify_abs_using_ranges (stmt);
break;
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ /* Optimize away BIT_AND_EXPR and BIT_IOR_EXPR
+ if all the bits being cleared are already cleared or
+ all the bits being set are already set. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
+ return simplify_bit_ops_using_ranges (gsi, stmt);
+ break;
+
default:
break;
}