diff options
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r-- | gcc/fold-const.c | 251 |
1 files changed, 187 insertions, 64 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 553a9c37d7a..bece8d74b46 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1,5 +1,5 @@ /* Fold a constant sub-tree into a single node for C-compiler - Copyright (C) 1987-2015 Free Software Foundation, Inc. + Copyright (C) 1987-2016 Free Software Foundation, Inc. This file is part of GCC. @@ -74,6 +74,8 @@ along with GCC; see the file COPYING3. If not see #include "tree-into-ssa.h" #include "md5.h" #include "case-cfn-macros.h" +#include "stringpool.h" +#include "tree-ssanames.h" #ifndef LOAD_EXTEND_OP #define LOAD_EXTEND_OP(M) UNKNOWN @@ -1150,9 +1152,10 @@ const_binop (enum tree_code code, tree arg1, tree arg2) mode = TYPE_MODE (type); /* Don't perform operation if we honor signaling NaNs and - either operand is a NaN. */ + either operand is a signaling NaN. */ if (HONOR_SNANS (mode) - && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) + && (REAL_VALUE_ISSIGNALING_NAN (d1) + || REAL_VALUE_ISSIGNALING_NAN (d2))) return NULL_TREE; /* Don't perform operation if it would raise a division @@ -1165,9 +1168,21 @@ const_binop (enum tree_code code, tree arg1, tree arg2) /* If either operand is a NaN, just return it. Otherwise, set up for floating-point trap; we return an overflow. */ if (REAL_VALUE_ISNAN (d1)) - return arg1; + { + /* Make resulting NaN value to be qNaN when flag_signaling_nans + is off. */ + d1.signalling = 0; + t = build_real (type, d1); + return t; + } else if (REAL_VALUE_ISNAN (d2)) - return arg2; + { + /* Make resulting NaN value to be qNaN when flag_signaling_nans + is off. */ + d2.signalling = 0; + t = build_real (type, d2); + return t; + } inexact = real_arithmetic (&value, code, &d1, &d2); real_convert (&result, mode, &value); @@ -1537,6 +1552,15 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) tree const_unop (enum tree_code code, tree type, tree arg0) { + /* Don't perform the operation, other than NEGATE and ABS, if + flag_signaling_nans is on and the operand is a signaling NaN. */ + if (TREE_CODE (arg0) == REAL_CST + && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) + && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0)) + && code != NEGATE_EXPR + && code != ABS_EXPR) + return NULL_TREE; + switch (code) { CASE_CONVERT: @@ -1948,6 +1972,12 @@ fold_convert_const_real_from_real (tree type, const_tree arg1) REAL_VALUE_TYPE value; tree t; + /* Don't perform the operation if flag_signaling_nans is on + and the operand is a signaling NaN. */ + if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))) + && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1))) + return NULL_TREE; + real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); t = build_real (type, value); @@ -2152,11 +2182,8 @@ fold_convertible_p (const_tree type, const_tree arg) case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: case POINTER_TYPE: case REFERENCE_TYPE: case OFFSET_TYPE: - if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) - || TREE_CODE (orig) == OFFSET_TYPE) - return true; - return (TREE_CODE (orig) == VECTOR_TYPE - && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); + return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) + || TREE_CODE (orig) == OFFSET_TYPE); case REAL_TYPE: case FIXED_POINT_TYPE: @@ -2211,11 +2238,11 @@ fold_convert_loc (location_t loc, tree type, tree arg) return fold_build1_loc (loc, NOP_EXPR, type, arg); if (TREE_CODE (orig) == COMPLEX_TYPE) return fold_convert_loc (loc, type, - fold_build1_loc (loc, REALPART_EXPR, - TREE_TYPE (orig), arg)); + fold_build1_loc (loc, REALPART_EXPR, + TREE_TYPE (orig), arg)); gcc_assert (TREE_CODE (orig) == VECTOR_TYPE && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); - return fold_build1_loc (loc, NOP_EXPR, type, arg); + return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); case REAL_TYPE: if (TREE_CODE (arg) == INTEGER_CST) @@ -6419,13 +6446,17 @@ fold_binary_op_with_conditional_arg (location_t loc, if (VOID_TYPE_P (TREE_TYPE (false_value))) rhs = false_value; } - else + else if (!(TREE_CODE (type) != VECTOR_TYPE + && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE)) { tree testtype = TREE_TYPE (cond); test = cond; true_value = constant_boolean_node (true, testtype); false_value = constant_boolean_node (false, testtype); } + else + /* Detect the case of mixing vector and scalar types - bail out. */ + return NULL_TREE; if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE) cond_code = VEC_COND_EXPR; @@ -8266,20 +8297,6 @@ pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) return total.to_uhwi () > (unsigned HOST_WIDE_INT) size; } -/* Return the HOST_WIDE_INT least significant bits of T, a sizetype - kind INTEGER_CST. This makes sure to properly sign-extend the - constant. */ - -static HOST_WIDE_INT -size_low_cst (const_tree t) -{ - HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0); - int prec = TYPE_PRECISION (TREE_TYPE (t)); - if (prec < HOST_BITS_PER_WIDE_INT) - return sext_hwi (w, prec); - return w; -} - /* Subroutine of fold_binary. This routine performs all of the transformations that are common to the equality/inequality operators (EQ_EXPR and NE_EXPR) and the ordering operators @@ -8408,18 +8425,30 @@ fold_comparison (location_t loc, enum tree_code code, tree type, STRIP_SIGN_NOPS (base0); if (TREE_CODE (base0) == ADDR_EXPR) { - base0 = TREE_OPERAND (base0, 0); - indirect_base0 = true; + base0 + = get_inner_reference (TREE_OPERAND (base0, 0), + &bitsize, &bitpos0, &offset0, &mode, + &unsignedp, &reversep, &volatilep, + false); + if (TREE_CODE (base0) == INDIRECT_REF) + base0 = TREE_OPERAND (base0, 0); + else + indirect_base0 = true; } - offset0 = TREE_OPERAND (arg0, 1); - if (tree_fits_shwi_p (offset0)) + if (offset0 == NULL_TREE || integer_zerop (offset0)) + offset0 = TREE_OPERAND (arg0, 1); + else + offset0 = size_binop (PLUS_EXPR, offset0, + TREE_OPERAND (arg0, 1)); + if (TREE_CODE (offset0) == INTEGER_CST) { - HOST_WIDE_INT off = size_low_cst (offset0); - if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off) - * BITS_PER_UNIT) - / BITS_PER_UNIT == (HOST_WIDE_INT) off) + offset_int tem = wi::sext (wi::to_offset (offset0), + TYPE_PRECISION (sizetype)); + tem = wi::lshift (tem, LOG2_BITS_PER_UNIT); + tem += bitpos0; + if (wi::fits_shwi_p (tem)) { - bitpos0 = off * BITS_PER_UNIT; + bitpos0 = tem.to_shwi (); offset0 = NULL_TREE; } } @@ -8443,18 +8472,30 @@ fold_comparison (location_t loc, enum tree_code code, tree type, STRIP_SIGN_NOPS (base1); if (TREE_CODE (base1) == ADDR_EXPR) { - base1 = TREE_OPERAND (base1, 0); - indirect_base1 = true; + base1 + = get_inner_reference (TREE_OPERAND (base1, 0), + &bitsize, &bitpos1, &offset1, &mode, + &unsignedp, &reversep, &volatilep, + false); + if (TREE_CODE (base1) == INDIRECT_REF) + base1 = TREE_OPERAND (base1, 0); + else + indirect_base1 = true; } - offset1 = TREE_OPERAND (arg1, 1); - if (tree_fits_shwi_p (offset1)) + if (offset1 == NULL_TREE || integer_zerop (offset1)) + offset1 = TREE_OPERAND (arg1, 1); + else + offset1 = size_binop (PLUS_EXPR, offset1, + TREE_OPERAND (arg1, 1)); + if (TREE_CODE (offset1) == INTEGER_CST) { - HOST_WIDE_INT off = size_low_cst (offset1); - if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off) - * BITS_PER_UNIT) - / BITS_PER_UNIT == (HOST_WIDE_INT) off) + offset_int tem = wi::sext (wi::to_offset (offset1), + TYPE_PRECISION (sizetype)); + tem = wi::lshift (tem, LOG2_BITS_PER_UNIT); + tem += bitpos1; + if (wi::fits_shwi_p (tem)) { - bitpos1 = off * BITS_PER_UNIT; + bitpos1 = tem.to_shwi (); offset1 = NULL_TREE; } } @@ -9063,6 +9104,45 @@ tree_expr_nonzero_p (tree t) return ret; } +/* Return true if T is known not to be equal to an integer W. */ + +bool +expr_not_equal_to (tree t, const wide_int &w) +{ + wide_int min, max, nz; + value_range_type rtype; + switch (TREE_CODE (t)) + { + case INTEGER_CST: + return wi::ne_p (t, w); + + case SSA_NAME: + if (!INTEGRAL_TYPE_P (TREE_TYPE (t))) + return false; + rtype = get_range_info (t, &min, &max); + if (rtype == VR_RANGE) + { + if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t)))) + return true; + if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t)))) + return true; + } + else if (rtype == VR_ANTI_RANGE + && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t))) + && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t)))) + return true; + /* If T has some known zero bits and W has any of those bits set, + then T is known not to be equal to W. */ + if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)), + TYPE_PRECISION (TREE_TYPE (t))), 0)) + return true; + return false; + + default: + return false; + } +} + /* Fold a binary expression of code CODE and type TYPE with operands OP0 and OP1. LOC is the location of the resulting expression. Return the folded expression if folding is successful. Otherwise, @@ -10547,12 +10627,27 @@ fold_binary_loc (location_t loc, || POINTER_TYPE_P (TREE_TYPE (arg0)))) { tree val = TREE_OPERAND (arg0, 1); - return omit_two_operands_loc (loc, type, - fold_build2_loc (loc, code, type, - val, - build_int_cst (TREE_TYPE (val), - 0)), - TREE_OPERAND (arg0, 0), arg1); + val = fold_build2_loc (loc, code, type, val, + build_int_cst (TREE_TYPE (val), 0)); + return omit_two_operands_loc (loc, type, val, + TREE_OPERAND (arg0, 0), arg1); + } + + /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */ + if ((TREE_CODE (arg1) == PLUS_EXPR + || TREE_CODE (arg1) == POINTER_PLUS_EXPR + || TREE_CODE (arg1) == MINUS_EXPR) + && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1, + 0)), + arg0, 0) + && (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) + || POINTER_TYPE_P (TREE_TYPE (arg1)))) + { + tree val = TREE_OPERAND (arg1, 1); + val = fold_build2_loc (loc, code, type, val, + build_int_cst (TREE_TYPE (val), 0)); + return omit_two_operands_loc (loc, type, val, + TREE_OPERAND (arg1, 0), arg0); } /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */ @@ -10562,12 +10657,22 @@ fold_binary_loc (location_t loc, 1)), arg1, 0) && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1) - { - return omit_two_operands_loc (loc, type, - code == NE_EXPR - ? boolean_true_node : boolean_false_node, - TREE_OPERAND (arg0, 1), arg1); - } + return omit_two_operands_loc (loc, type, + code == NE_EXPR + ? boolean_true_node : boolean_false_node, + TREE_OPERAND (arg0, 1), arg1); + + /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */ + if (TREE_CODE (arg1) == MINUS_EXPR + && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST + && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1, + 1)), + arg0, 0) + && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1) + return omit_two_operands_loc (loc, type, + code == NE_EXPR + ? boolean_true_node : boolean_false_node, + TREE_OPERAND (arg1, 1), arg0); /* If this is an EQ or NE comparison with zero and ARG0 is (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require @@ -13413,7 +13518,7 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) /* Return true if the floating point result of (CODE OP0) has an integer value. We also allow +Inf, -Inf and NaN to be considered - integer values. + integer values. Return false for signaling NaN. DEPTH is the current nesting depth of the query. */ @@ -13446,7 +13551,7 @@ integer_valued_real_unary_p (tree_code code, tree op0, int depth) /* Return true if the floating point result of (CODE OP0 OP1) has an integer value. We also allow +Inf, -Inf and NaN to be considered - integer values. + integer values. Return false for signaling NaN. DEPTH is the current nesting depth of the query. */ @@ -13470,8 +13575,8 @@ integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth) /* Return true if the floating point result of calling FNDECL with arguments ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be - considered integer values. If FNDECL takes fewer than 2 arguments, - the remaining ARGn are null. + considered integer values. Return false for signaling NaN. If FNDECL + takes fewer than 2 arguments, the remaining ARGn are null. DEPTH is the current nesting depth of the query. */ @@ -13500,7 +13605,7 @@ integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth) /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS) has an integer value. We also allow +Inf, -Inf and NaN to be - considered integer values. + considered integer values. Return false for signaling NaN. DEPTH is the current nesting depth of the query. */ @@ -13534,7 +13639,7 @@ integer_valued_real_single_p (tree t, int depth) /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS) has an integer value. We also allow +Inf, -Inf and NaN to be - considered integer values. + considered integer values. Return false for signaling NaN. DEPTH is the current nesting depth of the query. */ @@ -13562,6 +13667,7 @@ integer_valued_real_invalid_p (tree t, int depth) /* Return true if the floating point expression T has an integer value. We also allow +Inf, -Inf and NaN to be considered integer values. + Return false for signaling NaN. DEPTH is the current nesting depth of the query. */ @@ -13882,6 +13988,23 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST) { + if (!VECTOR_TYPE_P (type)) + { + /* Have vector comparison with scalar boolean result. */ + bool result = true; + gcc_assert ((code == EQ_EXPR || code == NE_EXPR) + && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1)); + for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++) + { + tree elem0 = VECTOR_CST_ELT (op0, i); + tree elem1 = VECTOR_CST_ELT (op1, i); + tree tmp = fold_relational_const (code, type, elem0, elem1); + result &= integer_onep (tmp); + } + if (code == NE_EXPR) + result = !result; + return constant_boolean_node (result, type); + } unsigned count = VECTOR_CST_NELTS (op0); tree *elts = XALLOCAVEC (tree, count); gcc_assert (VECTOR_CST_NELTS (op1) == count |