summaryrefslogtreecommitdiff
path: root/gcc/gimple-fold.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/gimple-fold.c')
-rw-r--r--gcc/gimple-fold.c161
1 files changed, 80 insertions, 81 deletions
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index d1349588fbd..0f35e3ddb39 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -1065,7 +1065,7 @@ gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
continue;
pos = int_bit_position (fld);
- size = tree_low_cst (DECL_SIZE (fld), 1);
+ size = tree_to_uhwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset)
break;
}
@@ -1142,7 +1142,7 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
if (binfo)
{
HOST_WIDE_INT token
- = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
+ = tree_to_hwi (OBJ_TYPE_REF_TOKEN (callee));
tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
if (fndecl)
{
@@ -2709,9 +2709,9 @@ get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
{
if (!integer_zerop (TREE_OPERAND (base, 1)))
{
- if (!host_integerp (TREE_OPERAND (base, 1), 0))
+ if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
return NULL_TREE;
- *bit_offset += (mem_ref_offset (base).low
+ *bit_offset += (mem_ref_offset (base).to_short_addr ()
* BITS_PER_UNIT);
}
@@ -2806,9 +2806,10 @@ fold_array_ctor_reference (tree type, tree ctor,
{
unsigned HOST_WIDE_INT cnt;
tree cfield, cval;
- double_int low_bound, elt_size;
- double_int index, max_index;
- double_int access_index;
+ addr_wide_int low_bound;
+ addr_wide_int elt_size;
+ addr_wide_int index, max_index;
+ addr_wide_int access_index;
tree domain_type = NULL_TREE, index_type = NULL_TREE;
HOST_WIDE_INT inner_offset;
@@ -2820,31 +2821,29 @@ fold_array_ctor_reference (tree type, tree ctor,
/* Static constructors for variably sized objects makes no sense. */
gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
- low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
+ low_bound = TYPE_MIN_VALUE (domain_type);
}
else
- low_bound = double_int_zero;
+ low_bound = 0;
/* Static constructors for variably sized objects makes no sense. */
gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
== INTEGER_CST);
- elt_size =
- tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
-
+ elt_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)));
/* We can handle only constantly sized accesses that are known to not
be larger than size of array element. */
if (!TYPE_SIZE_UNIT (type)
|| TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
- || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
+ || wi::lts_p (elt_size, TYPE_SIZE_UNIT (type)))
return NULL_TREE;
/* Compute the array index we look for. */
- access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
- .udiv (elt_size, TRUNC_DIV_EXPR);
+ access_index = wi::udiv_trunc (addr_wide_int (offset / BITS_PER_UNIT),
+ elt_size);
access_index += low_bound;
if (index_type)
- access_index = access_index.ext (TYPE_PRECISION (index_type),
- TYPE_UNSIGNED (index_type));
+ access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
+ TYPE_SIGN (index_type));
/* And offset within the access. */
inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
@@ -2854,9 +2853,10 @@ fold_array_ctor_reference (tree type, tree ctor,
if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
return NULL_TREE;
- index = low_bound - double_int_one;
+ index = low_bound - 1;
if (index_type)
- index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type),
+ TYPE_SIGN (index_type));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
{
@@ -2866,26 +2866,26 @@ fold_array_ctor_reference (tree type, tree ctor,
if (cfield)
{
if (TREE_CODE (cfield) == INTEGER_CST)
- max_index = index = tree_to_double_int (cfield);
+ max_index = index = cfield;
else
{
gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
- index = tree_to_double_int (TREE_OPERAND (cfield, 0));
- max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
+ index = TREE_OPERAND (cfield, 0);
+ max_index = TREE_OPERAND (cfield, 1);
}
}
else
{
- index += double_int_one;
+ index += 1;
if (index_type)
- index = index.ext (TYPE_PRECISION (index_type),
- TYPE_UNSIGNED (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type),
+ TYPE_SIGN (index_type));
max_index = index;
}
/* Do we have match? */
- if (access_index.cmp (index, 1) >= 0
- && access_index.cmp (max_index, 1) <= 0)
+ if (wi::cmpu (access_index, index) >= 0
+ && wi::cmpu (access_index, max_index) <= 0)
return fold_ctor_reference (type, cval, inner_offset, size,
from_decl);
}
@@ -2912,10 +2912,9 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
tree byte_offset = DECL_FIELD_OFFSET (cfield);
tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
tree field_size = DECL_SIZE (cfield);
- double_int bitoffset;
- double_int byte_offset_cst = tree_to_double_int (byte_offset);
- double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
- double_int bitoffset_end, access_end;
+ addr_wide_int bitoffset;
+ addr_wide_int byte_offset_cst = byte_offset;
+ addr_wide_int bitoffset_end, access_end;
/* Variable sized objects in static constructors makes no sense,
but field_size can be NULL for flexible array members. */
@@ -2926,30 +2925,28 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
: TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
/* Compute bit offset of the field. */
- bitoffset = tree_to_double_int (field_offset)
- + byte_offset_cst * bits_per_unit_cst;
+ bitoffset = wi::add (field_offset, byte_offset_cst * BITS_PER_UNIT);
/* Compute bit offset where the field ends. */
if (field_size != NULL_TREE)
- bitoffset_end = bitoffset + tree_to_double_int (field_size);
+ bitoffset_end = bitoffset + field_size;
else
- bitoffset_end = double_int_zero;
+ bitoffset_end = 0;
- access_end = double_int::from_uhwi (offset)
- + double_int::from_uhwi (size);
+ access_end = addr_wide_int (offset) + size;
/* Is there any overlap between [OFFSET, OFFSET+SIZE) and
[BITOFFSET, BITOFFSET_END)? */
- if (access_end.cmp (bitoffset, 0) > 0
+ if (wi::cmps (access_end, bitoffset) > 0
&& (field_size == NULL_TREE
- || double_int::from_uhwi (offset).slt (bitoffset_end)))
+ || wi::lts_p (offset, bitoffset_end)))
{
- double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
+ addr_wide_int inner_offset = addr_wide_int (offset) - bitoffset;
/* We do have overlap. Now see if field is large enough to
cover the access. Give up for accesses spanning multiple
fields. */
- if (access_end.cmp (bitoffset_end, 0) > 0)
+ if (wi::cmps (access_end, bitoffset_end) > 0)
return NULL_TREE;
- if (double_int::from_uhwi (offset).slt (bitoffset))
+ if (wi::lts_p (offset, bitoffset))
return NULL_TREE;
return fold_ctor_reference (type, cval,
inner_offset.to_uhwi (), size,
@@ -3039,38 +3036,42 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
&& (idx = (*valueize) (TREE_OPERAND (t, 1)))
&& TREE_CODE (idx) == INTEGER_CST)
{
- tree low_bound, unit_size;
- double_int doffset;
+ tree low_bound = array_ref_low_bound (t);
+ tree unit_size = array_ref_element_size (t);
/* If the resulting bit-offset is constant, track it. */
- if ((low_bound = array_ref_low_bound (t),
- TREE_CODE (low_bound) == INTEGER_CST)
- && (unit_size = array_ref_element_size (t),
- host_integerp (unit_size, 1))
- && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
- .sext (TYPE_PRECISION (TREE_TYPE (idx))),
- doffset.fits_shwi ()))
+ if ((TREE_CODE (low_bound) == INTEGER_CST)
+ && (tree_fits_uhwi_p (unit_size)))
{
- offset = doffset.to_shwi ();
- offset *= TREE_INT_CST_LOW (unit_size);
- offset *= BITS_PER_UNIT;
-
- base = TREE_OPERAND (t, 0);
- ctor = get_base_constructor (base, &offset, valueize);
- /* Empty constructor. Always fold to 0. */
- if (ctor == error_mark_node)
- return build_zero_cst (TREE_TYPE (t));
- /* Out of bound array access. Value is undefined,
- but don't fold. */
- if (offset < 0)
- return NULL_TREE;
- /* We can not determine ctor. */
- if (!ctor)
- return NULL_TREE;
- return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
- TREE_INT_CST_LOW (unit_size)
- * BITS_PER_UNIT,
- base);
+ addr_wide_int woffset
+ = wi::sext (addr_wide_int (idx) - low_bound,
+ TYPE_PRECISION (TREE_TYPE (idx)));
+
+ if (wi::fits_shwi_p (woffset))
+ {
+ offset = woffset.to_shwi ();
+ /* TODO: This code seems wrong, multiply then check
+ to see if it fits. */
+ offset *= tree_to_hwi (unit_size);
+ offset *= BITS_PER_UNIT;
+
+ base = TREE_OPERAND (t, 0);
+ ctor = get_base_constructor (base, &offset, valueize);
+ /* Empty constructor. Always fold to 0. */
+ if (ctor == error_mark_node)
+ return build_zero_cst (TREE_TYPE (t));
+ /* Out of bound array access. Value is undefined,
+ but don't fold. */
+ if (offset < 0)
+ return NULL_TREE;
+ /* We can not determine ctor. */
+ if (!ctor)
+ return NULL_TREE;
+ return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
+ tree_to_uhwi (unit_size)
+ * BITS_PER_UNIT,
+ base);
+ }
}
}
/* Fallthru. */
@@ -3140,7 +3141,7 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
if (TREE_CODE (v) == POINTER_PLUS_EXPR)
{
- offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
+ offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
v = TREE_OPERAND (v, 0);
}
else
@@ -3166,7 +3167,7 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
return NULL_TREE;
}
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
- size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
+ size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
offset += token * size;
fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
offset, size, v);
@@ -3285,7 +3286,7 @@ gimple_val_nonnegative_real_p (tree val)
arg1 = gimple_call_arg (def_stmt, 1);
if (TREE_CODE (arg1) == INTEGER_CST
- && (TREE_INT_CST_LOW (arg1) & 1) == 0)
+ && (tree_to_hwi (arg1) & 1) == 0)
return true;
break;
@@ -3306,7 +3307,7 @@ gimple_val_nonnegative_real_p (tree val)
if ((n & 1) == 0)
{
REAL_VALUE_TYPE cint;
- real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
+ real_from_integer (&cint, VOIDmode, n, SIGNED);
if (real_identical (&c, &cint))
return true;
}
@@ -3390,12 +3391,12 @@ gimple_fold_indirect_ref (tree t)
if (TREE_CODE (addr) == ADDR_EXPR
&& TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
&& useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
- && host_integerp (off, 1))
+ && tree_fits_uhwi_p (off))
{
- unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
+ unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
tree part_width = TYPE_SIZE (type);
unsigned HOST_WIDE_INT part_widthi
- = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
+ = tree_to_shwi (part_width) / BITS_PER_UNIT;
unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
tree index = bitsize_int (indexi);
if (offset / part_widthi
@@ -3419,9 +3420,7 @@ gimple_fold_indirect_ref (tree t)
|| DECL_P (TREE_OPERAND (addr, 0)))
return fold_build2 (MEM_REF, type,
addr,
- build_int_cst_wide (ptype,
- TREE_INT_CST_LOW (off),
- TREE_INT_CST_HIGH (off)));
+ wide_int_to_tree (ptype, off));
}
/* *(foo *)fooarrptr => (*fooarrptr)[0] */