diff options
author | edlinger <edlinger@138bc75d-0d04-0410-961f-82ee72b054a4> | 2013-11-27 16:33:01 +0000 |
---|---|---|
committer | edlinger <edlinger@138bc75d-0d04-0410-961f-82ee72b054a4> | 2013-11-27 16:33:01 +0000 |
commit | dc317fc847d66f8d9aa1f22b20c2264227003604 (patch) | |
tree | 57109bd0c5a05fb6134524e9eba3e9890ae43804 | |
parent | fb930d292eac663fe3b4770caf9ce9ce4a85def3 (diff) | |
download | gcc-dc317fc847d66f8d9aa1f22b20c2264227003604.tar.gz |
2013-11-27 Bernd Edlinger <bernd.edlinger@hotmail.de>
reverted r205398 on request: Remove parameter keep_aligning from
get_inner_reference.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205452 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r-- | gcc/ChangeLog | 31 | ||||
-rw-r--r-- | gcc/ada/ChangeLog | 7 | ||||
-rw-r--r-- | gcc/ada/gcc-interface/decl.c | 2 | ||||
-rw-r--r-- | gcc/ada/gcc-interface/trans.c | 2 | ||||
-rw-r--r-- | gcc/ada/gcc-interface/utils2.c | 3 | ||||
-rw-r--r-- | gcc/asan.c | 2 | ||||
-rw-r--r-- | gcc/builtins.c | 6 | ||||
-rw-r--r-- | gcc/cfgexpand.c | 2 | ||||
-rw-r--r-- | gcc/config/mips/mips.c | 2 | ||||
-rw-r--r-- | gcc/dbxout.c | 2 | ||||
-rw-r--r-- | gcc/dwarf2out.c | 6 | ||||
-rw-r--r-- | gcc/expr.c | 37 | ||||
-rw-r--r-- | gcc/fold-const.c | 15 | ||||
-rw-r--r-- | gcc/gimple-ssa-strength-reduction.c | 2 | ||||
-rw-r--r-- | gcc/simplify-rtx.c | 2 | ||||
-rw-r--r-- | gcc/tree-affine.c | 5 | ||||
-rw-r--r-- | gcc/tree-data-ref.c | 4 | ||||
-rw-r--r-- | gcc/tree-scalar-evolution.c | 2 | ||||
-rw-r--r-- | gcc/tree-ssa-loop-ivopts.c | 4 | ||||
-rw-r--r-- | gcc/tree-vect-data-refs.c | 4 | ||||
-rw-r--r-- | gcc/tree.h | 3 | ||||
-rw-r--r-- | gcc/tsan.c | 2 |
22 files changed, 69 insertions, 76 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 1ef702c50fe..39a4f1e7192 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -337,37 +337,6 @@ for the inner loop if collapse_bb is non-NULL. (expand_omp_simd): Use cont_bb rather than e->dest as latch. -2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de> - - Remove parameter keep_aligning from get_inner_reference. - * tree.h (get_inner_reference): Adjust header. - * expr.c (get_inner_reference): Remove parameter keep_aligning. - (get_bit_range, expand_assignment, - expand_expr_addr_expr_1, expand_expr_real_1): Adjust. - * asan.c (instrument_derefs): Adjust. - * builtins.c (get_object_alignment_2): Adjust. Remove handling of - VIEW_CONVERT_EXPR. - * cfgexpand.c (expand_debug_expr): Adjust. - * dbxout.c (dbxout_expand_expr): Adjust. - * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref, - loc_list_from_tree, fortran_common): Adjust. - * fold-const.c (optimize_bit_field_compare, - decode_field_reference, fold_unary_loc, fold_comparison, - split_address_to_core_and_offset): Adjust. - * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust. - * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust. - * tree-affine.c (tree_to_aff_combination, - get_inner_reference_aff): Adjust. - * tree-data-ref.c (split_constant_offset_1, - dr_analyze_innermost): Adjust. - * tree-vect-data-refs.c (vect_check_gather, - vect_analyze_data_refs): Adjust. - * tree-scalar-evolution.c (interpret_rhs_expr): Adjust. - * tree-ssa-loop-ivopts.c (may_be_unaligned_p, - split_address_cost): Adjust. - * tsan.c (instrument_expr): Adjust. - * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust. - 2013-11-26 Yufeng Zhang <yufeng.zhang@arm.com> * config/arm/arm.c (arm_legitimize_address): Check xop1 is not diff --git a/gcc/ada/ChangeLog b/gcc/ada/ChangeLog index d4908a0d80b..8bb3462bc5a 100644 --- a/gcc/ada/ChangeLog +++ b/gcc/ada/ChangeLog @@ -1,10 +1,3 @@ -2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de> - - Remove parameter keep_aligning from get_inner_reference. - * gcc-interface/decl.c (elaborate_expression_1): Adjust. - * gcc-interface/trans.c (Attribute_to_gnu): Adjust. - * gcc-interface/utils2.c (build_unary_op): Adjust. - 2013-11-23 Eric Botcazou <ebotcazou@adacore.com> * gcc-interface/trans.c (Loop_Statement_to_gnu): Set TREE_SIDE_EFFECTS diff --git a/gcc/ada/gcc-interface/decl.c b/gcc/ada/gcc-interface/decl.c index 51adf18e0a3..ee76a9d160f 100644 --- a/gcc/ada/gcc-interface/decl.c +++ b/gcc/ada/gcc-interface/decl.c @@ -6269,7 +6269,7 @@ elaborate_expression_1 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name, int unsignedp, volatilep; inner = get_inner_reference (inner, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); /* If the offset is variable, err on the side of caution. */ if (offset) inner = NULL_TREE; diff --git a/gcc/ada/gcc-interface/trans.c b/gcc/ada/gcc-interface/trans.c index 6c7a418662f..e533de6dcbf 100644 --- a/gcc/ada/gcc-interface/trans.c +++ b/gcc/ada/gcc-interface/trans.c @@ -2060,7 +2060,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute) && TREE_CODE (gnu_prefix) == FIELD_DECL)); get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); if (TREE_CODE (gnu_prefix) == COMPONENT_REF) { diff --git a/gcc/ada/gcc-interface/utils2.c b/gcc/ada/gcc-interface/utils2.c index b6299cbeefa..224a87d8777 100644 --- a/gcc/ada/gcc-interface/utils2.c +++ b/gcc/ada/gcc-interface/utils2.c @@ -1312,7 +1312,8 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand) int unsignedp, volatilep; inner = get_inner_reference (operand, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, + false); /* If INNER is a padding type whose field has a self-referential size, convert to that inner type. We know the offset is zero diff --git a/gcc/asan.c b/gcc/asan.c index c901e1da380..677435e05ae 100644 --- a/gcc/asan.c +++ b/gcc/asan.c @@ -1488,7 +1488,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t, enum machine_mode mode; int volatilep = 0, unsignedp = 0; tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); if (bitpos % (size_in_bytes * BITS_PER_UNIT) || bitsize != size_in_bytes * BITS_PER_UNIT) { diff --git a/gcc/builtins.c b/gcc/builtins.c index 3f03b01281b..d2248ea5ec6 100644 --- a/gcc/builtins.c +++ b/gcc/builtins.c @@ -329,7 +329,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp, /* Get the innermost object and the constant (bitpos) and possibly variable (offset) offset of the access. */ exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, true); /* Extract alignment information from the innermost object and possibly adjust bitpos and offset. */ @@ -360,6 +360,10 @@ get_object_alignment_2 (tree exp, unsigned int *alignp, align = DECL_ALIGN (exp); known_alignment = true; } + else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR) + { + align = TYPE_ALIGN (TREE_TYPE (exp)); + } else if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF) diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c index 98983f40b3b..207f8767573 100644 --- a/gcc/cfgexpand.c +++ b/gcc/cfgexpand.c @@ -3941,7 +3941,7 @@ expand_debug_expr (tree exp) tree offset; int volatilep = 0; tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode1, &unsignedp, &volatilep); + &mode1, &unsignedp, &volatilep, false); rtx orig_op0; if (bitsize == 0) diff --git a/gcc/config/mips/mips.c b/gcc/config/mips/mips.c index 69e67be4973..36ba6df7a4c 100644 --- a/gcc/config/mips/mips.c +++ b/gcc/config/mips/mips.c @@ -14948,7 +14948,7 @@ r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset) int unsigned_p, volatile_p; inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode, - &unsigned_p, &volatile_p); + &unsigned_p, &volatile_p, false); if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset) return false; diff --git a/gcc/dbxout.c b/gcc/dbxout.c index 5988c7e1bd4..bc6a3af0f96 100644 --- a/gcc/dbxout.c +++ b/gcc/dbxout.c @@ -2515,7 +2515,7 @@ dbxout_expand_expr (tree expr) rtx x; tem = get_inner_reference (expr, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, true); x = dbxout_expand_expr (tem); if (x == NULL || !MEM_P (x)) diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index 6376306802a..3448ec4a98c 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -13934,7 +13934,7 @@ loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev) obj = get_inner_reference (TREE_OPERAND (loc, 0), &bitsize, &bitpos, &offset, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); STRIP_NOPS (obj); if (bitpos % BITS_PER_UNIT) { @@ -14211,7 +14211,7 @@ loc_list_from_tree (tree loc, int want_address) int unsignedp, volatilep = 0; obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); gcc_assert (obj != loc); @@ -15521,7 +15521,7 @@ fortran_common (tree decl, HOST_WIDE_INT *value) return NULL_TREE; cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, true); if (cvar == NULL_TREE || TREE_CODE (cvar) != VAR_DECL diff --git a/gcc/expr.c b/gcc/expr.c index 8f8b5272846..4815c886f77 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -4657,7 +4657,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, int unsignedp; int volatilep = 0; get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, - &roffset, &rmode, &unsignedp, &volatilep); + &roffset, &rmode, &unsignedp, &volatilep, false); if ((rbitpos % BITS_PER_UNIT) != 0) { *bitstart = *bitend = 0; @@ -4810,7 +4810,7 @@ expand_assignment (tree to, tree from, bool nontemporal) push_temp_slots (); tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, - &unsignedp, &volatilep); + &unsignedp, &volatilep, true); /* Make sure bitpos is not negative, it can wreak havoc later. */ if (bitpos < 0) @@ -6652,13 +6652,27 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, If the field describes a variable-sized object, *PMODE is set to BLKmode and *PBITSIZE is set to -1. An access cannot be made in - this case, but the address of the object can be found. */ + this case, but the address of the object can be found. + + If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't + look through nodes that serve as markers of a greater alignment than + the one that can be deduced from the expression. These nodes make it + possible for front-ends to prevent temporaries from being created by + the middle-end on alignment considerations. For that purpose, the + normal operating mode at high-level is to always pass FALSE so that + the ultimate containing object is really returned; moreover, the + associated predicate handled_component_p will always return TRUE + on these nodes, thus indicating that they are essentially handled + by get_inner_reference. TRUE should only be passed when the caller + is scanning the expression in order to build another representation + and specifically knows how to handle these nodes; as such, this is + the normal operating mode in the RTL expanders. */ tree get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos, tree *poffset, enum machine_mode *pmode, int *punsignedp, - int *pvolatilep) + int *pvolatilep, bool keep_aligning) { tree size_tree = 0; enum machine_mode mode = VOIDmode; @@ -6778,6 +6792,14 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, break; case VIEW_CONVERT_EXPR: + if (keep_aligning && STRICT_ALIGNMENT + && (TYPE_ALIGN (TREE_TYPE (exp)) + > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) + && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) + < BIGGEST_ALIGNMENT) + && (TYPE_ALIGN_OK (TREE_TYPE (exp)) + || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0))))) + goto done; break; case MEM_REF: @@ -7642,7 +7664,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, they won't change the final object whose address will be returned (they actually exist only for that purpose). */ inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode1, &unsignedp, &volatilep); + &mode1, &unsignedp, &volatilep, false); break; } @@ -9919,7 +9941,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, tree offset; int volatilep = 0, must_force_mem; tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode1, &unsignedp, &volatilep); + &mode1, &unsignedp, &volatilep, true); rtx orig_op0, memloc; bool mem_attrs_from_type = false; @@ -10280,7 +10302,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, int volatilep = 0; tree tem = get_inner_reference (treeop0, &bitsize, &bitpos, - &offset, &mode1, &unsignedp, &volatilep); + &offset, &mode1, &unsignedp, &volatilep, + true); rtx orig_op0; /* ??? We should work harder and deal with non-zero offsets. */ diff --git a/gcc/fold-const.c b/gcc/fold-const.c index fcd7f087be8..5cf8ed196d7 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -3503,7 +3503,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, do anything if the inner expression is a PLACEHOLDER_EXPR since we then will no longer be able to replace it. */ linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, - &lunsignedp, &lvolatilep); + &lunsignedp, &lvolatilep, false); if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep) return 0; @@ -3513,7 +3513,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, /* If this is not a constant, we can only do something if bit positions, sizes, and signedness are the same. */ rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, - &runsignedp, &rvolatilep); + &runsignedp, &rvolatilep, false); if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize || lunsignedp != runsignedp || offset != 0 @@ -3687,7 +3687,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, } inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, - punsignedp, pvolatilep); + punsignedp, pvolatilep, false); if ((inner == exp && and_mask == 0) || *pbitsize < 0 || offset != 0 || TREE_CODE (inner) == PLACEHOLDER_EXPR) @@ -8071,7 +8071,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) int unsignedp, volatilep; tree base = TREE_OPERAND (op0, 0); base = get_inner_reference (base, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); /* If the reference was to a (constant) zero offset, we can use the address of the base if it has the same base type as the result type and the pointer type is unqualified. */ @@ -9096,7 +9096,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, { base0 = get_inner_reference (TREE_OPERAND (arg0, 0), &bitsize, &bitpos0, &offset0, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); if (TREE_CODE (base0) == INDIRECT_REF) base0 = TREE_OPERAND (base0, 0); else @@ -9130,7 +9130,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, { base1 = get_inner_reference (TREE_OPERAND (arg1, 0), &bitsize, &bitpos1, &offset1, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); if (TREE_CODE (base1) == INDIRECT_REF) base1 = TREE_OPERAND (base1, 0); else @@ -16997,7 +16997,8 @@ split_address_to_core_and_offset (tree exp, if (TREE_CODE (exp) == ADDR_EXPR) { core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, - poffset, &mode, &unsignedp, &volatilep); + poffset, &mode, &unsignedp, &volatilep, + false); core = build_fold_addr_expr_loc (loc, core); } else diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c index 1f881350021..bc2484b5b13 100644 --- a/gcc/gimple-ssa-strength-reduction.c +++ b/gcc/gimple-ssa-strength-reduction.c @@ -940,7 +940,7 @@ slsr_process_ref (gimple gs) return; base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); index = double_int::from_uhwi (bitpos); if (!restructure_reference (&base, &offset, &index, &type)) diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index f680e1773a7..ec138584c38 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -296,7 +296,7 @@ delegitimize_mem_from_attrs (rtx x) int unsignedp, volatilep = 0; decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); if (bitsize != GET_MODE_BITSIZE (mode) || (bitpos % BITS_PER_UNIT) || (toffset && !tree_fits_shwi_p (toffset))) diff --git a/gcc/tree-affine.c b/gcc/tree-affine.c index ea80e7593e6..f93f186a761 100644 --- a/gcc/tree-affine.c +++ b/gcc/tree-affine.c @@ -325,7 +325,8 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb) return; } core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, - &toffset, &mode, &unsignedp, &volatilep); + &toffset, &mode, &unsignedp, &volatilep, + false); if (bitpos % BITS_PER_UNIT != 0) break; aff_combination_const (comb, type, @@ -894,7 +895,7 @@ get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size) int uns, vol; aff_tree tmp; tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, - &uns, &vol); + &uns, &vol, false); tree base_addr = build_fold_addr_expr (base); /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c index 559a546d29f..fef6a716b7a 100644 --- a/gcc/tree-data-ref.c +++ b/gcc/tree-data-ref.c @@ -619,7 +619,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1, op0 = TREE_OPERAND (op0, 0); base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset, - &pmode, &punsignedp, &pvolatilep); + &pmode, &punsignedp, &pvolatilep, false); if (pbitpos % BITS_PER_UNIT != 0) return false; @@ -769,7 +769,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest) fprintf (dump_file, "analyze_innermost: "); base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset, - &pmode, &punsignedp, &pvolatilep); + &pmode, &punsignedp, &pvolatilep, false); gcc_assert (base != NULL_TREE); if (pbitpos % BITS_PER_UNIT != 0) diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c index ddea81b3b8e..ada942df389 100644 --- a/gcc/tree-scalar-evolution.c +++ b/gcc/tree-scalar-evolution.c @@ -1658,7 +1658,7 @@ interpret_rhs_expr (struct loop *loop, gimple at_stmt, base = get_inner_reference (TREE_OPERAND (rhs1, 0), &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); if (TREE_CODE (base) == MEM_REF) { diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c index f790bb180cd..1f5590a7ac2 100644 --- a/gcc/tree-ssa-loop-ivopts.c +++ b/gcc/tree-ssa-loop-ivopts.c @@ -1684,7 +1684,7 @@ may_be_unaligned_p (tree ref, tree step) does to check whether the object must be loaded by parts when STRICT_ALIGNMENT is true. */ base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, true); base_type = TREE_TYPE (base); base_align = get_object_alignment (base); base_align = MAX (base_align, TYPE_ALIGN (base_type)); @@ -3781,7 +3781,7 @@ split_address_cost (struct ivopts_data *data, int unsignedp, volatilep; core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode, - &unsignedp, &volatilep); + &unsignedp, &volatilep, false); if (toffset != 0 || bitpos % BITS_PER_UNIT != 0 diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index a61f2a1a868..76a3563f397 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -2971,7 +2971,7 @@ vect_check_gather (gimple stmt, loop_vec_info loop_vinfo, tree *basep, SSA_NAME OFF and put the loop invariants into a tree BASE that can be gimplified before the loop. */ base = get_inner_reference (DR_REF (dr), &pbitsize, &pbitpos, &off, - &pmode, &punsignedp, &pvolatilep); + &pmode, &punsignedp, &pvolatilep, false); gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0); if (TREE_CODE (base) == MEM_REF) @@ -3518,7 +3518,7 @@ again: } outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos, - &poffset, &pmode, &punsignedp, &pvolatilep); + &poffset, &pmode, &punsignedp, &pvolatilep, false); gcc_assert (outer_base != NULL_TREE); if (pbitpos % BITS_PER_UNIT != 0) diff --git a/gcc/tree.h b/gcc/tree.h index 88c8d56bf3a..11ab1ce3eb0 100644 --- a/gcc/tree.h +++ b/gcc/tree.h @@ -4515,7 +4515,8 @@ extern tree build_personality_function (const char *); look for the ultimate containing object, which is returned and specify the access position and size. */ extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *, - tree *, enum machine_mode *, int *, int *); + tree *, enum machine_mode *, int *, int *, + bool); /* Return a tree representing the lower bound of the array mentioned in EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ diff --git a/gcc/tsan.c b/gcc/tsan.c index 10b74fd96ee..4efcfe565aa 100644 --- a/gcc/tsan.c +++ b/gcc/tsan.c @@ -121,7 +121,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) enum machine_mode mode; int volatilep = 0, unsignedp = 0; base = get_inner_reference (expr, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + &mode, &unsignedp, &volatilep, false); /* No need to instrument accesses to decls that don't escape, they can't escape to other threads then. */ |