diff options
Diffstat (limited to 'gcc/expr.c')
-rw-r--r-- | gcc/expr.c | 105 |
1 files changed, 62 insertions, 43 deletions
diff --git a/gcc/expr.c b/gcc/expr.c index df1c1e88f6f..916a8aa889a 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -1,5 +1,5 @@ /* Convert tree expression to rtl instructions, for GNU compiler. - Copyright (C) 1988-2013 Free Software Foundation, Inc. + Copyright (C) 1988-2014 Free Software Foundation, Inc. This file is part of GCC. @@ -719,7 +719,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) - x = gen_lowpart (mode, x); + x = gen_lowpart (mode, SUBREG_REG (x)); if (GET_MODE (x) != VOIDmode) oldmode = GET_MODE (x); @@ -2061,12 +2061,14 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); enum machine_mode mode = GET_MODE (tmps[i]); unsigned int bytelen = GET_MODE_SIZE (mode); - unsigned int adj_bytelen = bytelen; + unsigned int adj_bytelen; rtx dest = dst; /* Handle trailing fragments that run over the size of the struct. */ if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) adj_bytelen = ssize - bytepos; + else + adj_bytelen = bytelen; if (GET_CODE (dst) == CONCAT) { @@ -2107,6 +2109,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) } } + /* Handle trailing fragments that run over the size of the struct. */ if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) { /* store_bit_field always takes its value from the lsb. @@ -2124,16 +2127,22 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], shift, tmps[i], 0); } - bytelen = adj_bytelen; + + /* Make sure not to write past the end of the struct. */ + store_bit_field (dest, + adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, + bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1, + VOIDmode, tmps[i]); } /* Optimize the access just a bit. */ - if (MEM_P (dest) - && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) - || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) - && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 - && bytelen == GET_MODE_SIZE (mode)) + else if (MEM_P (dest) + && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) + || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) + && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 + && bytelen == GET_MODE_SIZE (mode)) emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); + else store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, 0, 0, mode, tmps[i]); @@ -4776,8 +4785,7 @@ expand_assignment (tree to, tree from, bool nontemporal) expand_insn (icode, 2, ops); } else - store_bit_field (mem, GET_MODE_BITSIZE (mode), - 0, 0, 0, mode, reg); + store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg); return; } @@ -4816,16 +4824,27 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (to) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1))) get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset); + /* The C++ memory model naturally applies to byte-aligned fields. + However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or + BITSIZE are not byte-aligned, there is no need to limit the range + we can access. This can occur with packed structures in Ada. */ + else if (bitsize > 0 + && bitsize % BITS_PER_UNIT == 0 + && bitpos % BITS_PER_UNIT == 0) + { + bitregion_start = bitpos; + bitregion_end = bitpos + bitsize - 1; + } to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE); - /* If the bitfield is volatile, we want to access it in the + /* If the field has a mode, we want to access it in the field's mode, not the computed mode. If a MEM has VOIDmode (external with incomplete type), use BLKmode for it instead. */ if (MEM_P (to_rtx)) { - if (volatilep && flag_strict_volatile_bitfields > 0) + if (mode1 != VOIDmode) to_rtx = adjust_address (to_rtx, mode1, 0); else if (GET_MODE (to_rtx) == VOIDmode) to_rtx = adjust_address (to_rtx, BLKmode, 0); @@ -4850,8 +4869,8 @@ expand_assignment (tree to, tree from, bool nontemporal) if (GET_MODE (offset_rtx) != address_mode) offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); - /* A constant address in TO_RTX can have VOIDmode, we must not try - to call force_reg for that case. Avoid that case. */ + /* The check for a constant address in TO_RTX not having VOIDmode + is probably no longer necessary. */ if (MEM_P (to_rtx) && GET_MODE (to_rtx) == BLKmode && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode @@ -4861,6 +4880,9 @@ expand_assignment (tree to, tree from, bool nontemporal) && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) { to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); + bitregion_start = 0; + if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos) + bitregion_end -= bitpos; bitpos = 0; } @@ -8782,12 +8804,6 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, { rtx insn; - /* ??? Same problem as in expmed.c: emit_conditional_move - forces a stack adjustment via compare_from_rtx, and we - lose the stack adjustment if the sequence we are about - to create is discarded. */ - do_pending_stack_adjust (); - start_sequence (); /* Try to emit the conditional move. */ @@ -9466,13 +9482,11 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, the same mode we got when the variable was declared. */ if (code == SSA_NAME && (g = SSA_NAME_DEF_STMT (ssa_name)) - && gimple_code (g) == GIMPLE_CALL) - { - gcc_assert (!gimple_call_internal_p (g)); - pmode = promote_function_mode (type, mode, &unsignedp, - gimple_call_fntype (g), - 2); - } + && gimple_code (g) == GIMPLE_CALL + && !gimple_call_internal_p (g)) + pmode = promote_function_mode (type, mode, &unsignedp, + gimple_call_fntype (g), + 2); else pmode = promote_decl_mode (exp, &unsignedp); gcc_assert (GET_MODE (decl_rtl) == pmode); @@ -9956,13 +9970,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, VOIDmode, modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); - /* If the bitfield is volatile, we want to access it in the + /* If the field has a mode, we want to access it in the field's mode, not the computed mode. If a MEM has VOIDmode (external with incomplete type), use BLKmode for it instead. */ if (MEM_P (op0)) { - if (volatilep && flag_strict_volatile_bitfields > 0) + if (mode1 != VOIDmode) op0 = adjust_address (op0, mode1, 0); else if (GET_MODE (op0) == VOIDmode) op0 = adjust_address (op0, BLKmode, 0); @@ -10049,8 +10063,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); if (GET_MODE (op0) == BLKmode - /* A constant address in OP0 can have VOIDmode, we must - not try to call force_reg in that case. */ + /* The check for a constant address in OP0 not having VOIDmode + is probably no longer necessary. */ && GET_MODE (XEXP (op0, 0)) != VOIDmode && bitsize != 0 && (bitpos % bitsize) == 0 @@ -10094,17 +10108,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER && modifier != EXPAND_MEMORY) - /* If the field is volatile, we always want an aligned - access. Do this in following two situations: - 1. the access is not already naturally - aligned, otherwise "normal" (non-bitfield) volatile fields - become non-addressable. - 2. the bitsize is narrower than the access size. Need - to extract bitfields from the access. */ - || (volatilep && flag_strict_volatile_bitfields > 0 - && (bitpos % GET_MODE_ALIGNMENT (mode) != 0 - || (mode1 != BLKmode - && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT))) + /* If the bitfield is volatile and the bitsize + is narrower than the access size of the bitfield, + we need to extract bitfields from the access. */ + || (volatilep && TREE_CODE (exp) == COMPONENT_REF + && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)) + && mode1 != BLKmode + && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT) /* If the field isn't aligned enough to fetch as a memref, fetch it as a bit field. */ || (mode1 != BLKmode @@ -10141,6 +10151,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (target == 0) target = assign_temp (type, 1, 1); + /* ??? Unlike the similar test a few lines below, this one is + very likely obsolete. */ if (bitsize == 0) return target; @@ -10161,6 +10173,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return target; } + /* If we have nothing to extract, the result will be 0 for targets + with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always + return 0 for the sake of consistency, as reading a zero-sized + bitfield is valid in Ada and the value is fully specified. */ + if (bitsize == 0) + return const0_rtx; + op0 = validize_mem (op0); if (MEM_P (op0) && REG_P (XEXP (op0, 0))) |