re PR target/52080 (Stores to bitfields introduce a store-data-race on adjacent data)

2012-03-14  Richard Guenther  <rguenther@suse.de>

	* tree.h (DECL_BIT_FIELD_REPRESENTATIVE): New define.
	* stor-layout.c (start_bitfield_representative): New function.
	(finish_bitfield_representative): Likewise.
	(finish_bitfield_layout): Likewise.
	(finish_record_layout): Call finish_bitfield_layout.
	* tree.c (free_lang_data_in_decl): Only free DECL_QUALIFIER
	for QUAL_UNION_TYPE fields.
	* tree-streamer-in.c (lto_input_ts_field_decl_tree_pointers):
	Stream DECL_BIT_FIELD_REPRESENTATIVE.
	* tree-streamer-out.c (write_ts_field_decl_tree_pointers): Likewise.

	PR middle-end/52080
	PR middle-end/52097
	PR middle-end/48124
	* expr.c (get_bit_range): Unconditionally extract bitrange
	from DECL_BIT_FIELD_REPRESENTATIVE.
	(expand_assignment): Adjust call to get_bit_range.

	* gcc.dg/torture/pr48124-1.c: New testcase.
	* gcc.dg/torture/pr48124-2.c: Likewise.
	* gcc.dg/torture/pr48124-3.c: Likewise.
	* gcc.dg/torture/pr48124-4.c: Likewise.

From-SVN: r185379
This commit is contained in:
Richard Guenther 2012-03-14 10:55:09 +00:00 committed by Richard Biener
parent 1caf8dd66c
commit 26c71b9368
12 changed files with 370 additions and 98 deletions

View file

@ -1,3 +1,23 @@
2012-03-14 Richard Guenther <rguenther@suse.de>
* tree.h (DECL_BIT_FIELD_REPRESENTATIVE): New define.
* stor-layout.c (start_bitfield_representative): New function.
(finish_bitfield_representative): Likewise.
(finish_bitfield_layout): Likewise.
(finish_record_layout): Call finish_bitfield_layout.
* tree.c (free_lang_data_in_decl): Only free DECL_QUALIFIER
for QUAL_UNION_TYPE fields.
* tree-streamer-in.c (lto_input_ts_field_decl_tree_pointers):
Stream DECL_BIT_FIELD_REPRESENTATIVE.
* tree-streamer-out.c (write_ts_field_decl_tree_pointers): Likewise.
PR middle-end/52080
PR middle-end/52097
PR middle-end/48124
* expr.c (get_bit_range): Unconditionally extract bitrange
from DECL_BIT_FIELD_REPRESENTATIVE.
(expand_assignment): Adjust call to get_bit_range.
2012-03-14 Richard Guenther <rguenther@suse.de>
PR middle-end/52578

View file

@ -4439,113 +4439,43 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
/* In the C++ memory model, consecutive bit fields in a structure are
considered one memory location.
Given a COMPONENT_REF, this function returns the bit range of
consecutive bits in which this COMPONENT_REF belongs in. The
values are returned in *BITSTART and *BITEND. If either the C++
memory model is not activated, or this memory access is not thread
visible, 0 is returned in *BITSTART and *BITEND.
EXP is the COMPONENT_REF.
INNERDECL is the actual object being referenced.
BITPOS is the position in bits where the bit starts within the structure.
BITSIZE is size in bits of the field being referenced in EXP.
For example, while storing into FOO.A here...
struct {
BIT 0:
unsigned int a : 4;
unsigned int b : 1;
BIT 8:
unsigned char c;
unsigned int d : 6;
} foo;
...we are not allowed to store past <b>, so for the layout above, a
range of 0..7 (because no one cares if we store into the
padding). */
Given a COMPONENT_REF EXP at bit position BITPOS, this function
returns the bit range of consecutive bits in which this COMPONENT_REF
belongs in. The values are returned in *BITSTART and *BITEND.
If the access does not need to be restricted 0 is returned in
*BITSTART and *BITEND. */
static void
get_bit_range (unsigned HOST_WIDE_INT *bitstart,
unsigned HOST_WIDE_INT *bitend,
tree exp, tree innerdecl,
HOST_WIDE_INT bitpos, HOST_WIDE_INT bitsize)
tree exp,
HOST_WIDE_INT bitpos)
{
tree field, record_type, fld;
bool found_field = false;
bool prev_field_is_bitfield;
unsigned HOST_WIDE_INT bitoffset;
tree field, repr, offset;
gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
/* If other threads can't see this value, no need to restrict stores. */
if (ALLOW_STORE_DATA_RACES
|| ((TREE_CODE (innerdecl) == MEM_REF
|| TREE_CODE (innerdecl) == TARGET_MEM_REF)
&& !ptr_deref_may_alias_global_p (TREE_OPERAND (innerdecl, 0)))
|| (DECL_P (innerdecl)
&& ((TREE_CODE (innerdecl) == VAR_DECL
&& DECL_THREAD_LOCAL_P (innerdecl))
|| !TREE_STATIC (innerdecl))))
field = TREE_OPERAND (exp, 1);
repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
/* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
need to limit the range we can access. */
if (!repr)
{
*bitstart = *bitend = 0;
return;
}
/* Bit field we're storing into. */
field = TREE_OPERAND (exp, 1);
record_type = DECL_FIELD_CONTEXT (field);
/* Compute the adjustment to bitpos from the offset of the field
relative to the representative. */
offset = size_diffop (DECL_FIELD_OFFSET (field),
DECL_FIELD_OFFSET (repr));
bitoffset = (tree_low_cst (offset, 1) * BITS_PER_UNIT
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
/* Count the contiguous bitfields for the memory location that
contains FIELD. */
*bitstart = 0;
prev_field_is_bitfield = true;
for (fld = TYPE_FIELDS (record_type); fld; fld = DECL_CHAIN (fld))
{
tree t, offset;
enum machine_mode mode;
int unsignedp, volatilep;
if (TREE_CODE (fld) != FIELD_DECL)
continue;
t = build3 (COMPONENT_REF, TREE_TYPE (exp),
unshare_expr (TREE_OPERAND (exp, 0)),
fld, NULL_TREE);
get_inner_reference (t, &bitsize, &bitpos, &offset,
&mode, &unsignedp, &volatilep, true);
if (field == fld)
found_field = true;
if (DECL_BIT_FIELD_TYPE (fld) && bitsize > 0)
{
if (prev_field_is_bitfield == false)
{
*bitstart = bitpos;
prev_field_is_bitfield = true;
}
}
else
{
prev_field_is_bitfield = false;
if (found_field)
break;
}
}
gcc_assert (found_field);
if (fld)
{
/* We found the end of the bit field sequence. Include the
padding up to the next field and be done. */
*bitend = bitpos - 1;
}
else
{
/* If this is the last element in the structure, include the padding
at the end of structure. */
*bitend = TREE_INT_CST_LOW (TYPE_SIZE (record_type)) - 1;
}
*bitstart = bitpos - bitoffset;
*bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
}
/* Returns true if the MEM_REF REF refers to an object that does not
@ -4682,8 +4612,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
if (TREE_CODE (to) == COMPONENT_REF
&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
get_bit_range (&bitregion_start, &bitregion_end,
to, tem, bitpos, bitsize);
get_bit_range (&bitregion_start, &bitregion_end, to, bitpos);
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */

View file

@ -1722,6 +1722,190 @@ finalize_type_size (tree type)
}
}
/* Return a new underlying object for a bitfield started with FIELD. */
static tree
start_bitfield_representative (tree field)
{
tree repr = make_node (FIELD_DECL);
DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
/* Force the representative to begin at a BITS_PER_UNIT aligned
boundary - C++ may use tail-padding of a base object to
continue packing bits so the bitfield region does not start
at bit zero (see g++.dg/abi/bitfield5.C for example).
Unallocated bits may happen for other reasons as well,
for example Ada which allows explicit bit-granular structure layout. */
DECL_FIELD_BIT_OFFSET (repr)
= size_binop (BIT_AND_EXPR,
DECL_FIELD_BIT_OFFSET (field),
bitsize_int (~(BITS_PER_UNIT - 1)));
SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
DECL_SIZE (repr) = DECL_SIZE (field);
DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
DECL_PACKED (repr) = DECL_PACKED (field);
DECL_CONTEXT (repr) = DECL_CONTEXT (field);
return repr;
}
/* Finish up a bitfield group that was started by creating the underlying
object REPR with the last field in the bitfield group FIELD. */
static void
finish_bitfield_representative (tree repr, tree field)
{
unsigned HOST_WIDE_INT bitsize, maxbitsize;
enum machine_mode mode;
tree nextf, size;
size = size_diffop (DECL_FIELD_OFFSET (field),
DECL_FIELD_OFFSET (repr));
gcc_assert (host_integerp (size, 1));
bitsize = (tree_low_cst (size, 1) * BITS_PER_UNIT
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)
+ tree_low_cst (DECL_SIZE (field), 1));
/* Now nothing tells us how to pad out bitsize ... */
nextf = DECL_CHAIN (field);
while (nextf && TREE_CODE (nextf) != FIELD_DECL)
nextf = DECL_CHAIN (nextf);
if (nextf)
{
tree maxsize;
/* If there was an error, the field may be not layed out
correctly. Don't bother to do anything. */
if (TREE_TYPE (nextf) == error_mark_node)
return;
maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
DECL_FIELD_OFFSET (repr));
gcc_assert (host_integerp (maxsize, 1));
maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (nextf), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
}
else
{
/* ??? If you consider that tail-padding of this struct might be
re-used when deriving from it we cannot really do the following
and thus need to set maxsize to bitsize? */
tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)),
DECL_FIELD_OFFSET (repr));
gcc_assert (host_integerp (maxsize, 1));
maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
}
/* Only if we don't artificially break up the representative in
the middle of a large bitfield with different possibly
overlapping representatives. And all representatives start
at byte offset. */
gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
/* Round up bitsize to multiples of BITS_PER_UNIT. */
bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
/* Find the smallest nice mode to use. */
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
if (GET_MODE_BITSIZE (mode) >= bitsize)
break;
if (mode != VOIDmode
&& (GET_MODE_BITSIZE (mode) > maxbitsize
|| GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE))
mode = VOIDmode;
if (mode == VOIDmode)
{
/* We really want a BLKmode representative only as a last resort,
considering the member b in
struct { int a : 7; int b : 17; int c; } __attribute__((packed));
Otherwise we simply want to split the representative up
allowing for overlaps within the bitfield region as required for
struct { int a : 7; int b : 7;
int c : 10; int d; } __attribute__((packed));
[0, 15] HImode for a and b, [8, 23] HImode for c. */
DECL_SIZE (repr) = bitsize_int (bitsize);
DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
DECL_MODE (repr) = BLKmode;
TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
bitsize / BITS_PER_UNIT);
}
else
{
unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
DECL_SIZE (repr) = bitsize_int (modesize);
DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
DECL_MODE (repr) = mode;
TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
}
/* Remember whether the bitfield group is at the end of the
structure or not. */
DECL_CHAIN (repr) = nextf;
}
/* Compute and set FIELD_DECLs for the underlying objects we should
use for bitfield access for the structure layed out with RLI. */
static void
finish_bitfield_layout (record_layout_info rli)
{
tree field, prev;
tree repr = NULL_TREE;
/* Unions would be special, for the ease of type-punning optimizations
we could use the underlying type as hint for the representative
if the bitfield would fit and the representative would not exceed
the union in size. */
if (TREE_CODE (rli->t) != RECORD_TYPE)
return;
for (prev = NULL_TREE, field = TYPE_FIELDS (rli->t);
field; field = DECL_CHAIN (field))
{
if (TREE_CODE (field) != FIELD_DECL)
continue;
/* In the C++ memory model, consecutive bit fields in a structure are
considered one memory location and updating a memory location
may not store into adjacent memory locations. */
if (!repr
&& DECL_BIT_FIELD_TYPE (field))
{
/* Start new representative. */
repr = start_bitfield_representative (field);
}
else if (repr
&& ! DECL_BIT_FIELD_TYPE (field))
{
/* Finish off new representative. */
finish_bitfield_representative (repr, prev);
repr = NULL_TREE;
}
else if (DECL_BIT_FIELD_TYPE (field))
{
/* Zero-size bitfields finish off a representative and
do not have a representative themselves. This is
required by the C++ memory model. */
if (integer_zerop (DECL_SIZE (field)))
{
finish_bitfield_representative (repr, prev);
repr = NULL_TREE;
}
}
else
continue;
if (repr)
DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
prev = field;
}
if (repr)
finish_bitfield_representative (repr, prev);
}
/* Do all of the work required to layout the type indicated by RLI,
once the fields have been laid out. This function will call `free'
for RLI, unless FREE_P is false. Passing a value other than false
@ -1742,6 +1926,9 @@ finish_record_layout (record_layout_info rli, int free_p)
/* Perform any last tweaks to the TYPE_SIZE, etc. */
finalize_type_size (rli->t);
/* Compute bitfield representatives. */
finish_bitfield_layout (rli);
/* Propagate TYPE_PACKED to variants. With C++ templates,
handle_packed_attribute is too early to do this. */
for (variant = TYPE_NEXT_VARIANT (rli->t); variant;

View file

@ -1,3 +1,13 @@
2012-03-14 Richard Guenther <rguenther@suse.de>
PR middle-end/52080
PR middle-end/52097
PR middle-end/48124
* gcc.dg/torture/pr48124-1.c: New testcase.
* gcc.dg/torture/pr48124-2.c: Likewise.
* gcc.dg/torture/pr48124-3.c: Likewise.
* gcc.dg/torture/pr48124-4.c: Likewise.
2012-03-14 Richard Guenther <rguenther@suse.de>
PR middle-end/52578

View file

@ -0,0 +1,33 @@
/* { dg-do run } */
/* { dg-options "-fno-toplevel-reorder" } */
extern void abort (void);
struct S
{
signed a : 26;
signed b : 16;
signed c : 10;
volatile signed d : 14;
};
static struct S e = { 0, 0, 0, 1 };
static int f = 1;
void __attribute__((noinline))
foo (void)
{
e.d = 0;
f = 2;
}
int
main ()
{
if (e.a || e.b || e.c || e.d != 1 || f != 1)
abort ();
foo ();
if (e.a || e.b || e.c || e.d || f != 2)
abort ();
return 0;
}

View file

@ -0,0 +1,27 @@
/* { dg-do run } */
extern void abort (void);
static volatile struct S0 {
short f3[9];
unsigned f8 : 15;
} s = {1};
static unsigned short sh = 0x1234;
struct S0 a, b;
int vi = 0;
void func_4()
{
s.f8 |= 1;
sh = 15;
if (vi) a = b;
}
int main()
{
func_4();
if (sh != 15)
abort ();
return 0;
}

View file

@ -0,0 +1,32 @@
/* { dg-do run } */
extern void abort (void);
struct S1
{
int f0;
int:1;
int f3;
int:1;
int:0;
int f6:1;
};
int g_13 = 1;
volatile struct S1 g_118 = {
1
};
void __attribute__((noinline))
func_46 ()
{
for (g_13 = 0; g_13 >= 0; g_13 -= 1)
g_118.f6 = 0;
}
int
main ()
{
func_46 ();
if (g_13 != -1)
abort ();
return 0;
}

View file

@ -0,0 +1,28 @@
/* { dg-do run } */
extern void abort (void);
struct S1 {
unsigned f0, f1;
unsigned short f2, f3;
unsigned f4 : 16;
unsigned f5, f6;
volatile unsigned f7 : 28;
};
static struct S1 g_76;
static struct S1 g_245 = {0,0,0,0,0,0,0,1};
static signed char g_323 = 0x80;
static void func_1(void)
{
g_245.f7 &= 1;
for (g_323 = 0; g_323 <= -1; g_323 -= 2) {
g_76 = g_76;
g_76.f4 ^= 11;
}
}
int main()
{
func_1();
if (g_323 != 0 || g_245.f7 != 1)
abort ();
return 0;
}

View file

@ -640,7 +640,7 @@ lto_input_ts_field_decl_tree_pointers (struct lto_input_block *ib,
{
DECL_FIELD_OFFSET (expr) = stream_read_tree (ib, data_in);
DECL_BIT_FIELD_TYPE (expr) = stream_read_tree (ib, data_in);
/* Do not stream DECL_QUALIFIER, it is useless after gimplification. */
DECL_BIT_FIELD_REPRESENTATIVE (expr) = stream_read_tree (ib, data_in);
DECL_FIELD_BIT_OFFSET (expr) = stream_read_tree (ib, data_in);
DECL_FCONTEXT (expr) = stream_read_tree (ib, data_in);
}

View file

@ -552,7 +552,7 @@ write_ts_field_decl_tree_pointers (struct output_block *ob, tree expr,
{
stream_write_tree (ob, DECL_FIELD_OFFSET (expr), ref_p);
stream_write_tree (ob, DECL_BIT_FIELD_TYPE (expr), ref_p);
/* Do not stream DECL_QUALIFIER, it is useless after gimplification. */
stream_write_tree (ob, DECL_BIT_FIELD_REPRESENTATIVE (expr), ref_p);
stream_write_tree (ob, DECL_FIELD_BIT_OFFSET (expr), ref_p);
stream_write_tree (ob, DECL_FCONTEXT (expr), ref_p);
}

View file

@ -4633,7 +4633,8 @@ free_lang_data_in_decl (tree decl)
if (TREE_CODE (decl) == FIELD_DECL)
{
free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
DECL_QUALIFIER (decl) = NULL_TREE;
if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
DECL_QUALIFIER (decl) = NULL_TREE;
}
if (TREE_CODE (decl) == FUNCTION_DECL)

View file

@ -3024,6 +3024,11 @@ struct GTY(()) tree_decl_with_rtl {
#define DECL_BIT_FIELD_TYPE(NODE) \
(FIELD_DECL_CHECK (NODE)->field_decl.bit_field_type)
/* In a FIELD_DECL of a RECORD_TYPE, this is a pointer to the storage
representative FIELD_DECL. */
#define DECL_BIT_FIELD_REPRESENTATIVE(NODE) \
(FIELD_DECL_CHECK (NODE)->field_decl.qualifier)
/* For a FIELD_DECL in a QUAL_UNION_TYPE, records the expression, which
if nonzero, indicates that the field occupies the type. */
#define DECL_QUALIFIER(NODE) (FIELD_DECL_CHECK (NODE)->field_decl.qualifier)