Fix couple of endianness issues in fold_ctor_reference

fold_ctor_reference attempts to use a recursive local processing in order
to call native_encode_expr on the leaf nodes of the constructor, before
falling back to calling native_encode_initializer if this fails.

There are a couple of issues related to endianness present in it:
  1) it does not specifically handle integral bit-fields; now these are left
justified on big-endian platforms so cannot be treated like ordinary fields.
  2) it does not check that the constructor uses the native storage order.

gcc/
	* gimple-fold.cc (fold_array_ctor_reference): Fix head comment.
	(fold_nonarray_ctor_reference): Likewise.  Specifically deal
	with integral bit-fields.
	(fold_ctor_reference): Make sure that the constructor uses the
	native storage order.

gcc/testsuite/
	* gcc.c-torture/execute/20230630-1.c: New test.
	* gcc.c-torture/execute/20230630-2.c: Likewise.
	* gcc.c-torture/execute/20230630-3.c: Likewise
	* gcc.c-torture/execute/20230630-4.c: Likewise
This commit is contained in:
Eric Botcazou 2023-06-30 18:05:34 +02:00
parent 1fb30818cf
commit d972c592b3
5 changed files with 148 additions and 21 deletions

View File

@ -7770,12 +7770,11 @@ get_base_constructor (tree base, poly_int64_pod *bit_offset,
}
}
/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
to the memory at bit OFFSET. When non-null, TYPE is the expected
type of the reference; otherwise the type of the referenced element
is used instead. When SIZE is zero, attempt to fold a reference to
the entire element which OFFSET refers to. Increment *SUBOFF by
the bit offset of the accessed element. */
/* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
the reference; otherwise the type of the referenced element is used instead.
When SIZE is zero, attempt to fold a reference to the entire element OFFSET
refers to. Increment *SUBOFF by the bit offset of the accessed element. */
static tree
fold_array_ctor_reference (tree type, tree ctor,
@ -7940,13 +7939,11 @@ fold_array_ctor_reference (tree type, tree ctor,
return type ? build_zero_cst (type) : NULL_TREE;
}
/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
is the expected type of the reference; otherwise the type of
the referenced member is used instead. When SIZE is zero,
attempt to fold a reference to the entire member which OFFSET
refers to; in this case. Increment *SUBOFF by the bit offset
of the accessed member. */
/* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
the reference; otherwise the type of the referenced member is used instead.
When SIZE is zero, attempt to fold a reference to the entire member OFFSET
refers to. Increment *SUBOFF by the bit offset of the accessed member. */
static tree
fold_nonarray_ctor_reference (tree type, tree ctor,
@ -7958,8 +7955,7 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
unsigned HOST_WIDE_INT cnt;
tree cfield, cval;
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
cval)
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
{
tree byte_offset = DECL_FIELD_OFFSET (cfield);
tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
@ -8031,6 +8027,19 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
return NULL_TREE;
offset_int inner_offset = offset_int (offset) - bitoffset;
/* Integral bit-fields are left-justified on big-endian targets, so
we must arrange for native_encode_int to start at their MSB. */
if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
{
if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
return NULL_TREE;
const unsigned int encoding_size
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
if (BYTES_BIG_ENDIAN)
inner_offset += encoding_size - wi::to_offset (field_size);
}
return fold_ctor_reference (type, cval,
inner_offset.to_uhwi (), size,
from_decl, suboff);
@ -8043,7 +8052,7 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
return build_zero_cst (type);
}
/* CTOR is value initializing memory. Fold a reference of TYPE and
/* CTOR is a value initializing memory. Fold a reference of TYPE and
bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
is zero, attempt to fold a reference to the entire subobject
which OFFSET refers to. This is used when folding accesses to
@ -8084,7 +8093,8 @@ fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
}
return ret;
}
/* For constants and byte-aligned/sized reads try to go through
/* For constants and byte-aligned/sized reads, try to go through
native_encode/interpret. */
if (CONSTANT_CLASS_P (ctor)
&& BITS_PER_UNIT == 8
@ -8100,7 +8110,12 @@ fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
if (len > 0)
return native_interpret_expr (type, buf, len);
}
if (TREE_CODE (ctor) == CONSTRUCTOR)
/* For constructors, try first a recursive local processing, but in any case
this requires the native storage order. */
if (TREE_CODE (ctor) == CONSTRUCTOR
&& !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
&& TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
{
unsigned HOST_WIDE_INT dummy = 0;
if (!suboff)
@ -8115,9 +8130,9 @@ fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
from_decl, suboff);
/* Fall back to native_encode_initializer. Needs to be done
only in the outermost fold_ctor_reference call (because it itself
recurses into CONSTRUCTORs) and doesn't update suboff. */
/* Otherwise fall back to native_encode_initializer. This may be done
only from the outermost fold_ctor_reference call (because it itself
recurses into CONSTRUCTORs and doesn't update suboff). */
if (ret == NULL_TREE
&& suboff == &dummy
&& BITS_PER_UNIT == 8

View File

@ -0,0 +1,23 @@
struct S {
short int i : 12;
char c1 : 1;
char c2 : 1;
char c3 : 1;
char c4 : 1;
};
int main (void)
{
struct S s0 = { 341, 1, 1, 1, 1 };
char *p = (char *) &s0;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
if (*p != 85)
__builtin_abort ();
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
if (*p != 21)
__builtin_abort ();
#endif
return 0;
}

View File

@ -0,0 +1,29 @@
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
#define REVERSE_SSO __attribute__((scalar_storage_order("big-endian")));
#else
#define REVERSE_SSO __attribute__((scalar_storage_order("little-endian")));
#endif
struct S {
short int i : 12;
char c1 : 1;
char c2 : 1;
char c3 : 1;
char c4 : 1;
} REVERSE_SSO;
int main (void)
{
struct S s0 = { 341, 1, 1, 1, 1 };
char *p = (char *) &s0;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
if (*p != 21)
__builtin_abort ();
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
if (*p != 85)
__builtin_abort ();
#endif
return 0;
}

View File

@ -0,0 +1,27 @@
struct S {
int i : 24;
char c1 : 1;
char c2 : 1;
char c3 : 1;
char c4 : 1;
char c5 : 1;
char c6 : 1;
char c7 : 1;
char c8 : 1;
};
int main (void)
{
struct S s0 = { 1193046, 1, 1, 1, 1, 1, 1, 1, 1 };
char *p = (char *) &s0;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
if (*p != 86)
__builtin_abort ();
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
if (*p != 18)
__builtin_abort ();
#endif
return 0;
}

View File

@ -0,0 +1,33 @@
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
#define REVERSE_SSO __attribute__((scalar_storage_order("big-endian")));
#else
#define REVERSE_SSO __attribute__((scalar_storage_order("little-endian")));
#endif
struct S {
int i : 24;
char c1 : 1;
char c2 : 1;
char c3 : 1;
char c4 : 1;
char c5 : 1;
char c6 : 1;
char c7 : 1;
char c8 : 1;
} REVERSE_SSO;
int main (void)
{
struct S s0 = { 1193046, 1, 1, 1, 1, 1, 1, 1, 1 };
char *p = (char *) &s0;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
if (*p != 18)
__builtin_abort ();
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
if (*p != 86)
__builtin_abort ();
#endif
return 0;
}