OpenMP/OpenACC: Unordered/non-constant component offset runtime diagnostic

This patch adds support for non-constant component offsets in "map"
clauses for OpenMP (and the equivalants for OpenACC), which are not able
to be sorted into order at compile time.  Normally struct accesses in
such clauses are gathered together and sorted into increasing address
order after a "GOMP_MAP_STRUCT" node: if we have variable indices,
that is no longer possible.

This version of the patch scales back the previously-posted version to
merely add a diagnostic for incorrect usage of component accesses with
variably-indexed arrays of structs: the only permitted variant is where
we have multiple indices that are the same, but we could not prove so
at compile time.  Rather than silently producing the wrong result for
cases where the indices are in fact different, we error out (e.g.,
"map(dtarr(i)%arrptr, dtarr(j)%arrptr(4:8))", for different i/j).

For now, multiple *constant* array indices are still supported (see
map-arrayofstruct-1.c).  That could perhaps be addressed with a follow-up
patch, if necessary.

This version of the patch renumbers the GOMP_MAP_STRUCT_UNORD kind to
avoid clashing with the OpenACC "non-contiguous" dynamic array support
(though that is not yet applied to mainline).

2023-08-18  Julian Brown  <julian@codesourcery.com>

gcc/
	* gimplify.cc (extract_base_bit_offset): Add VARIABLE_OFFSET parameter.
	(omp_get_attachment, omp_group_last, omp_group_base,
	omp_directive_maps_explicitly): Add GOMP_MAP_STRUCT_UNORD support.
	(omp_accumulate_sibling_list): Update calls to extract_base_bit_offset.
	Support GOMP_MAP_STRUCT_UNORD.
	(omp_build_struct_sibling_lists, gimplify_scan_omp_clauses,
	gimplify_adjust_omp_clauses, gimplify_omp_target_update): Add
	GOMP_MAP_STRUCT_UNORD support.
	* omp-low.cc (lower_omp_target): Add GOMP_MAP_STRUCT_UNORD support.
	* tree-pretty-print.cc (dump_omp_clause): Likewise.

include/
	* gomp-constants.h (gomp_map_kind): Add GOMP_MAP_STRUCT_UNORD.

libgomp/
	* oacc-mem.c (find_group_last, goacc_enter_data_internal,
	goacc_exit_data_internal, GOACC_enter_exit_data): Add
	GOMP_MAP_STRUCT_UNORD support.
	* target.c (gomp_map_vars_internal): Add GOMP_MAP_STRUCT_UNORD support.
	Detect incorrect use of variable indexing of arrays of structs.
	(GOMP_target_enter_exit_data, gomp_target_task_fn): Add
	GOMP_MAP_STRUCT_UNORD support.
	* testsuite/libgomp.c-c++-common/map-arrayofstruct-1.c: New test.
	* testsuite/libgomp.c-c++-common/map-arrayofstruct-2.c: New test.
	* testsuite/libgomp.c-c++-common/map-arrayofstruct-3.c: New test.
	* testsuite/libgomp.fortran/map-subarray-5.f90: New test.
This commit is contained in:
Julian Brown 2022-10-09 20:26:09 +00:00
parent ef9754dfdf
commit f5745dc142
10 changed files with 390 additions and 27 deletions

View File

@ -9238,7 +9238,8 @@ build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
static tree
extract_base_bit_offset (tree base, poly_int64 *bitposp,
poly_offset_int *poffsetp)
poly_offset_int *poffsetp,
bool *variable_offset)
{
tree offset;
poly_int64 bitsize, bitpos;
@ -9256,10 +9257,13 @@ extract_base_bit_offset (tree base, poly_int64 *bitposp,
if (offset && poly_int_tree_p (offset))
{
poffset = wi::to_poly_offset (offset);
offset = NULL_TREE;
*variable_offset = false;
}
else
poffset = 0;
{
poffset = 0;
*variable_offset = (offset != NULL_TREE);
}
if (maybe_ne (bitpos, 0))
poffset += bits_to_bytes_round_down (bitpos);
@ -9439,6 +9443,7 @@ omp_get_attachment (omp_mapping_group *grp)
return error_mark_node;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
case GOMP_MAP_FORCE_DEVICEPTR:
case GOMP_MAP_DEVICE_RESIDENT:
case GOMP_MAP_LINK:
@ -9544,6 +9549,7 @@ omp_group_last (tree *start_p)
break;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
{
unsigned HOST_WIDE_INT num_mappings
= tree_to_uhwi (OMP_CLAUSE_SIZE (c));
@ -9710,6 +9716,7 @@ omp_group_base (omp_mapping_group *grp, unsigned int *chained,
return error_mark_node;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
{
unsigned HOST_WIDE_INT num_mappings
= tree_to_uhwi (OMP_CLAUSE_SIZE (node));
@ -10352,7 +10359,8 @@ omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
/* We might be called during omp_build_struct_sibling_lists, when
GOMP_MAP_STRUCT might have been inserted at the start of the group.
Skip over that, and also possibly the node after it. */
if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT)
if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
|| OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
{
grp_first = OMP_CLAUSE_CHAIN (grp_first);
if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
@ -11040,7 +11048,9 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
}
}
tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
bool variable_offset;
tree base
= extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
int base_token;
for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
@ -11074,14 +11084,20 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
{
tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
OMP_CLAUSE_DECL (l) = unshare_expr (base);
OMP_CLAUSE_SIZE (l) = size_int (1);
enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
if (struct_map_to_clause == NULL)
struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
if (variable_offset)
str_kind = GOMP_MAP_STRUCT_UNORD;
tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
OMP_CLAUSE_DECL (l) = unshare_expr (base);
OMP_CLAUSE_SIZE (l) = size_int (1);
struct_map_to_clause->put (base, l);
/* On first iterating through the clause list, we insert the struct node
@ -11321,6 +11337,11 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
{
tree *osc = struct_map_to_clause->get (base);
tree *sc = NULL, *scp = NULL;
bool unordered = false;
if (osc && OMP_CLAUSE_MAP_KIND (*osc) == GOMP_MAP_STRUCT_UNORD)
unordered = true;
unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
sc = &OMP_CLAUSE_CHAIN (*osc);
/* The struct mapping might be immediately followed by a
@ -11361,12 +11382,20 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
== REFERENCE_TYPE))
sc_decl = TREE_OPERAND (sc_decl, 0);
tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
bool variable_offset2;
tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
&variable_offset2);
if (!base2 || !operand_equal_p (base2, base, 0))
break;
if (scp)
continue;
if ((region_type & ORT_ACC) != 0)
if (variable_offset2)
{
OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
unordered = true;
break;
}
else if ((region_type & ORT_ACC) != 0)
{
/* For OpenACC, allow (ignore) duplicate struct accesses in
the middle of a mapping clause, e.g. "mystruct->foo" in:
@ -11398,6 +11427,15 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
}
}
/* If this is an unordered struct, just insert the new element at the
end of the list. */
if (unordered)
{
for (; i < elems; i++)
sc = &OMP_CLAUSE_CHAIN (*sc);
scp = NULL;
}
OMP_CLAUSE_SIZE (*osc)
= size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
@ -11789,14 +11827,42 @@ omp_build_struct_sibling_lists (enum tree_code code,
/* This is the first sorted node in the struct sibling list. Use it
to recalculate the correct bias to use.
(&first_node - attach_decl). */
tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
first_node = build_fold_addr_expr (first_node);
first_node = fold_convert (ptrdiff_type_node, first_node);
(&first_node - attach_decl).
For GOMP_MAP_STRUCT_UNORD, we need e.g. the
min(min(min(first,second),third),fourth) element, because the
elements aren't in any particular order. */
tree lowest_addr;
if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
{
tree first_node = OMP_CLAUSE_CHAIN (attach);
unsigned HOST_WIDE_INT num_mappings
= tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
lowest_addr = OMP_CLAUSE_DECL (first_node);
lowest_addr = build_fold_addr_expr (lowest_addr);
lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
tree next_node = OMP_CLAUSE_CHAIN (first_node);
while (num_mappings > 1)
{
tree tmp = OMP_CLAUSE_DECL (next_node);
tmp = build_fold_addr_expr (tmp);
tmp = fold_convert (pointer_sized_int_node, tmp);
lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
lowest_addr, tmp);
next_node = OMP_CLAUSE_CHAIN (next_node);
num_mappings--;
}
lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
}
else
{
tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
first_node = build_fold_addr_expr (first_node);
lowest_addr = fold_convert (ptrdiff_type_node, first_node);
}
tree attach_decl = OMP_CLAUSE_DECL (attach);
attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
OMP_CLAUSE_SIZE (attach)
= fold_build2 (MINUS_EXPR, ptrdiff_type_node, first_node,
= fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
attach_decl);
/* Remove GOMP_MAP_ATTACH node from after struct node. */
@ -12364,7 +12430,8 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
GOVD_FIRSTPRIVATE | GOVD_SEEN);
}
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
&& (addr_tokens[0]->type == STRUCTURE_BASE
|| addr_tokens[0]->type == ARRAY_BASE)
&& addr_tokens[0]->u.structure_base_kind == BASE_DECL)
@ -13990,7 +14057,8 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
}
}
}
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
&& (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
{
remove = true;
@ -14034,7 +14102,8 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
in target block and none of the mapping has always modifier,
remove all the struct element mappings, which immediately
follow the GOMP_MAP_STRUCT map clause. */
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
{
HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
while (cnt--)
@ -16814,6 +16883,7 @@ gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
have_clause = false;
break;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
have_clause = false;
break;
default:

View File

@ -12712,6 +12712,7 @@ lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
case GOMP_MAP_FIRSTPRIVATE_POINTER:
case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
case GOMP_MAP_ALWAYS_POINTER:
case GOMP_MAP_ATTACH:
case GOMP_MAP_DETACH:

View File

@ -974,6 +974,9 @@ dump_omp_clause (pretty_printer *pp, tree clause, int spc, dump_flags_t flags)
case GOMP_MAP_STRUCT:
pp_string (pp, "struct");
break;
case GOMP_MAP_STRUCT_UNORD:
pp_string (pp, "struct_unord");
break;
case GOMP_MAP_ALWAYS_POINTER:
pp_string (pp, "always_pointer");
break;

View File

@ -153,6 +153,12 @@ enum gomp_map_kind
(address of the last adjacent entry plus its size). */
GOMP_MAP_STRUCT = (GOMP_MAP_FLAG_SPECIAL_2
| GOMP_MAP_FLAG_SPECIAL | 0),
/* As above, but followed by an unordered list of adjacent entries.
At present, this is used only to diagnose incorrect usage of variable
indices into arrays of structs. */
GOMP_MAP_STRUCT_UNORD = (GOMP_MAP_FLAG_SPECIAL_4
| GOMP_MAP_FLAG_SPECIAL_2
| GOMP_MAP_FLAG_SPECIAL | 0),
/* On a location of a pointer/reference that is assumed to be already mapped
earlier, store the translated address of the preceeding mapping.
No refcount is bumped by this, and the store is done unconditionally. */

View File

@ -1028,6 +1028,7 @@ find_group_last (int pos, size_t mapnum, size_t *sizes, unsigned short *kinds)
break;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
pos += sizes[pos];
break;
@ -1088,6 +1089,7 @@ goacc_enter_data_internal (struct gomp_device_descr *acc_dev, size_t mapnum,
switch (kinds[i] & 0xff)
{
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
{
size = (uintptr_t) hostaddrs[group_last] + sizes[group_last]
- (uintptr_t) hostaddrs[i];
@ -1334,6 +1336,7 @@ goacc_exit_data_internal (struct gomp_device_descr *acc_dev, size_t mapnum,
break;
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
/* Skip the 'GOMP_MAP_STRUCT' itself, and use the regular processing
for all its entries. This special handling exists for GCC 10.1
compatibility; afterwards, we're not generating these no-op
@ -1472,7 +1475,8 @@ GOACC_enter_exit_data (int flags_m, size_t mapnum, void **hostaddrs,
if (kind == GOMP_MAP_POINTER
|| kind == GOMP_MAP_TO_PSET
|| kind == GOMP_MAP_STRUCT)
|| kind == GOMP_MAP_STRUCT
|| kind == GOMP_MAP_STRUCT_UNORD)
continue;
if (kind == GOMP_MAP_FORCE_ALLOC

View File

@ -1084,7 +1084,8 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
tgt->list[i].offset = 0;
continue;
}
else if ((kind & typemask) == GOMP_MAP_STRUCT)
else if ((kind & typemask) == GOMP_MAP_STRUCT
|| (kind & typemask) == GOMP_MAP_STRUCT_UNORD)
{
size_t first = i + 1;
size_t last = i + sizes[i];
@ -1468,6 +1469,20 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
tgt->list[i].offset = OFFSET_INLINED;
}
continue;
case GOMP_MAP_STRUCT_UNORD:
if (sizes[i] > 1)
{
void *first = hostaddrs[i + 1];
for (size_t j = i + 1; j < i + sizes[i]; j++)
if (hostaddrs[j + 1] != first)
{
gomp_mutex_unlock (&devicep->lock);
gomp_fatal ("Mapped array elements must be the "
"same (%p vs %p)", first,
hostaddrs[j + 1]);
}
}
/* Fallthrough. */
case GOMP_MAP_STRUCT:
first = i + 1;
last = i + sizes[i];
@ -1586,9 +1601,40 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
k->host_end = k->host_start + sizeof (void *);
splay_tree_key n = splay_tree_lookup (mem_map, k);
if (n && n->refcount != REFCOUNT_LINK)
gomp_map_vars_existing (devicep, aq, n, k, &tgt->list[i],
kind & typemask, false, implicit, cbufp,
refcount_set);
{
if (field_tgt_clear != FIELD_TGT_EMPTY)
{
/* For this condition to be true, there must be a
duplicate struct element mapping. This can happen with
GOMP_MAP_STRUCT_UNORD mappings, for example. */
tgt->list[i].key = n;
if (openmp_p)
{
assert ((n->refcount & REFCOUNT_STRUCTELEM) != 0);
assert (field_tgt_structelem_first != NULL);
if (i == field_tgt_clear)
{
n->refcount |= REFCOUNT_STRUCTELEM_FLAG_LAST;
field_tgt_structelem_first = NULL;
}
}
if (i == field_tgt_clear)
field_tgt_clear = FIELD_TGT_EMPTY;
gomp_increment_refcount (n, refcount_set);
tgt->list[i].copy_from
= GOMP_MAP_COPY_FROM_P (kind & typemask);
tgt->list[i].always_copy_from
= GOMP_MAP_ALWAYS_FROM_P (kind & typemask);
tgt->list[i].is_attach = false;
tgt->list[i].offset = 0;
tgt->list[i].length = k->host_end - k->host_start;
}
else
gomp_map_vars_existing (devicep, aq, n, k, &tgt->list[i],
kind & typemask, false, implicit,
cbufp, refcount_set);
}
else
{
k->aux = NULL;
@ -4166,7 +4212,8 @@ GOMP_target_enter_exit_data (int device, size_t mapnum, void **hostaddrs,
size_t i, j;
if ((flags & GOMP_TARGET_FLAG_EXIT_DATA) == 0)
for (i = 0; i < mapnum; i++)
if ((kinds[i] & 0xff) == GOMP_MAP_STRUCT)
if ((kinds[i] & 0xff) == GOMP_MAP_STRUCT
|| (kinds[i] & 0xff) == GOMP_MAP_STRUCT_UNORD)
{
gomp_map_vars (devicep, sizes[i] + 1, &hostaddrs[i], NULL, &sizes[i],
&kinds[i], true, &refcount_set,
@ -4264,7 +4311,8 @@ gomp_target_task_fn (void *data)
htab_t refcount_set = htab_create (ttask->mapnum);
if ((ttask->flags & GOMP_TARGET_FLAG_EXIT_DATA) == 0)
for (i = 0; i < ttask->mapnum; i++)
if ((ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT)
if ((ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT
|| (ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT_UNORD)
{
gomp_map_vars (devicep, ttask->sizes[i] + 1, &ttask->hostaddrs[i],
NULL, &ttask->sizes[i], &ttask->kinds[i], true,

View File

@ -0,0 +1,51 @@
#include <stdlib.h>
#include <assert.h>
struct st {
int *p;
};
int main (void)
{
struct st s[2];
s[0].p = (int *) calloc (5, sizeof (int));
s[1].p = (int *) calloc (5, sizeof (int));
/* These mappings not supported by the OpenMP spec, and are currently
implemented as an extension by GCC for legacy compatibility only. See
e.g. OpenMP 5.2, "5.8.3 map Clause":
"If multiple list items are explicitly mapped on the same construct and
have the same containing array or have base pointers that share original
storage, and if any of the list items do not have corresponding list
items that are present in the device data environment prior to a task
encountering the construct, then the list items must refer to the same
array elements of either the containing array or the implicit array of
the base pointers."
*/
#pragma omp target map(s[0].p, s[1].p, s[0].p[0:2], s[1].p[1:3])
{
s[0].p[0] = 5;
s[1].p[1] = 7;
}
#pragma omp target map(s, s[0].p[0:2], s[1].p[1:3])
{
s[0].p[0]++;
s[1].p[1]++;
}
#pragma omp target map(s[0:2], s[0].p[0:2], s[1].p[1:3])
{
s[0].p[0]++;
s[1].p[1]++;
}
assert (s[0].p[0] == 7);
assert (s[1].p[1] == 9);
free (s[0].p);
free (s[1].p);
return 0;
}

View File

@ -0,0 +1,58 @@
#include <stdlib.h>
#include <assert.h>
struct st {
int *p;
};
int main (void)
{
struct st s[10];
for (int i = 0; i < 10; i++)
s[i].p = (int *) calloc (5, sizeof (int));
for (int i = 0; i < 10; i++)
for (int j = 0; j < 10; j++)
for (int k = 0; k < 10; k++)
{
if (i == j || j == k || i == k)
continue;
#pragma omp target map(s[i].p, s[j].p, s[k].p, s[i].p[0:2], s[j].p[1:3], \
s[k].p[2])
{
s[i].p[0]++;
s[j].p[1]++;
s[k].p[2]++;
}
#pragma omp target map(s, s[i].p[0:2], s[j].p[1:3], s[k].p[2])
{
s[i].p[0]++;
s[j].p[1]++;
s[k].p[2]++;
}
#pragma omp target map(s[0:10], s[i].p[0:2], s[j].p[1:3], s[k].p[2])
{
s[i].p[0]++;
s[j].p[1]++;
s[k].p[2]++;
}
}
for (int i = 0; i < 10; i++)
{
assert (s[i].p[0] == 216);
assert (s[i].p[1] == 216);
assert (s[i].p[2] == 216);
free (s[i].p);
}
return 0;
}
/* { dg-output "(\n|\r|\r\n)" { target offload_device_nonshared_as } } */
/* { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" { target offload_device_nonshared_as } } */
/* { dg-shouldfail "" { offload_device_nonshared_as } } */

View File

@ -0,0 +1,68 @@
#include <stdlib.h>
#include <assert.h>
struct st {
int *p;
};
struct tt {
struct st a[10];
};
struct ut {
struct tt *t;
};
int main (void)
{
struct tt *t = (struct tt *) malloc (sizeof *t);
struct ut *u = (struct ut *) malloc (sizeof *u);
for (int i = 0; i < 10; i++)
t->a[i].p = (int *) calloc (5, sizeof (int));
u->t = t;
for (int i = 0; i < 10; i++)
for (int j = 0; j < 10; j++)
for (int k = 0; k < 10; k++)
{
if (i == j || j == k || i == k)
continue;
/* This one can use "firstprivate" for T... */
#pragma omp target map(t->a[i].p, t->a[j].p, t->a[k].p, \
t->a[i].p[0:2], t->a[j].p[1:3], t->a[k].p[2])
{
t->a[i].p[0]++;
t->a[j].p[1]++;
t->a[k].p[2]++;
}
/* ...but this one must use attach/detach for T. */
#pragma omp target map(u->t, u->t->a[i].p, u->t->a[j].p, u->t->a[k].p, \
u->t->a[i].p[0:2], u->t->a[j].p[1:3], u->t->a[k].p[2])
{
u->t->a[i].p[0]++;
u->t->a[j].p[1]++;
u->t->a[k].p[2]++;
}
}
for (int i = 0; i < 10; i++)
{
assert (t->a[i].p[0] == 144);
assert (t->a[i].p[1] == 144);
assert (t->a[i].p[2] == 144);
free (t->a[i].p);
}
free (u);
free (t);
return 0;
}
/* { dg-output "(\n|\r|\r\n)" { target offload_device_nonshared_as } } */
/* { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" { target offload_device_nonshared_as } } */
/* { dg-shouldfail "" { offload_device_nonshared_as } } */

View File

@ -0,0 +1,54 @@
! { dg-do run }
type t
integer, pointer :: p(:)
end type t
type(t) :: var(3)
integer :: i, j
allocate (var(1)%p, source=[1,2,3,5])
allocate (var(2)%p, source=[2,3,5])
allocate (var(3)%p(1:3))
var(3)%p = 0
do i = 1, 3
do j = 1, 3
!$omp target map(var(i)%p, var(j)%p)
var(i)%p(1) = 5
var(j)%p(2) = 7
!$omp end target
if (i.ne.j) then
!$omp target map(var(i)%p(1:3), var(i)%p, var(j)%p)
var(i)%p(1) = var(i)%p(1) + 1
var(j)%p(2) = var(j)%p(2) + 1
!$omp end target
!$omp target map(var(i)%p, var(j)%p, var(j)%p(1:3))
var(i)%p(1) = var(i)%p(1) + 1
var(j)%p(2) = var(j)%p(2) + 1
!$omp end target
!$omp target map(var(i)%p, var(i)%p(1:3), var(j)%p, var(j)%p(2))
var(i)%p(1) = var(i)%p(1) + 1
var(j)%p(2) = var(j)%p(2) + 1
!$omp end target
end if
if (i.eq.j) then
if (var(i)%p(1).ne.5) stop 1
if (var(j)%p(2).ne.7) stop 2
else
if (var(i)%p(1).ne.8) stop 3
if (var(j)%p(2).ne.10) stop 4
end if
end do
end do
end
! { dg-output "(\n|\r|\r\n)" }
! { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" }
! { dg-shouldfail "" { offload_device_nonshared_as } }