blob: 43d59abe9e098a35ce76fbb65aa51491dff2e1ce [file] [log] [blame]
/* OpenMP directive translation -- generate GCC trees from gfc_code.
Copyright (C) 2005-2022 Free Software Foundation, Inc.
Contributed by Jakub Jelinek <jakub@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "options.h"
#include "tree.h"
#include "gfortran.h"
#include "gimple-expr.h"
#include "trans.h"
#include "stringpool.h"
#include "fold-const.h"
#include "gimplify.h" /* For create_tmp_var_raw. */
#include "trans-stmt.h"
#include "trans-types.h"
#include "trans-array.h"
#include "trans-const.h"
#include "arith.h"
#include "constructor.h"
#include "gomp-constants.h"
#include "omp-general.h"
#include "omp-low.h"
#include "memmodel.h" /* For MEMMODEL_ enums. */
#undef GCC_DIAG_STYLE
#define GCC_DIAG_STYLE __gcc_tdiag__
#include "diagnostic-core.h"
#undef GCC_DIAG_STYLE
#define GCC_DIAG_STYLE __gcc_gfc__
#include "attribs.h"
#include "function.h"
int ompws_flags;
/* True if OpenMP should regard this DECL as being a scalar which has Fortran's
allocatable or pointer attribute. */
bool
gfc_omp_is_allocatable_or_ptr (const_tree decl)
{
return (DECL_P (decl)
&& (GFC_DECL_GET_SCALAR_POINTER (decl)
|| GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)));
}
/* True if the argument is an optional argument; except that false is also
returned for arguments with the value attribute (nonpointers) and for
assumed-shape variables (decl is a local variable containing arg->data).
Note that for 'procedure(), optional' the value false is used as that's
always a pointer and no additional indirection is used.
Note that pvoid_type_node is for 'type(c_ptr), value' (and c_funloc). */
static bool
gfc_omp_is_optional_argument (const_tree decl)
{
/* Note: VAR_DECL can occur with BIND(C) and array descriptors. */
return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
&& DECL_LANG_SPECIFIC (decl)
&& TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
&& !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) != FUNCTION_TYPE
&& GFC_DECL_OPTIONAL_ARGUMENT (decl));
}
/* Check whether this DECL belongs to a Fortran optional argument.
With 'for_present_check' set to false, decls which are optional parameters
themselve are returned as tree - or a NULL_TREE otherwise. Those decls are
always pointers. With 'for_present_check' set to true, the decl for checking
whether an argument is present is returned; for arguments with value
attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
unrelated to optional arguments, NULL_TREE is returned. */
tree
gfc_omp_check_optional_argument (tree decl, bool for_present_check)
{
if (!for_present_check)
return gfc_omp_is_optional_argument (decl) ? decl : NULL_TREE;
if (!DECL_LANG_SPECIFIC (decl))
return NULL_TREE;
tree orig_decl = decl;
/* For assumed-shape arrays, a local decl with arg->data is used. */
if (TREE_CODE (decl) != PARM_DECL
&& (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
|| GFC_ARRAY_TYPE_P (TREE_TYPE (decl))))
decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
/* Note: With BIND(C), array descriptors are converted to a VAR_DECL. */
if (decl == NULL_TREE
|| (TREE_CODE (decl) != PARM_DECL && TREE_CODE (decl) != VAR_DECL)
|| !DECL_LANG_SPECIFIC (decl)
|| !GFC_DECL_OPTIONAL_ARGUMENT (decl))
return NULL_TREE;
/* Scalars with VALUE attribute which are passed by value use a hidden
argument to denote the present status. They are passed as nonpointer type
with one exception: 'type(c_ptr), value' as 'void*'. */
/* Cf. trans-expr.cc's gfc_conv_expr_present. */
if (TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE
|| VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
{
char name[GFC_MAX_SYMBOL_LEN + 2];
tree tree_name;
name[0] = '_';
strcpy (&name[1], IDENTIFIER_POINTER (DECL_NAME (decl)));
tree_name = get_identifier (name);
/* Walk function argument list to find the hidden arg. */
decl = DECL_ARGUMENTS (DECL_CONTEXT (decl));
for ( ; decl != NULL_TREE; decl = TREE_CHAIN (decl))
if (DECL_NAME (decl) == tree_name
&& DECL_ARTIFICIAL (decl))
break;
gcc_assert (decl);
return decl;
}
return fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
orig_decl, null_pointer_node);
}
/* Returns tree with NULL if it is not an array descriptor and with the tree to
access the 'data' component otherwise. With type_only = true, it returns the
TREE_TYPE without creating a new tree. */
tree
gfc_omp_array_data (tree decl, bool type_only)
{
tree type = TREE_TYPE (decl);
if (POINTER_TYPE_P (type))
type = TREE_TYPE (type);
if (!GFC_DESCRIPTOR_TYPE_P (type))
return NULL_TREE;
if (type_only)
return GFC_TYPE_ARRAY_DATAPTR_TYPE (type);
if (POINTER_TYPE_P (TREE_TYPE (decl)))
decl = build_fold_indirect_ref (decl);
decl = gfc_conv_descriptor_data_get (decl);
STRIP_NOPS (decl);
return decl;
}
/* True if OpenMP should privatize what this DECL points to rather
than the DECL itself. */
bool
gfc_omp_privatize_by_reference (const_tree decl)
{
tree type = TREE_TYPE (decl);
if (TREE_CODE (type) == REFERENCE_TYPE
&& (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
return true;
if (TREE_CODE (type) == POINTER_TYPE
&& gfc_omp_is_optional_argument (decl))
return true;
if (TREE_CODE (type) == POINTER_TYPE)
{
while (TREE_CODE (decl) == COMPONENT_REF)
decl = TREE_OPERAND (decl, 1);
/* Array POINTER/ALLOCATABLE have aggregate types, all user variables
that have POINTER_TYPE type and aren't scalar pointers, scalar
allocatables, Cray pointees or C pointers are supposed to be
privatized by reference. */
if (GFC_DECL_GET_SCALAR_POINTER (decl)
|| GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
|| GFC_DECL_CRAY_POINTEE (decl)
|| GFC_DECL_ASSOCIATE_VAR_P (decl)
|| VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
return false;
if (!DECL_ARTIFICIAL (decl)
&& TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
return true;
/* Some arrays are expanded as DECL_ARTIFICIAL pointers
by the frontend. */
if (DECL_LANG_SPECIFIC (decl)
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
return true;
}
return false;
}
/* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
of DECL is predetermined. */
enum omp_clause_default_kind
gfc_omp_predetermined_sharing (tree decl)
{
/* Associate names preserve the association established during ASSOCIATE.
As they are implemented either as pointers to the selector or array
descriptor and shouldn't really change in the ASSOCIATE region,
this decl can be either shared or firstprivate. If it is a pointer,
use firstprivate, as it is cheaper that way, otherwise make it shared. */
if (GFC_DECL_ASSOCIATE_VAR_P (decl))
{
if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
else
return OMP_CLAUSE_DEFAULT_SHARED;
}
if (DECL_ARTIFICIAL (decl)
&& ! GFC_DECL_RESULT (decl)
&& ! (DECL_LANG_SPECIFIC (decl)
&& GFC_DECL_SAVED_DESCRIPTOR (decl)))
return OMP_CLAUSE_DEFAULT_SHARED;
/* Cray pointees shouldn't be listed in any clauses and should be
gimplified to dereference of the corresponding Cray pointer.
Make them all private, so that they are emitted in the debug
information. */
if (GFC_DECL_CRAY_POINTEE (decl))
return OMP_CLAUSE_DEFAULT_PRIVATE;
/* Assumed-size arrays are predetermined shared. */
if (TREE_CODE (decl) == PARM_DECL
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
&& GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
== NULL)
return OMP_CLAUSE_DEFAULT_SHARED;
/* Dummy procedures aren't considered variables by OpenMP, thus are
disallowed in OpenMP clauses. They are represented as PARM_DECLs
in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
to avoid complaining about their uses with default(none). */
if (TREE_CODE (decl) == PARM_DECL
&& TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
/* COMMON and EQUIVALENCE decls are shared. They
are only referenced through DECL_VALUE_EXPR of the variables
contained in them. If those are privatized, they will not be
gimplified to the COMMON or EQUIVALENCE decls. */
if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
return OMP_CLAUSE_DEFAULT_SHARED;
if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
return OMP_CLAUSE_DEFAULT_SHARED;
/* These are either array or derived parameters, or vtables.
In the former cases, the OpenMP standard doesn't consider them to be
variables at all (they can't be redefined), but they can nevertheless appear
in parallel/task regions and for default(none) purposes treat them as shared.
For vtables likely the same handling is desirable. */
if (VAR_P (decl) && TREE_READONLY (decl)
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
return OMP_CLAUSE_DEFAULT_SHARED;
return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
}
/* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
of DECL is predetermined. */
enum omp_clause_defaultmap_kind
gfc_omp_predetermined_mapping (tree decl)
{
if (DECL_ARTIFICIAL (decl)
&& ! GFC_DECL_RESULT (decl)
&& ! (DECL_LANG_SPECIFIC (decl)
&& GFC_DECL_SAVED_DESCRIPTOR (decl)))
return OMP_CLAUSE_DEFAULTMAP_TO;
/* These are either array or derived parameters, or vtables. */
if (VAR_P (decl) && TREE_READONLY (decl)
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
return OMP_CLAUSE_DEFAULTMAP_TO;
return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
}
/* Return decl that should be used when reporting DEFAULT(NONE)
diagnostics. */
tree
gfc_omp_report_decl (tree decl)
{
if (DECL_ARTIFICIAL (decl)
&& DECL_LANG_SPECIFIC (decl)
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
return GFC_DECL_SAVED_DESCRIPTOR (decl);
return decl;
}
/* Return true if TYPE has any allocatable components. */
static bool
gfc_has_alloc_comps (tree type, tree decl)
{
tree field, ftype;
if (POINTER_TYPE_P (type))
{
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
type = TREE_TYPE (type);
else if (GFC_DECL_GET_SCALAR_POINTER (decl))
return false;
}
if (GFC_DESCRIPTOR_TYPE_P (type)
&& (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
|| GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
return false;
if (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
type = gfc_get_element_type (type);
if (TREE_CODE (type) != RECORD_TYPE)
return false;
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
ftype = TREE_TYPE (field);
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
return true;
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
return true;
if (gfc_has_alloc_comps (ftype, field))
return true;
}
return false;
}
/* Return true if TYPE is polymorphic but not with pointer attribute. */
static bool
gfc_is_polymorphic_nonptr (tree type)
{
if (POINTER_TYPE_P (type))
type = TREE_TYPE (type);
return GFC_CLASS_TYPE_P (type);
}
/* Return true if TYPE is unlimited polymorphic but not with pointer attribute;
unlimited means also intrinsic types are handled and _len is used. */
static bool
gfc_is_unlimited_polymorphic_nonptr (tree type)
{
if (POINTER_TYPE_P (type))
type = TREE_TYPE (type);
if (!GFC_CLASS_TYPE_P (type))
return false;
tree field = TYPE_FIELDS (type); /* _data */
gcc_assert (field);
field = DECL_CHAIN (field); /* _vptr */
gcc_assert (field);
field = DECL_CHAIN (field);
if (!field)
return false;
gcc_assert (strcmp ("_len", IDENTIFIER_POINTER (DECL_NAME (field))) == 0);
return true;
}
/* Return true if the DECL is for an allocatable array or scalar. */
bool
gfc_omp_allocatable_p (tree decl)
{
if (!DECL_P (decl))
return false;
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
return true;
tree type = TREE_TYPE (decl);
if (gfc_omp_privatize_by_reference (decl))
type = TREE_TYPE (type);
if (GFC_DESCRIPTOR_TYPE_P (type)
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
return true;
return false;
}
/* Return true if DECL in private clause needs
OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
bool
gfc_omp_private_outer_ref (tree decl)
{
tree type = TREE_TYPE (decl);
if (gfc_omp_privatize_by_reference (decl))
type = TREE_TYPE (type);
if (GFC_DESCRIPTOR_TYPE_P (type)
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
return true;
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
return true;
if (gfc_has_alloc_comps (type, decl))
return true;
return false;
}
/* Callback for gfc_omp_unshare_expr. */
static tree
gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
{
tree t = *tp;
enum tree_code code = TREE_CODE (t);
/* Stop at types, decls, constants like copy_tree_r. */
if (TREE_CODE_CLASS (code) == tcc_type
|| TREE_CODE_CLASS (code) == tcc_declaration
|| TREE_CODE_CLASS (code) == tcc_constant
|| code == BLOCK)
*walk_subtrees = 0;
else if (handled_component_p (t)
|| TREE_CODE (t) == MEM_REF)
{
*tp = unshare_expr (t);
*walk_subtrees = 0;
}
return NULL_TREE;
}
/* Unshare in expr anything that the FE which normally doesn't
care much about tree sharing (because during gimplification
everything is unshared) could cause problems with tree sharing
at omp-low.cc time. */
static tree
gfc_omp_unshare_expr (tree expr)
{
walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
return expr;
}
enum walk_alloc_comps
{
WALK_ALLOC_COMPS_DTOR,
WALK_ALLOC_COMPS_DEFAULT_CTOR,
WALK_ALLOC_COMPS_COPY_CTOR
};
/* Handle allocatable components in OpenMP clauses. */
static tree
gfc_walk_alloc_comps (tree decl, tree dest, tree var,
enum walk_alloc_comps kind)
{
stmtblock_t block, tmpblock;
tree type = TREE_TYPE (decl), then_b, tem, field;
gfc_init_block (&block);
if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
{
if (GFC_DESCRIPTOR_TYPE_P (type))
{
gfc_init_block (&tmpblock);
tem = gfc_full_array_size (&tmpblock, decl,
GFC_TYPE_ARRAY_RANK (type));
then_b = gfc_finish_block (&tmpblock);
gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
tem = gfc_omp_unshare_expr (tem);
tem = fold_build2_loc (input_location, MINUS_EXPR,
gfc_array_index_type, tem,
gfc_index_one_node);
}
else
{
bool compute_nelts = false;
if (!TYPE_DOMAIN (type)
|| TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
|| TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
|| TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
compute_nelts = true;
else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
{
tree a = DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type)));
if (lookup_attribute ("omp dummy var", a))
compute_nelts = true;
}
if (compute_nelts)
{
tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
TYPE_SIZE_UNIT (type),
TYPE_SIZE_UNIT (TREE_TYPE (type)));
tem = size_binop (MINUS_EXPR, tem, size_one_node);
}
else
tem = array_type_nelts (type);
tem = fold_convert (gfc_array_index_type, tem);
}
tree nelems = gfc_evaluate_now (tem, &block);
tree index = gfc_create_var (gfc_array_index_type, "S");
gfc_init_block (&tmpblock);
tem = gfc_conv_array_data (decl);
tree declvar = build_fold_indirect_ref_loc (input_location, tem);
tree declvref = gfc_build_array_ref (declvar, index, NULL);
tree destvar, destvref = NULL_TREE;
if (dest)
{
tem = gfc_conv_array_data (dest);
destvar = build_fold_indirect_ref_loc (input_location, tem);
destvref = gfc_build_array_ref (destvar, index, NULL);
}
gfc_add_expr_to_block (&tmpblock,
gfc_walk_alloc_comps (declvref, destvref,
var, kind));
gfc_loopinfo loop;
gfc_init_loopinfo (&loop);
loop.dimen = 1;
loop.from[0] = gfc_index_zero_node;
loop.loopvar[0] = index;
loop.to[0] = nelems;
gfc_trans_scalarizing_loops (&loop, &tmpblock);
gfc_add_block_to_block (&block, &loop.pre);
return gfc_finish_block (&block);
}
else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
{
decl = build_fold_indirect_ref_loc (input_location, decl);
if (dest)
dest = build_fold_indirect_ref_loc (input_location, dest);
type = TREE_TYPE (decl);
}
gcc_assert (TREE_CODE (type) == RECORD_TYPE);
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
tree ftype = TREE_TYPE (field);
tree declf, destf = NULL_TREE;
bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
|| GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
&& !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
&& !has_alloc_comps)
continue;
declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
decl, field, NULL_TREE);
if (dest)
destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
dest, field, NULL_TREE);
tem = NULL_TREE;
switch (kind)
{
case WALK_ALLOC_COMPS_DTOR:
break;
case WALK_ALLOC_COMPS_DEFAULT_CTOR:
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
{
gfc_add_modify (&block, unshare_expr (destf),
unshare_expr (declf));
tem = gfc_duplicate_allocatable_nocopy
(destf, declf, ftype,
GFC_TYPE_ARRAY_RANK (ftype));
}
else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
break;
case WALK_ALLOC_COMPS_COPY_CTOR:
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
tem = gfc_duplicate_allocatable (destf, declf, ftype,
GFC_TYPE_ARRAY_RANK (ftype),
NULL_TREE);
else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
tem = gfc_duplicate_allocatable (destf, declf, ftype, 0,
NULL_TREE);
break;
}
if (tem)
gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
if (has_alloc_comps)
{
gfc_init_block (&tmpblock);
gfc_add_expr_to_block (&tmpblock,
gfc_walk_alloc_comps (declf, destf,
field, kind));
then_b = gfc_finish_block (&tmpblock);
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
tem = unshare_expr (declf);
else
tem = NULL_TREE;
if (tem)
{
tem = fold_convert (pvoid_type_node, tem);
tem = fold_build2_loc (input_location, NE_EXPR,
logical_type_node, tem,
null_pointer_node);
then_b = build3_loc (input_location, COND_EXPR, void_type_node,
tem, then_b,
build_empty_stmt (input_location));
}
gfc_add_expr_to_block (&block, then_b);
}
if (kind == WALK_ALLOC_COMPS_DTOR)
{
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
{
tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
tem = gfc_deallocate_with_status (tem, NULL_TREE, NULL_TREE,
NULL_TREE, NULL_TREE, true,
NULL,
GFC_CAF_COARRAY_NOCOARRAY);
gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
}
else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
{
tem = gfc_call_free (unshare_expr (declf));
gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
}
}
}
return gfc_finish_block (&block);
}
/* Return code to initialize DECL with its default constructor, or
NULL if there's nothing to do. */
tree
gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
{
tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
stmtblock_t block, cond_block;
switch (OMP_CLAUSE_CODE (clause))
{
case OMP_CLAUSE__LOOPTEMP_:
case OMP_CLAUSE__REDUCTEMP_:
case OMP_CLAUSE__CONDTEMP_:
case OMP_CLAUSE__SCANTEMP_:
return NULL;
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_LASTPRIVATE:
case OMP_CLAUSE_LINEAR:
case OMP_CLAUSE_REDUCTION:
case OMP_CLAUSE_IN_REDUCTION:
case OMP_CLAUSE_TASK_REDUCTION:
break;
default:
gcc_unreachable ();
}
if ((! GFC_DESCRIPTOR_TYPE_P (type)
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
gcc_assert (outer);
gfc_start_block (&block);
tree tem = gfc_walk_alloc_comps (outer, decl,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DEFAULT_CTOR);
gfc_add_expr_to_block (&block, tem);
return gfc_finish_block (&block);
}
return NULL_TREE;
}
gcc_assert (outer != NULL_TREE);
/* Allocatable arrays and scalars in PRIVATE clauses need to be set to
"not currently allocated" allocation status if outer
array is "not currently allocated", otherwise should be allocated. */
gfc_start_block (&block);
gfc_init_block (&cond_block);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
gfc_add_modify (&cond_block, decl, outer);
tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
size = gfc_conv_descriptor_ubound_get (decl, rank);
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
size,
gfc_conv_descriptor_lbound_get (decl, rank));
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
size, gfc_index_one_node);
if (GFC_TYPE_ARRAY_RANK (type) > 1)
size = fold_build2_loc (input_location, MULT_EXPR,
gfc_array_index_type, size,
gfc_conv_descriptor_stride_get (decl, rank));
tree esize = fold_convert (gfc_array_index_type,
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
size, esize);
size = unshare_expr (size);
size = gfc_evaluate_now (fold_convert (size_type_node, size),
&cond_block);
}
else
size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
ptr = gfc_create_var (pvoid_type_node, NULL);
gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
if (GFC_DESCRIPTOR_TYPE_P (type))
gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
else
gfc_add_modify (&cond_block, unshare_expr (decl),
fold_convert (TREE_TYPE (decl), ptr));
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
tree tem = gfc_walk_alloc_comps (outer, decl,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DEFAULT_CTOR);
gfc_add_expr_to_block (&cond_block, tem);
}
then_b = gfc_finish_block (&cond_block);
/* Reduction clause requires allocated ALLOCATABLE. */
if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION
&& OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_IN_REDUCTION
&& OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_TASK_REDUCTION)
{
gfc_init_block (&cond_block);
if (GFC_DESCRIPTOR_TYPE_P (type))
gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
null_pointer_node);
else
gfc_add_modify (&cond_block, unshare_expr (decl),
build_zero_cst (TREE_TYPE (decl)));
else_b = gfc_finish_block (&cond_block);
tree tem = fold_convert (pvoid_type_node,
GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (outer) : outer);
tem = unshare_expr (tem);
cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
tem, null_pointer_node);
gfc_add_expr_to_block (&block,
build3_loc (input_location, COND_EXPR,
void_type_node, cond, then_b,
else_b));
/* Avoid -W*uninitialized warnings. */
if (DECL_P (decl))
suppress_warning (decl, OPT_Wuninitialized);
}
else
gfc_add_expr_to_block (&block, then_b);
return gfc_finish_block (&block);
}
/* Build and return code for a copy constructor from SRC to DEST. */
tree
gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
{
tree type = TREE_TYPE (dest), ptr, size, call;
tree decl_type = TREE_TYPE (OMP_CLAUSE_DECL (clause));
tree cond, then_b, else_b;
stmtblock_t block, cond_block;
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
|| OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
/* Privatize pointer, only; cf. gfc_omp_predetermined_sharing. */
if (DECL_P (OMP_CLAUSE_DECL (clause))
&& GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause)))
return build2 (MODIFY_EXPR, TREE_TYPE (dest), dest, src);
if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause))
&& DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause))
&& GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause)))
decl_type
= TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause)));
if (gfc_is_polymorphic_nonptr (decl_type))
{
if (POINTER_TYPE_P (decl_type))
decl_type = TREE_TYPE (decl_type);
decl_type = TREE_TYPE (TYPE_FIELDS (decl_type));
if (GFC_DESCRIPTOR_TYPE_P (decl_type) || GFC_ARRAY_TYPE_P (decl_type))
fatal_error (input_location,
"Sorry, polymorphic arrays not yet supported for "
"firstprivate");
tree src_len;
tree nelems = build_int_cst (size_type_node, 1); /* Scalar. */
tree src_data = gfc_class_data_get (unshare_expr (src));
tree dest_data = gfc_class_data_get (unshare_expr (dest));
bool unlimited = gfc_is_unlimited_polymorphic_nonptr (type);
gfc_start_block (&block);
gfc_add_modify (&block, gfc_class_vptr_get (dest),
gfc_class_vptr_get (src));
gfc_init_block (&cond_block);
if (unlimited)
{
src_len = gfc_class_len_get (src);
gfc_add_modify (&cond_block, gfc_class_len_get (unshare_expr (dest)), src_len);
}
/* Use: size = class._vtab._size * (class._len > 0 ? class._len : 1). */
size = fold_convert (size_type_node, gfc_class_vtab_size_get (src));
if (unlimited)
{
cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
unshare_expr (src_len),
build_zero_cst (TREE_TYPE (src_len)));
cond = build3_loc (input_location, COND_EXPR, size_type_node, cond,
fold_convert (size_type_node,
unshare_expr (src_len)),
build_int_cst (size_type_node, 1));
size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
size, cond);
}
/* Malloc memory + call class->_vpt->_copy. */
call = builtin_decl_explicit (BUILT_IN_MALLOC);
call = build_call_expr_loc (input_location, call, 1, size);
gfc_add_modify (&cond_block, dest_data,
fold_convert (TREE_TYPE (dest_data), call));
gfc_add_expr_to_block (&cond_block,
gfc_copy_class_to_class (src, dest, nelems,
unlimited));
gcc_assert (TREE_CODE (dest_data) == COMPONENT_REF);
if (!GFC_DECL_GET_SCALAR_ALLOCATABLE (TREE_OPERAND (dest_data, 1)))
{
gfc_add_block_to_block (&block, &cond_block);
}
else
{
/* Create: if (class._data != 0) <cond_block> else class._data = NULL; */
cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
src_data, null_pointer_node);
gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
void_type_node, cond,
gfc_finish_block (&cond_block),
fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
unshare_expr (dest_data), null_pointer_node)));
}
return gfc_finish_block (&block);
}
if ((! GFC_DESCRIPTOR_TYPE_P (type)
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
gfc_start_block (&block);
gfc_add_modify (&block, dest, src);
tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_COPY_CTOR);
gfc_add_expr_to_block (&block, tem);
return gfc_finish_block (&block);
}
else
return build2_v (MODIFY_EXPR, dest, src);
}
/* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
and copied from SRC. */
gfc_start_block (&block);
gfc_init_block (&cond_block);
gfc_add_modify (&cond_block, dest, fold_convert (TREE_TYPE (dest), src));
if (GFC_DESCRIPTOR_TYPE_P (type))
{
tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
size = gfc_conv_descriptor_ubound_get (dest, rank);
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
size,
gfc_conv_descriptor_lbound_get (dest, rank));
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
size, gfc_index_one_node);
if (GFC_TYPE_ARRAY_RANK (type) > 1)
size = fold_build2_loc (input_location, MULT_EXPR,
gfc_array_index_type, size,
gfc_conv_descriptor_stride_get (dest, rank));
tree esize = fold_convert (gfc_array_index_type,
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
size, esize);
size = unshare_expr (size);
size = gfc_evaluate_now (fold_convert (size_type_node, size),
&cond_block);
}
else
size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
ptr = gfc_create_var (pvoid_type_node, NULL);
gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
if (GFC_DESCRIPTOR_TYPE_P (type))
gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
else
gfc_add_modify (&cond_block, unshare_expr (dest),
fold_convert (TREE_TYPE (dest), ptr));
tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (src) : src;
srcptr = unshare_expr (srcptr);
srcptr = fold_convert (pvoid_type_node, srcptr);
call = build_call_expr_loc (input_location,
builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
srcptr, size);
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
tree tem = gfc_walk_alloc_comps (src, dest,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_COPY_CTOR);
gfc_add_expr_to_block (&cond_block, tem);
}
then_b = gfc_finish_block (&cond_block);
gfc_init_block (&cond_block);
if (GFC_DESCRIPTOR_TYPE_P (type))
gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
null_pointer_node);
else
gfc_add_modify (&cond_block, unshare_expr (dest),
build_zero_cst (TREE_TYPE (dest)));
else_b = gfc_finish_block (&cond_block);
cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
unshare_expr (srcptr), null_pointer_node);
gfc_add_expr_to_block (&block,
build3_loc (input_location, COND_EXPR,
void_type_node, cond, then_b, else_b));
/* Avoid -W*uninitialized warnings. */
if (DECL_P (dest))
suppress_warning (dest, OPT_Wuninitialized);
return gfc_finish_block (&block);
}
/* Similarly, except use an intrinsic or pointer assignment operator
instead. */
tree
gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
{
tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
tree cond, then_b, else_b;
stmtblock_t block, cond_block, cond_block2, inner_block;
if ((! GFC_DESCRIPTOR_TYPE_P (type)
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
gfc_start_block (&block);
/* First dealloc any allocatable components in DEST. */
tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR);
gfc_add_expr_to_block (&block, tem);
/* Then copy over toplevel data. */
gfc_add_modify (&block, dest, src);
/* Finally allocate any allocatable components and copy. */
tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_COPY_CTOR);
gfc_add_expr_to_block (&block, tem);
return gfc_finish_block (&block);
}
else
return build2_v (MODIFY_EXPR, dest, src);
}
gfc_start_block (&block);
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR);
tree tem = fold_convert (pvoid_type_node,
GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (dest) : dest);
tem = unshare_expr (tem);
cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
tem, null_pointer_node);
tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
then_b, build_empty_stmt (input_location));
gfc_add_expr_to_block (&block, tem);
}
gfc_init_block (&cond_block);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
size = gfc_conv_descriptor_ubound_get (src, rank);
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
size,
gfc_conv_descriptor_lbound_get (src, rank));
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
size, gfc_index_one_node);
if (GFC_TYPE_ARRAY_RANK (type) > 1)
size = fold_build2_loc (input_location, MULT_EXPR,
gfc_array_index_type, size,
gfc_conv_descriptor_stride_get (src, rank));
tree esize = fold_convert (gfc_array_index_type,
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
size, esize);
size = unshare_expr (size);
size = gfc_evaluate_now (fold_convert (size_type_node, size),
&cond_block);
}
else
size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
ptr = gfc_create_var (pvoid_type_node, NULL);
tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (dest) : dest;
destptr = unshare_expr (destptr);
destptr = fold_convert (pvoid_type_node, destptr);
gfc_add_modify (&cond_block, ptr, destptr);
nonalloc = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
destptr, null_pointer_node);
cond = nonalloc;
if (GFC_DESCRIPTOR_TYPE_P (type))
{
int i;
for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
{
tree rank = gfc_rank_cst[i];
tree tem = gfc_conv_descriptor_ubound_get (src, rank);
tem = fold_build2_loc (input_location, MINUS_EXPR,
gfc_array_index_type, tem,
gfc_conv_descriptor_lbound_get (src, rank));
tem = fold_build2_loc (input_location, PLUS_EXPR,
gfc_array_index_type, tem,
gfc_conv_descriptor_lbound_get (dest, rank));
tem = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
tem, gfc_conv_descriptor_ubound_get (dest,
rank));
cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
logical_type_node, cond, tem);
}
}
gfc_init_block (&cond_block2);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
gfc_init_block (&inner_block);
gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
then_b = gfc_finish_block (&inner_block);
gfc_init_block (&inner_block);
gfc_add_modify (&inner_block, ptr,
gfc_call_realloc (&inner_block, ptr, size));
else_b = gfc_finish_block (&inner_block);
gfc_add_expr_to_block (&cond_block2,
build3_loc (input_location, COND_EXPR,
void_type_node,
unshare_expr (nonalloc),
then_b, else_b));
gfc_add_modify (&cond_block2, dest, src);
gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
}
else
{
gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
gfc_add_modify (&cond_block2, unshare_expr (dest),
fold_convert (type, ptr));
}
then_b = gfc_finish_block (&cond_block2);
else_b = build_empty_stmt (input_location);
gfc_add_expr_to_block (&cond_block,
build3_loc (input_location, COND_EXPR,
void_type_node, unshare_expr (cond),
then_b, else_b));
tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (src) : src;
srcptr = unshare_expr (srcptr);
srcptr = fold_convert (pvoid_type_node, srcptr);
call = build_call_expr_loc (input_location,
builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
srcptr, size);
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
tree tem = gfc_walk_alloc_comps (src, dest,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_COPY_CTOR);
gfc_add_expr_to_block (&cond_block, tem);
}
then_b = gfc_finish_block (&cond_block);
if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
{
gfc_init_block (&cond_block);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
tree tmp = gfc_conv_descriptor_data_get (unshare_expr (dest));
tmp = gfc_deallocate_with_status (tmp, NULL_TREE, NULL_TREE,
NULL_TREE, NULL_TREE, true, NULL,
GFC_CAF_COARRAY_NOCOARRAY);
gfc_add_expr_to_block (&cond_block, tmp);
}
else
{
destptr = gfc_evaluate_now (destptr, &cond_block);
gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
gfc_add_modify (&cond_block, unshare_expr (dest),
build_zero_cst (TREE_TYPE (dest)));
}
else_b = gfc_finish_block (&cond_block);
cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
unshare_expr (srcptr), null_pointer_node);
gfc_add_expr_to_block (&block,
build3_loc (input_location, COND_EXPR,
void_type_node, cond,
then_b, else_b));
}
else
gfc_add_expr_to_block (&block, then_b);
return gfc_finish_block (&block);
}
static void
gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
tree add, tree nelems)
{
stmtblock_t tmpblock;
tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
nelems = gfc_evaluate_now (nelems, block);
gfc_init_block (&tmpblock);
if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
{
desta = gfc_build_array_ref (dest, index, NULL);
srca = gfc_build_array_ref (src, index, NULL);
}
else
{
gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
tree idx = fold_build2 (MULT_EXPR, sizetype,
fold_convert (sizetype, index),
TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
TREE_TYPE (dest), dest,
idx));
srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
TREE_TYPE (src), src,
idx));
}
gfc_add_modify (&tmpblock, desta,
fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
srca, add));
gfc_loopinfo loop;
gfc_init_loopinfo (&loop);
loop.dimen = 1;
loop.from[0] = gfc_index_zero_node;
loop.loopvar[0] = index;
loop.to[0] = nelems;
gfc_trans_scalarizing_loops (&loop, &tmpblock);
gfc_add_block_to_block (block, &loop.pre);
}
/* Build and return code for a constructor of DEST that initializes
it to SRC plus ADD (ADD is scalar integer). */
tree
gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
{
tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
stmtblock_t block;
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
gfc_start_block (&block);
add = gfc_evaluate_now (add, &block);
if ((! GFC_DESCRIPTOR_TYPE_P (type)
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
bool compute_nelts = false;
gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
if (!TYPE_DOMAIN (type)
|| TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
|| TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
|| TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
compute_nelts = true;
else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
{
tree a = DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type)));
if (lookup_attribute ("omp dummy var", a))
compute_nelts = true;
}
if (compute_nelts)
{
nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
TYPE_SIZE_UNIT (type),
TYPE_SIZE_UNIT (TREE_TYPE (type)));
nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
}
else
nelems = array_type_nelts (type);
nelems = fold_convert (gfc_array_index_type, nelems);
gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
return gfc_finish_block (&block);
}
/* Allocatable arrays in LINEAR clauses need to be allocated
and copied from SRC. */
gfc_add_modify (&block, dest, src);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
size = gfc_conv_descriptor_ubound_get (dest, rank);
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
size,
gfc_conv_descriptor_lbound_get (dest, rank));
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
size, gfc_index_one_node);
if (GFC_TYPE_ARRAY_RANK (type) > 1)
size = fold_build2_loc (input_location, MULT_EXPR,
gfc_array_index_type, size,
gfc_conv_descriptor_stride_get (dest, rank));
tree esize = fold_convert (gfc_array_index_type,
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
nelems = gfc_evaluate_now (unshare_expr (size), &block);
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
nelems, unshare_expr (esize));
size = gfc_evaluate_now (fold_convert (size_type_node, size),
&block);
nelems = fold_build2_loc (input_location, MINUS_EXPR,
gfc_array_index_type, nelems,
gfc_index_one_node);
}
else
size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
ptr = gfc_create_var (pvoid_type_node, NULL);
gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
if (GFC_DESCRIPTOR_TYPE_P (type))
{
gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
tree etype = gfc_get_element_type (type);
ptr = fold_convert (build_pointer_type (etype), ptr);
tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
srcptr = fold_convert (build_pointer_type (etype), srcptr);
gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
}
else
{
gfc_add_modify (&block, unshare_expr (dest),
fold_convert (TREE_TYPE (dest), ptr));
ptr = fold_convert (TREE_TYPE (dest), ptr);
tree dstm = build_fold_indirect_ref (ptr);
tree srcm = build_fold_indirect_ref (unshare_expr (src));
gfc_add_modify (&block, dstm,
fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
}
return gfc_finish_block (&block);
}
/* Build and return code destructing DECL. Return NULL if nothing
to be done. */
tree
gfc_omp_clause_dtor (tree clause, tree decl)
{
tree type = TREE_TYPE (decl), tem;
tree decl_type = TREE_TYPE (OMP_CLAUSE_DECL (clause));
/* Only pointer was privatized; cf. gfc_omp_clause_copy_ctor. */
if (DECL_P (OMP_CLAUSE_DECL (clause))
&& GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause)))
return NULL_TREE;
if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause))
&& DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause))
&& GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause)))
decl_type
= TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause)));
if (gfc_is_polymorphic_nonptr (decl_type))
{
if (POINTER_TYPE_P (decl_type))
decl_type = TREE_TYPE (decl_type);
decl_type = TREE_TYPE (TYPE_FIELDS (decl_type));
if (GFC_DESCRIPTOR_TYPE_P (decl_type) || GFC_ARRAY_TYPE_P (decl_type))
fatal_error (input_location,
"Sorry, polymorphic arrays not yet supported for "
"firstprivate");
stmtblock_t block, cond_block;
gfc_start_block (&block);
gfc_init_block (&cond_block);
tree final = gfc_class_vtab_final_get (decl);
tree size = fold_convert (size_type_node, gfc_class_vtab_size_get (decl));
gfc_se se;
gfc_init_se (&se, NULL);
symbol_attribute attr = {};
tree data = gfc_class_data_get (decl);
tree desc = gfc_conv_scalar_to_descriptor (&se, data, attr);
/* Call class->_vpt->_finalize + free. */
tree call = build_fold_indirect_ref (final);
call = build_call_expr_loc (input_location, call, 3,
gfc_build_addr_expr (NULL, desc),
size, boolean_false_node);
gfc_add_block_to_block (&cond_block, &se.pre);
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
gfc_add_block_to_block (&cond_block, &se.post);
/* Create: if (_vtab && _final) <cond_block> */
tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
gfc_class_vptr_get (decl),
null_pointer_node);
tree cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
final, null_pointer_node);
cond = fold_build2_loc (input_location, TRUTH_ANDIF_EXPR,
boolean_type_node, cond, cond2);
gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
void_type_node, cond,
gfc_finish_block (&cond_block), NULL_TREE));
call = builtin_decl_explicit (BUILT_IN_FREE);
call = build_call_expr_loc (input_location, call, 1, data);
gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
return gfc_finish_block (&block);
}
if ((! GFC_DESCRIPTOR_TYPE_P (type)
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
return gfc_walk_alloc_comps (decl, NULL_TREE,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR);
return NULL_TREE;
}
if (GFC_DESCRIPTOR_TYPE_P (type))
{
/* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
to be deallocated if they were allocated. */
tem = gfc_conv_descriptor_data_get (decl);
tem = gfc_deallocate_with_status (tem, NULL_TREE, NULL_TREE, NULL_TREE,
NULL_TREE, true, NULL,
GFC_CAF_COARRAY_NOCOARRAY);
}
else
tem = gfc_call_free (decl);
tem = gfc_omp_unshare_expr (tem);
if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
{
stmtblock_t block;
tree then_b;
gfc_init_block (&block);
gfc_add_expr_to_block (&block,
gfc_walk_alloc_comps (decl, NULL_TREE,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR));
gfc_add_expr_to_block (&block, tem);
then_b = gfc_finish_block (&block);
tem = fold_convert (pvoid_type_node,
GFC_DESCRIPTOR_TYPE_P (type)
? gfc_conv_descriptor_data_get (decl) : decl);
tem = unshare_expr (tem);
tree cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
tem, null_pointer_node);
tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
then_b, build_empty_stmt (input_location));
}
return tem;
}
/* Build a conditional expression in BLOCK. If COND_VAL is not
null, then the block THEN_B is executed, otherwise ELSE_VAL
is assigned to VAL. */
static void
gfc_build_cond_assign (stmtblock_t *block, tree val, tree cond_val,
tree then_b, tree else_val)
{
stmtblock_t cond_block;
tree else_b = NULL_TREE;
tree val_ty = TREE_TYPE (val);
if (else_val)
{
gfc_init_block (&cond_block);
gfc_add_modify (&cond_block, val, fold_convert (val_ty, else_val));
else_b = gfc_finish_block (&cond_block);
}
gfc_add_expr_to_block (block,
build3_loc (input_location, COND_EXPR, void_type_node,
cond_val, then_b, else_b));
}
/* Build a conditional expression in BLOCK, returning a temporary
variable containing the result. If COND_VAL is not null, then
THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
is assigned.
*/
static tree
gfc_build_cond_assign_expr (stmtblock_t *block, tree cond_val,
tree then_val, tree else_val)
{
tree val;
tree val_ty = TREE_TYPE (then_val);
stmtblock_t cond_block;
val = create_tmp_var (val_ty);
gfc_init_block (&cond_block);
gfc_add_modify (&cond_block, val, then_val);
tree then_b = gfc_finish_block (&cond_block);
gfc_build_cond_assign (block, val, cond_val, then_b, else_val);
return val;
}
void
gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
{
if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
return;
tree decl = OMP_CLAUSE_DECL (c);
/* Assumed-size arrays can't be mapped implicitly, they have to be
mapped explicitly using array sections. */
if (TREE_CODE (decl) == PARM_DECL
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
&& GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
== NULL)
{
error_at (OMP_CLAUSE_LOCATION (c),
"implicit mapping of assumed size array %qD", decl);
return;
}
tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
tree present = gfc_omp_check_optional_argument (decl, true);
if (POINTER_TYPE_P (TREE_TYPE (decl)))
{
if (!gfc_omp_privatize_by_reference (decl)
&& !GFC_DECL_GET_SCALAR_POINTER (decl)
&& !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
&& !GFC_DECL_CRAY_POINTEE (decl)
&& !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
return;
tree orig_decl = decl;
c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c4, GOMP_MAP_POINTER);
OMP_CLAUSE_DECL (c4) = decl;
OMP_CLAUSE_SIZE (c4) = size_int (0);
decl = build_fold_indirect_ref (decl);
if (present
&& (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
|| GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
{
c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_POINTER);
OMP_CLAUSE_DECL (c2) = decl;
OMP_CLAUSE_SIZE (c2) = size_int (0);
stmtblock_t block;
gfc_start_block (&block);
tree ptr = decl;
ptr = gfc_build_cond_assign_expr (&block, present, decl,
null_pointer_node);
gimplify_and_add (gfc_finish_block (&block), pre_p);
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (c) = ptr;
OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
}
else
{
OMP_CLAUSE_DECL (c) = decl;
OMP_CLAUSE_SIZE (c) = NULL_TREE;
}
if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
&& (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
|| GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
{
c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_POINTER);
OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
OMP_CLAUSE_SIZE (c3) = size_int (0);
decl = build_fold_indirect_ref (decl);
OMP_CLAUSE_DECL (c) = decl;
}
}
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
{
stmtblock_t block;
gfc_start_block (&block);
tree type = TREE_TYPE (decl);
tree ptr = gfc_conv_descriptor_data_get (decl);
/* OpenMP: automatically map pointer targets with the pointer;
hence, always update the descriptor/pointer itself.
NOTE: This also remaps the pointer for allocatable arrays with
'target' attribute which also don't have the 'restrict' qualifier. */
bool always_modifier = false;
if (!openacc
&& !(TYPE_QUALS (TREE_TYPE (ptr)) & TYPE_QUAL_RESTRICT))
always_modifier = true;
if (present)
ptr = gfc_build_cond_assign_expr (&block, present, ptr,
null_pointer_node);
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (c) = ptr;
c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_TO_PSET);
if (present)
{
ptr = create_tmp_var (TREE_TYPE (TREE_OPERAND (decl, 0)));
gfc_add_modify (&block, ptr, TREE_OPERAND (decl, 0));
OMP_CLAUSE_DECL (c2) = build_fold_indirect_ref (ptr);
}
else
OMP_CLAUSE_DECL (c2) = decl;
OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c3, always_modifier ? GOMP_MAP_ALWAYS_POINTER
: GOMP_MAP_POINTER);
if (present)
{
ptr = gfc_conv_descriptor_data_get (decl);
ptr = gfc_build_addr_expr (NULL, ptr);
ptr = gfc_build_cond_assign_expr (&block, present,
ptr, null_pointer_node);
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (c3) = ptr;
}
else
OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
OMP_CLAUSE_SIZE (c3) = size_int (0);
tree size = create_tmp_var (gfc_array_index_type);
tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
elemsz = fold_convert (gfc_array_index_type, elemsz);
if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
|| GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
|| GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
{
stmtblock_t cond_block;
tree tem, then_b, else_b, zero, cond;
gfc_init_block (&cond_block);
tem = gfc_full_array_size (&cond_block, decl,
GFC_TYPE_ARRAY_RANK (type));
gfc_add_modify (&cond_block, size, tem);
gfc_add_modify (&cond_block, size,
fold_build2 (MULT_EXPR, gfc_array_index_type,
size, elemsz));
then_b = gfc_finish_block (&cond_block);
gfc_init_block (&cond_block);
zero = build_int_cst (gfc_array_index_type, 0);
gfc_add_modify (&cond_block, size, zero);
else_b = gfc_finish_block (&cond_block);
tem = gfc_conv_descriptor_data_get (decl);
tem = fold_convert (pvoid_type_node, tem);
cond = fold_build2_loc (input_location, NE_EXPR,
boolean_type_node, tem, null_pointer_node);
if (present)
{
cond = fold_build2_loc (input_location, TRUTH_ANDIF_EXPR,
boolean_type_node, present, cond);
}
gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
void_type_node, cond,
then_b, else_b));
}
else if (present)
{
stmtblock_t cond_block;
tree then_b;
gfc_init_block (&cond_block);
gfc_add_modify (&cond_block, size,
gfc_full_array_size (&cond_block, decl,
GFC_TYPE_ARRAY_RANK (type)));
gfc_add_modify (&cond_block, size,
fold_build2 (MULT_EXPR, gfc_array_index_type,
size, elemsz));
then_b = gfc_finish_block (&cond_block);
gfc_build_cond_assign (&block, size, present, then_b,
build_int_cst (gfc_array_index_type, 0));
}
else
{
gfc_add_modify (&block, size,
gfc_full_array_size (&block, decl,
GFC_TYPE_ARRAY_RANK (type)));
gfc_add_modify (&block, size,
fold_build2 (MULT_EXPR, gfc_array_index_type,
size, elemsz));
}
OMP_CLAUSE_SIZE (c) = size;
tree stmt = gfc_finish_block (&block);
gimplify_and_add (stmt, pre_p);
}
tree last = c;
if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
OMP_CLAUSE_SIZE (c)
= DECL_P (decl) ? DECL_SIZE_UNIT (decl)
: TYPE_SIZE_UNIT (TREE_TYPE (decl));
if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
OMP_CLAUSE_SIZE (c) = size_int (0);
if (c2)
{
OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
OMP_CLAUSE_CHAIN (last) = c2;
last = c2;
}
if (c3)
{
OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
OMP_CLAUSE_CHAIN (last) = c3;
last = c3;
}
if (c4)
{
OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
OMP_CLAUSE_CHAIN (last) = c4;
}
}
/* Return true if DECL is a scalar variable (for the purpose of
implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
is true, allocatables and pointers are permitted. */
bool
gfc_omp_scalar_p (tree decl, bool ptr_alloc_ok)
{
tree type = TREE_TYPE (decl);
if (TREE_CODE (type) == REFERENCE_TYPE)
type = TREE_TYPE (type);
if (TREE_CODE (type) == POINTER_TYPE)
{
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
|| GFC_DECL_GET_SCALAR_POINTER (decl))
{
if (!ptr_alloc_ok)
return false;
type = TREE_TYPE (type);
}
if (GFC_ARRAY_TYPE_P (type)
|| GFC_CLASS_TYPE_P (type))
return false;
}
if ((TREE_CODE (type) == ARRAY_TYPE || TREE_CODE (type) == INTEGER_TYPE)
&& TYPE_STRING_FLAG (type))
return false;
if (INTEGRAL_TYPE_P (type)
|| SCALAR_FLOAT_TYPE_P (type)
|| COMPLEX_FLOAT_TYPE_P (type))
return true;
return false;
}
/* Return true if DECL is a scalar with target attribute but does not have the
allocatable (or pointer) attribute (for the purpose of implicit mapping). */
bool
gfc_omp_scalar_target_p (tree decl)
{
return (DECL_P (decl) && GFC_DECL_GET_SCALAR_TARGET (decl)
&& gfc_omp_scalar_p (decl, false));
}
/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
disregarded in OpenMP construct, because it is going to be
remapped during OpenMP lowering. SHARED is true if DECL
is going to be shared, false if it is going to be privatized. */
bool
gfc_omp_disregard_value_expr (tree decl, bool shared)
{
if (GFC_DECL_COMMON_OR_EQUIV (decl)
&& DECL_HAS_VALUE_EXPR_P (decl))
{
tree value = DECL_VALUE_EXPR (decl);
if (TREE_CODE (value) == COMPONENT_REF
&& VAR_P (TREE_OPERAND (value, 0))
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
{
/* If variable in COMMON or EQUIVALENCE is privatized, return
true, as just that variable is supposed to be privatized,
not the whole COMMON or whole EQUIVALENCE.
For shared variables in COMMON or EQUIVALENCE, let them be
gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
from the same COMMON or EQUIVALENCE just one sharing of the
whole COMMON or EQUIVALENCE is enough. */
return ! shared;
}
}
if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
return ! shared;
return false;
}
/* Return true if DECL that is shared iff SHARED is true should
be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
flag set. */
bool
gfc_omp_private_debug_clause (tree decl, bool shared)
{
if (GFC_DECL_CRAY_POINTEE (decl))
return true;
if (GFC_DECL_COMMON_OR_EQUIV (decl)
&& DECL_HAS_VALUE_EXPR_P (decl))
{
tree value = DECL_VALUE_EXPR (decl);
if (TREE_CODE (value) == COMPONENT_REF
&& VAR_P (TREE_OPERAND (value, 0))
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
return shared;
}
return false;
}
/* Register language specific type size variables as potentially OpenMP
firstprivate variables. */
void
gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
{
if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
{
int r;
gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
{
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
}
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
}
}
static inline tree
gfc_trans_add_clause (tree node, tree tail)
{
OMP_CLAUSE_CHAIN (node) = tail;
return node;
}
static tree
gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
{
if (declare_simd)
{
int cnt = 0;
gfc_symbol *proc_sym;
gfc_formal_arglist *f;
gcc_assert (sym->attr.dummy);
proc_sym = sym->ns->proc_name;
if (proc_sym->attr.entry_master)
++cnt;
if (gfc_return_by_reference (proc_sym))
{
++cnt;
if (proc_sym->ts.type == BT_CHARACTER)
++cnt;
}
for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
if (f->sym == sym)
break;
else if (f->sym)
++cnt;
gcc_assert (f);
return build_int_cst (integer_type_node, cnt);
}
tree t = gfc_get_symbol_decl (sym);
tree parent_decl;
int parent_flag;
bool return_value;
bool alternate_entry;
bool entry_master;
return_value = sym->attr.function && sym->result == sym;
alternate_entry = sym->attr.function && sym->attr.entry
&& sym->result == sym;
entry_master = sym->attr.result
&& sym->ns->proc_name->attr.entry_master
&& !gfc_return_by_reference (sym->ns->proc_name);
parent_decl = current_function_decl
? DECL_CONTEXT (current_function_decl) : NULL_TREE;
if ((t == parent_decl && return_value)
|| (sym->ns && sym->ns->proc_name
&& sym->ns->proc_name->backend_decl == parent_decl
&& (alternate_entry || entry_master)))
parent_flag = 1;
else
parent_flag = 0;
/* Special case for assigning the return value of a function.
Self recursive functions must have an explicit return value. */
if (return_value && (t == current_function_decl || parent_flag))
t = gfc_get_fake_result_decl (sym, parent_flag);
/* Similarly for alternate entry points. */
else if (alternate_entry
&& (sym->ns->proc_name->backend_decl == current_function_decl
|| parent_flag))
{
gfc_entry_list *el = NULL;
for (el = sym->ns->entries; el; el = el->next)
if (sym == el->sym)
{
t = gfc_get_fake_result_decl (sym, parent_flag);
break;
}
}
else if (entry_master
&& (sym->ns->proc_name->backend_decl == current_function_decl
|| parent_flag))
t = gfc_get_fake_result_decl (sym, parent_flag);
return t;
}
static tree
gfc_trans_omp_variable_list (enum omp_clause_code code,
gfc_omp_namelist *namelist, tree list,
bool declare_simd)
{
for (; namelist != NULL; namelist = namelist->next)
if (namelist->sym->attr.referenced || declare_simd)
{
tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
if (t != error_mark_node)
{
tree node;
/* For HAS_DEVICE_ADDR of an array descriptor, firstprivatize the
descriptor such that the bounds are available; its data component
is unmodified; it is handled as device address inside target. */
if (code == OMP_CLAUSE_HAS_DEVICE_ADDR
&& (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (t))
|| (POINTER_TYPE_P (TREE_TYPE (t))
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (t))))))
node = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
else
node = build_omp_clause (input_location, code);
OMP_CLAUSE_DECL (node) = t;
list = gfc_trans_add_clause (node, list);
if (code == OMP_CLAUSE_LASTPRIVATE
&& namelist->u.lastprivate_conditional)
OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (node) = 1;
}
}
return list;
}
struct omp_udr_find_orig_data
{
gfc_omp_udr *omp_udr;
bool omp_orig_seen;
};
static int
omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
void *data)
{
struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
if ((*e)->expr_type == EXPR_VARIABLE
&& (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
cd->omp_orig_seen = true;
return 0;
}
static void
gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
{
gfc_symbol *sym = n->sym;
gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
gfc_symbol omp_var_copy[4];
gfc_expr *e1, *e2, *e3, *e4;
gfc_ref *ref;
tree decl, backend_decl, stmt, type, outer_decl;
locus old_loc = gfc_current_locus;
const char *iname;
bool t;
gfc_omp_udr *udr = n->u2.udr ? n->u2.udr->udr : NULL;
decl = OMP_CLAUSE_DECL (c);
gfc_current_locus = where;
type = TREE_TYPE (decl);
outer_decl = create_tmp_var_raw (type);
if (TREE_CODE (decl) == PARM_DECL
&& TREE_CODE (type) == REFERENCE_TYPE
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
{
decl = build_fold_indirect_ref (decl);
type = TREE_TYPE (type);
}
/* Create a fake symbol for init value. */
memset (&init_val_sym, 0, sizeof (init_val_sym));
init_val_sym.ns = sym->ns;
init_val_sym.name = sym->name;
init_val_sym.ts = sym->ts;
init_val_sym.attr.referenced = 1;
init_val_sym.declared_at = where;
init_val_sym.attr.flavor = FL_VARIABLE;
if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
else if (udr->initializer_ns)
backend_decl = NULL;
else
switch (sym->ts.type)
{
case BT_LOGICAL:
case BT_INTEGER:
case BT_REAL:
case BT_COMPLEX:
backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
break;
default:
backend_decl = NULL_TREE;
break;
}
init_val_sym.backend_decl = backend_decl;
/* Create a fake symbol for the outer array reference. */
outer_sym = *sym;
if (sym->as)
outer_sym.as = gfc_copy_array_spec (sym->as);
outer_sym.attr.dummy = 0;
outer_sym.attr.result = 0;
outer_sym.attr.flavor = FL_VARIABLE;
outer_sym.backend_decl = outer_decl;
if (decl != OMP_CLAUSE_DECL (c))
outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
/* Create fake symtrees for it. */
symtree1 = gfc_new_symtree (&root1, sym->name);
symtree1->n.sym = sym;
gcc_assert (symtree1 == root1);
symtree2 = gfc_new_symtree (&root2, sym->name);
symtree2->n.sym = &init_val_sym;
gcc_assert (symtree2 == root2);
symtree3 = gfc_new_symtree (&root3, sym->name);
symtree3->n.sym = &outer_sym;
gcc_assert (symtree3 == root3);
memset (omp_var_copy, 0, sizeof omp_var_copy);
if (udr)
{
omp_var_copy[0] = *udr->omp_out;
omp_var_copy[1] = *udr->omp_in;
*udr->omp_out = outer_sym;
*udr->omp_in = *sym;
if (udr->initializer_ns)
{
omp_var_copy[2] = *udr->omp_priv;
omp_var_copy[3] = *udr->omp_orig;
*udr->omp_priv = *sym;
*udr->omp_orig = outer_sym;
}
}
/* Create expressions. */
e1 = gfc_get_expr ();
e1->expr_type = EXPR_VARIABLE;
e1->where = where;
e1->symtree = symtree1;
e1->ts = sym->ts;
if (sym->attr.dimension)
{
e1->ref = ref = gfc_get_ref ();
ref->type = REF_ARRAY;
ref->u.ar.where = where;
ref->u.ar.as = sym->as;
ref->u.ar.type = AR_FULL;
ref->u.ar.dimen = 0;
}
t = gfc_resolve_expr (e1);
gcc_assert (t);
e2 = NULL;
if (backend_decl != NULL_TREE)
{
e2 = gfc_get_expr ();
e2->expr_type = EXPR_VARIABLE;
e2->where = where;
e2->symtree = symtree2;
e2->ts = sym->ts;
t = gfc_resolve_expr (e2);
gcc_assert (t);
}
else if (udr->initializer_ns == NULL)
{
gcc_assert (sym->ts.type == BT_DERIVED);
e2 = gfc_default_initializer (&sym->ts);
gcc_assert (e2);
t = gfc_resolve_expr (e2);
gcc_assert (t);
}
else if (n->u2.udr->initializer->op == EXEC_ASSIGN)
{
e2 = gfc_copy_expr (n->u2.udr->initializer->expr2);
t = gfc_resolve_expr (e2);
gcc_assert (t);
}
if (udr && udr->initializer_ns)
{
struct omp_udr_find_orig_data cd;
cd.omp_udr = udr;
cd.omp_orig_seen = false;
gfc_code_walker (&n->u2.udr->initializer,
gfc_dummy_code_callback, omp_udr_find_orig, &cd);
if (cd.omp_orig_seen)
OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
}
e3 = gfc_copy_expr (e1);
e3->symtree = symtree3;
t = gfc_resolve_expr (e3);
gcc_assert (t);
iname = NULL;
e4 = NULL;
switch (OMP_CLAUSE_REDUCTION_CODE (c))
{
case PLUS_EXPR:
case MINUS_EXPR:
e4 = gfc_add (e3, e1);
break;
case MULT_EXPR:
e4 = gfc_multiply (e3, e1);
break;
case TRUTH_ANDIF_EXPR:
e4 = gfc_and (e3, e1);
break;
case TRUTH_ORIF_EXPR:
e4 = gfc_or (e3, e1);
break;
case EQ_EXPR:
e4 = gfc_eqv (e3, e1);
break;
case NE_EXPR:
e4 = gfc_neqv (e3, e1);
break;
case MIN_EXPR:
iname = "min";
break;
case MAX_EXPR:
iname = "max";
break;
case BIT_AND_EXPR:
iname = "iand";
break;
case BIT_IOR_EXPR:
iname = "ior";
break;
case BIT_XOR_EXPR:
iname = "ieor";
break;
case ERROR_MARK:
if (n->u2.udr->combiner->op == EXEC_ASSIGN)
{
gfc_free_expr (e3);
e3 = gfc_copy_expr (n->u2.udr->combiner->expr1);
e4 = gfc_copy_expr (n->u2.udr->combiner->expr2);
t = gfc_resolve_expr (e3);
gcc_assert (t);
t = gfc_resolve_expr (e4);
gcc_assert (t);
}
break;
default:
gcc_unreachable ();
}
if (iname != NULL)
{
memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
intrinsic_sym.ns = sym->ns;
intrinsic_sym.name = iname;
intrinsic_sym.ts = sym->ts;
intrinsic_sym.attr.referenced = 1;
intrinsic_sym.attr.intrinsic = 1;
intrinsic_sym.attr.function = 1;
intrinsic_sym.attr.implicit_type = 1;
intrinsic_sym.result = &intrinsic_sym;
intrinsic_sym.declared_at = where;
symtree4 = gfc_new_symtree (&root4, iname);
symtree4->n.sym = &intrinsic_sym;
gcc_assert (symtree4 == root4);
e4 = gfc_get_expr ();
e4->expr_type = EXPR_FUNCTION;
e4->where = where;
e4->symtree = symtree4;
e4->value.function.actual = gfc_get_actual_arglist ();
e4->value.function.actual->expr = e3;
e4->value.function.actual->next = gfc_get_actual_arglist ();
e4->value.function.actual->next->expr = e1;
}
if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
{
/* e1 and e3 have been stored as arguments of e4, avoid sharing. */
e1 = gfc_copy_expr (e1);
e3 = gfc_copy_expr (e3);
t = gfc_resolve_expr (e4);
gcc_assert (t);
}
/* Create the init statement list. */
pushlevel ();
if (e2)
stmt = gfc_trans_assignment (e1, e2, false, false);
else
stmt = gfc_trans_call (n->u2.udr->initializer, false,
NULL_TREE, NULL_TREE, false);
if (TREE_CODE (stmt) != BIND_EXPR)
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
else
poplevel (0, 0);
OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
/* Create the merge statement list. */
pushlevel ();
if (e4)
stmt = gfc_trans_assignment (e3, e4, false, true);
else
stmt = gfc_trans_call (n->u2.udr->combiner, false,
NULL_TREE, NULL_TREE, false);
if (TREE_CODE (stmt) != BIND_EXPR)
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
else
poplevel (0, 0);
OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
/* And stick the placeholder VAR_DECL into the clause as well. */
OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
gfc_current_locus = old_loc;
gfc_free_expr (e1);
if (e2)
gfc_free_expr (e2);
gfc_free_expr (e3);
if (e4)
gfc_free_expr (e4);
free (symtree1);
free (symtree2);
free (symtree3);
free (symtree4);
if (outer_sym.as)
gfc_free_array_spec (outer_sym.as);
if (udr)
{
*udr->omp_out = omp_var_copy[0];
*udr->omp_in = omp_var_copy[1];
if (udr->initializer_ns)
{
*udr->omp_priv = omp_var_copy[2];
*udr->omp_orig = omp_var_copy[3];
}
}
}
static tree
gfc_trans_omp_reduction_list (int kind, gfc_omp_namelist *namelist, tree list,
locus where, bool mark_addressable)
{
omp_clause_code clause = OMP_CLAUSE_REDUCTION;
switch (kind)
{
case OMP_LIST_REDUCTION:
case OMP_LIST_REDUCTION_INSCAN:
case OMP_LIST_REDUCTION_TASK:
break;
case OMP_LIST_IN_REDUCTION:
clause = OMP_CLAUSE_IN_REDUCTION;
break;
case OMP_LIST_TASK_REDUCTION:
clause = OMP_CLAUSE_TASK_REDUCTION;
break;
default:
gcc_unreachable ();
}
for (; namelist != NULL; namelist = namelist->next)
if (namelist->sym->attr.referenced)
{
tree t = gfc_trans_omp_variable (namelist->sym, false);
if (t != error_mark_node)
{
tree node = build_omp_clause (gfc_get_location (&namelist->where),
clause);
OMP_CLAUSE_DECL (node) = t;
if (mark_addressable)
TREE_ADDRESSABLE (t) = 1;
if (kind == OMP_LIST_REDUCTION_INSCAN)
OMP_CLAUSE_REDUCTION_INSCAN (node) = 1;
if (kind == OMP_LIST_REDUCTION_TASK)
OMP_CLAUSE_REDUCTION_TASK (node) = 1;
switch (namelist->u.reduction_op)
{
case OMP_REDUCTION_PLUS:
OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
break;
case OMP_REDUCTION_MINUS:
OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
break;
case OMP_REDUCTION_TIMES:
OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
break;
case OMP_REDUCTION_AND:
OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
break;
case OMP_REDUCTION_OR:
OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
break;
case OMP_REDUCTION_EQV:
OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
break;
case OMP_REDUCTION_NEQV:
OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
break;
case OMP_REDUCTION_MAX:
OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
break;
case OMP_REDUCTION_MIN:
OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
break;
case OMP_REDUCTION_IAND:
OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
break;
case OMP_REDUCTION_IOR:
OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
break;
case OMP_REDUCTION_IEOR:
OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
break;
case OMP_REDUCTION_USER:
OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
break;
default:
gcc_unreachable ();
}
if (namelist->sym->attr.dimension
|| namelist->u.reduction_op == OMP_REDUCTION_USER
|| namelist->sym->attr.allocatable)
gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
list = gfc_trans_add_clause (node, list);
}
}
return list;
}
static inline tree
gfc_convert_expr_to_tree (stmtblock_t *block, gfc_expr *expr)
{
gfc_se se;
tree result;
gfc_init_se (&se, NULL );
gfc_conv_expr (&se, expr);
gfc_add_block_to_block (block, &se.pre);
result = gfc_evaluate_now (se.expr, block);
gfc_add_block_to_block (block, &se.post);
return result;
}
static vec<tree, va_heap, vl_embed> *doacross_steps;
/* Translate an array section or array element. */
static void
gfc_trans_omp_array_section (stmtblock_t *block, gfc_omp_namelist *n,
tree decl, bool element, gomp_map_kind ptr_kind,
tree &node, tree &node2, tree &node3, tree &node4)
{
gfc_se se;
tree ptr, ptr2;
tree elemsz = NULL_TREE;
gfc_init_se (&se, NULL);
if (element)
{
gfc_conv_expr_reference (&se, n->expr);
gfc_add_block_to_block (block, &se.pre);
ptr = se.expr;
OMP_CLAUSE_SIZE (node) = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr)));
elemsz = OMP_CLAUSE_SIZE (node);
}
else
{
gfc_conv_expr_descriptor (&se, n->expr);
ptr = gfc_conv_array_data (se.expr);
tree type = TREE_TYPE (se.expr);
gfc_add_block_to_block (block, &se.pre);
OMP_CLAUSE_SIZE (node) = gfc_full_array_size (block, se.expr,
GFC_TYPE_ARRAY_RANK (type));
elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
elemsz = fold_convert (gfc_array_index_type, elemsz);
OMP_CLAUSE_SIZE (node) = fold_build2 (MULT_EXPR, gfc_array_index_type,
OMP_CLAUSE_SIZE (node), elemsz);
}
gcc_assert (se.post.head == NULL_TREE);
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
ptr = fold_convert (ptrdiff_type_node, ptr);
if (POINTER_TYPE_P (TREE_TYPE (decl))
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))
&& ptr_kind == GOMP_MAP_POINTER)
{
node4 = build_omp_clause (input_location,
OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node4, GOMP_MAP_POINTER);
OMP_CLAUSE_DECL (node4) = decl;
OMP_CLAUSE_SIZE (node4) = size_int (0);
decl = build_fold_indirect_ref (decl);
}
else if (ptr_kind == GOMP_MAP_ALWAYS_POINTER
&& n->expr->ts.type == BT_CHARACTER
&& n->expr->ts.deferred)
{
gomp_map_kind map_kind;
if (GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (node)))
map_kind = GOMP_MAP_TO;
else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_RELEASE
|| OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DELETE)
map_kind = OMP_CLAUSE_MAP_KIND (node);
else
map_kind = GOMP_MAP_ALLOC;
gcc_assert (se.string_length);
node4 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node4, map_kind);
OMP_CLAUSE_DECL (node4) = se.string_length;
OMP_CLAUSE_SIZE (node4) = TYPE_SIZE_UNIT (gfc_charlen_type_node);
}
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
{
tree desc_node;
tree type = TREE_TYPE (decl);
ptr2 = gfc_conv_descriptor_data_get (decl);
desc_node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
OMP_CLAUSE_DECL (desc_node) = decl;
OMP_CLAUSE_SIZE (desc_node) = TYPE_SIZE_UNIT (type);
if (ptr_kind == GOMP_MAP_ALWAYS_POINTER)
{
OMP_CLAUSE_SET_MAP_KIND (desc_node, GOMP_MAP_TO);
node2 = node;
node = desc_node; /* Needs to come first. */
}
else
{
OMP_CLAUSE_SET_MAP_KIND (desc_node, GOMP_MAP_TO_PSET);
node2 = desc_node;
}
node3 = build_omp_clause (input_location,
OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node3, ptr_kind);
OMP_CLAUSE_DECL (node3)
= gfc_conv_descriptor_data_get (decl);
/* This purposely does not include GOMP_MAP_ALWAYS_POINTER. The extra
cast prevents gimplify.cc from recognising it as being part of the
struct – and adding an 'alloc: for the 'desc.data' pointer, which
would break as the 'desc' (the descriptor) is also mapped
(see node4 above). */
if (ptr_kind == GOMP_MAP_ATTACH_DETACH)
STRIP_NOPS (OMP_CLAUSE_DECL (node3));
}
else
{
if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
{
tree offset;
ptr2 = build_fold_addr_expr (decl);
offset = fold_build2 (MINUS_EXPR, ptrdiff_type_node, ptr,
fold_convert (ptrdiff_type_node, ptr2));
offset = build2 (TRUNC_DIV_EXPR, ptrdiff_type_node,
offset, fold_convert (ptrdiff_type_node, elemsz));
offset = build4_loc (input_location, ARRAY_REF,
TREE_TYPE (TREE_TYPE (decl)),
decl, offset, NULL_TREE, NULL_TREE);
OMP_CLAUSE_DECL (node) = offset;
if (ptr_kind == GOMP_MAP_ALWAYS_POINTER)
return;
}
else
{
gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
ptr2 = decl;
}
node3 = build_omp_clause (input_location,
OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node3, ptr_kind);
OMP_CLAUSE_DECL (node3) = decl;
}
ptr2 = fold_convert (ptrdiff_type_node, ptr2);
OMP_CLAUSE_SIZE (node3) = fold_build2 (MINUS_EXPR, ptrdiff_type_node,
ptr, ptr2);
}
static tree
handle_iterator (gfc_namespace *ns, stmtblock_t *iter_block, tree block)
{
tree list = NULL_TREE;
for (gfc_symbol *sym = ns->omp_affinity_iterators; sym; sym = sym->tlink)
{
gfc_constructor *c;
gfc_se se;
tree last = make_tree_vec (6);
tree iter_var = gfc_get_symbol_decl (sym);
tree type = TREE_TYPE (iter_var);
TREE_VEC_ELT (last, 0) = iter_var;
DECL_CHAIN (iter_var) = BLOCK_VARS (block);
BLOCK_VARS (block) = iter_var;
/* begin */
c = gfc_constructor_first (sym->value->value.constructor);
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, c->expr);
gfc_add_block_to_block (iter_block, &se.pre);
gfc_add_block_to_block (iter_block, &se.post);
TREE_VEC_ELT (last, 1) = fold_convert (type,
gfc_evaluate_now (se.expr,
iter_block));
/* end */
c = gfc_constructor_next (c);
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, c->expr);
gfc_add_block_to_block (iter_block, &se.pre);
gfc_add_block_to_block (iter_block, &se.post);
TREE_VEC_ELT (last, 2) = fold_convert (type,
gfc_evaluate_now (se.expr,
iter_block));
/* step */
c = gfc_constructor_next (c);
tree step;
if (c)
{
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, c->expr);
gfc_add_block_to_block (iter_block, &se.pre);
gfc_add_block_to_block (iter_block, &se.post);
gfc_conv_expr (&se, c->expr);
step = fold_convert (type,
gfc_evaluate_now (se.expr,
iter_block));
}
else
step = build_int_cst (type, 1);
TREE_VEC_ELT (last, 3) = step;
/* orig_step */
TREE_VEC_ELT (last, 4) = save_expr (step);
TREE_CHAIN (last) = list;
list = last;
}
return list;
}
static tree
gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
locus where, bool declare_simd = false,
bool openacc = false)
{
tree omp_clauses = NULL_TREE, prev_clauses, chunk_size, c;
tree iterator = NULL_TREE;
tree tree_block = NULL_TREE;
stmtblock_t iter_block;
int list, ifc;
enum omp_clause_code clause_code;
gfc_omp_namelist *prev = NULL;
gfc_se se;
if (clauses == NULL)
return NULL_TREE;
for (list = 0; list < OMP_LIST_NUM; list++)
{
gfc_omp_namelist *n = clauses->lists[list];
if (n == NULL)
continue;
switch (list)
{
case OMP_LIST_REDUCTION:
case OMP_LIST_REDUCTION_INSCAN:
case OMP_LIST_REDUCTION_TASK:
case OMP_LIST_IN_REDUCTION:
case OMP_LIST_TASK_REDUCTION:
/* An OpenACC async clause indicates the need to set reduction
arguments addressable, to allow asynchronous copy-out. */
omp_clauses = gfc_trans_omp_reduction_list (list, n, omp_clauses,
where, clauses->async);
break;
case OMP_LIST_PRIVATE:
clause_code = OMP_CLAUSE_PRIVATE;
goto add_clause;
case OMP_LIST_SHARED:
clause_code = OMP_CLAUSE_SHARED;
goto add_clause;
case OMP_LIST_FIRSTPRIVATE:
clause_code = OMP_CLAUSE_FIRSTPRIVATE;
goto add_clause;
case OMP_LIST_LASTPRIVATE:
clause_code = OMP_CLAUSE_LASTPRIVATE;
goto add_clause;
case OMP_LIST_COPYIN:
clause_code = OMP_CLAUSE_COPYIN;
goto add_clause;
case OMP_LIST_COPYPRIVATE:
clause_code = OMP_CLAUSE_COPYPRIVATE;
goto add_clause;
case OMP_LIST_UNIFORM:
clause_code = OMP_CLAUSE_UNIFORM;
goto add_clause;
case OMP_LIST_USE_DEVICE:
case OMP_LIST_USE_DEVICE_PTR:
clause_code = OMP_CLAUSE_USE_DEVICE_PTR;
goto add_clause;
case OMP_LIST_USE_DEVICE_ADDR:
clause_code = OMP_CLAUSE_USE_DEVICE_ADDR;
goto add_clause;
case OMP_LIST_IS_DEVICE_PTR:
clause_code = OMP_CLAUSE_IS_DEVICE_PTR;
goto add_clause;
case OMP_LIST_HAS_DEVICE_ADDR:
clause_code = OMP_CLAUSE_HAS_DEVICE_ADDR;
goto add_clause;
case OMP_LIST_NONTEMPORAL:
clause_code = OMP_CLAUSE_NONTEMPORAL;
goto add_clause;
case OMP_LIST_SCAN_IN:
clause_code = OMP_CLAUSE_INCLUSIVE;
goto add_clause;
case OMP_LIST_SCAN_EX:
clause_code = OMP_CLAUSE_EXCLUSIVE;
goto add_clause;
add_clause:
omp_clauses
= gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
declare_simd);
break;
case OMP_LIST_ALIGNED:
for (; n != NULL; n = n->next)
if (n->sym->attr.referenced || declare_simd)
{
tree t = gfc_trans_omp_variable (n->sym, declare_simd);
if (t != error_mark_node)
{
tree node = build_omp_clause (input_location,
OMP_CLAUSE_ALIGNED);
OMP_CLAUSE_DECL (node) = t;
if (n->expr)
{
tree alignment_var;
if (declare_simd)
alignment_var = gfc_conv_constant_to_tree (n->expr);
else
{
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, n->expr);
gfc_add_block_to_block (block, &se.pre);
alignment_var = gfc_evaluate_now (se.expr, block);
gfc_add_block_to_block (block, &se.post);
}
OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
}
omp_clauses = gfc_trans_add_clause (node, omp_clauses);
}
}
break;
case OMP_LIST_ALLOCATE:
for (; n != NULL; n = n->next)
if (n->sym->attr.referenced)
{
tree t = gfc_trans_omp_variable (n->sym, false);
if (t != error_mark_node)
{
tree node = build_omp_clause (input_location,
OMP_CLAUSE_ALLOCATE);
OMP_CLAUSE_DECL (node) = t;
if (n->expr)
{
tree allocator_;
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, n->expr);
allocator_ = gfc_evaluate_now (se.expr, block);
OMP_CLAUSE_ALLOCATE_ALLOCATOR (node) = allocator_;
}
omp_clauses = gfc_trans_add_clause (node, omp_clauses);
}
}
break;
case OMP_LIST_LINEAR:
{
gfc_expr *last_step_expr = NULL;
tree last_step = NULL_TREE;
bool last_step_parm = false;
for (; n != NULL; n = n->next)
{
if (n->expr)
{
last_step_expr = n->expr;
last_step = NULL_TREE;
last_step_parm = false;
}
if (n->sym->attr.referenced || declare_simd)
{
tree t = gfc_trans_omp_variable (n->sym, declare_simd);
if (t != error_mark_node)
{
tree node = build_omp_clause (input_location,
OMP_CLAUSE_LINEAR);
OMP_CLAUSE_DECL (node) = t;
omp_clause_linear_kind kind;
switch (n->u.linear_op)
{
case OMP_LINEAR_DEFAULT:
kind = OMP_CLAUSE_LINEAR_DEFAULT;
break;
case OMP_LINEAR_REF:
kind = OMP_CLAUSE_LINEAR_REF;
break;
case OMP_LINEAR_VAL:
kind = OMP_CLAUSE_LINEAR_VAL;
break;
case OMP_LINEAR_UVAL:
kind = OMP_CLAUSE_LINEAR_UVAL;
break;
default:
gcc_unreachable ();
}
OMP_CLAUSE_LINEAR_KIND (node) = kind;
if (last_step_expr && last_step == NULL_TREE)
{
if (!declare_simd)
{
gfc_init_se (&se, NULL);
gfc_conv_expr (&se, last_step_expr);
gfc_add_block_to_block (block, &se.pre);
last_step = gfc_evaluate_now (se.expr, block);
gfc_add_block_to_block (block, &se.post);
}
else if (last_step_expr->expr_type == EXPR_VARIABLE)
{
gfc_symbol *s = last_step_expr->symtree->n.sym;
last_step = gfc_trans_omp_variable (s, true);
last_step_parm = true;
}
else
last_step
= gfc_conv_constant_to_tree (last_step_expr);
}
if (last_step_parm)
{
OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node) = 1;
OMP_CLAUSE_LINEAR_STEP (node) = last_step;
}
else
{
if (kind == OMP_CLAUSE_LINEAR_REF)
{
tree type;
if (n->sym->attr.flavor == FL_PROCEDURE)
{
type = gfc_get_function_type (n->sym);
type = build_pointer_type (type);
}
else
type = gfc_sym_type (n->sym);
if (POINTER_TYPE_P (type))
type = TREE_TYPE (type);
/* Otherwise to be determined what exactly
should be done. */
tree t = fold_convert (sizetype, last_step);
t = size_binop (MULT_EXPR, t,
TYPE_SIZE_UNIT (type));
OMP_CLAUSE_LINEAR_STEP (node) = t;
}
else
{
tree type
= gfc_typenode_for_spec (&n->sym->ts);
OMP_CLAUSE_LINEAR_STEP (node)
= fold_convert (type, last_step);
}
}
if (n->sym->attr.dimension || n->sym->attr.allocatable)
OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
omp_clauses = gfc_trans_add_clause (node, omp_clauses);
}
}
}
}
break;
case OMP_LIST_AFFINITY:
case OMP_LIST_DEPEND:
iterator = NULL_TREE;
prev = NULL;
prev_clauses = omp_clauses;
for (; n != NULL; n = n->next)
{
if (iterator && prev->u2.ns != n->u2.ns)
{
BLOCK_SUBBLOCKS (tree_block) = gfc_finish_block (&iter_block);
TREE_VEC_ELT (iterator, 5) = tree_block;
for (tree c = omp_clauses; c != prev_clauses;
c = OMP_CLAUSE_CHAIN (c))
OMP_CLAUSE_DECL (c) = build_tree_list (iterator,
OMP_CLAUSE_DECL (c));
prev_clauses = omp_clauses;
iterator = NULL_TREE;
}
if (n->u2.ns && (!prev || prev->u2.ns != n->u2.ns))
{
gfc_init_block (&iter_block);
tree_block = make_node (BLOCK);
TREE_USED (tree_block) = 1;
BLOCK_VARS (tree_block) = NULL_TREE;
iterator = handle_iterator (n->u2.ns, block,
tree_block);
}
if (!iterator)
gfc_init_block (&iter_block);
prev = n;
if (list == OMP_LIST_DEPEND
&& n->u.depend_op == OMP_DEPEND_SINK_FIRST)
{
tree vec = NULL_TREE;
unsigned int i;
for (i = 0; ; i++)
{
tree addend = integer_zero_node, t;
bool neg = false;
if (n->expr)
{
addend = gfc_conv_constant_to_tree (n->expr);
if (TREE_CODE (addend) == INTEGER_CST
&& tree_int_cst_sgn (addend) == -1)
{
neg = true;
addend = const_unop (NEGATE_EXPR,
TREE_TYPE (addend), addend);
}
}
t = gfc_trans_omp_variable (n->sym, false);
if (t != error_mark_node)
{
if (i < vec_safe_length (doacross_steps)
&& !integer_zerop (addend)
&& (*doacross_steps)[i])
{
tree step = (*doacross_steps)[i];
addend = fold_convert (TREE_TYPE (step), addend);
addend = build2 (TRUNC_DIV_EXPR,
TREE_TYPE (step), addend, step);
}
vec = tree_cons (addend, t, vec);
if (neg)
OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec) = 1;
}
if (n->next == NULL
|| n->next->u.depend_op != OMP_DEPEND_SINK)
break;
n = n->next;
}
if (vec == NULL_TREE)
continue;
tree node = build_omp_clause (input_location,
OMP_CLAUSE_DEPEND);
OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_SINK;
OMP_CLAUSE_DECL (node) = nreverse (vec);
omp_clauses = gfc_trans_add_clause (node, omp_clauses);
continue;
}
if (!n->sym->attr.referenced)
continue;
tree node = build_omp_clause (input_location,
list == OMP_LIST_DEPEND
? OMP_CLAUSE_DEPEND
: OMP_CLAUSE_AFFINITY);
if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
{
tree decl = gfc_trans_omp_variable (n->sym, false);
if (gfc_omp_privatize_by_reference (decl))
decl = build_fold_indirect_ref (decl);
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
{
decl = gfc_conv_descriptor_data_get (decl);
gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
decl = build_fold_indirect_ref (decl);
}
else if (n->sym->attr.allocatable || n->sym->attr.pointer)
decl = build_fold_indirect_ref (decl);
else if (DECL_P (decl))
TREE_ADDRESSABLE (decl) = 1;
OMP_CLAUSE_DECL (node) = decl;
}
else
{
tree ptr;
gfc_init_se (&se, NULL);
if (n->expr->ref->u.ar.type == AR_ELEMENT)
{
gfc_conv_expr_reference (&se, n->expr);
ptr = se.expr;
}
else
{
gfc_conv_expr_descriptor (&se, n->expr);
ptr = gfc_conv_array_data (se.expr);
}
gfc_add_block_to_block (&iter_block, &se.pre);
gfc_add_block_to_block (&iter_block, &se.post);
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
}
if (list == OMP_LIST_DEPEND)
switch (n->u.depend_op)
{
case OMP_DEPEND_IN:
OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
break;
case OMP_DEPEND_OUT:
OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
break;
case OMP_DEPEND_INOUT:
OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
break;
case OMP_DEPEND_MUTEXINOUTSET:
OMP_CLAUSE_DEPEND_KIND (node)
= OMP_CLAUSE_DEPEND_MUTEXINOUTSET;
break;
case OMP_DEPEND_DEPOBJ:
OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_DEPOBJ;
break;
default:
gcc_unreachable ();
}
if (!iterator)
gfc_add_block_to_block (block, &iter_block);
omp_clauses = gfc_trans_add_clause (node, omp_clauses);
}
if (iterator)
{
BLOCK_SUBBLOCKS (tree_block) = gfc_finish_block (&iter_block);
TREE_VEC_ELT (iterator, 5) = tree_block;
for (tree c = omp_clauses; c != prev_clauses;
c = OMP_CLAUSE_CHAIN (c))
OMP_CLAUSE_DECL (c) = build_tree_list (iterator,
OMP_CLAUSE_DECL (c));
}
break;
case OMP_LIST_MAP:
for (; n != NULL; n = n->next)
{
if (!n->sym->attr.referenced)
continue;
bool always_modifier = false;
tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
tree node2 = NULL_TREE;
tree node3 = NULL_TREE;
tree node4 = NULL_TREE;
/* OpenMP: automatically map pointer targets with the pointer;
hence, always update the descriptor/pointer itself. */
if (!openacc
&& ((n->expr == NULL && n->sym->attr.pointer)
|| (n->expr && gfc_expr_attr (n->expr).pointer)))
always_modifier = true;
switch (n->u.map_op)
{
case OMP_MAP_ALLOC:
OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_ALLOC);
break;