blob: 077960cb6e7b2ca0b59450367d5f02cc050f1de8 [file] [log] [blame]
/* Build expressions with type checking for C compiler.
Copyright (C) 1987-2015 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
/* This file is part of the C front end.
It contains routines to build C expressions given their operands,
including computing the types of the result, C-specific error checks,
and some optimization. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "hash-set.h"
#include "vec.h"
#include "symtab.h"
#include "input.h"
#include "alias.h"
#include "double-int.h"
#include "machmode.h"
#include "inchash.h"
#include "real.h"
#include "fixed-value.h"
#include "tree.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "trans-mem.h"
#include "varasm.h"
#include "stmt.h"
#include "langhooks.h"
#include "c-tree.h"
#include "c-lang.h"
#include "flags.h"
#include "intl.h"
#include "target.h"
#include "tree-iterator.h"
#include "bitmap.h"
#include "predict.h"
#include "vec.h"
#include "hashtab.h"
#include "hash-set.h"
#include "machmode.h"
#include "hard-reg-set.h"
#include "input.h"
#include "function.h"
#include "gimple-expr.h"
#include "gimplify.h"
#include "tree-inline.h"
#include "omp-low.h"
#include "c-family/c-objc.h"
#include "c-family/c-common.h"
#include "c-family/c-ubsan.h"
#include "cilk.h"
#include "wide-int.h"
#include "gomp-constants.h"
/* Possible cases of implicit bad conversions. Used to select
diagnostic messages in convert_for_assignment. */
enum impl_conv {
ic_argpass,
ic_assign,
ic_init,
ic_return
};
/* The level of nesting inside "__alignof__". */
int in_alignof;
/* The level of nesting inside "sizeof". */
int in_sizeof;
/* The level of nesting inside "typeof". */
int in_typeof;
/* The argument of last parsed sizeof expression, only to be tested
if expr.original_code == SIZEOF_EXPR. */
tree c_last_sizeof_arg;
/* Nonzero if we might need to print a "missing braces around
initializer" message within this initializer. */
static int found_missing_braces;
static int require_constant_value;
static int require_constant_elements;
static bool null_pointer_constant_p (const_tree);
static tree qualify_type (tree, tree);
static int tagged_types_tu_compatible_p (const_tree, const_tree, bool *,
bool *);
static int comp_target_types (location_t, tree, tree);
static int function_types_compatible_p (const_tree, const_tree, bool *,
bool *);
static int type_lists_compatible_p (const_tree, const_tree, bool *, bool *);
static tree lookup_field (tree, tree);
static int convert_arguments (location_t, vec<location_t>, tree,
vec<tree, va_gc> *, vec<tree, va_gc> *, tree,
tree);
static tree pointer_diff (location_t, tree, tree);
static tree convert_for_assignment (location_t, location_t, tree, tree, tree,
enum impl_conv, bool, tree, tree, int);
static tree valid_compound_expr_initializer (tree, tree);
static void push_string (const char *);
static void push_member_name (tree);
static int spelling_length (void);
static char *print_spelling (char *);
static void warning_init (location_t, int, const char *);
static tree digest_init (location_t, tree, tree, tree, bool, bool, int);
static void output_init_element (location_t, tree, tree, bool, tree, tree, int,
bool, struct obstack *);
static void output_pending_init_elements (int, struct obstack *);
static int set_designator (location_t, int, struct obstack *);
static void push_range_stack (tree, struct obstack *);
static void add_pending_init (location_t, tree, tree, tree, bool,
struct obstack *);
static void set_nonincremental_init (struct obstack *);
static void set_nonincremental_init_from_string (tree, struct obstack *);
static tree find_init_member (tree, struct obstack *);
static void readonly_warning (tree, enum lvalue_use);
static int lvalue_or_else (location_t, const_tree, enum lvalue_use);
static void record_maybe_used_decl (tree);
static int comptypes_internal (const_tree, const_tree, bool *, bool *);
/* Return true if EXP is a null pointer constant, false otherwise. */
static bool
null_pointer_constant_p (const_tree expr)
{
/* This should really operate on c_expr structures, but they aren't
yet available everywhere required. */
tree type = TREE_TYPE (expr);
return (TREE_CODE (expr) == INTEGER_CST
&& !TREE_OVERFLOW (expr)
&& integer_zerop (expr)
&& (INTEGRAL_TYPE_P (type)
|| (TREE_CODE (type) == POINTER_TYPE
&& VOID_TYPE_P (TREE_TYPE (type))
&& TYPE_QUALS (TREE_TYPE (type)) == TYPE_UNQUALIFIED)));
}
/* EXPR may appear in an unevaluated part of an integer constant
expression, but not in an evaluated part. Wrap it in a
C_MAYBE_CONST_EXPR, or mark it with TREE_OVERFLOW if it is just an
INTEGER_CST and we cannot create a C_MAYBE_CONST_EXPR. */
static tree
note_integer_operands (tree expr)
{
tree ret;
if (TREE_CODE (expr) == INTEGER_CST && in_late_binary_op)
{
ret = copy_node (expr);
TREE_OVERFLOW (ret) = 1;
}
else
{
ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (expr), NULL_TREE, expr);
C_MAYBE_CONST_EXPR_INT_OPERANDS (ret) = 1;
}
return ret;
}
/* Having checked whether EXPR may appear in an unevaluated part of an
integer constant expression and found that it may, remove any
C_MAYBE_CONST_EXPR noting this fact and return the resulting
expression. */
static inline tree
remove_c_maybe_const_expr (tree expr)
{
if (TREE_CODE (expr) == C_MAYBE_CONST_EXPR)
return C_MAYBE_CONST_EXPR_EXPR (expr);
else
return expr;
}
/* This is a cache to hold if two types are compatible or not. */
struct tagged_tu_seen_cache {
const struct tagged_tu_seen_cache * next;
const_tree t1;
const_tree t2;
/* The return value of tagged_types_tu_compatible_p if we had seen
these two types already. */
int val;
};
static const struct tagged_tu_seen_cache * tagged_tu_seen_base;
static void free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *);
/* Do `exp = require_complete_type (exp);' to make sure exp
does not have an incomplete type. (That includes void types.) */
tree
require_complete_type (tree value)
{
tree type = TREE_TYPE (value);
if (error_operand_p (value))
return error_mark_node;
/* First, detect a valid value with a complete type. */
if (COMPLETE_TYPE_P (type))
return value;
c_incomplete_type_error (value, type);
return error_mark_node;
}
/* Print an error message for invalid use of an incomplete type.
VALUE is the expression that was used (or 0 if that isn't known)
and TYPE is the type that was invalid. */
void
c_incomplete_type_error (const_tree value, const_tree type)
{
const char *type_code_string;
/* Avoid duplicate error message. */
if (TREE_CODE (type) == ERROR_MARK)
return;
if (value != 0 && (TREE_CODE (value) == VAR_DECL
|| TREE_CODE (value) == PARM_DECL))
error ("%qD has an incomplete type", value);
else
{
retry:
/* We must print an error message. Be clever about what it says. */
switch (TREE_CODE (type))
{
case RECORD_TYPE:
type_code_string = "struct";
break;
case UNION_TYPE:
type_code_string = "union";
break;
case ENUMERAL_TYPE:
type_code_string = "enum";
break;
case VOID_TYPE:
error ("invalid use of void expression");
return;
case ARRAY_TYPE:
if (TYPE_DOMAIN (type))
{
if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL)
{
error ("invalid use of flexible array member");
return;
}
type = TREE_TYPE (type);
goto retry;
}
error ("invalid use of array with unspecified bounds");
return;
default:
gcc_unreachable ();
}
if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
error ("invalid use of undefined type %<%s %E%>",
type_code_string, TYPE_NAME (type));
else
/* If this type has a typedef-name, the TYPE_NAME is a TYPE_DECL. */
error ("invalid use of incomplete typedef %qD", TYPE_NAME (type));
}
}
/* Given a type, apply default promotions wrt unnamed function
arguments and return the new type. */
tree
c_type_promotes_to (tree type)
{
tree ret = NULL_TREE;
if (TYPE_MAIN_VARIANT (type) == float_type_node)
ret = double_type_node;
else if (c_promoting_integer_type_p (type))
{
/* Preserve unsignedness if not really getting any wider. */
if (TYPE_UNSIGNED (type)
&& (TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)))
ret = unsigned_type_node;
else
ret = integer_type_node;
}
if (ret != NULL_TREE)
return (TYPE_ATOMIC (type)
? c_build_qualified_type (ret, TYPE_QUAL_ATOMIC)
: ret);
return type;
}
/* Return true if between two named address spaces, whether there is a superset
named address space that encompasses both address spaces. If there is a
superset, return which address space is the superset. */
static bool
addr_space_superset (addr_space_t as1, addr_space_t as2, addr_space_t *common)
{
if (as1 == as2)
{
*common = as1;
return true;
}
else if (targetm.addr_space.subset_p (as1, as2))
{
*common = as2;
return true;
}
else if (targetm.addr_space.subset_p (as2, as1))
{
*common = as1;
return true;
}
else
return false;
}
/* Return a variant of TYPE which has all the type qualifiers of LIKE
as well as those of TYPE. */
static tree
qualify_type (tree type, tree like)
{
addr_space_t as_type = TYPE_ADDR_SPACE (type);
addr_space_t as_like = TYPE_ADDR_SPACE (like);
addr_space_t as_common;
/* If the two named address spaces are different, determine the common
superset address space. If there isn't one, raise an error. */
if (!addr_space_superset (as_type, as_like, &as_common))
{
as_common = as_type;
error ("%qT and %qT are in disjoint named address spaces",
type, like);
}
return c_build_qualified_type (type,
TYPE_QUALS_NO_ADDR_SPACE (type)
| TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (like)
| ENCODE_QUAL_ADDR_SPACE (as_common));
}
/* Return true iff the given tree T is a variable length array. */
bool
c_vla_type_p (const_tree t)
{
if (TREE_CODE (t) == ARRAY_TYPE
&& C_TYPE_VARIABLE_SIZE (t))
return true;
return false;
}
/* Return the composite type of two compatible types.
We assume that comptypes has already been done and returned
nonzero; if that isn't so, this may crash. In particular, we
assume that qualifiers match. */
tree
composite_type (tree t1, tree t2)
{
enum tree_code code1;
enum tree_code code2;
tree attributes;
/* Save time if the two types are the same. */
if (t1 == t2) return t1;
/* If one type is nonsense, use the other. */
if (t1 == error_mark_node)
return t2;
if (t2 == error_mark_node)
return t1;
code1 = TREE_CODE (t1);
code2 = TREE_CODE (t2);
/* Merge the attributes. */
attributes = targetm.merge_type_attributes (t1, t2);
/* If one is an enumerated type and the other is the compatible
integer type, the composite type might be either of the two
(DR#013 question 3). For consistency, use the enumerated type as
the composite type. */
if (code1 == ENUMERAL_TYPE && code2 == INTEGER_TYPE)
return t1;
if (code2 == ENUMERAL_TYPE && code1 == INTEGER_TYPE)
return t2;
gcc_assert (code1 == code2);
switch (code1)
{
case POINTER_TYPE:
/* For two pointers, do this recursively on the target type. */
{
tree pointed_to_1 = TREE_TYPE (t1);
tree pointed_to_2 = TREE_TYPE (t2);
tree target = composite_type (pointed_to_1, pointed_to_2);
t1 = build_pointer_type_for_mode (target, TYPE_MODE (t1), false);
t1 = build_type_attribute_variant (t1, attributes);
return qualify_type (t1, t2);
}
case ARRAY_TYPE:
{
tree elt = composite_type (TREE_TYPE (t1), TREE_TYPE (t2));
int quals;
tree unqual_elt;
tree d1 = TYPE_DOMAIN (t1);
tree d2 = TYPE_DOMAIN (t2);
bool d1_variable, d2_variable;
bool d1_zero, d2_zero;
bool t1_complete, t2_complete;
/* We should not have any type quals on arrays at all. */
gcc_assert (!TYPE_QUALS_NO_ADDR_SPACE (t1)
&& !TYPE_QUALS_NO_ADDR_SPACE (t2));
t1_complete = COMPLETE_TYPE_P (t1);
t2_complete = COMPLETE_TYPE_P (t2);
d1_zero = d1 == 0 || !TYPE_MAX_VALUE (d1);
d2_zero = d2 == 0 || !TYPE_MAX_VALUE (d2);
d1_variable = (!d1_zero
&& (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST));
d2_variable = (!d2_zero
&& (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST));
d1_variable = d1_variable || (d1_zero && c_vla_type_p (t1));
d2_variable = d2_variable || (d2_zero && c_vla_type_p (t2));
/* Save space: see if the result is identical to one of the args. */
if (elt == TREE_TYPE (t1) && TYPE_DOMAIN (t1)
&& (d2_variable || d2_zero || !d1_variable))
return build_type_attribute_variant (t1, attributes);
if (elt == TREE_TYPE (t2) && TYPE_DOMAIN (t2)
&& (d1_variable || d1_zero || !d2_variable))
return build_type_attribute_variant (t2, attributes);
if (elt == TREE_TYPE (t1) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1))
return build_type_attribute_variant (t1, attributes);
if (elt == TREE_TYPE (t2) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1))
return build_type_attribute_variant (t2, attributes);
/* Merge the element types, and have a size if either arg has
one. We may have qualifiers on the element types. To set
up TYPE_MAIN_VARIANT correctly, we need to form the
composite of the unqualified types and add the qualifiers
back at the end. */
quals = TYPE_QUALS (strip_array_types (elt));
unqual_elt = c_build_qualified_type (elt, TYPE_UNQUALIFIED);
t1 = build_array_type (unqual_elt,
TYPE_DOMAIN ((TYPE_DOMAIN (t1)
&& (d2_variable
|| d2_zero
|| !d1_variable))
? t1
: t2));
/* Ensure a composite type involving a zero-length array type
is a zero-length type not an incomplete type. */
if (d1_zero && d2_zero
&& (t1_complete || t2_complete)
&& !COMPLETE_TYPE_P (t1))
{
TYPE_SIZE (t1) = bitsize_zero_node;
TYPE_SIZE_UNIT (t1) = size_zero_node;
}
t1 = c_build_qualified_type (t1, quals);
return build_type_attribute_variant (t1, attributes);
}
case ENUMERAL_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
if (attributes != NULL)
{
/* Try harder not to create a new aggregate type. */
if (attribute_list_equal (TYPE_ATTRIBUTES (t1), attributes))
return t1;
if (attribute_list_equal (TYPE_ATTRIBUTES (t2), attributes))
return t2;
}
return build_type_attribute_variant (t1, attributes);
case FUNCTION_TYPE:
/* Function types: prefer the one that specified arg types.
If both do, merge the arg types. Also merge the return types. */
{
tree valtype = composite_type (TREE_TYPE (t1), TREE_TYPE (t2));
tree p1 = TYPE_ARG_TYPES (t1);
tree p2 = TYPE_ARG_TYPES (t2);
int len;
tree newargs, n;
int i;
/* Save space: see if the result is identical to one of the args. */
if (valtype == TREE_TYPE (t1) && !TYPE_ARG_TYPES (t2))
return build_type_attribute_variant (t1, attributes);
if (valtype == TREE_TYPE (t2) && !TYPE_ARG_TYPES (t1))
return build_type_attribute_variant (t2, attributes);
/* Simple way if one arg fails to specify argument types. */
if (TYPE_ARG_TYPES (t1) == 0)
{
t1 = build_function_type (valtype, TYPE_ARG_TYPES (t2));
t1 = build_type_attribute_variant (t1, attributes);
return qualify_type (t1, t2);
}
if (TYPE_ARG_TYPES (t2) == 0)
{
t1 = build_function_type (valtype, TYPE_ARG_TYPES (t1));
t1 = build_type_attribute_variant (t1, attributes);
return qualify_type (t1, t2);
}
/* If both args specify argument types, we must merge the two
lists, argument by argument. */
len = list_length (p1);
newargs = 0;
for (i = 0; i < len; i++)
newargs = tree_cons (NULL_TREE, NULL_TREE, newargs);
n = newargs;
for (; p1;
p1 = TREE_CHAIN (p1), p2 = TREE_CHAIN (p2), n = TREE_CHAIN (n))
{
/* A null type means arg type is not specified.
Take whatever the other function type has. */
if (TREE_VALUE (p1) == 0)
{
TREE_VALUE (n) = TREE_VALUE (p2);
goto parm_done;
}
if (TREE_VALUE (p2) == 0)
{
TREE_VALUE (n) = TREE_VALUE (p1);
goto parm_done;
}
/* Given wait (union {union wait *u; int *i} *)
and wait (union wait *),
prefer union wait * as type of parm. */
if (TREE_CODE (TREE_VALUE (p1)) == UNION_TYPE
&& TREE_VALUE (p1) != TREE_VALUE (p2))
{
tree memb;
tree mv2 = TREE_VALUE (p2);
if (mv2 && mv2 != error_mark_node
&& TREE_CODE (mv2) != ARRAY_TYPE)
mv2 = TYPE_MAIN_VARIANT (mv2);
for (memb = TYPE_FIELDS (TREE_VALUE (p1));
memb; memb = DECL_CHAIN (memb))
{
tree mv3 = TREE_TYPE (memb);
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = TYPE_MAIN_VARIANT (mv3);
if (comptypes (mv3, mv2))
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p2));
pedwarn (input_location, OPT_Wpedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
}
}
if (TREE_CODE (TREE_VALUE (p2)) == UNION_TYPE
&& TREE_VALUE (p2) != TREE_VALUE (p1))
{
tree memb;
tree mv1 = TREE_VALUE (p1);
if (mv1 && mv1 != error_mark_node
&& TREE_CODE (mv1) != ARRAY_TYPE)
mv1 = TYPE_MAIN_VARIANT (mv1);
for (memb = TYPE_FIELDS (TREE_VALUE (p2));
memb; memb = DECL_CHAIN (memb))
{
tree mv3 = TREE_TYPE (memb);
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = TYPE_MAIN_VARIANT (mv3);
if (comptypes (mv3, mv1))
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p1));
pedwarn (input_location, OPT_Wpedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
}
}
TREE_VALUE (n) = composite_type (TREE_VALUE (p1), TREE_VALUE (p2));
parm_done: ;
}
t1 = build_function_type (valtype, newargs);
t1 = qualify_type (t1, t2);
/* ... falls through ... */
}
default:
return build_type_attribute_variant (t1, attributes);
}
}
/* Return the type of a conditional expression between pointers to
possibly differently qualified versions of compatible types.
We assume that comp_target_types has already been done and returned
nonzero; if that isn't so, this may crash. */
static tree
common_pointer_type (tree t1, tree t2)
{
tree attributes;
tree pointed_to_1, mv1;
tree pointed_to_2, mv2;
tree target;
unsigned target_quals;
addr_space_t as1, as2, as_common;
int quals1, quals2;
/* Save time if the two types are the same. */
if (t1 == t2) return t1;
/* If one type is nonsense, use the other. */
if (t1 == error_mark_node)
return t2;
if (t2 == error_mark_node)
return t1;
gcc_assert (TREE_CODE (t1) == POINTER_TYPE
&& TREE_CODE (t2) == POINTER_TYPE);
/* Merge the attributes. */
attributes = targetm.merge_type_attributes (t1, t2);
/* Find the composite type of the target types, and combine the
qualifiers of the two types' targets. Do not lose qualifiers on
array element types by taking the TYPE_MAIN_VARIANT. */
mv1 = pointed_to_1 = TREE_TYPE (t1);
mv2 = pointed_to_2 = TREE_TYPE (t2);
if (TREE_CODE (mv1) != ARRAY_TYPE)
mv1 = TYPE_MAIN_VARIANT (pointed_to_1);
if (TREE_CODE (mv2) != ARRAY_TYPE)
mv2 = TYPE_MAIN_VARIANT (pointed_to_2);
target = composite_type (mv1, mv2);
/* Strip array types to get correct qualifier for pointers to arrays */
quals1 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_1));
quals2 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_2));
/* For function types do not merge const qualifiers, but drop them
if used inconsistently. The middle-end uses these to mark const
and noreturn functions. */
if (TREE_CODE (pointed_to_1) == FUNCTION_TYPE)
target_quals = (quals1 & quals2);
else
target_quals = (quals1 | quals2);
/* If the two named address spaces are different, determine the common
superset address space. This is guaranteed to exist due to the
assumption that comp_target_type returned non-zero. */
as1 = TYPE_ADDR_SPACE (pointed_to_1);
as2 = TYPE_ADDR_SPACE (pointed_to_2);
if (!addr_space_superset (as1, as2, &as_common))
gcc_unreachable ();
target_quals |= ENCODE_QUAL_ADDR_SPACE (as_common);
t1 = build_pointer_type (c_build_qualified_type (target, target_quals));
return build_type_attribute_variant (t1, attributes);
}
/* Return the common type for two arithmetic types under the usual
arithmetic conversions. The default conversions have already been
applied, and enumerated types converted to their compatible integer
types. The resulting type is unqualified and has no attributes.
This is the type for the result of most arithmetic operations
if the operands have the given two types. */
static tree
c_common_type (tree t1, tree t2)
{
enum tree_code code1;
enum tree_code code2;
/* If one type is nonsense, use the other. */
if (t1 == error_mark_node)
return t2;
if (t2 == error_mark_node)
return t1;
if (TYPE_QUALS (t1) != TYPE_UNQUALIFIED)
t1 = TYPE_MAIN_VARIANT (t1);
if (TYPE_QUALS (t2) != TYPE_UNQUALIFIED)
t2 = TYPE_MAIN_VARIANT (t2);
if (TYPE_ATTRIBUTES (t1) != NULL_TREE)
t1 = build_type_attribute_variant (t1, NULL_TREE);
if (TYPE_ATTRIBUTES (t2) != NULL_TREE)
t2 = build_type_attribute_variant (t2, NULL_TREE);
/* Save time if the two types are the same. */
if (t1 == t2) return t1;
code1 = TREE_CODE (t1);
code2 = TREE_CODE (t2);
gcc_assert (code1 == VECTOR_TYPE || code1 == COMPLEX_TYPE
|| code1 == FIXED_POINT_TYPE || code1 == REAL_TYPE
|| code1 == INTEGER_TYPE);
gcc_assert (code2 == VECTOR_TYPE || code2 == COMPLEX_TYPE
|| code2 == FIXED_POINT_TYPE || code2 == REAL_TYPE
|| code2 == INTEGER_TYPE);
/* When one operand is a decimal float type, the other operand cannot be
a generic float type or a complex type. We also disallow vector types
here. */
if ((DECIMAL_FLOAT_TYPE_P (t1) || DECIMAL_FLOAT_TYPE_P (t2))
&& !(DECIMAL_FLOAT_TYPE_P (t1) && DECIMAL_FLOAT_TYPE_P (t2)))
{
if (code1 == VECTOR_TYPE || code2 == VECTOR_TYPE)
{
error ("can%'t mix operands of decimal float and vector types");
return error_mark_node;
}
if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE)
{
error ("can%'t mix operands of decimal float and complex types");
return error_mark_node;
}
if (code1 == REAL_TYPE && code2 == REAL_TYPE)
{
error ("can%'t mix operands of decimal float and other float types");
return error_mark_node;
}
}
/* If one type is a vector type, return that type. (How the usual
arithmetic conversions apply to the vector types extension is not
precisely specified.) */
if (code1 == VECTOR_TYPE)
return t1;
if (code2 == VECTOR_TYPE)
return t2;
/* If one type is complex, form the common type of the non-complex
components, then make that complex. Use T1 or T2 if it is the
required type. */
if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE)
{
tree subtype1 = code1 == COMPLEX_TYPE ? TREE_TYPE (t1) : t1;
tree subtype2 = code2 == COMPLEX_TYPE ? TREE_TYPE (t2) : t2;
tree subtype = c_common_type (subtype1, subtype2);
if (code1 == COMPLEX_TYPE && TREE_TYPE (t1) == subtype)
return t1;
else if (code2 == COMPLEX_TYPE && TREE_TYPE (t2) == subtype)
return t2;
else
return build_complex_type (subtype);
}
/* If only one is real, use it as the result. */
if (code1 == REAL_TYPE && code2 != REAL_TYPE)
return t1;
if (code2 == REAL_TYPE && code1 != REAL_TYPE)
return t2;
/* If both are real and either are decimal floating point types, use
the decimal floating point type with the greater precision. */
if (code1 == REAL_TYPE && code2 == REAL_TYPE)
{
if (TYPE_MAIN_VARIANT (t1) == dfloat128_type_node
|| TYPE_MAIN_VARIANT (t2) == dfloat128_type_node)
return dfloat128_type_node;
else if (TYPE_MAIN_VARIANT (t1) == dfloat64_type_node
|| TYPE_MAIN_VARIANT (t2) == dfloat64_type_node)
return dfloat64_type_node;
else if (TYPE_MAIN_VARIANT (t1) == dfloat32_type_node
|| TYPE_MAIN_VARIANT (t2) == dfloat32_type_node)
return dfloat32_type_node;
}
/* Deal with fixed-point types. */
if (code1 == FIXED_POINT_TYPE || code2 == FIXED_POINT_TYPE)
{
unsigned int unsignedp = 0, satp = 0;
machine_mode m1, m2;
unsigned int fbit1, ibit1, fbit2, ibit2, max_fbit, max_ibit;
m1 = TYPE_MODE (t1);
m2 = TYPE_MODE (t2);
/* If one input type is saturating, the result type is saturating. */
if (TYPE_SATURATING (t1) || TYPE_SATURATING (t2))
satp = 1;
/* If both fixed-point types are unsigned, the result type is unsigned.
When mixing fixed-point and integer types, follow the sign of the
fixed-point type.
Otherwise, the result type is signed. */
if ((TYPE_UNSIGNED (t1) && TYPE_UNSIGNED (t2)
&& code1 == FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE)
|| (code1 == FIXED_POINT_TYPE && code2 != FIXED_POINT_TYPE
&& TYPE_UNSIGNED (t1))
|| (code1 != FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE
&& TYPE_UNSIGNED (t2)))
unsignedp = 1;
/* The result type is signed. */
if (unsignedp == 0)
{
/* If the input type is unsigned, we need to convert to the
signed type. */
if (code1 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t1))
{
enum mode_class mclass = (enum mode_class) 0;
if (GET_MODE_CLASS (m1) == MODE_UFRACT)
mclass = MODE_FRACT;
else if (GET_MODE_CLASS (m1) == MODE_UACCUM)
mclass = MODE_ACCUM;
else
gcc_unreachable ();
m1 = mode_for_size (GET_MODE_PRECISION (m1), mclass, 0);
}
if (code2 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t2))
{
enum mode_class mclass = (enum mode_class) 0;
if (GET_MODE_CLASS (m2) == MODE_UFRACT)
mclass = MODE_FRACT;
else if (GET_MODE_CLASS (m2) == MODE_UACCUM)
mclass = MODE_ACCUM;
else
gcc_unreachable ();
m2 = mode_for_size (GET_MODE_PRECISION (m2), mclass, 0);
}
}
if (code1 == FIXED_POINT_TYPE)
{
fbit1 = GET_MODE_FBIT (m1);
ibit1 = GET_MODE_IBIT (m1);
}
else
{
fbit1 = 0;
/* Signed integers need to subtract one sign bit. */
ibit1 = TYPE_PRECISION (t1) - (!TYPE_UNSIGNED (t1));
}
if (code2 == FIXED_POINT_TYPE)
{
fbit2 = GET_MODE_FBIT (m2);
ibit2 = GET_MODE_IBIT (m2);
}
else
{
fbit2 = 0;
/* Signed integers need to subtract one sign bit. */
ibit2 = TYPE_PRECISION (t2) - (!TYPE_UNSIGNED (t2));
}
max_ibit = ibit1 >= ibit2 ? ibit1 : ibit2;
max_fbit = fbit1 >= fbit2 ? fbit1 : fbit2;
return c_common_fixed_point_type_for_size (max_ibit, max_fbit, unsignedp,
satp);
}
/* Both real or both integers; use the one with greater precision. */
if (TYPE_PRECISION (t1) > TYPE_PRECISION (t2))
return t1;
else if (TYPE_PRECISION (t2) > TYPE_PRECISION (t1))
return t2;
/* Same precision. Prefer long longs to longs to ints when the
same precision, following the C99 rules on integer type rank
(which are equivalent to the C90 rules for C90 types). */
if (TYPE_MAIN_VARIANT (t1) == long_long_unsigned_type_node
|| TYPE_MAIN_VARIANT (t2) == long_long_unsigned_type_node)
return long_long_unsigned_type_node;
if (TYPE_MAIN_VARIANT (t1) == long_long_integer_type_node
|| TYPE_MAIN_VARIANT (t2) == long_long_integer_type_node)
{
if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2))
return long_long_unsigned_type_node;
else
return long_long_integer_type_node;
}
if (TYPE_MAIN_VARIANT (t1) == long_unsigned_type_node
|| TYPE_MAIN_VARIANT (t2) == long_unsigned_type_node)
return long_unsigned_type_node;
if (TYPE_MAIN_VARIANT (t1) == long_integer_type_node
|| TYPE_MAIN_VARIANT (t2) == long_integer_type_node)
{
/* But preserve unsignedness from the other type,
since long cannot hold all the values of an unsigned int. */
if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2))
return long_unsigned_type_node;
else
return long_integer_type_node;
}
/* Likewise, prefer long double to double even if same size. */
if (TYPE_MAIN_VARIANT (t1) == long_double_type_node
|| TYPE_MAIN_VARIANT (t2) == long_double_type_node)
return long_double_type_node;
/* Likewise, prefer double to float even if same size.
We got a couple of embedded targets with 32 bit doubles, and the
pdp11 might have 64 bit floats. */
if (TYPE_MAIN_VARIANT (t1) == double_type_node
|| TYPE_MAIN_VARIANT (t2) == double_type_node)
return double_type_node;
/* Otherwise prefer the unsigned one. */
if (TYPE_UNSIGNED (t1))
return t1;
else
return t2;
}
/* Wrapper around c_common_type that is used by c-common.c and other
front end optimizations that remove promotions. ENUMERAL_TYPEs
are allowed here and are converted to their compatible integer types.
BOOLEAN_TYPEs are allowed here and return either boolean_type_node or
preferably a non-Boolean type as the common type. */
tree
common_type (tree t1, tree t2)
{
if (TREE_CODE (t1) == ENUMERAL_TYPE)
t1 = c_common_type_for_size (TYPE_PRECISION (t1), 1);
if (TREE_CODE (t2) == ENUMERAL_TYPE)
t2 = c_common_type_for_size (TYPE_PRECISION (t2), 1);
/* If both types are BOOLEAN_TYPE, then return boolean_type_node. */
if (TREE_CODE (t1) == BOOLEAN_TYPE
&& TREE_CODE (t2) == BOOLEAN_TYPE)
return boolean_type_node;
/* If either type is BOOLEAN_TYPE, then return the other. */
if (TREE_CODE (t1) == BOOLEAN_TYPE)
return t2;
if (TREE_CODE (t2) == BOOLEAN_TYPE)
return t1;
return c_common_type (t1, t2);
}
/* Return 1 if TYPE1 and TYPE2 are compatible types for assignment
or various other operations. Return 2 if they are compatible
but a warning may be needed if you use them together. */
int
comptypes (tree type1, tree type2)
{
const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base;
int val;
val = comptypes_internal (type1, type2, NULL, NULL);
free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1);
return val;
}
/* Like comptypes, but if it returns non-zero because enum and int are
compatible, it sets *ENUM_AND_INT_P to true. */
static int
comptypes_check_enum_int (tree type1, tree type2, bool *enum_and_int_p)
{
const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base;
int val;
val = comptypes_internal (type1, type2, enum_and_int_p, NULL);
free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1);
return val;
}
/* Like comptypes, but if it returns nonzero for different types, it
sets *DIFFERENT_TYPES_P to true. */
int
comptypes_check_different_types (tree type1, tree type2,
bool *different_types_p)
{
const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base;
int val;
val = comptypes_internal (type1, type2, NULL, different_types_p);
free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1);
return val;
}
/* Return 1 if TYPE1 and TYPE2 are compatible types for assignment
or various other operations. Return 2 if they are compatible
but a warning may be needed if you use them together. If
ENUM_AND_INT_P is not NULL, and one type is an enum and the other a
compatible integer type, then this sets *ENUM_AND_INT_P to true;
*ENUM_AND_INT_P is never set to false. If DIFFERENT_TYPES_P is not
NULL, and the types are compatible but different enough not to be
permitted in C11 typedef redeclarations, then this sets
*DIFFERENT_TYPES_P to true; *DIFFERENT_TYPES_P is never set to
false, but may or may not be set if the types are incompatible.
This differs from comptypes, in that we don't free the seen
types. */
static int
comptypes_internal (const_tree type1, const_tree type2, bool *enum_and_int_p,
bool *different_types_p)
{
const_tree t1 = type1;
const_tree t2 = type2;
int attrval, val;
/* Suppress errors caused by previously reported errors. */
if (t1 == t2 || !t1 || !t2
|| TREE_CODE (t1) == ERROR_MARK || TREE_CODE (t2) == ERROR_MARK)
return 1;
/* Enumerated types are compatible with integer types, but this is
not transitive: two enumerated types in the same translation unit
are compatible with each other only if they are the same type. */
if (TREE_CODE (t1) == ENUMERAL_TYPE && TREE_CODE (t2) != ENUMERAL_TYPE)
{
t1 = c_common_type_for_size (TYPE_PRECISION (t1), TYPE_UNSIGNED (t1));
if (TREE_CODE (t2) != VOID_TYPE)
{
if (enum_and_int_p != NULL)
*enum_and_int_p = true;
if (different_types_p != NULL)
*different_types_p = true;
}
}
else if (TREE_CODE (t2) == ENUMERAL_TYPE && TREE_CODE (t1) != ENUMERAL_TYPE)
{
t2 = c_common_type_for_size (TYPE_PRECISION (t2), TYPE_UNSIGNED (t2));
if (TREE_CODE (t1) != VOID_TYPE)
{
if (enum_and_int_p != NULL)
*enum_and_int_p = true;
if (different_types_p != NULL)
*different_types_p = true;
}
}
if (t1 == t2)
return 1;
/* Different classes of types can't be compatible. */
if (TREE_CODE (t1) != TREE_CODE (t2))
return 0;
/* Qualifiers must match. C99 6.7.3p9 */
if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
return 0;
/* Allow for two different type nodes which have essentially the same
definition. Note that we already checked for equality of the type
qualifiers (just above). */
if (TREE_CODE (t1) != ARRAY_TYPE
&& TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
return 1;
/* 1 if no need for warning yet, 2 if warning cause has been seen. */
if (!(attrval = comp_type_attributes (t1, t2)))
return 0;
/* 1 if no need for warning yet, 2 if warning cause has been seen. */
val = 0;
switch (TREE_CODE (t1))
{
case POINTER_TYPE:
/* Do not remove mode or aliasing information. */
if (TYPE_MODE (t1) != TYPE_MODE (t2)
|| TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
break;
val = (TREE_TYPE (t1) == TREE_TYPE (t2)
? 1 : comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2),
enum_and_int_p, different_types_p));
break;
case FUNCTION_TYPE:
val = function_types_compatible_p (t1, t2, enum_and_int_p,
different_types_p);
break;
case ARRAY_TYPE:
{
tree d1 = TYPE_DOMAIN (t1);
tree d2 = TYPE_DOMAIN (t2);
bool d1_variable, d2_variable;
bool d1_zero, d2_zero;
val = 1;
/* Target types must match incl. qualifiers. */
if (TREE_TYPE (t1) != TREE_TYPE (t2)
&& 0 == (val = comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2),
enum_and_int_p,
different_types_p)))
return 0;
if (different_types_p != NULL
&& (d1 == 0) != (d2 == 0))
*different_types_p = true;
/* Sizes must match unless one is missing or variable. */
if (d1 == 0 || d2 == 0 || d1 == d2)
break;
d1_zero = !TYPE_MAX_VALUE (d1);
d2_zero = !TYPE_MAX_VALUE (d2);
d1_variable = (!d1_zero
&& (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST));
d2_variable = (!d2_zero
&& (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST));
d1_variable = d1_variable || (d1_zero && c_vla_type_p (t1));
d2_variable = d2_variable || (d2_zero && c_vla_type_p (t2));
if (different_types_p != NULL
&& d1_variable != d2_variable)
*different_types_p = true;
if (d1_variable || d2_variable)
break;
if (d1_zero && d2_zero)
break;
if (d1_zero || d2_zero
|| !tree_int_cst_equal (TYPE_MIN_VALUE (d1), TYPE_MIN_VALUE (d2))
|| !tree_int_cst_equal (TYPE_MAX_VALUE (d1), TYPE_MAX_VALUE (d2)))
val = 0;
break;
}
case ENUMERAL_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
if (val != 1 && !same_translation_unit_p (t1, t2))
{
tree a1 = TYPE_ATTRIBUTES (t1);
tree a2 = TYPE_ATTRIBUTES (t2);
if (! attribute_list_contained (a1, a2)
&& ! attribute_list_contained (a2, a1))
break;
if (attrval != 2)
return tagged_types_tu_compatible_p (t1, t2, enum_and_int_p,
different_types_p);
val = tagged_types_tu_compatible_p (t1, t2, enum_and_int_p,
different_types_p);
}
break;
case VECTOR_TYPE:
val = (TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
&& comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2),
enum_and_int_p, different_types_p));
break;
default:
break;
}
return attrval == 2 && val == 1 ? 2 : val;
}
/* Return 1 if TTL and TTR are pointers to types that are equivalent, ignoring
their qualifiers, except for named address spaces. If the pointers point to
different named addresses, then we must determine if one address space is a
subset of the other. */
static int
comp_target_types (location_t location, tree ttl, tree ttr)
{
int val;
int val_ped;
tree mvl = TREE_TYPE (ttl);
tree mvr = TREE_TYPE (ttr);
addr_space_t asl = TYPE_ADDR_SPACE (mvl);
addr_space_t asr = TYPE_ADDR_SPACE (mvr);
addr_space_t as_common;
bool enum_and_int_p;
/* Fail if pointers point to incompatible address spaces. */
if (!addr_space_superset (asl, asr, &as_common))
return 0;
/* For pedantic record result of comptypes on arrays before losing
qualifiers on the element type below. */
val_ped = 1;
if (TREE_CODE (mvl) == ARRAY_TYPE
&& TREE_CODE (mvr) == ARRAY_TYPE)
val_ped = comptypes (mvl, mvr);
/* Qualifiers on element types of array types that are
pointer targets are lost by taking their TYPE_MAIN_VARIANT. */
mvl = (TYPE_ATOMIC (strip_array_types (mvl))
? c_build_qualified_type (TYPE_MAIN_VARIANT (mvl), TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mvl));
mvr = (TYPE_ATOMIC (strip_array_types (mvr))
? c_build_qualified_type (TYPE_MAIN_VARIANT (mvr), TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mvr));
enum_and_int_p = false;
val = comptypes_check_enum_int (mvl, mvr, &enum_and_int_p);
if (val == 1 && val_ped != 1)
pedwarn (location, OPT_Wpedantic, "pointers to arrays with different qualifiers "
"are incompatible in ISO C");
if (val == 2)
pedwarn (location, OPT_Wpedantic, "types are not quite compatible");
if (val == 1 && enum_and_int_p && warn_cxx_compat)
warning_at (location, OPT_Wc___compat,
"pointer target types incompatible in C++");
return val;
}
/* Subroutines of `comptypes'. */
/* Determine whether two trees derive from the same translation unit.
If the CONTEXT chain ends in a null, that tree's context is still
being parsed, so if two trees have context chains ending in null,
they're in the same translation unit. */
int
same_translation_unit_p (const_tree t1, const_tree t2)
{
while (t1 && TREE_CODE (t1) != TRANSLATION_UNIT_DECL)
switch (TREE_CODE_CLASS (TREE_CODE (t1)))
{
case tcc_declaration:
t1 = DECL_CONTEXT (t1); break;
case tcc_type:
t1 = TYPE_CONTEXT (t1); break;
case tcc_exceptional:
t1 = BLOCK_SUPERCONTEXT (t1); break; /* assume block */
default: gcc_unreachable ();
}
while (t2 && TREE_CODE (t2) != TRANSLATION_UNIT_DECL)
switch (TREE_CODE_CLASS (TREE_CODE (t2)))
{
case tcc_declaration:
t2 = DECL_CONTEXT (t2); break;
case tcc_type:
t2 = TYPE_CONTEXT (t2); break;
case tcc_exceptional:
t2 = BLOCK_SUPERCONTEXT (t2); break; /* assume block */
default: gcc_unreachable ();
}
return t1 == t2;
}
/* Allocate the seen two types, assuming that they are compatible. */
static struct tagged_tu_seen_cache *
alloc_tagged_tu_seen_cache (const_tree t1, const_tree t2)
{
struct tagged_tu_seen_cache *tu = XNEW (struct tagged_tu_seen_cache);
tu->next = tagged_tu_seen_base;
tu->t1 = t1;
tu->t2 = t2;
tagged_tu_seen_base = tu;
/* The C standard says that two structures in different translation
units are compatible with each other only if the types of their
fields are compatible (among other things). We assume that they
are compatible until proven otherwise when building the cache.
An example where this can occur is:
struct a
{
struct a *next;
};
If we are comparing this against a similar struct in another TU,
and did not assume they were compatible, we end up with an infinite
loop. */
tu->val = 1;
return tu;
}
/* Free the seen types until we get to TU_TIL. */
static void
free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *tu_til)
{
const struct tagged_tu_seen_cache *tu = tagged_tu_seen_base;
while (tu != tu_til)
{
const struct tagged_tu_seen_cache *const tu1
= (const struct tagged_tu_seen_cache *) tu;
tu = tu1->next;
free (CONST_CAST (struct tagged_tu_seen_cache *, tu1));
}
tagged_tu_seen_base = tu_til;
}
/* Return 1 if two 'struct', 'union', or 'enum' types T1 and T2 are
compatible. If the two types are not the same (which has been
checked earlier), this can only happen when multiple translation
units are being compiled. See C99 6.2.7 paragraph 1 for the exact
rules. ENUM_AND_INT_P and DIFFERENT_TYPES_P are as in
comptypes_internal. */
static int
tagged_types_tu_compatible_p (const_tree t1, const_tree t2,
bool *enum_and_int_p, bool *different_types_p)
{
tree s1, s2;
bool needs_warning = false;
/* We have to verify that the tags of the types are the same. This
is harder than it looks because this may be a typedef, so we have
to go look at the original type. It may even be a typedef of a
typedef...
In the case of compiler-created builtin structs the TYPE_DECL
may be a dummy, with no DECL_ORIGINAL_TYPE. Don't fault. */
while (TYPE_NAME (t1)
&& TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (TYPE_NAME (t1)))
t1 = DECL_ORIGINAL_TYPE (TYPE_NAME (t1));
while (TYPE_NAME (t2)
&& TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (TYPE_NAME (t2)))
t2 = DECL_ORIGINAL_TYPE (TYPE_NAME (t2));
/* C90 didn't have the requirement that the two tags be the same. */
if (flag_isoc99 && TYPE_NAME (t1) != TYPE_NAME (t2))
return 0;
/* C90 didn't say what happened if one or both of the types were
incomplete; we choose to follow C99 rules here, which is that they
are compatible. */
if (TYPE_SIZE (t1) == NULL
|| TYPE_SIZE (t2) == NULL)
return 1;
{
const struct tagged_tu_seen_cache * tts_i;
for (tts_i = tagged_tu_seen_base; tts_i != NULL; tts_i = tts_i->next)
if (tts_i->t1 == t1 && tts_i->t2 == t2)
return tts_i->val;
}
switch (TREE_CODE (t1))
{
case ENUMERAL_TYPE:
{
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
/* Speed up the case where the type values are in the same order. */
tree tv1 = TYPE_VALUES (t1);
tree tv2 = TYPE_VALUES (t2);
if (tv1 == tv2)
{
return 1;
}
for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2))
{
if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2))
break;
if (simple_cst_equal (TREE_VALUE (tv1), TREE_VALUE (tv2)) != 1)
{
tu->val = 0;
return 0;
}
}
if (tv1 == NULL_TREE && tv2 == NULL_TREE)
{
return 1;
}
if (tv1 == NULL_TREE || tv2 == NULL_TREE)
{
tu->val = 0;
return 0;
}
if (list_length (TYPE_VALUES (t1)) != list_length (TYPE_VALUES (t2)))
{
tu->val = 0;
return 0;
}
for (s1 = TYPE_VALUES (t1); s1; s1 = TREE_CHAIN (s1))
{
s2 = purpose_member (TREE_PURPOSE (s1), TYPE_VALUES (t2));
if (s2 == NULL
|| simple_cst_equal (TREE_VALUE (s1), TREE_VALUE (s2)) != 1)
{
tu->val = 0;
return 0;
}
}
return 1;
}
case UNION_TYPE:
{
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
if (list_length (TYPE_FIELDS (t1)) != list_length (TYPE_FIELDS (t2)))
{
tu->val = 0;
return 0;
}
/* Speed up the common case where the fields are in the same order. */
for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); s1 && s2;
s1 = DECL_CHAIN (s1), s2 = DECL_CHAIN (s2))
{
int result;
if (DECL_NAME (s1) != DECL_NAME (s2))
break;
result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2),
enum_and_int_p, different_types_p);
if (result != 1 && !DECL_NAME (s1))
break;
if (result == 0)
{
tu->val = 0;
return 0;
}
if (result == 2)
needs_warning = true;
if (TREE_CODE (s1) == FIELD_DECL
&& simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1),
DECL_FIELD_BIT_OFFSET (s2)) != 1)
{
tu->val = 0;
return 0;
}
}
if (!s1 && !s2)
{
tu->val = needs_warning ? 2 : 1;
return tu->val;
}
for (s1 = TYPE_FIELDS (t1); s1; s1 = DECL_CHAIN (s1))
{
bool ok = false;
for (s2 = TYPE_FIELDS (t2); s2; s2 = DECL_CHAIN (s2))
if (DECL_NAME (s1) == DECL_NAME (s2))
{
int result;
result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2),
enum_and_int_p,
different_types_p);
if (result != 1 && !DECL_NAME (s1))
continue;
if (result == 0)
{
tu->val = 0;
return 0;
}
if (result == 2)
needs_warning = true;
if (TREE_CODE (s1) == FIELD_DECL
&& simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1),
DECL_FIELD_BIT_OFFSET (s2)) != 1)
break;
ok = true;
break;
}
if (!ok)
{
tu->val = 0;
return 0;
}
}
tu->val = needs_warning ? 2 : 10;
return tu->val;
}
case RECORD_TYPE:
{
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2);
s1 && s2;
s1 = DECL_CHAIN (s1), s2 = DECL_CHAIN (s2))
{
int result;
if (TREE_CODE (s1) != TREE_CODE (s2)
|| DECL_NAME (s1) != DECL_NAME (s2))
break;
result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2),
enum_and_int_p, different_types_p);
if (result == 0)
break;
if (result == 2)
needs_warning = true;
if (TREE_CODE (s1) == FIELD_DECL
&& simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1),
DECL_FIELD_BIT_OFFSET (s2)) != 1)
break;
}
if (s1 && s2)
tu->val = 0;
else
tu->val = needs_warning ? 2 : 1;
return tu->val;
}
default:
gcc_unreachable ();
}
}
/* Return 1 if two function types F1 and F2 are compatible.
If either type specifies no argument types,
the other must specify a fixed number of self-promoting arg types.
Otherwise, if one type specifies only the number of arguments,
the other must specify that number of self-promoting arg types.
Otherwise, the argument types must match.
ENUM_AND_INT_P and DIFFERENT_TYPES_P are as in comptypes_internal. */
static int
function_types_compatible_p (const_tree f1, const_tree f2,
bool *enum_and_int_p, bool *different_types_p)
{
tree args1, args2;
/* 1 if no need for warning yet, 2 if warning cause has been seen. */
int val = 1;
int val1;
tree ret1, ret2;
ret1 = TREE_TYPE (f1);
ret2 = TREE_TYPE (f2);
/* 'volatile' qualifiers on a function's return type used to mean
the function is noreturn. */
if (TYPE_VOLATILE (ret1) != TYPE_VOLATILE (ret2))
pedwarn (input_location, 0, "function return types not compatible due to %<volatile%>");
if (TYPE_VOLATILE (ret1))
ret1 = build_qualified_type (TYPE_MAIN_VARIANT (ret1),
TYPE_QUALS (ret1) & ~TYPE_QUAL_VOLATILE);
if (TYPE_VOLATILE (ret2))
ret2 = build_qualified_type (TYPE_MAIN_VARIANT (ret2),
TYPE_QUALS (ret2) & ~TYPE_QUAL_VOLATILE);
val = comptypes_internal (ret1, ret2, enum_and_int_p, different_types_p);
if (val == 0)
return 0;
args1 = TYPE_ARG_TYPES (f1);
args2 = TYPE_ARG_TYPES (f2);
if (different_types_p != NULL
&& (args1 == 0) != (args2 == 0))
*different_types_p = true;
/* An unspecified parmlist matches any specified parmlist
whose argument types don't need default promotions. */
if (args1 == 0)
{
if (!self_promoting_args_p (args2))
return 0;
/* If one of these types comes from a non-prototype fn definition,
compare that with the other type's arglist.
If they don't match, ask for a warning (but no error). */
if (TYPE_ACTUAL_ARG_TYPES (f1)
&& 1 != type_lists_compatible_p (args2, TYPE_ACTUAL_ARG_TYPES (f1),
enum_and_int_p, different_types_p))
val = 2;
return val;
}
if (args2 == 0)
{
if (!self_promoting_args_p (args1))
return 0;
if (TYPE_ACTUAL_ARG_TYPES (f2)
&& 1 != type_lists_compatible_p (args1, TYPE_ACTUAL_ARG_TYPES (f2),
enum_and_int_p, different_types_p))
val = 2;
return val;
}
/* Both types have argument lists: compare them and propagate results. */
val1 = type_lists_compatible_p (args1, args2, enum_and_int_p,
different_types_p);
return val1 != 1 ? val1 : val;
}
/* Check two lists of types for compatibility, returning 0 for
incompatible, 1 for compatible, or 2 for compatible with
warning. ENUM_AND_INT_P and DIFFERENT_TYPES_P are as in
comptypes_internal. */
static int
type_lists_compatible_p (const_tree args1, const_tree args2,
bool *enum_and_int_p, bool *different_types_p)
{
/* 1 if no need for warning yet, 2 if warning cause has been seen. */
int val = 1;
int newval = 0;
while (1)
{
tree a1, mv1, a2, mv2;
if (args1 == 0 && args2 == 0)
return val;
/* If one list is shorter than the other,
they fail to match. */
if (args1 == 0 || args2 == 0)
return 0;
mv1 = a1 = TREE_VALUE (args1);
mv2 = a2 = TREE_VALUE (args2);
if (mv1 && mv1 != error_mark_node && TREE_CODE (mv1) != ARRAY_TYPE)
mv1 = (TYPE_ATOMIC (mv1)
? c_build_qualified_type (TYPE_MAIN_VARIANT (mv1),
TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mv1));
if (mv2 && mv2 != error_mark_node && TREE_CODE (mv2) != ARRAY_TYPE)
mv2 = (TYPE_ATOMIC (mv2)
? c_build_qualified_type (TYPE_MAIN_VARIANT (mv2),
TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mv2));
/* A null pointer instead of a type
means there is supposed to be an argument
but nothing is specified about what type it has.
So match anything that self-promotes. */
if (different_types_p != NULL
&& (a1 == 0) != (a2 == 0))
*different_types_p = true;
if (a1 == 0)
{
if (c_type_promotes_to (a2) != a2)
return 0;
}
else if (a2 == 0)
{
if (c_type_promotes_to (a1) != a1)
return 0;
}
/* If one of the lists has an error marker, ignore this arg. */
else if (TREE_CODE (a1) == ERROR_MARK
|| TREE_CODE (a2) == ERROR_MARK)
;
else if (!(newval = comptypes_internal (mv1, mv2, enum_and_int_p,
different_types_p)))
{
if (different_types_p != NULL)
*different_types_p = true;
/* Allow wait (union {union wait *u; int *i} *)
and wait (union wait *) to be compatible. */
if (TREE_CODE (a1) == UNION_TYPE
&& (TYPE_NAME (a1) == 0
|| TYPE_TRANSPARENT_AGGR (a1))
&& TREE_CODE (TYPE_SIZE (a1)) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (a1),
TYPE_SIZE (a2)))
{
tree memb;
for (memb = TYPE_FIELDS (a1);
memb; memb = DECL_CHAIN (memb))
{
tree mv3 = TREE_TYPE (memb);
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = (TYPE_ATOMIC (mv3)
? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3),
TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mv3));
if (comptypes_internal (mv3, mv2, enum_and_int_p,
different_types_p))
break;
}
if (memb == 0)
return 0;
}
else if (TREE_CODE (a2) == UNION_TYPE
&& (TYPE_NAME (a2) == 0
|| TYPE_TRANSPARENT_AGGR (a2))
&& TREE_CODE (TYPE_SIZE (a2)) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (a2),
TYPE_SIZE (a1)))
{
tree memb;
for (memb = TYPE_FIELDS (a2);
memb; memb = DECL_CHAIN (memb))
{
tree mv3 = TREE_TYPE (memb);
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = (TYPE_ATOMIC (mv3)
? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3),
TYPE_QUAL_ATOMIC)
: TYPE_MAIN_VARIANT (mv3));
if (comptypes_internal (mv3, mv1, enum_and_int_p,
different_types_p))
break;
}
if (memb == 0)
return 0;
}
else
return 0;
}
/* comptypes said ok, but record if it said to warn. */
if (newval > val)
val = newval;
args1 = TREE_CHAIN (args1);
args2 = TREE_CHAIN (args2);
}
}
/* Compute the size to increment a pointer by. When a function type or void
type or incomplete type is passed, size_one_node is returned.
This function does not emit any diagnostics; the caller is responsible
for that. */
static tree
c_size_in_bytes (const_tree type)
{
enum tree_code code = TREE_CODE (type);
if (code == FUNCTION_TYPE || code == VOID_TYPE || code == ERROR_MARK
|| !COMPLETE_TYPE_P (type))
return size_one_node;
/* Convert in case a char is more than one unit. */
return size_binop_loc (input_location, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
size_int (TYPE_PRECISION (char_type_node)
/ BITS_PER_UNIT));
}
/* Return either DECL or its known constant value (if it has one). */
tree
decl_constant_value (tree decl)
{
if (/* Don't change a variable array bound or initial value to a constant
in a place where a variable is invalid. Note that DECL_INITIAL
isn't valid for a PARM_DECL. */
current_function_decl != 0
&& TREE_CODE (decl) != PARM_DECL
&& !TREE_THIS_VOLATILE (decl)
&& TREE_READONLY (decl)
&& DECL_INITIAL (decl) != 0
&& TREE_CODE (DECL_INITIAL (decl)) != ERROR_MARK
/* This is invalid if initial value is not constant.
If it has either a function call, a memory reference,
or a variable, then re-evaluating it could give different results. */
&& TREE_CONSTANT (DECL_INITIAL (decl))
/* Check for cases where this is sub-optimal, even though valid. */
&& TREE_CODE (DECL_INITIAL (decl)) != CONSTRUCTOR)
return DECL_INITIAL (decl);
return decl;
}
/* Convert the array expression EXP to a pointer. */
static tree
array_to_pointer_conversion (location_t loc, tree exp)
{
tree orig_exp = exp;
tree type = TREE_TYPE (exp);
tree adr;
tree restype = TREE_TYPE (type);
tree ptrtype;
gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
STRIP_TYPE_NOPS (exp);
if (TREE_NO_WARNING (orig_exp))
TREE_NO_WARNING (exp) = 1;
ptrtype = build_pointer_type (restype);
if (TREE_CODE (exp) == INDIRECT_REF)
return convert (ptrtype, TREE_OPERAND (exp, 0));
/* In C++ array compound literals are temporary objects unless they are
const or appear in namespace scope, so they are destroyed too soon
to use them for much of anything (c++/53220). */
if (warn_cxx_compat && TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
{
tree decl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
if (!TREE_READONLY (decl) && !TREE_STATIC (decl))
warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wc___compat,
"converting an array compound literal to a pointer "
"is ill-formed in C++");
}
adr = build_unary_op (loc, ADDR_EXPR, exp, 1);
return convert (ptrtype, adr);
}
/* Convert the function expression EXP to a pointer. */
static tree
function_to_pointer_conversion (location_t loc, tree exp)
{
tree orig_exp = exp;
gcc_assert (TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE);
STRIP_TYPE_NOPS (exp);
if (TREE_NO_WARNING (orig_exp))
TREE_NO_WARNING (exp) = 1;
return build_unary_op (loc, ADDR_EXPR, exp, 0);
}
/* Mark EXP as read, not just set, for set but not used -Wunused
warning purposes. */
void
mark_exp_read (tree exp)
{
switch (TREE_CODE (exp))
{
case VAR_DECL:
case PARM_DECL:
DECL_READ_P (exp) = 1;
break;
case ARRAY_REF:
case COMPONENT_REF:
case MODIFY_EXPR:
case REALPART_EXPR:
case IMAGPART_EXPR:
CASE_CONVERT:
case ADDR_EXPR:
mark_exp_read (TREE_OPERAND (exp, 0));
break;
case COMPOUND_EXPR:
case C_MAYBE_CONST_EXPR:
mark_exp_read (TREE_OPERAND (exp, 1));
break;
default:
break;
}
}
/* Perform the default conversion of arrays and functions to pointers.
Return the result of converting EXP. For any other expression, just
return EXP.
LOC is the location of the expression. */
struct c_expr
default_function_array_conversion (location_t loc, struct c_expr exp)
{
tree orig_exp = exp.value;
tree type = TREE_TYPE (exp.value);
enum tree_code code = TREE_CODE (type);
switch (code)
{
case ARRAY_TYPE:
{
bool not_lvalue = false;
bool lvalue_array_p;
while ((TREE_CODE (exp.value) == NON_LVALUE_EXPR
|| CONVERT_EXPR_P (exp.value))
&& TREE_TYPE (TREE_OPERAND (exp.value, 0)) == type)
{
if (TREE_CODE (exp.value) == NON_LVALUE_EXPR)
not_lvalue = true;
exp.value = TREE_OPERAND (exp.value, 0);
}
if (TREE_NO_WARNING (orig_exp))
TREE_NO_WARNING (exp.value) = 1;
lvalue_array_p = !not_lvalue && lvalue_p (exp.value);
if (!flag_isoc99 && !lvalue_array_p)
{
/* Before C99, non-lvalue arrays do not decay to pointers.
Normally, using such an array would be invalid; but it can
be used correctly inside sizeof or as a statement expression.
Thus, do not give an error here; an error will result later. */
return exp;
}
exp.value = array_to_pointer_conversion (loc, exp.value);
}
break;
case FUNCTION_TYPE:
exp.value = function_to_pointer_conversion (loc, exp.value);
break;
default:
break;
}
return exp;
}
struct c_expr
default_function_array_read_conversion (location_t loc, struct c_expr exp)
{
mark_exp_read (exp.value);
return default_function_array_conversion (loc, exp);
}
/* Return whether EXPR should be treated as an atomic lvalue for the
purposes of load and store handling. */
static bool
really_atomic_lvalue (tree expr)
{
if (error_operand_p (expr))
return false;
if (!TYPE_ATOMIC (TREE_TYPE (expr)))
return false;
if (!lvalue_p (expr))
return false;
/* Ignore _Atomic on register variables, since their addresses can't
be taken so (a) atomicity is irrelevant and (b) the normal atomic
sequences wouldn't work. Ignore _Atomic on structures containing
bit-fields, since accessing elements of atomic structures or
unions is undefined behavior (C11 6.5.2.3#5), but it's unclear if
it's undefined at translation time or execution time, and the
normal atomic sequences again wouldn't work. */
while (handled_component_p (expr))
{
if (TREE_CODE (expr) == COMPONENT_REF
&& DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1)))
return false;
expr = TREE_OPERAND (expr, 0);
}
if (DECL_P (expr) && C_DECL_REGISTER (expr))
return false;
return true;
}
/* Convert expression EXP (location LOC) from lvalue to rvalue,
including converting functions and arrays to pointers if CONVERT_P.
If READ_P, also mark the expression as having been read. */
struct c_expr
convert_lvalue_to_rvalue (location_t loc, struct c_expr exp,
bool convert_p, bool read_p)
{
if (read_p)
mark_exp_read (exp.value);
if (convert_p)
exp = default_function_array_conversion (loc, exp);
if (really_atomic_lvalue (exp.value))
{
vec<tree, va_gc> *params;
tree nonatomic_type, tmp, tmp_addr, fndecl, func_call;
tree expr_type = TREE_TYPE (exp.value);
tree expr_addr = build_unary_op (loc, ADDR_EXPR, exp.value, 0);
tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
gcc_assert (TYPE_ATOMIC (expr_type));
/* Expansion of a generic atomic load may require an addition
element, so allocate enough to prevent a resize. */
vec_alloc (params, 4);
/* Remove the qualifiers for the rest of the expressions and
create the VAL temp variable to hold the RHS. */
nonatomic_type = build_qualified_type (expr_type, TYPE_UNQUALIFIED);
tmp = create_tmp_var (nonatomic_type);
tmp_addr = build_unary_op (loc, ADDR_EXPR, tmp, 0);
TREE_ADDRESSABLE (tmp) = 1;
TREE_NO_WARNING (tmp) = 1;
/* Issue __atomic_load (&expr, &tmp, SEQ_CST); */
fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_LOAD);
params->quick_push (expr_addr);
params->quick_push (tmp_addr);
params->quick_push (seq_cst);
func_call = c_build_function_call_vec (loc, vNULL, fndecl, params, NULL);
/* EXPR is always read. */
mark_exp_read (exp.value);
/* Return tmp which contains the value loaded. */
exp.value = build2 (COMPOUND_EXPR, nonatomic_type, func_call, tmp);
}
return exp;
}
/* EXP is an expression of integer type. Apply the integer promotions
to it and return the promoted value. */
tree
perform_integral_promotions (tree exp)
{
tree type = TREE_TYPE (exp);
enum tree_code code = TREE_CODE (type);
gcc_assert (INTEGRAL_TYPE_P (type));
/* Normally convert enums to int,
but convert wide enums to something wider. */
if (code == ENUMERAL_TYPE)
{
type = c_common_type_for_size (MAX (TYPE_PRECISION (type),
TYPE_PRECISION (integer_type_node)),
((TYPE_PRECISION (type)
>= TYPE_PRECISION (integer_type_node))
&& TYPE_UNSIGNED (type)));
return convert (type, exp);
}
/* ??? This should no longer be needed now bit-fields have their
proper types. */
if (TREE_CODE (exp) == COMPONENT_REF
&& DECL_C_BIT_FIELD (TREE_OPERAND (exp, 1))
/* If it's thinner than an int, promote it like a
c_promoting_integer_type_p, otherwise leave it alone. */
&& 0 > compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)),
TYPE_PRECISION (integer_type_node)))
return convert (integer_type_node, exp);
if (c_promoting_integer_type_p (type))
{
/* Preserve unsignedness if not really getting any wider. */
if (TYPE_UNSIGNED (type)
&& TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node))
return convert (unsigned_type_node, exp);
return convert (integer_type_node, exp);
}
return exp;
}
/* Perform default promotions for C data used in expressions.
Enumeral types or short or char are converted to int.
In addition, manifest constants symbols are replaced by their values. */
tree
default_conversion (tree exp)
{
tree orig_exp;
tree type = TREE_TYPE (exp);
enum tree_code code = TREE_CODE (type);
tree promoted_type;
mark_exp_read (exp);
/* Functions and arrays have been converted during parsing. */
gcc_assert (code != FUNCTION_TYPE);
if (code == ARRAY_TYPE)
return exp;
/* Constants can be used directly unless they're not loadable. */
if (TREE_CODE (exp) == CONST_DECL)
exp = DECL_INITIAL (exp);
/* Strip no-op conversions. */
orig_exp = exp;
STRIP_TYPE_NOPS (exp);
if (TREE_NO_WARNING (orig_exp))
TREE_NO_WARNING (exp) = 1;
if (code == VOID_TYPE)
{
error_at (EXPR_LOC_OR_LOC (exp, input_location),
"void value not ignored as it ought to be");
return error_mark_node;
}
exp = require_complete_type (exp);
if (exp == error_mark_node)
return error_mark_node;
promoted_type = targetm.promoted_type (type);
if (promoted_type)
return convert (promoted_type, exp);
if (INTEGRAL_TYPE_P (type))
return perform_integral_promotions (exp);
return exp;
}
/* Look up COMPONENT in a structure or union TYPE.
If the component name is not found, returns NULL_TREE. Otherwise,
the return value is a TREE_LIST, with each TREE_VALUE a FIELD_DECL
stepping down the chain to the component, which is in the last
TREE_VALUE of the list. Normally the list is of length one, but if
the component is embedded within (nested) anonymous structures or
unions, the list steps down the chain to the component. */
static tree
lookup_field (tree type, tree component)
{
tree field;
/* If TYPE_LANG_SPECIFIC is set, then it is a sorted array of pointers
to the field elements. Use a binary search on this array to quickly
find the element. Otherwise, do a linear search. TYPE_LANG_SPECIFIC
will always be set for structures which have many elements. */
if (TYPE_LANG_SPECIFIC (type) && TYPE_LANG_SPECIFIC (type)->s)
{
int bot, top, half;
tree *field_array = &TYPE_LANG_SPECIFIC (type)->s->elts[0];
field = TYPE_FIELDS (type);
bot = 0;
top = TYPE_LANG_SPECIFIC (type)->s->len;
while (top - bot > 1)
{
half = (top - bot + 1) >> 1;
field = field_array[bot+half];
if (DECL_NAME (field) == NULL_TREE)
{
/* Step through all anon unions in linear fashion. */
while (DECL_NAME (field_array[bot]) == NULL_TREE)
{
field = field_array[bot++];
if (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
{
tree anon = lookup_field (TREE_TYPE (field), component);
if (anon)
return tree_cons (NULL_TREE, field, anon);
/* The Plan 9 compiler permits referring
directly to an anonymous struct/union field
using a typedef name. */
if (flag_plan9_extensions
&& TYPE_NAME (TREE_TYPE (field)) != NULL_TREE
&& (TREE_CODE (TYPE_NAME (TREE_TYPE (field)))
== TYPE_DECL)
&& (DECL_NAME (TYPE_NAME (TREE_TYPE (field)))
== component))
break;
}
}
/* Entire record is only anon unions. */
if (bot > top)
return NULL_TREE;
/* Restart the binary search, with new lower bound. */
continue;
}
if (DECL_NAME (field) == component)
break;
if (DECL_NAME (field) < component)
bot += half;
else
top = bot + half;
}
if (DECL_NAME (field_array[bot]) == component)
field = field_array[bot];
else if (DECL_NAME (field) != component)
return NULL_TREE;
}
else
{
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
if (DECL_NAME (field) == NULL_TREE
&& (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
{
tree anon = lookup_field (TREE_TYPE (field), component);
if (anon)
return tree_cons (NULL_TREE, field, anon);
/* The Plan 9 compiler permits referring directly to an
anonymous struct/union field using a typedef
name. */
if (flag_plan9_extensions
&& TYPE_NAME (TREE_TYPE (field)) != NULL_TREE
&& TREE_CODE (TYPE_NAME (TREE_TYPE (field))) == TYPE_DECL
&& (DECL_NAME (TYPE_NAME (TREE_TYPE (field)))
== component))
break;
}
if (DECL_NAME (field) == component)
break;
}
if (field == NULL_TREE)
return NULL_TREE;
}
return tree_cons (NULL_TREE, field, NULL_TREE);
}
/* Make an expression to refer to the COMPONENT field of structure or
union value DATUM. COMPONENT is an IDENTIFIER_NODE. LOC is the
location of the COMPONENT_REF. */
tree
build_component_ref (location_t loc, tree datum, tree component)
{
tree type = TREE_TYPE (datum);
enum tree_code code = TREE_CODE (type);
tree field = NULL;
tree ref;
bool datum_lvalue = lvalue_p (datum);
if (!objc_is_public (datum, component))
return error_mark_node;
/* Detect Objective-C property syntax object.property. */
if (c_dialect_objc ()
&& (ref = objc_maybe_build_component_ref (datum, component)))
return ref;
/* See if there is a field or component with name COMPONENT. */
if (code == RECORD_TYPE || code == UNION_TYPE)
{
if (!COMPLETE_TYPE_P (type))
{
c_incomplete_type_error (NULL_TREE, type);
return error_mark_node;
}
field = lookup_field (type, component);
if (!field)
{
error_at (loc, "%qT has no member named %qE", type, component);
return error_mark_node;
}
/* Chain the COMPONENT_REFs if necessary down to the FIELD.
This might be better solved in future the way the C++ front
end does it - by giving the anonymous entities each a
separate name and type, and then have build_component_ref
recursively call itself. We can't do that here. */
do
{
tree subdatum = TREE_VALUE (field);
int quals;
tree subtype;
bool use_datum_quals;
if (TREE_TYPE (subdatum) == error_mark_node)
return error_mark_node;
/* If this is an rvalue, it does not have qualifiers in C
standard terms and we must avoid propagating such
qualifiers down to a non-lvalue array that is then
converted to a pointer. */
use_datum_quals = (datum_lvalue
|| TREE_CODE (TREE_TYPE (subdatum)) != ARRAY_TYPE);
quals = TYPE_QUALS (strip_array_types (TREE_TYPE (subdatum)));
if (use_datum_quals)
quals |= TYPE_QUALS (TREE_TYPE (datum));
subtype = c_build_qualified_type (TREE_TYPE (subdatum), quals);
ref = build3 (COMPONENT_REF, subtype, datum, subdatum,
NULL_TREE);
SET_EXPR_LOCATION (ref, loc);
if (TREE_READONLY (subdatum)
|| (use_datum_quals && TREE_READONLY (datum)))
TREE_READONLY (ref) = 1;
if (TREE_THIS_VOLATILE (subdatum)
|| (use_datum_quals && TREE_THIS_VOLATILE (datum)))
TREE_THIS_VOLATILE (ref) = 1;
if (TREE_DEPRECATED (subdatum))
warn_deprecated_use (subdatum, NULL_TREE);
datum = ref;
field = TREE_CHAIN (field);
}
while (field);
return ref;
}
else if (code != ERROR_MARK)
error_at (loc,
"request for member %qE in something not a structure or union",
component);
return error_mark_node;
}
/* Given an expression PTR for a pointer, return an expression
for the value pointed to.
ERRORSTRING is the name of the operator to appear in error messages.
LOC is the location to use for the generated tree. */
tree
build_indirect_ref (location_t loc, tree ptr, ref_operator errstring)
{
tree pointer = default_conversion (ptr);
tree type = TREE_TYPE (pointer);
tree ref;
if (TREE_CODE (type) == POINTER_TYPE)
{
if (CONVERT_EXPR_P (pointer)
|| TREE_CODE (pointer) == VIEW_CONVERT_EXPR)
{
/* If a warning is issued, mark it to avoid duplicates from
the backend. This only needs to be done at
warn_strict_aliasing > 2. */
if (warn_strict_aliasing > 2)
if (strict_aliasing_warning (TREE_TYPE (TREE_OPERAND (pointer, 0)),
type, TREE_OPERAND (pointer, 0)))
TREE_NO_WARNING (pointer) = 1;
}
if (TREE_CODE (pointer) == ADDR_EXPR
&& (TREE_TYPE (TREE_OPERAND (pointer, 0))
== TREE_TYPE (type)))
{
ref = TREE_OPERAND (pointer, 0);
protected_set_expr_location (ref, loc);
return ref;
}
else
{
tree t = TREE_TYPE (type);
ref = build1 (INDIRECT_REF, t, pointer);
if (!COMPLETE_OR_VOID_TYPE_P (t) && TREE_CODE (t) != ARRAY_TYPE)
{
if (!C_TYPE_ERROR_REPORTED (TREE_TYPE (ptr)))
{
error_at (loc, "dereferencing pointer to incomplete type "
"%qT", t);
C_TYPE_ERROR_REPORTED (TREE_TYPE (ptr)) = 1;
}
return error_mark_node;
}
if (VOID_TYPE_P (t) && c_inhibit_evaluation_warnings == 0)
warning_at (loc, 0, "dereferencing %<void *%> pointer");
/* We *must* set TREE_READONLY when dereferencing a pointer to const,
so that we get the proper error message if the result is used
to assign to. Also, &* is supposed to be a no-op.
And ANSI C seems to specify that the type of the result
should be the const type. */
/* A de-reference of a pointer to const is not a const. It is valid
to change it via some other pointer. */
TREE_READONLY (ref) = TYPE_READONLY (t);
TREE_SIDE_EFFECTS (ref)
= TYPE_VOLATILE (t) || TREE_SIDE_EFFECTS (pointer);
TREE_THIS_VOLATILE (ref) = TYPE_VOLATILE (t);
protected_set_expr_location (ref, loc);
return ref;
}
}
else if (TREE_CODE (pointer) != ERROR_MARK)
invalid_indirection_error (loc, type, errstring);
return error_mark_node;
}
/* This handles expressions of the form "a[i]", which denotes
an array reference.
This is logically equivalent in C to *(a+i), but we may do it differently.
If A is a variable or a member, we generate a primitive ARRAY_REF.
This avoids forcing the array out of registers, and can work on
arrays that are not lvalues (for example, members of structures returned
by functions).
For vector types, allow vector[i] but not i[vector], and create
*(((type*)&vectortype) + i) for the expression.
LOC is the location to use for the returned expression. */
tree
build_array_ref (location_t loc, tree array, tree index)
{
tree ret;
bool swapped = false;
if (TREE_TYPE (array) == error_mark_node
|| TREE_TYPE (index) == error_mark_node)
return error_mark_node;
if (flag_cilkplus && contains_array_notation_expr (index))
{
size_t rank = 0;
if (!find_rank (loc, index, index, true, &rank))
return error_mark_node;
if (rank > 1)
{
error_at (loc, "rank of the array's index is greater than 1");
return error_mark_node;
}
}
if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE
&& TREE_CODE (TREE_TYPE (array)) != POINTER_TYPE
/* Allow vector[index] but not index[vector]. */
&& TREE_CODE (TREE_TYPE (array)) != VECTOR_TYPE)
{
tree temp;
if (TREE_CODE (TREE_TYPE (index)) != ARRAY_TYPE
&& TREE_CODE (TREE_TYPE (index)) != POINTER_TYPE)
{
error_at (loc,
"subscripted value is neither array nor pointer nor vector");
return error_mark_node;
}
temp = array;
array = index;
index = temp;
swapped = true;
}
if (!INTEGRAL_TYPE_P (TREE_TYPE (index)))
{
error_at (loc, "array subscript is not an integer");
return error_mark_node;
}
if (TREE_CODE (TREE_TYPE (TREE_TYPE (array))) == FUNCTION_TYPE)
{
error_at (loc, "subscripted value is pointer to function");
return error_mark_node;
}
/* ??? Existing practice has been to warn only when the char
index is syntactically the index, not for char[array]. */
if (!swapped)
warn_array_subscript_with_type_char (loc, index);
/* Apply default promotions *after* noticing character types. */
index = default_conversion (index);
if (index == error_mark_node)
return error_mark_node;
gcc_assert (TREE_CODE (TREE_TYPE (index)) == INTEGER_TYPE);
bool non_lvalue
= convert_vector_to_pointer_for_subscript (loc, &array, index);
if (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE)
{
tree rval, type;
/* An array that is indexed by a non-constant
cannot be stored in a register; we must be able to do
address arithmetic on its address.
Likewise an array of elements of variable size. */
if (TREE_CODE (index) != INTEGER_CST
|| (COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (array)))
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (array)))) != INTEGER_CST))
{
if (!c_mark_addressable (array))
return error_mark_node;
}
/* An array that is indexed by a constant value which is not within
the array bounds cannot be stored in a register either; because we
would get a crash in store_bit_field/extract_bit_field when trying
to access a non-existent part of the register. */
if (TREE_CODE (index) == INTEGER_CST
&& TYPE_DOMAIN (TREE_TYPE (array))
&& !int_fits_type_p (index, TYPE_DOMAIN (TREE_TYPE (array))))
{
if (!c_mark_addressable (array))
return error_mark_node;
}
if (pedantic || warn_c90_c99_compat)
{
tree foo = array;
while (TREE_CODE (foo) == COMPONENT_REF)
foo = TREE_OPERAND (foo, 0);
if (TREE_CODE (foo) == VAR_DECL && C_DECL_REGISTER (foo))
pedwarn (loc, OPT_Wpedantic,
"ISO C forbids subscripting %<register%> array");
else if (!lvalue_p (foo))
pedwarn_c90 (loc, OPT_Wpedantic,
"ISO C90 forbids subscripting non-lvalue "
"array");
}
type = TREE_TYPE (TREE_TYPE (array));
rval = build4 (ARRAY_REF, type, array, index, NULL_TREE, NULL_TREE);
/* Array ref is const/volatile if the array elements are
or if the array is. */
TREE_READONLY (rval)
|= (TYPE_READONLY (TREE_TYPE (TREE_TYPE (array)))
| TREE_READONLY (array));
TREE_SIDE_EFFECTS (rval)
|= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array)))
| TREE_SIDE_EFFECTS (array));
TREE_THIS_VOLATILE (rval)
|= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array)))
/* This was added by rms on 16 Nov 91.
It fixes vol struct foo *a; a->elts[1]
in an inline function.
Hope it doesn't break something else. */
| TREE_THIS_VOLATILE (array));
ret = require_complete_type (rval);
protected_set_expr_location (ret, loc);
if (non_lvalue)
ret = non_lvalue_loc (loc, ret);
return ret;
}
else
{
tree ar = default_conversion (array);
if (ar == error_mark_node)
return ar;
gcc_assert (TREE_CODE (TREE_TYPE (ar)) == POINTER_TYPE);
gcc_assert (TREE_CODE (TREE_TYPE (TREE_TYPE (ar))) != FUNCTION_TYPE);
ret = build_indirect_ref (loc, build_binary_op (loc, PLUS_EXPR, ar,
index, 0),
RO_ARRAY_INDEXING);
if (non_lvalue)
ret = non_lvalue_loc (loc, ret);
return ret;
}
}
/* Build an external reference to identifier ID. FUN indicates
whether this will be used for a function call. LOC is the source
location of the identifier. This sets *TYPE to the type of the
identifier, which is not the same as the type of the returned value
for CONST_DECLs defined as enum constants. If the type of the
identifier is not available, *TYPE is set to NULL. */
tree
build_external_ref (location_t loc, tree id, int fun, tree *type)
{
tree ref;
tree decl = lookup_name (id);
/* In Objective-C, an instance variable (ivar) may be preferred to
whatever lookup_name() found. */
decl = objc_lookup_ivar (decl, id);
*type = NULL;
if (decl && decl != error_mark_node)
{
ref = decl;
*type = TREE_TYPE (ref);
}
else if (fun)
/* Implicit function declaration. */
ref = implicitly_declare (loc, id);
else if (decl == error_mark_node)
/* Don't complain about something that's already been
complained about. */
return error_mark_node;
else
{
undeclared_variable (loc, id);
return error_mark_node;
}
if (TREE_TYPE (ref) == error_mark_node)
return error_mark_node;
if (TREE_DEPRECATED (ref))
warn_deprecated_use (ref, NULL_TREE);
/* Recursive call does not count as usage. */
if (ref != current_function_decl)
{
TREE_USED (ref) = 1;
}
if (TREE_CODE (ref) == FUNCTION_DECL && !in_alignof)
{
if (!in_sizeof && !in_typeof)
C_DECL_USED (ref) = 1;
else if (DECL_INITIAL (ref) == 0
&& DECL_EXTERNAL (ref)
&& !TREE_PUBLIC (ref))
record_maybe_used_decl (ref);
}
if (TREE_CODE (ref) == CONST_DECL)
{
used_types_insert (TREE_TYPE (ref));
if (warn_cxx_compat
&& TREE_CODE (TREE_TYPE (ref)) == ENUMERAL_TYPE
&& C_TYPE_DEFINED_IN_STRUCT (TREE_TYPE (ref)))
{
warning_at (loc, OPT_Wc___compat,
("enum constant defined in struct or union "
"is not visible in C++"));
inform (DECL_SOURCE_LOCATION (ref), "enum constant defined here");
}
ref = DECL_INITIAL (ref);
TREE_CONSTANT (ref) = 1;
}
else if (current_function_decl != 0
&& !DECL_FILE_SCOPE_P (current_function_decl)
&& (TREE_CODE (ref) == VAR_DECL
|| TREE_CODE (ref) == PARM_DECL
|| TREE_CODE (ref) == FUNCTION_DECL))
{
tree context = decl_function_context (ref);
if (context != 0 && context != current_function_decl)
DECL_NONLOCAL (ref) = 1;
}
/* C99 6.7.4p3: An inline definition of a function with external
linkage ... shall not contain a reference to an identifier with
internal linkage. */
else if (current_function_decl != 0
&& DECL_DECLARED_INLINE_P (current_function_decl)
&& DECL_EXTERNAL (current_function_decl)
&& VAR_OR_FUNCTION_DECL_P (ref)
&& (TREE_CODE (ref) != VAR_DECL || TREE_STATIC (ref))
&& ! TREE_PUBLIC (ref)
&& DECL_CONTEXT (ref) != current_function_decl)
record_inline_static (loc, current_function_decl, ref,
csi_internal);
return ref;
}
/* Record details of decls possibly used inside sizeof or typeof. */
struct maybe_used_decl
{
/* The decl. */
tree decl;
/* The level seen at (in_sizeof + in_typeof). */
int level;
/* The next one at this level or above, or NULL. */
struct maybe_used_decl *next;
};
static struct maybe_used_decl *maybe_used_decls;
/* Record that DECL, an undefined static function reference seen
inside sizeof or typeof, might be used if the operand of sizeof is
a VLA type or the operand of typeof is a variably modified
type. */
static void
record_maybe_used_decl (tree decl)
{
struct maybe_used_decl *t = XOBNEW (&parser_obstack, struct maybe_used_decl);
t->decl = decl;
t->level = in_sizeof + in_typeof;
t->next = maybe_used_decls;
maybe_used_decls = t;
}
/* Pop the stack of decls possibly used inside sizeof or typeof. If
USED is false, just discard them. If it is true, mark them used
(if no longer inside sizeof or typeof) or move them to the next
level up (if still inside sizeof or typeof). */
void
pop_maybe_used (bool used)
{
struct maybe_used_decl *p = maybe_used_decls;
int cur_level = in_sizeof + in_typeof;
while (p && p->level > cur_level)
{
if (used)
{
if (cur_level == 0)
C_DECL_USED (p->decl) = 1;
else
p->level = cur_level;
}
p = p->next;
}
if (!used || cur_level == 0)
maybe_used_decls = p;
}
/* Return the result of sizeof applied to EXPR. */
struct c_expr
c_expr_sizeof_expr (location_t loc, struct c_expr expr)
{
struct c_expr ret;
if (expr.value == error_mark_node)
{
ret.value = error_mark_node;
ret.original_code = ERROR_MARK;
ret.original_type = NULL;
pop_maybe_used (false);
}
else
{
bool expr_const_operands = true;
if (TREE_CODE (expr.value) == PARM_DECL
&& C_ARRAY_PARAMETER (expr.value))
{
if (warning_at (loc, OPT_Wsizeof_array_argument,
"%<sizeof%> on array function parameter %qE will "
"return size of %qT", expr.value,
expr.original_type))
inform (DECL_SOURCE_LOCATION (expr.value), "declared here");
}
tree folded_expr = c_fully_fold (expr.value, require_constant_value,
&expr_const_operands);
ret.value = c_sizeof (loc, TREE_TYPE (folded_expr));
c_last_sizeof_arg = expr.value;
ret.original_code = SIZEOF_EXPR;
ret.original_type = NULL;
if (c_vla_type_p (TREE_TYPE (folded_expr)))
{
/* sizeof is evaluated when given a vla (C99 6.5.3.4p2). */
ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value),
folded_expr, ret.value);
C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !expr_const_operands;
SET_EXPR_LOCATION (ret.value, loc);
}
pop_maybe_used (C_TYPE_VARIABLE_SIZE (TREE_TYPE (folded_expr)));
}
return ret;
}
/* Return the result of sizeof applied to T, a structure for the type
name passed to sizeof (rather than the type itself). LOC is the
location of the original expression. */
struct c_expr
c_expr_sizeof_type (location_t loc, struct c_type_name *t)
{
tree type;
struct c_expr ret;
tree type_expr = NULL_TREE;
bool type_expr_const = true;
type = groktypename (t, &type_expr, &type_expr_const);
ret.value = c_sizeof (loc, type);
c_last_sizeof_arg = type;
ret.original_code = SIZEOF_EXPR;
ret.original_type = NULL;
if ((type_expr || TREE_CODE (ret.value) == INTEGER_CST)
&& c_vla_type_p (type))
{
/* If the type is a [*] array, it is a VLA but is represented as
having a size of zero. In such a case we must ensure that
the result of sizeof does not get folded to a constant by
c_fully_fold, because if the size is evaluated the result is
not constant and so constraints on zero or negative size
arrays must not be applied when this sizeof call is inside
another array declarator. */
if (!type_expr)
type_expr = integer_zero_node;
ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value),
type_expr, ret.value);
C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !type_expr_const;
}
pop_maybe_used (type != error_mark_node
? C_TYPE_VARIABLE_SIZE (type) : false);
return ret;
}
/* Build a function call to function FUNCTION with parameters PARAMS.
The function call is at LOC.
PARAMS is a list--a chain of TREE_LIST nodes--in which the
TREE_VALUE of each node is a parameter-expression.
FUNCTION's data type may be a function type or a pointer-to-function. */
tree
build_function_call (location_t loc, tree function, tree params)
{
vec<tree, va_gc> *v;
tree ret;
vec_alloc (v, list_length (params));
for (; params; params = TREE_CHAIN (params))
v->quick_push (TREE_VALUE (params));
ret = c_build_function_call_vec (loc, vNULL, function, v, NULL);
vec_free (v);
return ret;
}
/* Give a note about the location of the declaration of DECL. */
static void inform_declaration (tree decl)
{
if (decl && (TREE_CODE (decl) != FUNCTION_DECL || !DECL_BUILT_IN (decl)))
inform (DECL_SOURCE_LOCATION (decl), "declared here");
}
/* Build a function call to function FUNCTION with parameters PARAMS.
ORIGTYPES, if not NULL, is a vector of types; each element is
either NULL or the original type of the corresponding element in
PARAMS. The original type may differ from TREE_TYPE of the
parameter for enums. FUNCTION's data type may be a function type
or pointer-to-function. This function changes the elements of
PARAMS. */
tree
build_function_call_vec (location_t loc, vec<location_t> arg_loc,
tree function, vec<tree, va_gc> *params,
vec<tree, va_gc> *origtypes)
{
tree fntype, fundecl = 0;
tree name = NULL_TREE, result;
tree tem;
int nargs;
tree *argarray;
/* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
STRIP_TYPE_NOPS (function);
/* Convert anything with function type to a pointer-to-function. */
if (TREE_CODE (function) == FUNCTION_DECL)
{
name = DECL_NAME (function);
if (flag_tm)
tm_malloc_replacement (function);
fundecl = function;
/* Atomic functions have type checking/casting already done. They are
often rewritten and don't match the original parameter list. */
if (name && !strncmp (IDENTIFIER_POINTER (name), "__atomic_", 9))
origtypes = NULL;
if (flag_cilkplus
&& is_cilkplus_reduce_builtin (function))
origtypes = NULL;
}
if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE)
function = function_to_pointer_conversion (loc, function);
/* For Objective-C, convert any calls via a cast to OBJC_TYPE_REF
expressions, like those used for ObjC messenger dispatches. */
if (params && !params->is_empty ())
function = objc_rewrite_function_call (function, (*params)[0]);
function = c_fully_fold (function, false, NULL);
fntype = TREE_TYPE (function);
if (TREE_CODE (fntype) == ERROR_MARK)
return error_mark_node;
if (!(TREE_CODE (fntype) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (fntype)) == FUNCTION_TYPE))
{
if (!flag_diagnostics_show_caret)
error_at (loc,
"called object %qE is not a function or function pointer",
function);
else if (DECL_P (function))
{
error_at (loc,
"called object %qD is not a function or function pointer",
function);
inform_declaration (function);
}
else
error_at (loc,
"called object is not a function or function pointer");
return error_mark_node;
}
if (fundecl && TREE_THIS_VOLATILE (fundecl))
current_function_returns_abnormally = 1;
/* fntype now gets the type of function pointed to. */
fntype = TREE_TYPE (fntype);
/* Convert the parameters to the types declared in the
function prototype, or apply default promotions. */
nargs = convert_arguments (loc, arg_loc, TYPE_ARG_TYPES (fntype), params,
origtypes, function, fundecl);
if (nargs < 0)
return error_mark_node;
/* Check that the function is called through a compatible prototype.
If it is not, warn. */
if (CONVERT_EXPR_P (function)
&& TREE_CODE (tem = TREE_OPERAND (function, 0)) == ADDR_EXPR
&& TREE_CODE (tem = TREE_OPERAND (tem, 0)) == FUNCTION_DECL
&& !comptypes (fntype, TREE_TYPE (tem)))
{
tree return_type = TREE_TYPE (fntype);
/* This situation leads to run-time undefined behavior. We can't,
therefore, simply error unless we can prove that all possible
executions of the program must execute the code. */
warning_at (loc, 0, "function called through a non-compatible type");
if (VOID_TYPE_P (return_type)
&& TYPE_QUALS (return_type) != TYPE_UNQUALIFIED)
pedwarn (loc, 0,
"function with qualified void return type called");
}
argarray = vec_safe_address (params);
/* Check that arguments to builtin functions match the expectations. */
if (fundecl
&& DECL_BUILT_IN (fundecl)
&& DECL_BUILT_IN_CLASS (fundecl) == BUILT_IN_NORMAL
&& !check_builtin_function_arguments (fundecl, nargs, argarray))
return error_mark_node;
/* Check that the arguments to the function are valid. */
check_function_arguments (fntype, nargs, argarray);
if (name != NULL_TREE
&& !strncmp (IDENTIFIER_POINTER (name), "__builtin_", 10))
{
if (require_constant_value)
result =
fold_build_call_array_initializer_loc (loc, TREE_TYPE (fntype),
function, nargs, argarray);
else
result = fold_build_call_array_loc (loc, TREE_TYPE (fntype),
function, nargs, argarray);
if (TREE_CODE (result) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (result, 0)) == INTEGER_CST)
STRIP_TYPE_NOPS (result);
}
else
result = build_call_array_loc (loc, TREE_TYPE (fntype),
function, nargs, argarray);
if (VOID_TYPE_P (TREE_TYPE (result)))
{
if (TYPE_QUALS (TREE_TYPE (result)) != TYPE_UNQUALIFIED)
pedwarn (loc, 0,
"function with qualified void return type called");
return result;
}
return require_complete_type (result);
}
/* Like build_function_call_vec, but call also resolve_overloaded_builtin. */
tree
c_build_function_call_vec (location_t loc, vec<location_t> arg_loc,
tree function, vec<tree, va_gc> *params,
vec<tree, va_gc> *origtypes)
{
/* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
STRIP_TYPE_NOPS (function);
/* Convert anything with function type to a pointer-to-function. */
if (TREE_CODE (function) == FUNCTION_DECL)
{
/* Implement type-directed function overloading for builtins.
resolve_overloaded_builtin and targetm.resolve_overloaded_builtin
handle all the type checking. The result is a complete expression
that implements this function call. */
tree tem = resolve_overloaded_builtin (loc, function, params);
if (tem)
return tem;
}
return build_function_call_vec (loc, arg_loc, function, params, origtypes);
}
/* Convert the argument expressions in the vector VALUES
to the types in the list TYPELIST.
If TYPELIST is exhausted, or when an element has NULL as its type,
perform the default conversions.
ORIGTYPES is the original types of the expressions in VALUES. This
holds the type of enum values which have been converted to integral
types. It may be NULL.
FUNCTION is a tree for the called function. It is used only for
error messages, where it is formatted with %qE.
This is also where warnings about wrong number of args are generated.
ARG_LOC are locations of function arguments (if any).
Returns the actual number of arguments processed (which may be less
than the length of VALUES in some error situations), or -1 on
failure. */
static int
convert_arguments (location_t loc, vec<location_t> arg_loc, tree typelist,
vec<tree, va_gc> *values, vec<tree, va_gc> *origtypes,
tree function, tree fundecl)
{
tree typetail, val;
unsigned int parmnum;
bool error_args = false;
const bool type_generic = fundecl
&& lookup_attribute ("type generic", TYPE_ATTRIBUTES (TREE_TYPE (fundecl)));
bool type_generic_remove_excess_precision = false;
tree selector;
/* Change pointer to function to the function itself for
diagnostics. */
if (TREE_CODE (function) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (function, 0)) == FUNCTION_DECL)
function = TREE_OPERAND (function, 0);
/* Handle an ObjC selector specially for diagnostics. */
selector = objc_message_selector ();
/* For type-generic built-in functions, determine whether excess
precision should be removed (classification) or not
(comparison). */
if (type_generic
&& DECL_BUILT_IN (fundecl)
&& DECL_BUILT_IN_CLASS (fundecl) == BUILT_IN_NORMAL)
{
switch (DECL_FUNCTION_CODE (fundecl))
{
case BUILT_IN_ISFINITE:
case BUILT_IN_ISINF:
case BUILT_IN_ISINF_SIGN:
case BUILT_IN_ISNAN:
case BUILT_IN_ISNORMAL:
case BUILT_IN_FPCLASSIFY:
type_generic_remove_excess_precision = true;
break;
default:
type_generic_remove_excess_precision = false;
break;
}
}
if (flag_cilkplus && fundecl && is_cilkplus_reduce_builtin (fundecl))
return vec_safe_length (values);
/* Scan the given expressions and types, producing individual
converted arguments. */
for (typetail = typelist, parmnum = 0;
values && values->iterate (parmnum, &val);
++parmnum)
{