| /* Definitions for C++ name lookup routines. |
| Copyright (C) 2003-2021 Free Software Foundation, Inc. |
| Contributed by Gabriel Dos Reis <gdr@integrable-solutions.net> |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 3, or (at your option) |
| any later version. |
| |
| GCC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #define INCLUDE_UNIQUE_PTR |
| #include "system.h" |
| #include "coretypes.h" |
| #include "cp-tree.h" |
| #include "timevar.h" |
| #include "stringpool.h" |
| #include "print-tree.h" |
| #include "attribs.h" |
| #include "debug.h" |
| #include "c-family/c-pragma.h" |
| #include "gcc-rich-location.h" |
| #include "spellcheck-tree.h" |
| #include "parser.h" |
| #include "c-family/name-hint.h" |
| #include "c-family/known-headers.h" |
| #include "c-family/c-spellcheck.h" |
| #include "bitmap.h" |
| |
| static cxx_binding *cxx_binding_make (tree value, tree type); |
| static cp_binding_level *innermost_nonclass_level (void); |
| static tree do_pushdecl (tree decl, bool hiding); |
| static void set_identifier_type_value_with_scope (tree id, tree decl, |
| cp_binding_level *b); |
| static name_hint maybe_suggest_missing_std_header (location_t location, |
| tree name); |
| static name_hint suggest_alternatives_for_1 (location_t location, tree name, |
| bool suggest_misspellings); |
| |
| /* Slots in BINDING_VECTOR. */ |
| enum binding_slots |
| { |
| BINDING_SLOT_CURRENT, /* Slot for current TU. */ |
| BINDING_SLOT_GLOBAL, /* Slot for merged global module. */ |
| BINDING_SLOT_PARTITION, /* Slot for merged partition entities |
| (optional). */ |
| |
| /* Number of always-allocated slots. */ |
| BINDING_SLOTS_FIXED = BINDING_SLOT_GLOBAL + 1 |
| }; |
| |
| /* Create an overload suitable for recording an artificial TYPE_DECL |
| and another decl. We use this machanism to implement the struct |
| stat hack. */ |
| |
| #define STAT_HACK_P(N) ((N) && TREE_CODE (N) == OVERLOAD && OVL_LOOKUP_P (N)) |
| #define STAT_TYPE_VISIBLE_P(N) TREE_USED (OVERLOAD_CHECK (N)) |
| #define STAT_TYPE(N) TREE_TYPE (N) |
| #define STAT_DECL(N) OVL_FUNCTION (N) |
| #define STAT_VISIBLE(N) OVL_CHAIN (N) |
| #define MAYBE_STAT_DECL(N) (STAT_HACK_P (N) ? STAT_DECL (N) : N) |
| #define MAYBE_STAT_TYPE(N) (STAT_HACK_P (N) ? STAT_TYPE (N) : NULL_TREE) |
| |
| /* When a STAT_HACK_P is true, OVL_USING_P and OVL_EXPORT_P are valid |
| and apply to the hacked type. */ |
| |
| /* For regular (maybe) overloaded functions, we have OVL_HIDDEN_P. |
| But we also need to indicate hiddenness on implicit type decls |
| (injected friend classes), and (coming soon) decls injected from |
| block-scope externs. It is too awkward to press the existing |
| overload marking for that. If we have a hidden non-function, we |
| always create a STAT_HACK, and use these two markers as needed. */ |
| #define STAT_TYPE_HIDDEN_P(N) OVL_HIDDEN_P (N) |
| #define STAT_DECL_HIDDEN_P(N) OVL_DEDUP_P (N) |
| |
| /* Create a STAT_HACK node with DECL as the value binding and TYPE as |
| the type binding. */ |
| |
| static tree |
| stat_hack (tree decl = NULL_TREE, tree type = NULL_TREE) |
| { |
| tree result = make_node (OVERLOAD); |
| |
| /* Mark this as a lookup, so we can tell this is a stat hack. */ |
| OVL_LOOKUP_P (result) = true; |
| STAT_DECL (result) = decl; |
| STAT_TYPE (result) = type; |
| return result; |
| } |
| |
| /* Create a local binding level for NAME. */ |
| |
| static cxx_binding * |
| create_local_binding (cp_binding_level *level, tree name) |
| { |
| cxx_binding *binding = cxx_binding_make (NULL, NULL); |
| |
| LOCAL_BINDING_P (binding) = true; |
| binding->scope = level; |
| binding->previous = IDENTIFIER_BINDING (name); |
| |
| IDENTIFIER_BINDING (name) = binding; |
| |
| return binding; |
| } |
| |
| /* Find the binding for NAME in namespace NS. If CREATE_P is true, |
| make an empty binding if there wasn't one. */ |
| |
| static tree * |
| find_namespace_slot (tree ns, tree name, bool create_p = false) |
| { |
| tree *slot = DECL_NAMESPACE_BINDINGS (ns) |
| ->find_slot_with_hash (name, name ? IDENTIFIER_HASH_VALUE (name) : 0, |
| create_p ? INSERT : NO_INSERT); |
| return slot; |
| } |
| |
| static tree |
| find_namespace_value (tree ns, tree name) |
| { |
| tree *b = find_namespace_slot (ns, name); |
| |
| return b ? MAYBE_STAT_DECL (*b) : NULL_TREE; |
| } |
| |
| /* Look in *SLOT for a the binding of NAME in imported module IX. |
| Returns pointer to binding's slot, or NULL if not found. Does a |
| binary search, as this is mainly used for random access during |
| importing. Do not use for the fixed slots. */ |
| |
| static binding_slot * |
| search_imported_binding_slot (tree *slot, unsigned ix) |
| { |
| gcc_assert (ix); |
| |
| if (!*slot) |
| return NULL; |
| |
| if (TREE_CODE (*slot) != BINDING_VECTOR) |
| return NULL; |
| |
| unsigned clusters = BINDING_VECTOR_NUM_CLUSTERS (*slot); |
| binding_cluster *cluster = BINDING_VECTOR_CLUSTER_BASE (*slot); |
| |
| if (BINDING_VECTOR_SLOTS_PER_CLUSTER == BINDING_SLOTS_FIXED) |
| { |
| clusters--; |
| cluster++; |
| } |
| |
| while (clusters > 1) |
| { |
| unsigned half = clusters / 2; |
| gcc_checking_assert (cluster[half].indices[0].span); |
| if (cluster[half].indices[0].base > ix) |
| clusters = half; |
| else |
| { |
| clusters -= half; |
| cluster += half; |
| } |
| } |
| |
| if (clusters) |
| /* Is it in this cluster? */ |
| for (unsigned off = 0; off != BINDING_VECTOR_SLOTS_PER_CLUSTER; off++) |
| { |
| if (!cluster->indices[off].span) |
| break; |
| if (cluster->indices[off].base > ix) |
| break; |
| |
| if (cluster->indices[off].base + cluster->indices[off].span > ix) |
| return &cluster->slots[off]; |
| } |
| |
| return NULL; |
| } |
| |
| static void |
| init_global_partition (binding_cluster *cluster, tree decl) |
| { |
| bool purview = true; |
| |
| if (header_module_p ()) |
| purview = false; |
| else if (TREE_PUBLIC (decl) |
| && TREE_CODE (decl) == NAMESPACE_DECL |
| && !DECL_NAMESPACE_ALIAS (decl)) |
| purview = false; |
| else if (!get_originating_module (decl)) |
| purview = false; |
| |
| binding_slot *mslot; |
| if (!purview) |
| mslot = &cluster[0].slots[BINDING_SLOT_GLOBAL]; |
| else |
| mslot = &cluster[BINDING_SLOT_PARTITION |
| / BINDING_VECTOR_SLOTS_PER_CLUSTER] |
| .slots[BINDING_SLOT_PARTITION |
| % BINDING_VECTOR_SLOTS_PER_CLUSTER]; |
| |
| if (*mslot) |
| decl = ovl_make (decl, *mslot); |
| *mslot = decl; |
| |
| if (TREE_CODE (decl) == CONST_DECL) |
| { |
| tree type = TREE_TYPE (decl); |
| if (TREE_CODE (type) == ENUMERAL_TYPE |
| && IDENTIFIER_ANON_P (DECL_NAME (TYPE_NAME (type))) |
| && decl == TREE_VALUE (TYPE_VALUES (type))) |
| /* Anonymous enums are keyed by their first enumerator, put |
| the TYPE_DECL here too. */ |
| *mslot = ovl_make (TYPE_NAME (type), *mslot); |
| } |
| } |
| |
| /* Get the fixed binding slot IX. Creating the vector if CREATE is |
| non-zero. If CREATE is < 0, make sure there is at least 1 spare |
| slot for an import. (It is an error for CREATE < 0 and the slot to |
| already exist.) */ |
| |
| static tree * |
| get_fixed_binding_slot (tree *slot, tree name, unsigned ix, int create) |
| { |
| gcc_checking_assert (ix <= BINDING_SLOT_PARTITION); |
| |
| /* An assumption is that the fixed slots all reside in one cluster. */ |
| gcc_checking_assert (BINDING_VECTOR_SLOTS_PER_CLUSTER >= BINDING_SLOTS_FIXED); |
| |
| if (!*slot || TREE_CODE (*slot) != BINDING_VECTOR) |
| { |
| if (ix == BINDING_SLOT_CURRENT) |
| /* The current TU can just use slot directly. */ |
| return slot; |
| |
| if (!create) |
| return NULL; |
| |
| /* The partition slot is only needed when we know we're a named |
| module. */ |
| bool partition_slot = named_module_p (); |
| unsigned want = ((BINDING_SLOTS_FIXED + partition_slot + (create < 0) |
| + BINDING_VECTOR_SLOTS_PER_CLUSTER - 1) |
| / BINDING_VECTOR_SLOTS_PER_CLUSTER); |
| tree new_vec = make_binding_vec (name, want); |
| BINDING_VECTOR_NUM_CLUSTERS (new_vec) = want; |
| binding_cluster *cluster = BINDING_VECTOR_CLUSTER_BASE (new_vec); |
| |
| /* Initialize the fixed slots. */ |
| for (unsigned jx = BINDING_SLOTS_FIXED; jx--;) |
| { |
| cluster[0].indices[jx].base = 0; |
| cluster[0].indices[jx].span = 1; |
| cluster[0].slots[jx] = NULL_TREE; |
| } |
| |
| if (partition_slot) |
| { |
| unsigned off = BINDING_SLOT_PARTITION % BINDING_VECTOR_SLOTS_PER_CLUSTER; |
| unsigned ind = BINDING_SLOT_PARTITION / BINDING_VECTOR_SLOTS_PER_CLUSTER; |
| cluster[ind].indices[off].base = 0; |
| cluster[ind].indices[off].span = 1; |
| cluster[ind].slots[off] = NULL_TREE; |
| } |
| |
| if (tree orig = *slot) |
| { |
| /* Propagate existing value to current slot. */ |
| |
| /* Propagate global & module entities to the global and |
| partition slots. */ |
| if (tree type = MAYBE_STAT_TYPE (orig)) |
| init_global_partition (cluster, type); |
| |
| for (ovl_iterator iter (MAYBE_STAT_DECL (orig)); iter; ++iter) |
| { |
| tree decl = *iter; |
| |
| /* Internal linkage entities are in deduplicateable. */ |
| init_global_partition (cluster, decl); |
| } |
| |
| if (cluster[0].slots[BINDING_SLOT_GLOBAL] |
| && !(TREE_CODE (orig) == NAMESPACE_DECL |
| && !DECL_NAMESPACE_ALIAS (orig))) |
| { |
| /* Note that we had some GMF entries. */ |
| if (!STAT_HACK_P (orig)) |
| orig = stat_hack (orig); |
| |
| MODULE_BINDING_GLOBAL_P (orig) = true; |
| } |
| |
| cluster[0].slots[BINDING_SLOT_CURRENT] = orig; |
| } |
| |
| *slot = new_vec; |
| } |
| else |
| gcc_checking_assert (create >= 0); |
| |
| unsigned off = ix % BINDING_VECTOR_SLOTS_PER_CLUSTER; |
| binding_cluster &cluster |
| = BINDING_VECTOR_CLUSTER (*slot, ix / BINDING_VECTOR_SLOTS_PER_CLUSTER); |
| |
| /* There must always be slots for these indices */ |
| gcc_checking_assert (cluster.indices[off].span == 1 |
| && !cluster.indices[off].base |
| && !cluster.slots[off].is_lazy ()); |
| |
| return reinterpret_cast<tree *> (&cluster.slots[off]); |
| } |
| |
| /* *SLOT is a namespace binding slot. Append a slot for imported |
| module IX. */ |
| |
| static binding_slot * |
| append_imported_binding_slot (tree *slot, tree name, unsigned ix) |
| { |
| gcc_checking_assert (ix); |
| |
| if (!*slot || TREE_CODE (*slot) != BINDING_VECTOR) |
| /* Make an initial module vector. */ |
| get_fixed_binding_slot (slot, name, BINDING_SLOT_GLOBAL, -1); |
| else if (!BINDING_VECTOR_CLUSTER_LAST (*slot) |
| ->indices[BINDING_VECTOR_SLOTS_PER_CLUSTER - 1].span) |
| /* There is space in the last cluster. */; |
| else if (BINDING_VECTOR_NUM_CLUSTERS (*slot) |
| != BINDING_VECTOR_ALLOC_CLUSTERS (*slot)) |
| /* There is space in the vector. */ |
| BINDING_VECTOR_NUM_CLUSTERS (*slot)++; |
| else |
| { |
| /* Extend the vector. */ |
| unsigned have = BINDING_VECTOR_NUM_CLUSTERS (*slot); |
| unsigned want = (have * 3 + 1) / 2; |
| |
| if (want > (unsigned short)~0) |
| want = (unsigned short)~0; |
| |
| tree new_vec = make_binding_vec (name, want); |
| BINDING_VECTOR_NUM_CLUSTERS (new_vec) = have + 1; |
| memcpy (BINDING_VECTOR_CLUSTER_BASE (new_vec), |
| BINDING_VECTOR_CLUSTER_BASE (*slot), |
| have * sizeof (binding_cluster)); |
| *slot = new_vec; |
| } |
| |
| binding_cluster *last = BINDING_VECTOR_CLUSTER_LAST (*slot); |
| for (unsigned off = 0; off != BINDING_VECTOR_SLOTS_PER_CLUSTER; off++) |
| if (!last->indices[off].span) |
| { |
| /* Fill the free slot of the cluster. */ |
| last->indices[off].base = ix; |
| last->indices[off].span = 1; |
| last->slots[off] = NULL_TREE; |
| /* Check monotonicity. */ |
| gcc_checking_assert (last[off ? 0 : -1] |
| .indices[off ? off - 1 |
| : BINDING_VECTOR_SLOTS_PER_CLUSTER - 1] |
| .base < ix); |
| return &last->slots[off]; |
| } |
| |
| gcc_unreachable (); |
| } |
| |
| /* Add DECL to the list of things declared in binding level B. */ |
| |
| static void |
| add_decl_to_level (cp_binding_level *b, tree decl) |
| { |
| gcc_assert (b->kind != sk_class); |
| |
| /* Make sure we don't create a circular list. xref_tag can end |
| up pushing the same artificial decl more than once. We |
| should have already detected that in update_binding. (This isn't a |
| complete verification of non-circularity.) */ |
| gcc_assert (b->names != decl); |
| |
| /* We build up the list in reverse order, and reverse it later if |
| necessary. */ |
| TREE_CHAIN (decl) = b->names; |
| b->names = decl; |
| |
| /* If appropriate, add decl to separate list of statics. We include |
| extern variables because they might turn out to be static later. |
| It's OK for this list to contain a few false positives. */ |
| if (b->kind == sk_namespace |
| && ((VAR_P (decl) && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
| || (TREE_CODE (decl) == FUNCTION_DECL |
| && (!TREE_PUBLIC (decl) |
| || decl_anon_ns_mem_p (decl) |
| || DECL_DECLARED_INLINE_P (decl))))) |
| vec_safe_push (static_decls, decl); |
| } |
| |
| /* Find the binding for NAME in the local binding level B. */ |
| |
| static cxx_binding * |
| find_local_binding (cp_binding_level *b, tree name) |
| { |
| if (cxx_binding *binding = IDENTIFIER_BINDING (name)) |
| for (;; b = b->level_chain) |
| { |
| if (binding->scope == b) |
| return binding; |
| |
| /* Cleanup contours are transparent to the language. */ |
| if (b->kind != sk_cleanup) |
| break; |
| } |
| return NULL; |
| } |
| |
| class name_lookup |
| { |
| public: |
| typedef std::pair<tree, tree> using_pair; |
| typedef vec<using_pair, va_heap, vl_embed> using_queue; |
| |
| public: |
| tree name; /* The identifier being looked for. */ |
| |
| /* Usually we just add things to the VALUE binding, but we record |
| (hidden) IMPLICIT_TYPEDEFs on the type binding, which is used for |
| using-decl resolution. */ |
| tree value; /* A (possibly ambiguous) set of things found. */ |
| tree type; /* A type that has been found. */ |
| |
| LOOK_want want; /* What kind of entity we want. */ |
| |
| bool deduping; /* Full deduping is needed because using declarations |
| are in play. */ |
| vec<tree, va_heap, vl_embed> *scopes; |
| name_lookup *previous; /* Previously active lookup. */ |
| |
| protected: |
| /* Marked scope stack for outermost name lookup. */ |
| static vec<tree, va_heap, vl_embed> *shared_scopes; |
| /* Currently active lookup. */ |
| static name_lookup *active; |
| |
| public: |
| name_lookup (tree n, LOOK_want w = LOOK_want::NORMAL) |
| : name (n), value (NULL_TREE), type (NULL_TREE), |
| want (w), |
| deduping (false), scopes (NULL), previous (NULL) |
| { |
| preserve_state (); |
| } |
| ~name_lookup () |
| { |
| gcc_checking_assert (!deduping); |
| restore_state (); |
| } |
| |
| private: /* Uncopyable, unmovable, unassignable. I am a rock. */ |
| name_lookup (const name_lookup &); |
| name_lookup &operator= (const name_lookup &); |
| |
| public: |
| /* Turn on or off deduping mode. */ |
| void dedup (bool state) |
| { |
| if (deduping != state) |
| { |
| deduping = state; |
| lookup_mark (value, state); |
| } |
| } |
| |
| protected: |
| static bool seen_p (tree scope) |
| { |
| return LOOKUP_SEEN_P (scope); |
| } |
| static bool found_p (tree scope) |
| { |
| return LOOKUP_FOUND_P (scope); |
| } |
| |
| void mark_seen (tree scope); /* Mark and add to scope vector. */ |
| static void mark_found (tree scope) |
| { |
| gcc_checking_assert (seen_p (scope)); |
| LOOKUP_FOUND_P (scope) = true; |
| } |
| bool see_and_mark (tree scope) |
| { |
| bool ret = seen_p (scope); |
| if (!ret) |
| mark_seen (scope); |
| return ret; |
| } |
| bool find_and_mark (tree scope); |
| |
| private: |
| void preserve_state (); |
| void restore_state (); |
| |
| private: |
| static tree ambiguous (tree thing, tree current); |
| void add_overload (tree fns); |
| void add_value (tree new_val); |
| void add_type (tree new_type); |
| bool process_binding (tree val_bind, tree type_bind); |
| unsigned process_module_binding (tree val_bind, tree type_bind, unsigned); |
| /* Look in only namespace. */ |
| bool search_namespace_only (tree scope); |
| /* Look in namespace and its (recursive) inlines. Ignore using |
| directives. Return true if something found (inc dups). */ |
| bool search_namespace (tree scope); |
| /* Look in the using directives of namespace + inlines using |
| qualified lookup rules. */ |
| bool search_usings (tree scope); |
| |
| private: |
| using_queue *queue_namespace (using_queue *queue, int depth, tree scope); |
| using_queue *do_queue_usings (using_queue *queue, int depth, |
| vec<tree, va_gc> *usings); |
| using_queue *queue_usings (using_queue *queue, int depth, |
| vec<tree, va_gc> *usings) |
| { |
| if (usings) |
| queue = do_queue_usings (queue, depth, usings); |
| return queue; |
| } |
| |
| private: |
| void add_fns (tree); |
| |
| private: |
| void adl_expr (tree); |
| void adl_type (tree); |
| void adl_template_arg (tree); |
| void adl_class (tree); |
| void adl_enum (tree); |
| void adl_bases (tree); |
| void adl_class_only (tree); |
| void adl_namespace (tree); |
| void adl_class_fns (tree); |
| void adl_namespace_fns (tree, bitmap); |
| |
| public: |
| /* Search namespace + inlines + maybe usings as qualified lookup. */ |
| bool search_qualified (tree scope, bool usings = true); |
| |
| /* Search namespace + inlines + usings as unqualified lookup. */ |
| bool search_unqualified (tree scope, cp_binding_level *); |
| |
| /* ADL lookup of ARGS. */ |
| tree search_adl (tree fns, vec<tree, va_gc> *args); |
| }; |
| |
| /* Scope stack shared by all outermost lookups. This avoids us |
| allocating and freeing on every single lookup. */ |
| vec<tree, va_heap, vl_embed> *name_lookup::shared_scopes; |
| |
| /* Currently active lookup. */ |
| name_lookup *name_lookup::active; |
| |
| /* Name lookup is recursive, becase ADL can cause template |
| instatiation. This is of course a rare event, so we optimize for |
| it not happening. When we discover an active name-lookup, which |
| must be an ADL lookup, we need to unmark the marked scopes and also |
| unmark the lookup we might have been accumulating. */ |
| |
| void |
| name_lookup::preserve_state () |
| { |
| previous = active; |
| if (previous) |
| { |
| unsigned length = vec_safe_length (previous->scopes); |
| vec_safe_reserve (previous->scopes, length * 2); |
| for (unsigned ix = length; ix--;) |
| { |
| tree decl = (*previous->scopes)[ix]; |
| |
| gcc_checking_assert (LOOKUP_SEEN_P (decl)); |
| LOOKUP_SEEN_P (decl) = false; |
| |
| /* Preserve the FOUND_P state on the interrupted lookup's |
| stack. */ |
| if (LOOKUP_FOUND_P (decl)) |
| { |
| LOOKUP_FOUND_P (decl) = false; |
| previous->scopes->quick_push (decl); |
| } |
| } |
| |
| /* Unmark the outer partial lookup. */ |
| if (previous->deduping) |
| lookup_mark (previous->value, false); |
| } |
| else |
| scopes = shared_scopes; |
| active = this; |
| } |
| |
| /* Restore the marking state of a lookup we interrupted. */ |
| |
| void |
| name_lookup::restore_state () |
| { |
| gcc_checking_assert (!deduping); |
| |
| /* Unmark and empty this lookup's scope stack. */ |
| for (unsigned ix = vec_safe_length (scopes); ix--;) |
| { |
| tree decl = scopes->pop (); |
| gcc_checking_assert (LOOKUP_SEEN_P (decl)); |
| LOOKUP_SEEN_P (decl) = false; |
| LOOKUP_FOUND_P (decl) = false; |
| } |
| |
| active = previous; |
| if (previous) |
| { |
| free (scopes); |
| |
| unsigned length = vec_safe_length (previous->scopes); |
| for (unsigned ix = 0; ix != length; ix++) |
| { |
| tree decl = (*previous->scopes)[ix]; |
| if (LOOKUP_SEEN_P (decl)) |
| { |
| /* The remainder of the scope stack must be recording |
| FOUND_P decls, which we want to pop off. */ |
| do |
| { |
| tree decl = previous->scopes->pop (); |
| gcc_checking_assert (LOOKUP_SEEN_P (decl) |
| && !LOOKUP_FOUND_P (decl)); |
| LOOKUP_FOUND_P (decl) = true; |
| } |
| while (++ix != length); |
| break; |
| } |
| |
| gcc_checking_assert (!LOOKUP_FOUND_P (decl)); |
| LOOKUP_SEEN_P (decl) = true; |
| } |
| |
| /* Remark the outer partial lookup. */ |
| if (previous->deduping) |
| lookup_mark (previous->value, true); |
| } |
| else |
| shared_scopes = scopes; |
| } |
| |
| void |
| name_lookup::mark_seen (tree scope) |
| { |
| gcc_checking_assert (!seen_p (scope)); |
| LOOKUP_SEEN_P (scope) = true; |
| vec_safe_push (scopes, scope); |
| } |
| |
| bool |
| name_lookup::find_and_mark (tree scope) |
| { |
| bool result = LOOKUP_FOUND_P (scope); |
| if (!result) |
| { |
| LOOKUP_FOUND_P (scope) = true; |
| if (!LOOKUP_SEEN_P (scope)) |
| vec_safe_push (scopes, scope); |
| } |
| |
| return result; |
| } |
| |
| /* THING and CURRENT are ambiguous, concatenate them. */ |
| |
| tree |
| name_lookup::ambiguous (tree thing, tree current) |
| { |
| if (TREE_CODE (current) != TREE_LIST) |
| { |
| current = build_tree_list (NULL_TREE, current); |
| TREE_TYPE (current) = error_mark_node; |
| } |
| current = tree_cons (NULL_TREE, thing, current); |
| TREE_TYPE (current) = error_mark_node; |
| |
| return current; |
| } |
| |
| /* FNS is a new overload set to add to the exising set. */ |
| |
| void |
| name_lookup::add_overload (tree fns) |
| { |
| if (!deduping && TREE_CODE (fns) == OVERLOAD) |
| { |
| tree probe = fns; |
| if (!bool (want & LOOK_want::HIDDEN_FRIEND)) |
| probe = ovl_skip_hidden (probe); |
| if (probe && TREE_CODE (probe) == OVERLOAD |
| && OVL_DEDUP_P (probe)) |
| /* We're about to add something found by multiple paths, so need to |
| engage deduping mode. */ |
| dedup (true); |
| } |
| |
| value = lookup_maybe_add (fns, value, deduping); |
| } |
| |
| /* Add a NEW_VAL, a found value binding into the current value binding. */ |
| |
| void |
| name_lookup::add_value (tree new_val) |
| { |
| if (OVL_P (new_val) && (!value || OVL_P (value))) |
| add_overload (new_val); |
| else if (!value) |
| value = new_val; |
| else if (value == new_val) |
| ; |
| else if ((TREE_CODE (value) == TYPE_DECL |
| && TREE_CODE (new_val) == TYPE_DECL |
| && same_type_p (TREE_TYPE (value), TREE_TYPE (new_val)))) |
| /* Typedefs to the same type. */; |
| else if (TREE_CODE (value) == NAMESPACE_DECL |
| && TREE_CODE (new_val) == NAMESPACE_DECL |
| && ORIGINAL_NAMESPACE (value) == ORIGINAL_NAMESPACE (new_val)) |
| /* Namespace (possibly aliased) to the same namespace. Locate |
| the namespace*/ |
| value = ORIGINAL_NAMESPACE (value); |
| else |
| { |
| /* Disengage deduping mode. */ |
| dedup (false); |
| value = ambiguous (new_val, value); |
| } |
| } |
| |
| /* Add a NEW_TYPE, a found type binding into the current type binding. */ |
| |
| void |
| name_lookup::add_type (tree new_type) |
| { |
| if (!type) |
| type = new_type; |
| else if (TREE_CODE (type) == TREE_LIST |
| || !same_type_p (TREE_TYPE (type), TREE_TYPE (new_type))) |
| type = ambiguous (new_type, type); |
| } |
| |
| /* Process a found binding containing NEW_VAL and NEW_TYPE. Returns |
| true if we actually found something noteworthy. Hiddenness has |
| already been handled in the caller. */ |
| |
| bool |
| name_lookup::process_binding (tree new_val, tree new_type) |
| { |
| /* Did we really see a type? */ |
| if (new_type |
| && (want & LOOK_want::TYPE_NAMESPACE) == LOOK_want::NAMESPACE) |
| new_type = NULL_TREE; |
| |
| /* Do we really see a value? */ |
| if (new_val) |
| switch (TREE_CODE (new_val)) |
| { |
| case TEMPLATE_DECL: |
| /* If we expect types or namespaces, and not templates, |
| or this is not a template class. */ |
| if (bool (want & LOOK_want::TYPE_NAMESPACE) |
| && !DECL_TYPE_TEMPLATE_P (new_val)) |
| new_val = NULL_TREE; |
| break; |
| case TYPE_DECL: |
| if ((want & LOOK_want::TYPE_NAMESPACE) == LOOK_want::NAMESPACE |
| || (new_type && bool (want & LOOK_want::TYPE))) |
| new_val = NULL_TREE; |
| break; |
| case NAMESPACE_DECL: |
| if ((want & LOOK_want::TYPE_NAMESPACE) == LOOK_want::TYPE) |
| new_val = NULL_TREE; |
| break; |
| default: |
| if (bool (want & LOOK_want::TYPE_NAMESPACE)) |
| new_val = NULL_TREE; |
| } |
| |
| if (!new_val) |
| { |
| new_val = new_type; |
| new_type = NULL_TREE; |
| } |
| |
| /* Merge into the lookup */ |
| if (new_val) |
| add_value (new_val); |
| if (new_type) |
| add_type (new_type); |
| |
| return new_val != NULL_TREE; |
| } |
| |
| /* If we're importing a module containing this binding, add it to the |
| lookup set. The trickiness is with namespaces, we only want to |
| find it once. */ |
| |
| unsigned |
| name_lookup::process_module_binding (tree new_val, tree new_type, |
| unsigned marker) |
| { |
| /* Optimize for (re-)finding a public namespace. We only need to |
| look once. */ |
| if (new_val && !new_type |
| && TREE_CODE (new_val) == NAMESPACE_DECL |
| && TREE_PUBLIC (new_val) |
| && !DECL_NAMESPACE_ALIAS (new_val)) |
| { |
| if (marker & 2) |
| return marker; |
| marker |= 2; |
| } |
| |
| if (new_type || new_val) |
| marker |= process_binding (new_val, new_type); |
| |
| return marker; |
| } |
| |
| /* Look in exactly namespace SCOPE. */ |
| |
| bool |
| name_lookup::search_namespace_only (tree scope) |
| { |
| bool found = false; |
| if (tree *binding = find_namespace_slot (scope, name)) |
| { |
| tree val = *binding; |
| if (TREE_CODE (val) == BINDING_VECTOR) |
| { |
| /* I presume the binding list is going to be sparser than |
| the import bitmap. Hence iterate over the former |
| checking for bits set in the bitmap. */ |
| bitmap imports = get_import_bitmap (); |
| binding_cluster *cluster = BINDING_VECTOR_CLUSTER_BASE (val); |
| int marker = 0; |
| int dup_detect = 0; |
| |
| if (tree bind = cluster->slots[BINDING_SLOT_CURRENT]) |
| { |
| if (!deduping) |
| { |
| if (named_module_purview_p ()) |
| { |
| dup_detect |= 2; |
| |
| if (STAT_HACK_P (bind) && MODULE_BINDING_GLOBAL_P (bind)) |
| dup_detect |= 1; |
| } |
| else |
| dup_detect |= 1; |
| } |
| tree type = NULL_TREE; |
| tree value = bind; |
| |
| if (STAT_HACK_P (bind)) |
| { |
| type = STAT_TYPE (bind); |
| value = STAT_DECL (bind); |
| |
| if (!bool (want & LOOK_want::HIDDEN_FRIEND)) |
| { |
| if (STAT_TYPE_HIDDEN_P (bind)) |
| type = NULL_TREE; |
| if (STAT_DECL_HIDDEN_P (bind)) |
| value = NULL_TREE; |
| else |
| value = ovl_skip_hidden (value); |
| } |
| } |
| else if (!bool (want & LOOK_want::HIDDEN_FRIEND)) |
| value = ovl_skip_hidden (value); |
| |
| marker = process_module_binding (value, type, marker); |
| } |
| |
| /* Scan the imported bindings. */ |
| unsigned ix = BINDING_VECTOR_NUM_CLUSTERS (val); |
| if (BINDING_VECTOR_SLOTS_PER_CLUSTER == BINDING_SLOTS_FIXED) |
| { |
| ix--; |
| cluster++; |
| } |
| |
| /* Do this in forward order, so we load modules in an order |
| the user expects. */ |
| for (; ix--; cluster++) |
| for (unsigned jx = 0; jx != BINDING_VECTOR_SLOTS_PER_CLUSTER; jx++) |
| { |
| /* Are we importing this module? */ |
| if (unsigned base = cluster->indices[jx].base) |
| if (unsigned span = cluster->indices[jx].span) |
| do |
| if (bitmap_bit_p (imports, base)) |
| goto found; |
| while (++base, --span); |
| continue; |
| |
| found:; |
| /* Is it loaded? */ |
| if (cluster->slots[jx].is_lazy ()) |
| { |
| gcc_assert (cluster->indices[jx].span == 1); |
| lazy_load_binding (cluster->indices[jx].base, |
| scope, name, &cluster->slots[jx]); |
| } |
| tree bind = cluster->slots[jx]; |
| if (!bind) |
| /* Load errors could mean there's nothing here. */ |
| continue; |
| |
| /* Extract what we can see from here. If there's no |
| stat_hack, then everything was exported. */ |
| tree type = NULL_TREE; |
| |
| |
| /* If STAT_HACK_P is false, everything is visible, and |
| there's no duplication possibilities. */ |
| if (STAT_HACK_P (bind)) |
| { |
| if (!deduping) |
| { |
| /* Do we need to engage deduplication? */ |
| int dup = 0; |
| if (MODULE_BINDING_GLOBAL_P (bind)) |
| dup = 1; |
| else if (MODULE_BINDING_PARTITION_P (bind)) |
| dup = 2; |
| if (unsigned hit = dup_detect & dup) |
| { |
| if ((hit & 1 && BINDING_VECTOR_GLOBAL_DUPS_P (val)) |
| || (hit & 2 |
| && BINDING_VECTOR_PARTITION_DUPS_P (val))) |
| dedup (true); |
| } |
| dup_detect |= dup; |
| } |
| |
| if (STAT_TYPE_VISIBLE_P (bind)) |
| type = STAT_TYPE (bind); |
| bind = STAT_VISIBLE (bind); |
| } |
| |
| /* And process it. */ |
| marker = process_module_binding (bind, type, marker); |
| } |
| found |= marker & 1; |
| } |
| else |
| { |
| /* Only a current module binding, visible from the current module. */ |
| tree bind = *binding; |
| tree value = bind, type = NULL_TREE; |
| |
| if (STAT_HACK_P (bind)) |
| { |
| type = STAT_TYPE (bind); |
| value = STAT_DECL (bind); |
| |
| if (!bool (want & LOOK_want::HIDDEN_FRIEND)) |
| { |
| if (STAT_TYPE_HIDDEN_P (bind)) |
| type = NULL_TREE; |
| if (STAT_DECL_HIDDEN_P (bind)) |
| value = NULL_TREE; |
| else |
| value = ovl_skip_hidden (value); |
| } |
| } |
| else if (!bool (want & LOOK_want::HIDDEN_FRIEND)) |
| value = ovl_skip_hidden (value); |
| |
| found |= process_binding (value, type); |
| } |
| } |
| |
| return found; |
| } |
| |
| /* Conditionally look in namespace SCOPE and inline children. */ |
| |
| bool |
| name_lookup::search_namespace (tree scope) |
| { |
| if (see_and_mark (scope)) |
| /* We've visited this scope before. Return what we found then. */ |
| return found_p (scope); |
| |
| /* Look in exactly namespace. */ |
| bool found = search_namespace_only (scope); |
| |
| /* Don't look into inline children, if we're looking for an |
| anonymous name -- it must be in the current scope, if anywhere. */ |
| if (name) |
| /* Recursively look in its inline children. */ |
| if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope)) |
| for (unsigned ix = inlinees->length (); ix--;) |
| found |= search_namespace ((*inlinees)[ix]); |
| |
| if (found) |
| mark_found (scope); |
| |
| return found; |
| } |
| |
| /* Recursively follow using directives of SCOPE & its inline children. |
| Such following is essentially a flood-fill algorithm. */ |
| |
| bool |
| name_lookup::search_usings (tree scope) |
| { |
| /* We do not check seen_p here, as that was already set during the |
| namespace_only walk. */ |
| if (found_p (scope)) |
| return true; |
| |
| bool found = false; |
| if (vec<tree, va_gc> *usings = NAMESPACE_LEVEL (scope)->using_directives) |
| for (unsigned ix = usings->length (); ix--;) |
| found |= search_qualified ((*usings)[ix], true); |
| |
| /* Look in its inline children. */ |
| if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope)) |
| for (unsigned ix = inlinees->length (); ix--;) |
| found |= search_usings ((*inlinees)[ix]); |
| |
| if (found) |
| mark_found (scope); |
| |
| return found; |
| } |
| |
| /* Qualified namespace lookup in SCOPE. |
| 1) Look in SCOPE (+inlines). If found, we're done. |
| 2) Otherwise, if USINGS is true, |
| recurse for every using directive of SCOPE (+inlines). |
| |
| Trickiness is (a) loops and (b) multiple paths to same namespace. |
| In both cases we want to not repeat any lookups, and know whether |
| to stop the caller's step #2. Do this via the FOUND_P marker. */ |
| |
| bool |
| name_lookup::search_qualified (tree scope, bool usings) |
| { |
| bool found = false; |
| |
| if (seen_p (scope)) |
| found = found_p (scope); |
| else |
| { |
| found = search_namespace (scope); |
| if (!found && usings) |
| found = search_usings (scope); |
| } |
| |
| dedup (false); |
| |
| return found; |
| } |
| |
| /* Add SCOPE to the unqualified search queue, recursively add its |
| inlines and those via using directives. */ |
| |
| name_lookup::using_queue * |
| name_lookup::queue_namespace (using_queue *queue, int depth, tree scope) |
| { |
| if (see_and_mark (scope)) |
| return queue; |
| |
| /* Record it. */ |
| tree common = scope; |
| while (SCOPE_DEPTH (common) > depth) |
| common = CP_DECL_CONTEXT (common); |
| vec_safe_push (queue, using_pair (common, scope)); |
| |
| /* Queue its inline children. */ |
| if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope)) |
| for (unsigned ix = inlinees->length (); ix--;) |
| queue = queue_namespace (queue, depth, (*inlinees)[ix]); |
| |
| /* Queue its using targets. */ |
| queue = queue_usings (queue, depth, NAMESPACE_LEVEL (scope)->using_directives); |
| |
| return queue; |
| } |
| |
| /* Add the namespaces in USINGS to the unqualified search queue. */ |
| |
| name_lookup::using_queue * |
| name_lookup::do_queue_usings (using_queue *queue, int depth, |
| vec<tree, va_gc> *usings) |
| { |
| for (unsigned ix = usings->length (); ix--;) |
| queue = queue_namespace (queue, depth, (*usings)[ix]); |
| |
| return queue; |
| } |
| |
| /* Unqualified namespace lookup in SCOPE. |
| 1) add scope+inlins to worklist. |
| 2) recursively add target of every using directive |
| 3) for each worklist item where SCOPE is common ancestor, search it |
| 4) if nothing find, scope=parent, goto 1. */ |
| |
| bool |
| name_lookup::search_unqualified (tree scope, cp_binding_level *level) |
| { |
| /* Make static to avoid continual reallocation. We're not |
| recursive. */ |
| static using_queue *queue = NULL; |
| bool found = false; |
| int length = vec_safe_length (queue); |
| |
| /* Queue local using-directives. */ |
| for (; level->kind != sk_namespace; level = level->level_chain) |
| queue = queue_usings (queue, SCOPE_DEPTH (scope), level->using_directives); |
| |
| for (; !found; scope = CP_DECL_CONTEXT (scope)) |
| { |
| gcc_assert (!DECL_NAMESPACE_ALIAS (scope)); |
| int depth = SCOPE_DEPTH (scope); |
| |
| /* Queue namespaces reachable from SCOPE. */ |
| queue = queue_namespace (queue, depth, scope); |
| |
| /* Search every queued namespace where SCOPE is the common |
| ancestor. Adjust the others. */ |
| unsigned ix = length; |
| do |
| { |
| using_pair &pair = (*queue)[ix]; |
| while (pair.first == scope) |
| { |
| found |= search_namespace_only (pair.second); |
| pair = queue->pop (); |
| if (ix == queue->length ()) |
| goto done; |
| } |
| /* The depth is the same as SCOPE, find the parent scope. */ |
| if (SCOPE_DEPTH (pair.first) == depth) |
| pair.first = CP_DECL_CONTEXT (pair.first); |
| ix++; |
| } |
| while (ix < queue->length ()); |
| done:; |
| if (scope == global_namespace) |
| break; |
| |
| /* If looking for hidden friends, we only look in the innermost |
| namespace scope. [namespace.memdef]/3 If a friend |
| declaration in a non-local class first declares a class, |
| function, class template or function template the friend is a |
| member of the innermost enclosing namespace. See also |
| [basic.lookup.unqual]/7 */ |
| if (bool (want & LOOK_want::HIDDEN_FRIEND)) |
| break; |
| } |
| |
| dedup (false); |
| |
| /* Restore to incoming length. */ |
| vec_safe_truncate (queue, length); |
| |
| return found; |
| } |
| |
| /* FNS is a value binding. If it is a (set of overloaded) functions, |
| add them into the current value. */ |
| |
| void |
| name_lookup::add_fns (tree fns) |
| { |
| if (!fns) |
| return; |
| else if (TREE_CODE (fns) == OVERLOAD) |
| { |
| if (TREE_TYPE (fns) != unknown_type_node) |
| fns = OVL_FUNCTION (fns); |
| } |
| else if (!DECL_DECLARES_FUNCTION_P (fns)) |
| return; |
| |
| add_overload (fns); |
| } |
| |
| /* Add the overloaded fns of SCOPE. */ |
| |
| void |
| name_lookup::adl_namespace_fns (tree scope, bitmap imports) |
| { |
| if (tree *binding = find_namespace_slot (scope, name)) |
| { |
| tree val = *binding; |
| if (TREE_CODE (val) != BINDING_VECTOR) |
| add_fns (ovl_skip_hidden (MAYBE_STAT_DECL (val))); |
| else |
| { |
| /* I presume the binding list is going to be sparser than |
| the import bitmap. Hence iterate over the former |
| checking for bits set in the bitmap. */ |
| binding_cluster *cluster = BINDING_VECTOR_CLUSTER_BASE (val); |
| int dup_detect = 0; |
| |
| if (tree bind = cluster->slots[BINDING_SLOT_CURRENT]) |
| { |
| /* The current TU's bindings must be visible, we don't |
| need to check the bitmaps. */ |
| |
| if (!deduping) |
| { |
| if (named_module_purview_p ()) |
| { |
| dup_detect |= 2; |
| |
| if (STAT_HACK_P (bind) && MODULE_BINDING_GLOBAL_P (bind)) |
| dup_detect |= 1; |
| } |
| else |
| dup_detect |= 1; |
| } |
| |
| add_fns (ovl_skip_hidden (MAYBE_STAT_DECL (bind))); |
| } |
| |
| /* Scan the imported bindings. */ |
| unsigned ix = BINDING_VECTOR_NUM_CLUSTERS (val); |
| if (BINDING_VECTOR_SLOTS_PER_CLUSTER == BINDING_SLOTS_FIXED) |
| { |
| ix--; |
| cluster++; |
| } |
| |
| /* Do this in forward order, so we load modules in an order |
| the user expects. */ |
| for (; ix--; cluster++) |
| for (unsigned jx = 0; jx != BINDING_VECTOR_SLOTS_PER_CLUSTER; jx++) |
| { |
| /* Functions are never on merged slots. */ |
| if (!cluster->indices[jx].base |
| || cluster->indices[jx].span != 1) |
| continue; |
| |
| /* Is this slot visible? */ |
| if (!bitmap_bit_p (imports, cluster->indices[jx].base)) |
| continue; |
| |
| /* Is it loaded. */ |
| if (cluster->slots[jx].is_lazy ()) |
| lazy_load_binding (cluster->indices[jx].base, |
| scope, name, &cluster->slots[jx]); |
| |
| tree bind = cluster->slots[jx]; |
| if (!bind) |
| /* Load errors could mean there's nothing here. */ |
| continue; |
| |
| if (STAT_HACK_P (bind)) |
| { |
| if (!deduping) |
| { |
| /* Do we need to engage deduplication? */ |
| int dup = 0; |
| if (MODULE_BINDING_GLOBAL_P (bind)) |
| dup = 1; |
| else if (MODULE_BINDING_PARTITION_P (bind)) |
| dup = 2; |
| if (unsigned hit = dup_detect & dup) |
| if ((hit & 1 && BINDING_VECTOR_GLOBAL_DUPS_P (val)) |
| || (hit & 2 |
| && BINDING_VECTOR_PARTITION_DUPS_P (val))) |
| dedup (true); |
| dup_detect |= dup; |
| } |
| |
| bind = STAT_VISIBLE (bind); |
| } |
| |
| add_fns (bind); |
| } |
| } |
| } |
| } |
| |
| /* Add the hidden friends of SCOPE. */ |
| |
| void |
| name_lookup::adl_class_fns (tree type) |
| { |
| /* Add friends. */ |
| for (tree list = DECL_FRIENDLIST (TYPE_MAIN_DECL (type)); |
| list; list = TREE_CHAIN (list)) |
| if (name == FRIEND_NAME (list)) |
| { |
| tree context = NULL_TREE; /* Lazily computed. */ |
| for (tree friends = FRIEND_DECLS (list); friends; |
| friends = TREE_CHAIN (friends)) |
| { |
| tree fn = TREE_VALUE (friends); |
| |
| /* Only interested in global functions with potentially hidden |
| (i.e. unqualified) declarations. */ |
| if (!context) |
| context = decl_namespace_context (type); |
| if (CP_DECL_CONTEXT (fn) != context) |
| continue; |
| |
| dedup (true); |
| |
| /* Template specializations are never found by name lookup. |
| (Templates themselves can be found, but not template |
| specializations.) */ |
| if (TREE_CODE (fn) == FUNCTION_DECL && DECL_USE_TEMPLATE (fn)) |
| continue; |
| |
| add_fns (fn); |
| } |
| } |
| } |
| |
| /* Find the containing non-inlined namespace, add it and all its |
| inlinees. */ |
| |
| void |
| name_lookup::adl_namespace (tree scope) |
| { |
| if (see_and_mark (scope)) |
| return; |
| |
| /* Look down into inline namespaces. */ |
| if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope)) |
| for (unsigned ix = inlinees->length (); ix--;) |
| adl_namespace ((*inlinees)[ix]); |
| |
| if (DECL_NAMESPACE_INLINE_P (scope)) |
| /* Mark parent. */ |
| adl_namespace (CP_DECL_CONTEXT (scope)); |
| } |
| |
| /* Adds the class and its friends to the lookup structure. */ |
| |
| void |
| name_lookup::adl_class_only (tree type) |
| { |
| /* Backend-built structures, such as __builtin_va_list, aren't |
| affected by all this. */ |
| if (!CLASS_TYPE_P (type)) |
| return; |
| |
| type = TYPE_MAIN_VARIANT (type); |
| |
| if (see_and_mark (type)) |
| return; |
| |
| tree context = decl_namespace_context (type); |
| adl_namespace (context); |
| } |
| |
| /* Adds the class and its bases to the lookup structure. |
| Returns true on error. */ |
| |
| void |
| name_lookup::adl_bases (tree type) |
| { |
| adl_class_only (type); |
| |
| /* Process baseclasses. */ |
| if (tree binfo = TYPE_BINFO (type)) |
| { |
| tree base_binfo; |
| int i; |
| |
| for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) |
| adl_bases (BINFO_TYPE (base_binfo)); |
| } |
| } |
| |
| /* Adds everything associated with a class argument type to the lookup |
| structure. |
| |
| If T is a class type (including unions), its associated classes are: the |
| class itself; the class of which it is a member, if any; and its direct |
| and indirect base classes. Its associated namespaces are the namespaces |
| of which its associated classes are members. Furthermore, if T is a |
| class template specialization, its associated namespaces and classes |
| also include: the namespaces and classes associated with the types of |
| the template arguments provided for template type parameters (excluding |
| template template parameters); the namespaces of which any template |
| template arguments are members; and the classes of which any member |
| templates used as template template arguments are members. [ Note: |
| non-type template arguments do not contribute to the set of associated |
| namespaces. --end note] */ |
| |
| void |
| name_lookup::adl_class (tree type) |
| { |
| /* Backend build structures, such as __builtin_va_list, aren't |
| affected by all this. */ |
| if (!CLASS_TYPE_P (type)) |
| return; |
| |
| type = TYPE_MAIN_VARIANT (type); |
| |
| /* We don't set found here because we have to have set seen first, |
| which is done in the adl_bases walk. */ |
| if (found_p (type)) |
| return; |
| |
| complete_type (type); |
| adl_bases (type); |
| mark_found (type); |
| |
| if (TYPE_CLASS_SCOPE_P (type)) |
| adl_class_only (TYPE_CONTEXT (type)); |
| |
| /* Process template arguments. */ |
| if (CLASSTYPE_TEMPLATE_INFO (type) |
| && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type))) |
| { |
| tree list = INNERMOST_TEMPLATE_ARGS (CLASSTYPE_TI_ARGS (type)); |
| for (int i = 0; i < TREE_VEC_LENGTH (list); ++i) |
| adl_template_arg (TREE_VEC_ELT (list, i)); |
| } |
| } |
| |
| void |
| name_lookup::adl_enum (tree type) |
| { |
| type = TYPE_MAIN_VARIANT (type); |
| if (see_and_mark (type)) |
| return; |
| |
| if (TYPE_CLASS_SCOPE_P (type)) |
| adl_class_only (TYPE_CONTEXT (type)); |
| else |
| adl_namespace (decl_namespace_context (type)); |
| } |
| |
| void |
| name_lookup::adl_expr (tree expr) |
| { |
| if (!expr) |
| return; |
| |
| gcc_assert (!TYPE_P (expr)); |
| |
| if (TREE_TYPE (expr) != unknown_type_node) |
| { |
| adl_type (unlowered_expr_type (expr)); |
| return; |
| } |
| |
| if (TREE_CODE (expr) == ADDR_EXPR) |
| expr = TREE_OPERAND (expr, 0); |
| if (TREE_CODE (expr) == COMPONENT_REF |
| || TREE_CODE (expr) == OFFSET_REF) |
| expr = TREE_OPERAND (expr, 1); |
| expr = MAYBE_BASELINK_FUNCTIONS (expr); |
| |
| if (OVL_P (expr)) |
| for (lkp_iterator iter (expr); iter; ++iter) |
| adl_type (TREE_TYPE (*iter)); |
| else if (TREE_CODE (expr) == TEMPLATE_ID_EXPR) |
| { |
| /* The working paper doesn't currently say how to handle |
| template-id arguments. The sensible thing would seem to be |
| to handle the list of template candidates like a normal |
| overload set, and handle the template arguments like we do |
| for class template specializations. */ |
| |
| /* First the templates. */ |
| adl_expr (TREE_OPERAND (expr, 0)); |
| |
| /* Now the arguments. */ |
| if (tree args = TREE_OPERAND (expr, 1)) |
| for (int ix = TREE_VEC_LENGTH (args); ix--;) |
| adl_template_arg (TREE_VEC_ELT (args, ix)); |
| } |
| } |
| |
| void |
| name_lookup::adl_type (tree type) |
| { |
| if (!type) |
| return; |
| |
| if (TYPE_PTRDATAMEM_P (type)) |
| { |
| /* Pointer to member: associate class type and value type. */ |
| adl_type (TYPE_PTRMEM_CLASS_TYPE (type)); |
| adl_type (TYPE_PTRMEM_POINTED_TO_TYPE (type)); |
| return; |
| } |
| |
| switch (TREE_CODE (type)) |
| { |
| case RECORD_TYPE: |
| if (TYPE_PTRMEMFUNC_P (type)) |
| { |
| adl_type (TYPE_PTRMEMFUNC_FN_TYPE (type)); |
| return; |
| } |
| /* FALLTHRU */ |
| case UNION_TYPE: |
| adl_class (type); |
| return; |
| |
| case METHOD_TYPE: |
| /* The basetype is referenced in the first arg type, so just |
| fall through. */ |
| case FUNCTION_TYPE: |
| /* Associate the parameter types. */ |
| for (tree args = TYPE_ARG_TYPES (type); args; args = TREE_CHAIN (args)) |
| adl_type (TREE_VALUE (args)); |
| /* FALLTHROUGH */ |
| |
| case POINTER_TYPE: |
| case REFERENCE_TYPE: |
| case ARRAY_TYPE: |
| adl_type (TREE_TYPE (type)); |
| return; |
| |
| case ENUMERAL_TYPE: |
| adl_enum (type); |
| return; |
| |
| case LANG_TYPE: |
| gcc_assert (type == unknown_type_node |
| || type == init_list_type_node); |
| return; |
| |
| case TYPE_PACK_EXPANSION: |
| adl_type (PACK_EXPANSION_PATTERN (type)); |
| return; |
| |
| default: |
| break; |
| } |
| } |
| |
| /* Adds everything associated with a template argument to the lookup |
| structure. */ |
| |
| void |
| name_lookup::adl_template_arg (tree arg) |
| { |
| /* [basic.lookup.koenig] |
| |
| If T is a template-id, its associated namespaces and classes are |
| ... the namespaces and classes associated with the types of the |
| template arguments provided for template type parameters |
| (excluding template template parameters); the namespaces in which |
| any template template arguments are defined; and the classes in |
| which any member templates used as template template arguments |
| are defined. [Note: non-type template arguments do not |
| contribute to the set of associated namespaces. ] */ |
| |
| /* Consider first template template arguments. */ |
| if (TREE_CODE (arg) == TEMPLATE_TEMPLATE_PARM |
| || TREE_CODE (arg) == UNBOUND_CLASS_TEMPLATE) |
| ; |
| else if (TREE_CODE (arg) == TEMPLATE_DECL) |
| { |
| tree ctx = CP_DECL_CONTEXT (arg); |
| |
| /* It's not a member template. */ |
| if (TREE_CODE (ctx) == NAMESPACE_DECL) |
| adl_namespace (ctx); |
| /* Otherwise, it must be member template. */ |
| else |
| adl_class_only (ctx); |
| } |
| /* It's an argument pack; handle it recursively. */ |
| else if (ARGUMENT_PACK_P (arg)) |
| { |
| tree args = ARGUMENT_PACK_ARGS (arg); |
| int i, len = TREE_VEC_LENGTH (args); |
| for (i = 0; i < len; ++i) |
| adl_template_arg (TREE_VEC_ELT (args, i)); |
| } |
| /* It's not a template template argument, but it is a type template |
| argument. */ |
| else if (TYPE_P (arg)) |
| adl_type (arg); |
| } |
| |
| /* Perform ADL lookup. FNS is the existing lookup result and ARGS are |
| the call arguments. */ |
| |
| tree |
| name_lookup::search_adl (tree fns, vec<tree, va_gc> *args) |
| { |
| gcc_checking_assert (!vec_safe_length (scopes)); |
| |
| /* Gather each associated entity onto the lookup's scope list. */ |
| unsigned ix; |
| tree arg; |
| |
| FOR_EACH_VEC_ELT_REVERSE (*args, ix, arg) |
| /* OMP reduction operators put an ADL-significant type as the |
| first arg. */ |
| if (TYPE_P (arg)) |
| adl_type (arg); |
| else |
| adl_expr (arg); |
| |
| if (vec_safe_length (scopes)) |
| { |
| /* Now do the lookups. */ |
| value = fns; |
| if (fns) |
| dedup (true); |
| |
| /* INST_PATH will be NULL, if this is /not/ 2nd-phase ADL. */ |
| bitmap inst_path = NULL; |
| /* VISIBLE is the regular import bitmap. */ |
| bitmap visible = visible_instantiation_path (&inst_path); |
| |
| for (unsigned ix = scopes->length (); ix--;) |
| { |
| tree scope = (*scopes)[ix]; |
| if (TREE_CODE (scope) == NAMESPACE_DECL) |
| adl_namespace_fns (scope, visible); |
| else |
| { |
| if (RECORD_OR_UNION_TYPE_P (scope)) |
| adl_class_fns (scope); |
| |
| /* During 2nd phase ADL: Any exported declaration D in N |
| declared within the purview of a named module M |
| (10.2) is visible if there is an associated entity |
| attached to M with the same innermost enclosing |
| non-inline namespace as D. |
| [basic.lookup.argdep]/4.4 */ |
| |
| if (!inst_path) |
| /* Not 2nd phase. */ |
| continue; |
| |
| tree ctx = CP_DECL_CONTEXT (TYPE_NAME (scope)); |
| if (TREE_CODE (ctx) != NAMESPACE_DECL) |
| /* Not namespace-scope class. */ |
| continue; |
| |
| tree origin = get_originating_module_decl (TYPE_NAME (scope)); |
| tree not_tmpl = STRIP_TEMPLATE (origin); |
| if (!DECL_LANG_SPECIFIC (not_tmpl) |
| || !DECL_MODULE_IMPORT_P (not_tmpl)) |
| /* Not imported. */ |
| continue; |
| |
| unsigned module = get_importing_module (origin); |
| |
| if (!bitmap_bit_p (inst_path, module)) |
| /* Not on path of instantiation. */ |
| continue; |
| |
| if (bitmap_bit_p (visible, module)) |
| /* If the module was in the visible set, we'll look at |
| its namespace partition anyway. */ |
| continue; |
| |
| if (tree *slot = find_namespace_slot (ctx, name, false)) |
| if (binding_slot *mslot = search_imported_binding_slot (slot, module)) |
| { |
| if (mslot->is_lazy ()) |
| lazy_load_binding (module, ctx, name, mslot); |
| |
| if (tree bind = *mslot) |
| { |
| /* We must turn on deduping, because some other class |
| from this module might also be in this namespace. */ |
| dedup (true); |
| |
| /* Add the exported fns */ |
| if (STAT_HACK_P (bind)) |
| add_fns (STAT_VISIBLE (bind)); |
| } |
| } |
| } |
| } |
| |
| fns = value; |
| dedup (false); |
| } |
| |
| return fns; |
| } |
| |
| static bool qualified_namespace_lookup (tree, name_lookup *); |
| static void consider_binding_level (tree name, |
| best_match <tree, const char *> &bm, |
| cp_binding_level *lvl, |
| bool look_within_fields, |
| enum lookup_name_fuzzy_kind kind); |
| static void diagnose_name_conflict (tree, tree); |
| |
| /* ADL lookup of NAME. FNS is the result of regular lookup, and we |
| don't add duplicates to it. ARGS is the vector of call |
| arguments (which will not be empty). */ |
| |
| tree |
| lookup_arg_dependent (tree name, tree fns, vec<tree, va_gc> *args) |
| { |
| bool subtime = timevar_cond_start (TV_NAME_LOOKUP); |
| name_lookup lookup (name); |
| fns = lookup.search_adl (fns, args); |
| timevar_cond_stop (TV_NAME_LOOKUP, subtime); |
| return fns; |
| } |
| |
| /* FNS is an overload set of conversion functions. Return the |
| overloads converting to TYPE. */ |
| |
| static tree |
| extract_conversion_operator (tree fns, tree type) |
| { |
| tree convs = NULL_TREE; |
| tree tpls = NULL_TREE; |
| |
| for (ovl_iterator iter (fns); iter; ++iter) |
| { |
| if (same_type_p (DECL_CONV_FN_TYPE (*iter), type)) |
| convs = lookup_add (*iter, convs); |
| |
| if (TREE_CODE (*iter) == TEMPLATE_DECL) |
| tpls = lookup_add (*iter, tpls); |
| } |
| |
| if (!convs) |
| convs = tpls; |
| |
| return convs; |
| } |
| |
| /* Binary search of (ordered) MEMBER_VEC for NAME. */ |
| |
| static tree |
| member_vec_binary_search (vec<tree, va_gc> *member_vec, tree name) |
| { |
| for (unsigned lo = 0, hi = member_vec->length (); lo < hi;) |
| { |
| unsigned mid = (lo + hi) / 2; |
| tree binding = (*member_vec)[mid]; |
| tree binding_name = OVL_NAME (binding); |
| |
| if (binding_name > name) |
| hi = mid; |
| else if (binding_name < name) |
| lo = mid + 1; |
| else |
| return binding; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Linear search of (unordered) MEMBER_VEC for NAME. */ |
| |
| static tree |
| member_vec_linear_search (vec<tree, va_gc> *member_vec, tree name) |
| { |
| for (int ix = member_vec->length (); ix--;) |
| if (tree binding = (*member_vec)[ix]) |
| if (OVL_NAME (binding) == name) |
| return binding; |
| |
| return NULL_TREE; |
| } |
| |
| /* Linear search of (partially ordered) fields of KLASS for NAME. */ |
| |
| static tree |
| fields_linear_search (tree klass, tree name, bool want_type) |
| { |
| for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields)) |
| { |
| tree decl = fields; |
| |
| if (TREE_CODE (decl) == FIELD_DECL |
| && ANON_AGGR_TYPE_P (TREE_TYPE (decl))) |
| { |
| if (tree temp = search_anon_aggr (TREE_TYPE (decl), name, want_type)) |
| return temp; |
| } |
| |
| if (DECL_NAME (decl) != name) |
| continue; |
| |
| if (TREE_CODE (decl) == USING_DECL) |
| { |
| decl = strip_using_decl (decl); |
| if (is_overloaded_fn (decl)) |
| continue; |
| } |
| |
| if (DECL_DECLARES_FUNCTION_P (decl)) |
| /* Functions are found separately. */ |
| continue; |
| |
| if (!want_type || DECL_DECLARES_TYPE_P (decl)) |
| return decl; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Look for NAME member inside of anonymous aggregate ANON. Although |
| such things should only contain FIELD_DECLs, we check that too |
| late, and would give very confusing errors if we weren't |
| permissive here. */ |
| |
| tree |
| search_anon_aggr (tree anon, tree name, bool want_type) |
| { |
| gcc_assert (COMPLETE_TYPE_P (anon)); |
| tree ret = get_class_binding_direct (anon, name, want_type); |
| return ret; |
| } |
| |
| /* Look for NAME as an immediate member of KLASS (including |
| anon-members or unscoped enum member). TYPE_OR_FNS is zero for |
| regular search. >0 to get a type binding (if there is one) and <0 |
| if you want (just) the member function binding. |
| |
| Use this if you do not want lazy member creation. */ |
| |
| tree |
| get_class_binding_direct (tree klass, tree name, bool want_type) |
| { |
| gcc_checking_assert (RECORD_OR_UNION_TYPE_P (klass)); |
| |
| /* Conversion operators can only be found by the marker conversion |
| operator name. */ |
| bool conv_op = IDENTIFIER_CONV_OP_P (name); |
| tree lookup = conv_op ? conv_op_identifier : name; |
| tree val = NULL_TREE; |
| vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); |
| |
| if (COMPLETE_TYPE_P (klass) && member_vec) |
| { |
| val = member_vec_binary_search (member_vec, lookup); |
| if (!val) |
| ; |
| else if (STAT_HACK_P (val)) |
| val = want_type ? STAT_TYPE (val) : STAT_DECL (val); |
| else if (want_type && !DECL_DECLARES_TYPE_P (val)) |
| val = NULL_TREE; |
| } |
| else |
| { |
| if (member_vec && !want_type) |
| val = member_vec_linear_search (member_vec, lookup); |
| |
| if (!val || (TREE_CODE (val) == OVERLOAD && OVL_DEDUP_P (val))) |
| /* Dependent using declarations are a 'field', make sure we |
| return that even if we saw an overload already. */ |
| if (tree field_val = fields_linear_search (klass, lookup, want_type)) |
| { |
| if (!val) |
| val = field_val; |
| else if (TREE_CODE (field_val) == USING_DECL) |
| val = ovl_make (field_val, val); |
| } |
| } |
| |
| /* Extract the conversion operators asked for, unless the general |
| conversion operator was requested. */ |
| if (val && conv_op) |
| { |
| gcc_checking_assert (OVL_FUNCTION (val) == conv_op_marker); |
| val = OVL_CHAIN (val); |
| if (tree type = TREE_TYPE (name)) |
| val = extract_conversion_operator (val, type); |
| } |
| |
| return val; |
| } |
| |
| /* We're about to lookup NAME in KLASS. Make sure any lazily declared |
| members are now declared. */ |
| |
| static void |
| maybe_lazily_declare (tree klass, tree name) |
| { |
| /* See big comment anout module_state::write_pendings regarding adding a check |
| bit. */ |
| if (modules_p ()) |
| lazy_load_pendings (TYPE_NAME (klass)); |
| |
| /* Lazily declare functions, if we're going to search these. */ |
| if (IDENTIFIER_CTOR_P (name)) |
| { |
| if (CLASSTYPE_LAZY_DEFAULT_CTOR (klass)) |
| lazily_declare_fn (sfk_constructor, klass); |
| if (CLASSTYPE_LAZY_COPY_CTOR (klass)) |
| lazily_declare_fn (sfk_copy_constructor, klass); |
| if (CLASSTYPE_LAZY_MOVE_CTOR (klass)) |
| lazily_declare_fn (sfk_move_constructor, klass); |
| } |
| else if (IDENTIFIER_DTOR_P (name)) |
| { |
| if (CLASSTYPE_LAZY_DESTRUCTOR (klass)) |
| lazily_declare_fn (sfk_destructor, klass); |
| } |
| else if (name == assign_op_identifier) |
| { |
| if (CLASSTYPE_LAZY_COPY_ASSIGN (klass)) |
| lazily_declare_fn (sfk_copy_assignment, klass); |
| if (CLASSTYPE_LAZY_MOVE_ASSIGN (klass)) |
| lazily_declare_fn (sfk_move_assignment, klass); |
| } |
| } |
| |
| /* Look for NAME's binding in exactly KLASS. See |
| get_class_binding_direct for argument description. Does lazy |
| special function creation as necessary. */ |
| |
| tree |
| get_class_binding (tree klass, tree name, bool want_type /*=false*/) |
| { |
| klass = complete_type (klass); |
| |
| if (COMPLETE_TYPE_P (klass)) |
| maybe_lazily_declare (klass, name); |
| |
| return get_class_binding_direct (klass, name, want_type); |
| } |
| |
| /* Find the slot containing overloads called 'NAME'. If there is no |
| such slot and the class is complete, create an empty one, at the |
| correct point in the sorted member vector. Otherwise return NULL. |
| Deals with conv_op marker handling. */ |
| |
| tree * |
| find_member_slot (tree klass, tree name) |
| { |
| bool complete_p = COMPLETE_TYPE_P (klass); |
| |
| vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); |
| if (!member_vec) |
| { |
| vec_alloc (member_vec, 8); |
| CLASSTYPE_MEMBER_VEC (klass) = member_vec; |
| if (complete_p) |
| /* If the class is complete but had no member_vec, we need to |
| add the TYPE_FIELDS into it. We're also most likely to be |
| adding ctors & dtors, so ask for 6 spare slots (the |
| abstract cdtors and their clones). */ |
| member_vec = set_class_bindings (klass, 6); |
| } |
| |
| if (IDENTIFIER_CONV_OP_P (name)) |
| name = conv_op_identifier; |
| |
| unsigned ix, length = member_vec->length (); |
| for (ix = 0; ix < length; ix++) |
| { |
| tree *slot = &(*member_vec)[ix]; |
| tree fn_name = OVL_NAME (*slot); |
| |
| if (fn_name == name) |
| { |
| /* If we found an existing slot, it must be a function set. |
| Even with insertion after completion, because those only |
| happen with artificial fns that have unspellable names. |
| This means we do not have to deal with the stat hack |
| either. */ |
| gcc_checking_assert (OVL_P (*slot)); |
| if (name == conv_op_identifier) |
| { |
| gcc_checking_assert (OVL_FUNCTION (*slot) == conv_op_marker); |
| /* Skip the conv-op marker. */ |
| slot = &OVL_CHAIN (*slot); |
| } |
| return slot; |
| } |
| |
| if (complete_p && fn_name > name) |
| break; |
| } |
| |
| /* No slot found, add one if the class is complete. */ |
| if (complete_p) |
| { |
| /* Do exact allocation, as we don't expect to add many. */ |
| gcc_assert (name != conv_op_identifier); |
| vec_safe_reserve_exact (member_vec, 1); |
| CLASSTYPE_MEMBER_VEC (klass) = member_vec; |
| member_vec->quick_insert (ix, NULL_TREE); |
| return &(*member_vec)[ix]; |
| } |
| |
| return NULL; |
| } |
| |
| /* KLASS is an incomplete class to which we're adding a method NAME. |
| Add a slot and deal with conv_op marker handling. */ |
| |
| tree * |
| add_member_slot (tree klass, tree name) |
| { |
| gcc_assert (!COMPLETE_TYPE_P (klass)); |
| |
| vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); |
| vec_safe_push (member_vec, NULL_TREE); |
| CLASSTYPE_MEMBER_VEC (klass) = member_vec; |
| |
| tree *slot = &member_vec->last (); |
| if (IDENTIFIER_CONV_OP_P (name)) |
| { |
| /* Install the marker prefix. */ |
| *slot = ovl_make (conv_op_marker, NULL_TREE); |
| slot = &OVL_CHAIN (*slot); |
| } |
| |
| return slot; |
| } |
| |
| /* Comparison function to compare two MEMBER_VEC entries by name. |
| Because we can have duplicates during insertion of TYPE_FIELDS, we |
| do extra checking so deduping doesn't have to deal with so many |
| cases. */ |
| |
| static int |
| member_name_cmp (const void *a_p, const void *b_p) |
| { |
| tree a = *(const tree *)a_p; |
| tree b = *(const tree *)b_p; |
| tree name_a = DECL_NAME (TREE_CODE (a) == OVERLOAD ? OVL_FUNCTION (a) : a); |
| tree name_b = DECL_NAME (TREE_CODE (b) == OVERLOAD ? OVL_FUNCTION (b) : b); |
| |
| gcc_checking_assert (name_a && name_b); |
| if (name_a != name_b) |
| return name_a < name_b ? -1 : +1; |
| |
| if (name_a == conv_op_identifier) |
| { |
| /* Strip the conv-op markers. */ |
| gcc_checking_assert (OVL_FUNCTION (a) == conv_op_marker |
| && OVL_FUNCTION (b) == conv_op_marker); |
| a = OVL_CHAIN (a); |
| b = OVL_CHAIN (b); |
| } |
| |
| if (TREE_CODE (a) == OVERLOAD) |
| a = OVL_FUNCTION (a); |
| if (TREE_CODE (b) == OVERLOAD) |
| b = OVL_FUNCTION (b); |
| |
| /* We're in STAT_HACK or USING_DECL territory (or possibly error-land). */ |
| if (TREE_CODE (a) != TREE_CODE (b)) |
| { |
| /* If one of them is a TYPE_DECL, it loses. */ |
| if (TREE_CODE (a) == TYPE_DECL) |
| return +1; |
| else if (TREE_CODE (b) == TYPE_DECL) |
| return -1; |
| |
| /* If one of them is a USING_DECL, it loses. */ |
| if (TREE_CODE (a) == USING_DECL) |
| return +1; |
| else if (TREE_CODE (b) == USING_DECL) |
| return -1; |
| |
| /* There are no other cases with different kinds of decls, as |
| duplicate detection should have kicked in earlier. However, |
| some erroneous cases get though. */ |
| gcc_assert (errorcount); |
| } |
| |
| /* Using source location would be the best thing here, but we can |
| get identically-located decls in the following circumstances: |
| |
| 1) duplicate artificial type-decls for the same type. |
| |
| 2) pack expansions of using-decls. |
| |
| We should not be doing #1, but in either case it doesn't matter |
| how we order these. Use UID as a proxy for source ordering, so |
| that identically-located decls still have a well-defined stable |
| ordering. */ |
| if (DECL_UID (a) != DECL_UID (b)) |
| return DECL_UID (a) < DECL_UID (b) ? -1 : +1; |
| gcc_assert (a == b); |
| return 0; |
| } |
| |
| static struct { |
| gt_pointer_operator new_value; |
| void *cookie; |
| } resort_data; |
| |
| /* This routine compares two fields like member_name_cmp but using the |
| pointer operator in resort_field_decl_data. We don't have to deal |
| with duplicates here. */ |
| |
| static int |
| resort_member_name_cmp (const void *a_p, const void *b_p) |
| { |
| tree a = *(const tree *)a_p; |
| tree b = *(const tree *)b_p; |
| tree name_a = OVL_NAME (a); |
| tree name_b = OVL_NAME (b); |
| |
| resort_data.new_value (&name_a, resort_data.cookie); |
| resort_data.new_value (&name_b, resort_data.cookie); |
| |
| gcc_checking_assert (name_a != name_b); |
| |
| return name_a < name_b ? -1 : +1; |
| } |
| |
| /* Resort CLASSTYPE_MEMBER_VEC because pointers have been reordered. */ |
| |
| void |
| resort_type_member_vec (void *obj, void */*orig_obj*/, |
| gt_pointer_operator new_value, void* cookie) |
| { |
| if (vec<tree, va_gc> *member_vec = (vec<tree, va_gc> *) obj) |
| { |
| resort_data.new_value = new_value; |
| resort_data.cookie = cookie; |
| member_vec->qsort (resort_member_name_cmp); |
| } |
| } |
| |
| /* Recursively count the number of fields in KLASS, including anonymous |
| union members. */ |
| |
| static unsigned |
| count_class_fields (tree klass) |
| { |
| unsigned n_fields = 0; |
| |
| for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields)) |
| if (DECL_DECLARES_FUNCTION_P (fields)) |
| /* Functions are dealt with separately. */; |
| else if (TREE_CODE (fields) == FIELD_DECL |
| && ANON_AGGR_TYPE_P (TREE_TYPE (fields))) |
| n_fields += count_class_fields (TREE_TYPE (fields)); |
| else if (DECL_NAME (fields)) |
| n_fields += 1; |
| |
| return n_fields; |
| } |
| |
| /* Append all the nonfunction members fields of KLASS to MEMBER_VEC. |
| Recurse for anonymous members. MEMBER_VEC must have space. */ |
| |
| static void |
| member_vec_append_class_fields (vec<tree, va_gc> *member_vec, tree klass) |
| { |
| for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields)) |
| if (DECL_DECLARES_FUNCTION_P (fields)) |
| /* Functions are handled separately. */; |
| else if (TREE_CODE (fields) == FIELD_DECL |
| && ANON_AGGR_TYPE_P (TREE_TYPE (fields))) |
| member_vec_append_class_fields (member_vec, TREE_TYPE (fields)); |
| else if (DECL_NAME (fields)) |
| { |
| tree field = fields; |
| /* Mark a conv-op USING_DECL with the conv-op-marker. */ |
| if (TREE_CODE (field) == USING_DECL |
| && IDENTIFIER_CONV_OP_P (DECL_NAME (field))) |
| field = ovl_make (conv_op_marker, field); |
| member_vec->quick_push (field); |
| } |
| } |
| |
| /* Append all of the enum values of ENUMTYPE to MEMBER_VEC. |
| MEMBER_VEC must have space. */ |
| |
| static void |
| member_vec_append_enum_values (vec<tree, va_gc> *member_vec, tree enumtype) |
| { |
| for (tree values = TYPE_VALUES (enumtype); |
| values; values = TREE_CHAIN (values)) |
| member_vec->quick_push (TREE_VALUE (values)); |
| } |
| |
| /* MEMBER_VEC has just had new DECLs added to it, but is sorted. |
| DeDup adjacent DECLS of the same name. We already dealt with |
| conflict resolution when adding the fields or methods themselves. |
| There are three cases (which could all be combined): |
| 1) a TYPE_DECL and non TYPE_DECL. Deploy STAT_HACK as appropriate. |
| 2) a USING_DECL and an overload. If the USING_DECL is dependent, |
| it wins. Otherwise the OVERLOAD does. |
| 3) two USING_DECLS. ... |
| |
| member_name_cmp will have ordered duplicates as |
| <fns><using><type> */ |
| |
| static void |
| member_vec_dedup (vec<tree, va_gc> *member_vec) |
| { |
| unsigned len = member_vec->length (); |
| unsigned store = 0; |
| |
| if (!len) |
| return; |
| |
| tree name = OVL_NAME ((*member_vec)[0]); |
| for (unsigned jx, ix = 0; ix < len; ix = jx) |
| { |
| tree current = NULL_TREE; |
| tree to_type = NULL_TREE; |
| tree to_using = NULL_TREE; |
| tree marker = NULL_TREE; |
| |
| for (jx = ix; jx < len; jx++) |
| { |
| tree next = (*member_vec)[jx]; |
| if (jx != ix) |
| { |
| tree next_name = OVL_NAME (next); |
| if (next_name != name) |
| { |
| name = next_name; |
| break; |
| } |
| } |
| |
| if (IDENTIFIER_CONV_OP_P (name)) |
| { |
| marker = next; |
| next = OVL_CHAIN (next); |
| } |
| |
| if (TREE_CODE (next) == USING_DECL) |
| { |
| if (IDENTIFIER_CTOR_P (name)) |
| /* Dependent inherited ctor. */ |
| continue; |
| |
| next = strip_using_decl (next); |
| if (TREE_CODE (next) == USING_DECL) |
| { |
| to_using = next; |
| continue; |
| } |
| |
| if (is_overloaded_fn (next)) |
| continue; |
| } |
| |
| if (DECL_DECLARES_TYPE_P (next)) |
| { |
| to_type = next; |
| continue; |
| } |
| |
| if (!current) |
| current = next; |
| } |
| |
| if (to_using) |
| { |
| if (!current) |
| current = to_using; |
| else |
| current = ovl_make (to_using, current); |
| } |
| |
| if (to_type) |
| { |
| if (!current) |
| current = to_type; |
| else |
| current = stat_hack (current, to_type); |
| } |
| |
| if (current) |
| { |
| if (marker) |
| { |
| OVL_CHAIN (marker) = current; |
| current = marker; |
| } |
| (*member_vec)[store++] = current; |
| } |
| } |
| |
| while (store++ < len) |
| member_vec->pop (); |
| } |
| |
| /* Add the non-function members to CLASSTYPE_MEMBER_VEC. If there is |
| no existing MEMBER_VEC and fewer than 8 fields, do nothing. We |
| know there must be at least 1 field -- the self-reference |
| TYPE_DECL, except for anon aggregates, which will have at least |
| one field anyway. If EXTRA < 0, always create the vector. */ |
| |
| vec<tree, va_gc> * |
| set_class_bindings (tree klass, int extra) |
| { |
| unsigned n_fields = count_class_fields (klass); |
| vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); |
| |
| if (member_vec || n_fields >= 8 || extra < 0) |
| { |
| /* Append the new fields. */ |
| vec_safe_reserve_exact (member_vec, n_fields + (extra >= 0 ? extra : 0)); |
| member_vec_append_class_fields (member_vec, klass); |
| } |
| |
| if (member_vec) |
| { |
| CLASSTYPE_MEMBER_VEC (klass) = member_vec; |
| member_vec->qsort (member_name_cmp); |
| member_vec_dedup (member_vec); |
| } |
| |
| return member_vec; |
| } |
| |
| /* Insert lately defined enum ENUMTYPE into KLASS for the sorted case. */ |
| |
| void |
| insert_late_enum_def_bindings (tree klass, tree enumtype) |
| { |
| int n_fields; |
| vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); |
| |
| /* The enum bindings will already be on the TYPE_FIELDS, so don't |
| count them twice. */ |
| if (!member_vec) |
| n_fields = count_class_fields (klass); |
| else |
| n_fields = list_length (TYPE_VALUES (enumtype)); |
| |
| if (member_vec || n_fields >= 8) |
| { |
| vec_safe_reserve_exact (member_vec, n_fields); |
| if (CLASSTYPE_MEMBER_VEC (klass)) |
| member_vec_append_enum_values (member_vec, enumtype); |
| else |
| member_vec_append_class_fields (member_vec, klass); |
| CLASSTYPE_MEMBER_VEC (klass) = member_vec; |
| member_vec->qsort (member_name_cmp); |
| member_vec_dedup (member_vec); |
| } |
| } |
| |
| /* The binding oracle; see cp-tree.h. */ |
| |
| cp_binding_oracle_function *cp_binding_oracle; |
| |
| /* If we have a binding oracle, ask it for all namespace-scoped |
| definitions of NAME. */ |
| |
| static inline void |
| query_oracle (tree name) |
| { |
| if (!cp_binding_oracle) |
| return; |
| |
| /* LOOKED_UP holds the set of identifiers that we have already |
| looked up with the oracle. */ |
| static hash_set<tree> looked_up; |
| if (looked_up.add (name)) |
| return; |
| |
| cp_binding_oracle (CP_ORACLE_IDENTIFIER, name); |
| } |
| |
| #ifndef ENABLE_SCOPE_CHECKING |
| # define ENABLE_SCOPE_CHECKING 0 |
| #else |
| # define ENABLE_SCOPE_CHECKING 1 |
| #endif |
| |
| /* A free list of "cxx_binding"s, connected by their PREVIOUS. */ |
| |
| static GTY((deletable)) cxx_binding *free_bindings; |
| |
| /* Initialize VALUE and TYPE field for BINDING, and set the PREVIOUS |
| field to NULL. */ |
| |
| static inline void |
| cxx_binding_init (cxx_binding *binding, tree value, tree type) |
| { |
| binding->value = value; |
| binding->type = type; |
| binding->previous = NULL; |
| } |
| |
| /* (GC)-allocate a binding object with VALUE and TYPE member initialized. */ |
| |
| static cxx_binding * |
| cxx_binding_make (tree value, tree type) |
| { |
| cxx_binding *binding = free_bindings; |
| |
| if (binding) |
| free_bindings = binding->previous; |
| else |
| binding = ggc_alloc<cxx_binding> (); |
| |
| /* Clear flags by default. */ |
| LOCAL_BINDING_P (binding) = false; |
| INHERITED_VALUE_BINDING_P (binding) = false; |
| HIDDEN_TYPE_BINDING_P (binding) = false; |
| |
| cxx_binding_init (binding, value, type); |
| |
| return binding; |
| } |
| |
| /* Put BINDING back on the free list. */ |
| |
| static inline void |
| cxx_binding_free (cxx_binding *binding) |
| { |
| binding->scope = NULL; |
| binding->previous = free_bindings; |
| free_bindings = binding; |
| } |
| |
| /* Create a new binding for NAME (with the indicated VALUE and TYPE |
| bindings) in the class scope indicated by SCOPE. */ |
| |
| static cxx_binding * |
| new_class_binding (tree name, tree value, tree type, cp_binding_level *scope) |
| { |
| cp_class_binding cb = {cxx_binding_make (value, type), name}; |
| cxx_binding *binding = cb.base; |
| vec_safe_push (scope->class_shadowed, cb); |
| binding->scope = scope; |
| return binding; |
| } |
| |
| /* Make DECL the innermost binding for ID. The LEVEL is the binding |
| level at which this declaration is being bound. */ |
| |
| void |
| push_binding (tree id, tree decl, cp_binding_level* level) |
| { |
| cxx_binding *binding; |
| |
| if (level != class_binding_level) |
| { |
| binding = cxx_binding_make (decl, NULL_TREE); |
| binding->scope = level; |
| } |
| else |
| binding = new_class_binding (id, decl, /*type=*/NULL_TREE, level); |
| |
| /* Now, fill in the binding information. */ |
| binding->previous = IDENTIFIER_BINDING (id); |
| LOCAL_BINDING_P (binding) = (level != class_binding_level); |
| |
| /* And put it on the front of the list of bindings for ID. */ |
| IDENTIFIER_BINDING (id) = binding; |
| } |
| |
| /* Remove the binding for DECL which should be the innermost binding |
| for ID. */ |
| |
| void |
| pop_local_binding (tree id, tree decl) |
| { |
| if (!id || IDENTIFIER_ANON_P (id)) |
| /* It's easiest to write the loops that call this function without |
| checking whether or not the entities involved have names. We |
| get here for such an entity. */ |
| return; |
| |
| /* Get the innermost binding for ID. */ |
| cxx_binding *binding = IDENTIFIER_BINDING (id); |
| |
| /* The name should be bound. */ |
| gcc_assert (binding != NULL); |
| |
| /* The DECL will be either the ordinary binding or the type binding |
| for this identifier. Remove that binding. We don't have to |
| clear HIDDEN_TYPE_BINDING_P, as the whole binding will be going |
| away. */ |
| if (binding->value == decl) |
| binding->value = NULL_TREE; |
| else |
| { |
| gcc_checking_assert (binding->type == decl); |
| binding->type = NULL_TREE; |
| } |
| |
| if (!binding->value && !binding->type) |
| { |
| /* We're completely done with the innermost binding for this |
| identifier. Unhook it from the list of bindings. */ |
| IDENTIFIER_BINDING (id) = binding->previous; |
| |
| /* Add it to the free list. */ |
| cxx_binding_free (binding); |
| } |
| } |
| |
| /* Remove the bindings for the decls of the current level and leave |
| the current scope. */ |
| |
| void |
| pop_bindings_and_leave_scope (void) |
| { |
| for (tree t = get_local_decls (); t; t = DECL_CHAIN (t)) |
| { |
| tree decl = TREE_CODE (t) == TREE_LIST ? TREE_VALUE (t) : t; |
| tree name = OVL_NAME (decl); |
| |
| pop_local_binding (name, decl); |
| } |
| |
| leave_scope (); |
| } |
| |
| /* Strip non dependent using declarations. If DECL is dependent, |
| surreptitiously create a typename_type and return it. */ |
| |
| tree |
| strip_using_decl (tree decl) |
| { |
| if (decl == NULL_TREE) |
| return NULL_TREE; |
| |
| while (TREE_CODE (decl) == USING_DECL && !DECL_DEPENDENT_P (decl)) |
| decl = USING_DECL_DECLS (decl); |
| |
| if (TREE_CODE (decl) == USING_DECL && DECL_DEPENDENT_P (decl) |
| && USING_DECL_TYPENAME_P (decl)) |
| { |
| /* We have found a type introduced by a using |
| declaration at class scope that refers to a dependent |
| type. |
| |
| using typename :: [opt] nested-name-specifier unqualified-id ; |
| */ |
| decl = make_typename_type (USING_DECL_SCOPE (decl), |
| DECL_NAME (decl), |
| typename_type, tf_error); |
| if (decl != error_mark_node) |
| decl = TYPE_NAME (decl); |
| } |
| |
| return decl; |
| } |
| |
| /* Return true if OVL is an overload for an anticipated builtin. */ |
| |
| static bool |
| anticipated_builtin_p (tree ovl) |
| { |
| return (TREE_CODE (ovl) == OVERLOAD |
| && OVL_HIDDEN_P (ovl) |
| && DECL_IS_UNDECLARED_BUILTIN (OVL_FUNCTION (ovl))); |
| } |
| |
| /* BINDING records an existing declaration for a name in the current scope. |
| But, DECL is another declaration for that same identifier in the |
| same scope. This is the `struct stat' hack whereby a non-typedef |
| class name or enum-name can be bound at the same level as some other |
| kind of entity. |
| 3.3.7/1 |
| |
| A class name (9.1) or enumeration name (7.2) can be hidden by the |
| name of an object, function, or enumerator declared in the same scope. |
| If a class or enumeration name and an object, function, or enumerator |
| are declared in the same scope (in any order) with the same name, the |
| class or enumeration name is hidden wherever the object, function, or |
| enumerator name is visible. |
| |
| It's the responsibility of the caller to check that |
| inserting this name is valid here. Returns nonzero if the new binding |
| was successful. */ |
| |
| static bool |
| supplement_binding_1 (cxx_binding *binding, tree decl) |
| { |
| tree bval = binding->value; |
| bool ok = true; |
| tree target_bval = strip_using_decl (bval); |
| tree target_decl = strip_using_decl (decl); |
| |
| if (TREE_CODE (target_decl) == TYPE_DECL && DECL_ARTIFICIAL (target_decl) |
| && target_decl != target_bval |
| && (TREE_CODE (target_bval) != TYPE_DECL |
| /* We allow pushing an enum multiple times in a class |
| template in order to handle late matching of underlying |
| type on an opaque-enum-declaration followed by an |
| enum-specifier. */ |
| || (processing_template_decl |
| && TREE_CODE (TREE_TYPE (target_decl)) == ENUMERAL_TYPE |
| && TREE_CODE (TREE_TYPE (target_bval)) == ENUMERAL_TYPE |
| && (dependent_type_p (ENUM_UNDERLYING_TYPE |
| (TREE_TYPE (target_decl))) |
| || dependent_type_p (ENUM_UNDERLYING_TYPE |
| (TREE_TYPE (target_bval))))))) |
| /* The new name is the type name. */ |
| binding->type = decl; |
| else if (/* TARGET_BVAL is null when push_class_level_binding moves |
| an inherited type-binding out of the way to make room |
| for a new value binding. */ |
| !target_bval |
| /* TARGET_BVAL is error_mark_node when TARGET_DECL's name |
| has been used in a non-class scope prior declaration. |
| In that case, we should have already issued a |
| diagnostic; for graceful error recovery purpose, pretend |
| this was the intended declaration for that name. */ |
| || target_bval == error_mark_node |
| /* If TARGET_BVAL is anticipated but has not yet been |
| declared, pretend it is not there at all. */ |
| || anticipated_builtin_p (target_bval)) |
| binding->value = decl; |
| else if (TREE_CODE (target_bval) == TYPE_DECL |
| && DECL_ARTIFICIAL (target_bval) |
| && target_decl != target_bval |
| && (TREE_CODE (target_decl) != TYPE_DECL |
| || same_type_p (TREE_TYPE (target_decl), |
| TREE_TYPE (target_bval)))) |
| { |
| /* The old binding was a type name. It was placed in |
| VALUE field because it was thought, at the point it was |
| declared, to be the only entity with such a name. Move the |
| type name into the type slot; it is now hidden by the new |
| binding. */ |
| binding->type = bval; |
| binding->value = decl; |
| binding->value_is_inherited = false; |
| } |
| else if (TREE_CODE (target_bval) == TYPE_DECL |
| && TREE_CODE (target_decl) == TYPE_DECL |
| && DECL_NAME (target_decl) == DECL_NAME (target_bval) |
| && binding->scope->kind != sk_class |
| && (same_type_p (TREE_TYPE (target_decl), TREE_TYPE (target_bval)) |
| /* If either type involves template parameters, we must |
| wait until instantiation. */ |
| || uses_template_parms (TREE_TYPE (target_decl)) |
| || uses_template_parms (TREE_TYPE (target_bval)))) |
| /* We have two typedef-names, both naming the same type to have |
| the same name. In general, this is OK because of: |
| |
| [dcl.typedef] |
| |
| In a given scope, a typedef specifier can be used to redefine |
| the name of any type declared in that scope to refer to the |
| type to which it already refers. |
| |
| However, in class scopes, this rule does not apply due to the |
| stricter language in [class.mem] prohibiting redeclarations of |
| members. */ |
| ok = false; |
| /* There can be two block-scope declarations of the same variable, |
| so long as they are `extern' declarations. However, there cannot |
| be two declarations of the same static data member: |
| |
| [class.mem] |
| |
| A member shall not be declared twice in the |
| member-specification. */ |
| else if (VAR_P (target_decl) |
| && VAR_P (target_bval) |
| && DECL_EXTERNAL (target_decl) && DECL_EXTERNAL (target_bval) |
| && !DECL_CLASS_SCOPE_P (target_decl)) |
| { |
| duplicate_decls (decl, binding->value); |
| ok = false; |
| } |
| else if (TREE_CODE (decl) == NAMESPACE_DECL |
| && TREE_CODE (bval) == NAMESPACE_DECL |
| && DECL_NAMESPACE_ALIAS (decl) |
| && DECL_NAMESPACE_ALIAS (bval) |
| && ORIGINAL_NAMESPACE (bval) == ORIGINAL_NAMESPACE (decl)) |
| /* [namespace.alias] |
| |
| In a declarative region, a namespace-alias-definition can be |
| used to redefine a namespace-alias declared in that declarative |
| region to refer only to the namespace to which it already |
| refers. */ |
| ok = false; |
| else if (TREE_CODE (bval) == USING_DECL |
| && CONST_DECL_USING_P (decl)) |
| /* Let the clone hide the using-decl that introduced it. */ |
| binding->value = decl; |
| else |
| { |
| if (!error_operand_p (bval)) |
| diagnose_name_conflict (decl, bval); |
| ok = false; |
| } |
| |
| return ok; |
| } |
| |
| /* Diagnose a name conflict between DECL and BVAL. */ |
| |
| static void |
| diagnose_name_conflict (tree decl, tree bval) |
| { |
| if (TREE_CODE (decl) == TREE_CODE (bval) |
| && TREE_CODE (decl) != NAMESPACE_DECL |
| && !DECL_DECLARES_FUNCTION_P (decl) |
| && (TREE_CODE (decl) != TYPE_DECL |
| || DECL_ARTIFICIAL (decl) == DECL_ARTIFICIAL (bval)) |
| && CP_DECL_CONTEXT (decl) == CP_DECL_CONTEXT (bval)) |
| { |
| if (concept_definition_p (decl)) |
| error ("redeclaration of %q#D with different template parameters", |
| decl); |
| else |
| error ("redeclaration of %q#D", decl); |
| } |
| else |
| error ("%q#D conflicts with a previous declaration", decl); |
| |
| inform (location_of (bval), "previous declaration %q#D", bval); |
| } |
| |
| /* Wrapper for supplement_binding_1. */ |
| |
| static bool |
| supplement_binding (cxx_binding *binding, tree decl) |
| { |
| bool ret; |
| bool subtime = timevar_cond_start (TV_NAME_LOOKUP); |
| ret = supplement_binding_1 (binding, decl); |
| timevar_cond_stop (TV_NAME_LOOKUP, subtime); |
| return ret; |
| } |
| |
| /* Replace BINDING's current value on its scope's name list with |
| NEWVAL. */ |
| |
| static void |
| update_local_overload (cxx_binding *binding, tree newval) |
| { |
| tree *d; |
| |
| for (d = &binding->scope->names; ; d = &TREE_CHAIN (*d)) |
| if (*d == binding->value) |
| { |
| /* Stitch new list node in. */ |
| *d = tree_cons (DECL_NAME (*d), NULL_TREE, TREE_CHAIN (*d)); |
| break; |
| } |
| else if (TREE_CODE (*d) == TREE_LIST && TREE_VALUE (*d) == binding->value) |
| break; |
| |
| TREE_VALUE (*d) = newval; |
| } |
| |
| /* Compares the parameter-type-lists of ONE and TWO and |
| returns false if they are different. If the DECLs are template |
| functions, the return types and the template parameter lists are |
| compared too (DR 565). */ |
| |
| static bool |
| matching_fn_p (tree one, tree two) |
| { |
| if (TREE_CODE (one) != TREE_CODE (two)) |
| return false; |
| |
| if (!compparms (TYPE_ARG_TYPES (TREE_TYPE (one)), |
| TYPE_ARG_TYPES (TREE_TYPE (two)))) |
| return false; |
| |
| if (TREE_CODE (one) == TEMPLATE_DECL) |
| { |
| /* Compare template parms. */ |
| if (!comp_template_parms (DECL_TEMPLATE_PARMS (one), |
| DECL_TEMPLATE_PARMS (two))) |
| return false; |
| |
| /* And return type. */ |
| if (!same_type_p (TREE_TYPE (TREE_TYPE (one)), |
| TREE_TYPE (TREE_TYPE (two)))) |
| return false; |
| } |
| |
| if (!equivalently_constrained (one, two)) |
| return false; |
| |
| return true; |
| } |
| |
| /* Push DECL into nonclass LEVEL BINDING or SLOT. OLD is the current |
| binding value (possibly with anticipated builtins stripped). |
| Diagnose conflicts and return updated decl. */ |
| |
| static tree |
| update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot, |
| tree old, tree decl, bool hiding = false) |
| { |
| tree old_type = NULL_TREE; |
| bool hide_type = false; |
| bool hide_value = false; |
| |
| if (!slot) |
| { |
| old_type = binding->type; |
| hide_type = HIDDEN_TYPE_BINDING_P (binding); |
| if (!old_type) |
| hide_value = hide_type, hide_type = false; |
| } |
| else if (STAT_HACK_P (*slot)) |
| { |
| old_type = STAT_TYPE (*slot); |
| hide_type = STAT_TYPE_HIDDEN_P (*slot); |
| hide_value = STAT_DECL_HIDDEN_P (*slot); |
| } |
| |
| tree to_val = decl; |
| tree to_type = old_type; |
| bool local_overload = false; |
| |
| gcc_assert (!level || level->kind == sk_namespace ? !binding |
| : level->kind != sk_class && !slot); |
| |
| if (old == error_mark_node) |
| old = NULL_TREE; |
| |
| if (DECL_IMPLICIT_TYPEDEF_P (decl)) |
| { |
| /* Pushing an artificial decl. We should not find another |
| artificial decl here already -- lookup_elaborated_type will |
| have already found it. */ |
| gcc_checking_assert (!to_type |
| && !(old && DECL_IMPLICIT_TYPEDEF_P (old))); |
| |
| if (old) |
| { |
| /* Put DECL into the type slot. */ |
| gcc_checking_assert (!to_type); |
| hide_type = hiding; |
| to_type = decl; |
| to_val = old; |
| } |
| else |
| hide_value = hiding; |
| |
| goto done; |
| } |
| |
| if (old && DECL_IMPLICIT_TYPEDEF_P (old)) |
| { |
| /* OLD is an implicit typedef. Move it to to_type. */ |
| gcc_checking_assert (!to_type); |
| |
| to_type = old; |
| hide_type = hide_value; |
| old = NULL_TREE; |
| hide_value = false; |
| } |
| |
| if (DECL_DECLARES_FUNCTION_P (decl)) |
| { |
| if (!old) |
| ; |
| else if (OVL_P (old)) |
| { |
| for (ovl_iterator iter (old); iter; ++iter) |
| { |
| tree fn = *iter; |
| |
| if (iter.using_p () && matching_fn_p (fn, decl)) |
| { |
| gcc_checking_assert (!iter.hidden_p ()); |
| /* If a function declaration in namespace scope or |
| block scope has the same name and the same |
| parameter-type- list (8.3.5) as a function |
| introduced by a using-declaration, and the |
| declarations do not declare the same function, |
| the program is ill-formed. [namespace.udecl]/14 */ |
| if (tree match = duplicate_decls (decl, fn, hiding)) |
| return match; |
| else |
| /* FIXME: To preserve existing error behavior, we |
| still push the decl. This might change. */ |
| diagnose_name_conflict (decl, fn); |
| } |
| } |
| } |
| else |
| goto conflict; |
| |
| if (to_type != old_type |
| && warn_shadow |
| && MAYBE_CLASS_TYPE_P (TREE_TYPE (to_type)) |
| && !(DECL_IN_SYSTEM_HEADER (decl) |
| && DECL_IN_SYSTEM_HEADER (to_type))) |
| warning (OPT_Wshadow, "%q#D hides constructor for %q#D", |
| decl, to_type); |
| |
| local_overload = old && level && level->kind != sk_namespace; |
| to_val = ovl_insert (decl, old, -int (hiding)); |
| } |
| else if (old) |
| { |
| if (TREE_CODE (old) != TREE_CODE (decl)) |
| /* Different kinds of decls conflict. */ |
| goto conflict; |
| else if (TREE_CODE (old) == TYPE_DECL) |
| { |
| if (same_type_p (TREE_TYPE (old), TREE_TYPE (decl))) |
| /* Two type decls to the same type. Do nothing. */ |
| return old; |
| else |
| goto conflict; |
| } |
| else if (TREE_CODE (old) == NAMESPACE_DECL) |
| { |
| /* Two maybe-aliased namespaces. If they're to the same target |
| namespace, that's ok. */ |
| if (ORIGINAL_NAMESPACE (old) != ORIGINAL_NAMESPACE (decl)) |
| goto conflict; |
| |
| /* The new one must be an alias at this point. */ |
| gcc_assert (DECL_NAMESPACE_ALIAS (decl)); |
| return old; |
| } |
| else if (TREE_CODE (old) == VAR_DECL) |
| { |
| /* There can be two block-scope declarations of the same |
| variable, so long as they are `extern' declarations. */ |
| if (!DECL_EXTERNAL (old) || !DECL_EXTERNAL (decl)) |
| goto conflict; |
| else if (tree match = duplicate_decls (decl, old)) |
| { |
| gcc_checking_assert (!hide_value && !hiding); |
| return match; |
| } |
| else |
| goto conflict; |
| } |
| else |
| { |
| conflict: |
| diagnose_name_conflict (decl, old); |
| to_val = NULL_TREE; |
| } |
| } |
| else if (hiding) |
| hide_value = true; |
| |
| done: |
| if (to_val) |
| { |
| if (local_overload) |
| { |
| gcc_checking_assert (binding->value && OVL_P (binding->value)); |
| update_local_overload (binding, to_val); |
| } |
| else if (level |
| && !(TREE_CODE (decl) == NAMESPACE_DECL |
| && !DECL_NAMESPACE_ALIAS (decl))) |
| /* Don't add namespaces here. They're done in |
| push_namespace. */ |
| add_decl_to_level (level, decl); |
| |
| if (slot) |
| { |
| if (STAT_HACK_P (*slot)) |
| { |
| STAT_TYPE (*slot) = to_type; |
| STAT_DECL (*slot) = to_val; |
| STAT_TYPE_HIDDEN_P (*slot) = hide_type; |
| STAT_DECL_HIDDEN_P (*slot) = hide_value; |
| } |
| else if (to_type || hide_value) |
| { |
| *slot = stat_hack (to_val, to_type); |
| STAT_TYPE_HIDDEN_P (*slot) = hide_type; |
| STAT_DECL_HIDDEN_P (*slot) = hide_value; |
| } |
| else |
| { |
| gcc_checking_assert (!hide_type); |
| *slot = to_val; |
| } |
| } |
| else |
| { |
| binding->type = to_type; |
| binding->value = to_val; |
| HIDDEN_TYPE_BINDING_P (binding) = hide_type || hide_value; |
| } |
| } |
| |
| return decl; |
| } |
| |
| /* Table of identifiers to extern C declarations (or LISTS thereof). */ |
| |
| static GTY(()) hash_table<named_decl_hash> *extern_c_decls; |
| |
| /* DECL has C linkage. If we have an existing instance, make sure the |
| new one is compatible. Make sure it has the same exception |
| specification [7.5, 7.6]. Add DECL to the map. */ |
| |
| static void |
| check_extern_c_conflict (tree decl) |
| { |
| /* Ignore artificial or system header decls. */ |
| if (DECL_ARTIFICIAL (decl) || DECL_IN_SYSTEM_HEADER (decl)) |
| return; |
| |
| /* This only applies to decls at namespace scope. */ |
| if (!DECL_NAMESPACE_SCOPE_P (decl)) |
| return; |
| |
| if (!extern_c_decls) |
| extern_c_decls = hash_table<named_decl_hash>::create_ggc (127); |
| |
| tree *slot = extern_c_decls |
| ->find_slot_with_hash (DECL_NAME (decl), |
| IDENTIFIER_HASH_VALUE (DECL_NAME (decl)), INSERT); |
| if (tree old = *slot) |
| { |
| if (TREE_CODE (old) == OVERLOAD) |
| old = OVL_FUNCTION (old); |
| |
| int mismatch = 0; |
| if (DECL_CONTEXT (old) == DECL_CONTEXT (decl)) |
| ; /* If they're in the same context, we'll have already complained |
| about a (possible) mismatch, when inserting the decl. */ |
| else if (!decls_match (decl, old)) |
| mismatch = 1; |
| else if (TREE_CODE (decl) == FUNCTION_DECL |
| && !comp_except_specs (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (old)), |
| TYPE_RAISES_EXCEPTIONS (TREE_TYPE (decl)), |
| ce_normal)) |
| mismatch = -1; |
| else if (DECL_ASSEMBLER_NAME_SET_P (old)) |
| SET_DECL_ASSEMBLER_NAME (decl, DECL_ASSEMBLER_NAME (old)); |
| |
| if (mismatch) |
| { |
| auto_diagnostic_group d; |
| pedwarn (DECL_SOURCE_LOCATION (decl), 0, |
| "conflicting C language linkage declaration %q#D", decl); |
| inform (DECL_SOURCE_LOCATION (old), |
| "previous declaration %q#D", old); |
| if (mismatch < 0) |
| inform (DECL_SOURCE_LOCATION (decl), |
| "due to different exception specifications"); |
| } |
| else |
| { |
| if (old == *slot) |
| /* The hash table expects OVERLOADS, so construct one with |
| OLD as both the function and the chain. This allocate |
| an excess OVERLOAD node, but it's rare to have multiple |
| extern "C" decls of the same name. And we save |
| complicating the hash table logic (which is used |
| elsewhere). */ |
| *slot = ovl_make (old, old); |
| |
| slot = &OVL_CHAIN (*slot); |
| |
| /* Chain it on for c_linkage_binding's use. */ |
| *slot = tree_cons (NULL_TREE, decl, *slot); |
| } |
| } |
| else |
| *slot = decl; |
| } |
| |
| /* Returns a list of C-linkage decls with the name NAME. Used in |
| c-family/c-pragma.c to implement redefine_extname pragma. */ |
| |
| tree |
| c_linkage_bindings (tree name) |
| { |
| if (extern_c_decls) |
| if (tree *slot = extern_c_decls |
| ->find_slot_with_hash (name, IDENTIFIER_HASH_VALUE (name), NO_INSERT)) |
| { |
| tree result = *slot; |
| if (TREE_CODE (result) == OVERLOAD) |
| result = OVL_CHAIN (result); |
| return result; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Subroutine of check_local_shadow. */ |
| |
| static void |
| inform_shadowed (tree shadowed) |
| { |
| inform (DECL_SOURCE_LOCATION (shadowed), |
| "shadowed declaration is here"); |
| } |
| |
| /* DECL is being declared at a local scope. Emit suitable shadow |
| warnings. */ |
| |
| static void |
| check_local_shadow (tree decl) |
| { |
| /* Don't complain about the parms we push and then pop |
| while tentatively parsing a function declarator. */ |
| if (TREE_CODE (decl) == PARM_DECL && !DECL_CONTEXT (decl)) |
| return; |
| |
| /* External decls are something else. */ |
| if (DECL_EXTERNAL (decl)) |
| return; |
| |
| tree old = NULL_TREE; |
| cp_binding_level *old_scope = NULL; |
| if (cxx_binding *binding = outer_binding (DECL_NAME (decl), NULL, true)) |
| { |
| old = binding->value; |
| old_scope = binding->scope; |
| } |
| |
| if (old |
| && (TREE_CODE (old) == PARM_DECL |
| || VAR_P (old) |
| || (TREE_CODE (old) == TYPE_DECL |
| && (!DECL_ARTIFICIAL (old) |
| || TREE_CODE (decl) == TYPE_DECL))) |
| && DECL_FUNCTION_SCOPE_P (old) |
| && (!DECL_ARTIFICIAL (decl) |
| || is_capture_proxy (decl) |
| || DECL_IMPLICIT_TYPEDEF_P (decl) |
| || (VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl)))) |
| { |
| /* DECL shadows a local thing possibly of interest. */ |
| |
| /* DR 2211: check that captures and parameters |
| do not have the same name. */ |
| if (is_capture_proxy (decl)) |
| { |
| if (current_lambda_expr () |
| && DECL_CONTEXT (old) == lambda_function (current_lambda_expr ()) |
| && TREE_CODE (old) == PARM_DECL |
| && DECL_NAME (decl) != this_identifier) |
| { |
| error_at (DECL_SOURCE_LOCATION (old), |
| "lambda parameter %qD " |
| "previously declared as a capture", old); |
| } |
| return; |
| } |
| /* Don't complain if it's from an enclosing function. */ |
| else if (DECL_CONTEXT (old) == current_function_decl |
| && TREE_CODE (decl) != PARM_DECL |
| && TREE_CODE (old) == PARM_DECL) |
| { |
| /* Go to where the parms should be and see if we find |
| them there. */ |
| cp_binding_level *b = current_binding_level->level_chain; |
| |
| if (FUNCTION_NEEDS_BODY_BLOCK (current_function_decl)) |
| /* Skip the ctor/dtor cleanup level. */ |
| b = b->level_chain; |
| |
| /* [basic.scope.param] A parameter name shall not be redeclared |
| in the outermost block of the function definition. */ |
| if (b->kind == sk_function_parms) |
| { |
| error_at (DECL_SOURCE_LOCATION (decl), |
| "declaration of %q#D shadows a parameter", decl); |
| inform (DECL_SOURCE_LOCATION (old), |
| "%q#D previously declared here", old); |
| return; |
| } |
| } |
| |
| /* The local structure or class can't use parameters of |
| the containing function anyway. */ |
| if (DECL_CONTEXT (old) != current_function_decl) |
| { |
| for (cp_binding_level *scope = current_binding_level; |
| scope != old_scope; scope = scope->level_chain) |
| if (scope->kind == sk_class |
| && !LAMBDA_TYPE_P (scope->this_entity)) |
| return; |
| } |
| /* Error if redeclaring a local declared in a |
| init-statement or in the condition of an if or |
| switch statement when the new declaration is in the |
| outermost block of the controlled statement. |
| Redeclaring a variable from a for or while condition is |
| detected elsewhere. */ |
| else if (VAR_P (old) |
| && old_scope == current_binding_level->level_chain |
| && (old_scope->kind == sk_cond || old_scope->kind == sk_for)) |
| { |
| auto_diagnostic_group d; |
| error_at (DECL_SOURCE_LOCATION (decl), |
| "redeclaration of %q#D", decl); |
| inform (DECL_SOURCE_LOCATION (old), |
| "%q#D previously declared here", old); |
| return; |
| } |
| /* C++11: |
| 3.3.3/3: The name declared in an exception-declaration (...) |
| shall not be redeclared in the outermost block of the handler. |
| 3.3.3/2: A parameter name shall not be redeclared (...) in |
| the outermost block of any handler associated with a |
| function-try-block. |
| 3.4.1/15: The function parameter names shall not be redeclared |
| in the exception-declaration nor in the outermost block of a |
| handler for the function-try-block. */ |
| else if ((TREE_CODE (old) == VAR_DECL |
| && old_scope == current_binding_level->level_chain |
| && old_scope->kind == sk_catch) |
| || (TREE_CODE (old) == PARM_DECL |
| && (current_binding_level->kind == sk_catch |
| || current_binding_level->level_chain->kind == sk_catch) |
| && in_function_try_handler)) |
| { |
| auto_diagnostic_group d; |
| if (permerror (DECL_SOURCE_LOCATION (decl), |
| "redeclaration of %q#D", decl)) |
| inform (DECL_SOURCE_LOCATION (old), |
| "%q#D previously declared here", old); |
| return; |
| } |
| |
| /* If '-Wshadow=compatible-local' is specified without other |
| -Wshadow= flags, we will warn only when the type of the |
|