cp-tree.h (OVL_ARG_DEPENDENT): Delete.
* cp-tree.h (OVL_ARG_DEPENDENT): Delete. (OVL_USED_P): New. (lookup_keep): Declare. * name-lookup.c (add_function): Don't set OVL_ARG_DEPENDENT. * pt.c (tsubst_copy): Assert lookup is persistent. * semantics.c (finish_call_expr): Use lkp_iterator, call lookup_keep. * tree.c (ovl_copy): New. (ovl_insert, ovl_iterator::remove_node): Copy immutable nodes. (lookup_keep): New. From-SVN: r248250
This commit is contained in:
parent
7d2f2a6b73
commit
1bf07cc3ff
@ -1,5 +1,16 @@
|
||||
2017-05-18 Nathan Sidwell <nathan@acm.org>
|
||||
|
||||
* cp-tree.h (OVL_ARG_DEPENDENT): Delete.
|
||||
(OVL_USED_P): New.
|
||||
(lookup_keep): Declare.
|
||||
* name-lookup.c (add_function): Don't set OVL_ARG_DEPENDENT.
|
||||
* pt.c (tsubst_copy): Assert lookup is persistent.
|
||||
* semantics.c (finish_call_expr): Use lkp_iterator, call
|
||||
lookup_keep.
|
||||
* tree.c (ovl_copy): New.
|
||||
(ovl_insert, ovl_iterator::remove_node): Copy immutable nodes.
|
||||
(lookup_keep): New.
|
||||
|
||||
* cp-tree.h (OVL_USED): Replace with ...
|
||||
(OVL_USING_P): ... this.
|
||||
(ovl_iterator::using_p): Adjust.
|
||||
|
@ -323,7 +323,6 @@ extern GTY(()) tree cp_global_trees[CPTI_MAX];
|
||||
IMPLICIT_CONV_EXPR_DIRECT_INIT (in IMPLICIT_CONV_EXPR)
|
||||
TRANSACTION_EXPR_IS_STMT (in TRANSACTION_EXPR)
|
||||
CONVERT_EXPR_VBASE_PATH (in CONVERT_EXPR)
|
||||
OVL_ARG_DEPENDENT (in OVERLOAD)
|
||||
PACK_EXPANSION_LOCAL_P (in *_PACK_EXPANSION)
|
||||
TINFO_HAS_ACCESS_ERRORS (in TEMPLATE_INFO)
|
||||
SIZEOF_EXPR_TYPE_P (in SIZEOF_EXPR)
|
||||
@ -641,9 +640,6 @@ typedef struct ptrmem_cst * ptrmem_cst_t;
|
||||
((TREE_CODE (NODE) == OVERLOAD) ? OVL_FUNCTION (NODE) : (NODE))
|
||||
#define OVL_NEXT(NODE) \
|
||||
((TREE_CODE (NODE) == OVERLOAD) ? TREE_CHAIN (NODE) : NULL_TREE)
|
||||
/* If set, this OVERLOAD was created for argument-dependent lookup
|
||||
and can be freed afterward. */
|
||||
#define OVL_ARG_DEPENDENT(NODE) TREE_LANG_FLAG_0 (OVERLOAD_CHECK (NODE))
|
||||
|
||||
/* If set, this was imported in a using declaration. */
|
||||
#define OVL_USING_P(NODE) TREE_LANG_FLAG_1 (OVERLOAD_CHECK (NODE))
|
||||
@ -651,6 +647,8 @@ typedef struct ptrmem_cst * ptrmem_cst_t;
|
||||
#define OVL_NESTED_P(NODE) TREE_LANG_FLAG_3 (OVERLOAD_CHECK (NODE))
|
||||
/* If set, this overload was constructed during lookup. */
|
||||
#define OVL_LOOKUP_P(NODE) TREE_LANG_FLAG_4 (OVERLOAD_CHECK (NODE))
|
||||
/* If set, this is a persistant lookup. */
|
||||
#define OVL_USED_P(NODE) TREE_USED (OVERLOAD_CHECK (NODE))
|
||||
|
||||
/* The first decl of an overload. */
|
||||
#define OVL_FIRST(NODE) ovl_first (NODE)
|
||||
@ -6809,6 +6807,7 @@ extern tree ovl_make (tree fn,
|
||||
extern tree ovl_insert (tree fn, tree maybe_ovl,
|
||||
bool using_p = false);
|
||||
extern tree lookup_add (tree fns, tree lookup);
|
||||
extern void lookup_keep (tree lookup, bool keep);
|
||||
extern int is_overloaded_fn (tree);
|
||||
extern tree dependent_name (tree);
|
||||
extern tree get_fns (tree) ATTRIBUTE_PURE;
|
||||
|
@ -159,11 +159,7 @@ add_function (struct arg_lookup *k, tree fn)
|
||||
else if (fn == k->functions)
|
||||
;
|
||||
else
|
||||
{
|
||||
k->functions = lookup_add (fn, k->functions);
|
||||
if (TREE_CODE (k->functions) == OVERLOAD)
|
||||
OVL_ARG_DEPENDENT (k->functions) = true;
|
||||
}
|
||||
k->functions = lookup_add (fn, k->functions);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -14565,6 +14565,8 @@ tsubst_copy (tree t, tree args, tsubst_flags_t complain, tree in_decl)
|
||||
overload set from function scope will just be represented with an
|
||||
IDENTIFIER_NODE, and from class scope with a BASELINK. */
|
||||
gcc_assert (!uses_template_parms (t));
|
||||
/* We must have marked any lookups as persistent. */
|
||||
gcc_assert (!OVL_LOOKUP_P (t) || OVL_USED_P (t));
|
||||
return t;
|
||||
|
||||
case BASELINK:
|
||||
|
@ -2304,18 +2304,28 @@ finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
|
||||
result = build_nt_call_vec (fn, *args);
|
||||
SET_EXPR_LOCATION (result, EXPR_LOC_OR_LOC (fn, input_location));
|
||||
KOENIG_LOOKUP_P (result) = koenig_p;
|
||||
if (is_overloaded_fn (fn))
|
||||
{
|
||||
fn = get_fns (fn);
|
||||
lookup_keep (fn, true);
|
||||
}
|
||||
|
||||
if (cfun)
|
||||
{
|
||||
do
|
||||
bool abnormal = true;
|
||||
for (lkp_iterator iter (fn); abnormal && iter; ++iter)
|
||||
{
|
||||
tree fndecl = OVL_CURRENT (fn);
|
||||
tree fndecl = *iter;
|
||||
if (TREE_CODE (fndecl) != FUNCTION_DECL
|
||||
|| !TREE_THIS_VOLATILE (fndecl))
|
||||
break;
|
||||
fn = OVL_NEXT (fn);
|
||||
abnormal = false;
|
||||
}
|
||||
while (fn);
|
||||
if (!fn)
|
||||
/* FIXME: Stop warning about falling off end of non-void
|
||||
function. But this is wrong. Even if we only see
|
||||
no-return fns at this point, we could select a
|
||||
future-defined return fn during instantiation. Or
|
||||
vice-versa. */
|
||||
if (abnormal)
|
||||
current_function_returns_abnormally = 1;
|
||||
}
|
||||
return result;
|
||||
@ -2469,24 +2479,9 @@ finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
|
||||
result = convert_from_reference (result);
|
||||
}
|
||||
|
||||
if (koenig_p)
|
||||
{
|
||||
/* Free garbage OVERLOADs from arg-dependent lookup. */
|
||||
tree next = NULL_TREE;
|
||||
for (fn = orig_fn;
|
||||
fn && TREE_CODE (fn) == OVERLOAD && OVL_ARG_DEPENDENT (fn);
|
||||
fn = next)
|
||||
{
|
||||
if (processing_template_decl)
|
||||
/* In a template, we'll re-use them at instantiation time. */
|
||||
OVL_ARG_DEPENDENT (fn) = false;
|
||||
else
|
||||
{
|
||||
next = OVL_CHAIN (fn);
|
||||
ggc_free (fn);
|
||||
}
|
||||
}
|
||||
}
|
||||
/* Free or retain OVERLOADs from lookup. */
|
||||
if (is_overloaded_fn (orig_fn))
|
||||
lookup_keep (get_fns (orig_fn), processing_template_decl);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -2124,6 +2124,30 @@ ovl_make (tree fn, tree next)
|
||||
return result;
|
||||
}
|
||||
|
||||
static tree
|
||||
ovl_copy (tree ovl)
|
||||
{
|
||||
tree result = ovl_cache;
|
||||
|
||||
if (result)
|
||||
{
|
||||
ovl_cache = OVL_FUNCTION (result);
|
||||
/* Zap the flags. */
|
||||
memset (result, 0, sizeof (tree_base));
|
||||
TREE_SET_CODE (result, OVERLOAD);
|
||||
}
|
||||
else
|
||||
result = make_node (OVERLOAD);
|
||||
|
||||
gcc_assert (!OVL_NESTED_P (ovl) && !OVL_LOOKUP_P (ovl));
|
||||
TREE_TYPE (result) = TREE_TYPE (ovl);
|
||||
OVL_FUNCTION (result) = OVL_FUNCTION (ovl);
|
||||
OVL_CHAIN (result) = OVL_CHAIN (ovl);
|
||||
OVL_USING_P (ovl) = OVL_USING_P (ovl);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Add FN to the (potentially NULL) overload set OVL. USING_P is
|
||||
true, if FN is via a using declaration. Overloads are ordered as
|
||||
using, regular. */
|
||||
@ -2131,6 +2155,7 @@ ovl_make (tree fn, tree next)
|
||||
tree
|
||||
ovl_insert (tree fn, tree maybe_ovl, bool using_p)
|
||||
{
|
||||
bool copying = false; /* Checking use only. */
|
||||
int weight = using_p;
|
||||
|
||||
tree result = NULL_TREE;
|
||||
@ -2140,6 +2165,15 @@ ovl_insert (tree fn, tree maybe_ovl, bool using_p)
|
||||
while (maybe_ovl && TREE_CODE (maybe_ovl) == OVERLOAD
|
||||
&& (weight < OVL_USING_P (maybe_ovl)))
|
||||
{
|
||||
gcc_checking_assert (!OVL_LOOKUP_P (maybe_ovl)
|
||||
&& (!OVL_USED_P (maybe_ovl) || !copying));
|
||||
if (OVL_USED_P (maybe_ovl))
|
||||
{
|
||||
copying = true;
|
||||
maybe_ovl = ovl_copy (maybe_ovl);
|
||||
if (insert_after)
|
||||
OVL_CHAIN (insert_after) = maybe_ovl;
|
||||
}
|
||||
if (!result)
|
||||
result = maybe_ovl;
|
||||
insert_after = maybe_ovl;
|
||||
@ -2156,7 +2190,7 @@ ovl_insert (tree fn, tree maybe_ovl, bool using_p)
|
||||
|
||||
if (insert_after)
|
||||
{
|
||||
TREE_CHAIN (insert_after) = trail;
|
||||
OVL_CHAIN (insert_after) = trail;
|
||||
TREE_TYPE (insert_after) = unknown_type_node;
|
||||
}
|
||||
else
|
||||
@ -2165,14 +2199,32 @@ ovl_insert (tree fn, tree maybe_ovl, bool using_p)
|
||||
return result;
|
||||
}
|
||||
|
||||
/* NODE is on the overloads of OVL. Remove it. */
|
||||
/* NODE is on the overloads of OVL. Remove it. If a predecessor is
|
||||
OVL_USED_P we must copy OVL nodes, because those are immutable.
|
||||
The removed node is unaltered and may continue to be iterated
|
||||
from (i.e. it is safe to remove a node from an overload one is
|
||||
currently iterating over). */
|
||||
|
||||
tree
|
||||
ovl_iterator::remove_node (tree overload, tree node)
|
||||
{
|
||||
bool copying = false; /* Checking use only. */
|
||||
|
||||
tree *slot = &overload;
|
||||
while (*slot != node)
|
||||
slot = &OVL_CHAIN (*slot);
|
||||
{
|
||||
tree probe = *slot;
|
||||
gcc_checking_assert (!OVL_LOOKUP_P (probe)
|
||||
&& (!OVL_USED_P (probe) || !copying));
|
||||
if (OVL_USED_P (probe))
|
||||
{
|
||||
copying = true;
|
||||
probe = ovl_copy (probe);
|
||||
*slot = probe;
|
||||
}
|
||||
|
||||
slot = &OVL_CHAIN (probe);
|
||||
}
|
||||
|
||||
/* Stitch out NODE. We don't have to worry about now making a
|
||||
singleton overload (and consequently maybe setting its type),
|
||||
@ -2199,6 +2251,26 @@ lookup_add (tree fns, tree lookup)
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/* If KEEP is true, preserve the contents of a lookup so that it is
|
||||
available for a later instantiation. Otherwise release the LOOKUP
|
||||
nodes for reuse. */
|
||||
|
||||
void
|
||||
lookup_keep (tree lookup, bool keep)
|
||||
{
|
||||
for (;
|
||||
lookup && TREE_CODE (lookup) == OVERLOAD
|
||||
&& OVL_LOOKUP_P (lookup) && !OVL_USED_P (lookup);
|
||||
lookup = OVL_CHAIN (lookup))
|
||||
if (keep)
|
||||
OVL_USED_P (lookup) = true;
|
||||
else
|
||||
{
|
||||
OVL_FUNCTION (lookup) = ovl_cache;
|
||||
ovl_cache = lookup;
|
||||
}
|
||||
}
|
||||
|
||||
/* Returns nonzero if X is an expression for a (possibly overloaded)
|
||||
function. If "f" is a function or function template, "f", "c->f",
|
||||
"c.f", "C::f", and "f<int>" will all be considered possibly
|
||||
|
Loading…
Reference in New Issue
Block a user