Require target lra in gcc.c-torture/compile/asmgoto-6.c
[official-gcc.git] / gcc / cp / lambda.cc
bloba359bc6ee8dc3570fd33c3ac45392f4598338aac
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2023 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 #include "decl.h"
36 /* Constructor for a lambda expression. */
38 tree
39 build_lambda_expr (void)
41 tree lambda = make_node (LAMBDA_EXPR);
42 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
44 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
46 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
47 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
48 return lambda;
51 /* Create the closure object for a LAMBDA_EXPR. */
53 tree
54 build_lambda_object (tree lambda_expr)
56 /* Build aggregate constructor call.
57 - cp_parser_braced_list
58 - cp_parser_functional_cast */
59 vec<constructor_elt, va_gc> *elts = NULL;
60 tree node, expr, type;
62 if (processing_template_decl || lambda_expr == error_mark_node)
63 return lambda_expr;
65 /* Make sure any error messages refer to the lambda-introducer. */
66 location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
67 iloc_sentinel il (loc);
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
76 if (field == error_mark_node)
78 expr = error_mark_node;
79 goto out;
82 if (TREE_CODE (val) == TREE_LIST)
83 val = build_x_compound_expr_from_list (val, ELK_INIT,
84 tf_warning_or_error);
86 if (DECL_P (val))
87 mark_used (val);
89 /* Mere mortals can't copy arrays with aggregate initialization, so
90 do some magic to make it work here. */
91 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 val = build_array_copy (val);
93 else if (DECL_NORMAL_CAPTURE_P (field)
94 && !DECL_VLA_CAPTURE_P (field)
95 && !TYPE_REF_P (TREE_TYPE (field)))
97 /* "the entities that are captured by copy are used to
98 direct-initialize each corresponding non-static data
99 member of the resulting closure object."
101 There's normally no way to express direct-initialization
102 from an element of a CONSTRUCTOR, so we build up a special
103 TARGET_EXPR to bypass the usual copy-initialization. */
104 val = force_rvalue (val, tf_warning_or_error);
105 if (TREE_CODE (val) == TARGET_EXPR)
106 TARGET_EXPR_DIRECT_INIT_P (val) = true;
109 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
112 expr = build_constructor (init_list_type_node, elts);
113 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
115 /* N2927: "[The closure] class type is not an aggregate."
116 But we briefly treat it as an aggregate to make this simpler. */
117 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118 CLASSTYPE_NON_AGGREGATE (type) = 0;
119 expr = finish_compound_literal (type, expr, tf_warning_or_error);
120 protected_set_expr_location (expr, loc);
121 CLASSTYPE_NON_AGGREGATE (type) = 1;
123 out:
124 return expr;
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128 LAMBDA must have its explicit captures already. */
130 tree
131 begin_lambda_type (tree lambda)
133 /* Lambda names are nearly but not quite anonymous. */
134 tree name = make_anon_name ();
135 IDENTIFIER_LAMBDA_P (name) = true;
137 /* Create the new RECORD_TYPE for this lambda. */
138 tree type = xref_tag (/*tag_code=*/record_type, name);
139 if (type == error_mark_node)
140 return error_mark_node;
142 /* Designate it as a struct so that we can use aggregate initialization. */
143 CLASSTYPE_DECLARED_CLASS (type) = false;
145 /* Cross-reference the expression and the type. */
146 LAMBDA_EXPR_CLOSURE (lambda) = type;
147 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
149 /* In C++17, assume the closure is literal; we'll clear the flag later if
150 necessary. */
151 if (cxx_dialect >= cxx17)
152 CLASSTYPE_LITERAL_P (type) = true;
154 /* Clear base types. */
155 xref_basetypes (type, /*bases=*/NULL_TREE);
157 /* Start the class. */
158 type = begin_class_definition (type);
160 return type;
163 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
164 closure type. */
166 tree
167 lambda_function (tree lambda)
169 tree type;
170 if (TREE_CODE (lambda) == LAMBDA_EXPR)
171 type = LAMBDA_EXPR_CLOSURE (lambda);
172 else
173 type = lambda;
174 gcc_assert (LAMBDA_TYPE_P (type));
175 /* Don't let debug_tree cause instantiation. */
176 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
177 && !COMPLETE_OR_OPEN_TYPE_P (type))
178 return NULL_TREE;
179 lambda = lookup_member (type, call_op_identifier,
180 /*protect=*/0, /*want_type=*/false,
181 tf_warning_or_error);
182 if (lambda)
183 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
184 return lambda;
187 /* True if EXPR is an expression whose type can be used directly in lambda
188 capture. Not to be used for 'auto'. */
190 static bool
191 type_deducible_expression_p (tree expr)
193 if (!type_dependent_expression_p (expr))
194 return true;
195 if (BRACE_ENCLOSED_INITIALIZER_P (expr)
196 || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
197 return false;
198 tree t = non_reference (TREE_TYPE (expr));
199 return (t && TREE_CODE (t) != TYPE_PACK_EXPANSION
200 && !WILDCARD_TYPE_P (t) && !LAMBDA_TYPE_P (t)
201 && !array_of_unknown_bound_p (t)
202 && !type_uses_auto (t));
205 /* Returns the type to use for the FIELD_DECL corresponding to the
206 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
207 C++14 init capture, and BY_REFERENCE_P indicates whether we're
208 capturing by reference. */
210 tree
211 lambda_capture_field_type (tree expr, bool explicit_init_p,
212 bool by_reference_p)
214 tree type;
215 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
217 if (is_this)
218 type = TREE_TYPE (expr);
219 else if (explicit_init_p)
221 tree auto_node = make_auto ();
223 type = auto_node;
224 if (by_reference_p)
225 /* Add the reference now, so deduction doesn't lose
226 outermost CV qualifiers of EXPR. */
227 type = build_reference_type (type);
228 if (uses_parameter_packs (expr))
229 /* Stick with 'auto' even if the type could be deduced. */;
230 else
231 type = do_auto_deduction (type, expr, auto_node);
233 else if (!type_deducible_expression_p (expr))
235 type = cxx_make_type (DECLTYPE_TYPE);
236 DECLTYPE_TYPE_EXPR (type) = expr;
237 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
238 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
239 SET_TYPE_STRUCTURAL_EQUALITY (type);
241 else
243 STRIP_ANY_LOCATION_WRAPPER (expr);
245 if (!by_reference_p && is_capture_proxy (expr))
247 /* When capturing by-value another capture proxy from an enclosing
248 lambda, consider the type of the corresponding field instead,
249 as the proxy may be additionally const-qualifed if the enclosing
250 lambda is non-mutable (PR94376). */
251 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
252 expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
255 type = non_reference (unlowered_expr_type (expr));
257 if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
258 type = build_reference_type (type);
261 return type;
264 /* Returns true iff DECL is a lambda capture proxy variable created by
265 build_capture_proxy. */
267 bool
268 is_capture_proxy (tree decl)
270 /* Location wrappers should be stripped or otherwise handled by the
271 caller before using this predicate. */
272 gcc_checking_assert (!location_wrapper_p (decl));
274 return (VAR_P (decl)
275 && DECL_HAS_VALUE_EXPR_P (decl)
276 && !DECL_ANON_UNION_VAR_P (decl)
277 && !DECL_DECOMPOSITION_P (decl)
278 && !DECL_FNAME_P (decl)
279 && !(DECL_ARTIFICIAL (decl)
280 && DECL_LANG_SPECIFIC (decl)
281 && DECL_OMP_PRIVATIZED_MEMBER (decl))
282 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
285 /* Returns true iff DECL is a capture proxy for a normal capture
286 (i.e. without explicit initializer). */
288 bool
289 is_normal_capture_proxy (tree decl)
291 if (!is_capture_proxy (decl))
292 /* It's not a capture proxy. */
293 return false;
295 return (DECL_LANG_SPECIFIC (decl)
296 && DECL_CAPTURED_VARIABLE (decl));
299 /* Returns true iff DECL is a capture proxy for a normal capture
300 of a constant variable. */
302 bool
303 is_constant_capture_proxy (tree decl)
305 if (is_normal_capture_proxy (decl))
306 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
307 return false;
310 /* VAR is a capture proxy created by build_capture_proxy; add it to the
311 current function, which is the operator() for the appropriate lambda. */
313 void
314 insert_capture_proxy (tree var)
316 if (is_normal_capture_proxy (var))
318 tree cap = DECL_CAPTURED_VARIABLE (var);
319 if (CHECKING_P)
321 gcc_assert (!is_normal_capture_proxy (cap));
322 tree old = retrieve_local_specialization (cap);
323 if (old)
324 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
326 register_local_specialization (var, cap);
329 /* Put the capture proxy in the extra body block so that it won't clash
330 with a later local variable. */
331 pushdecl_outermost_localscope (var);
333 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
334 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
335 tree stmt_list = (*stmt_list_stack)[1];
336 gcc_assert (stmt_list);
337 append_to_statement_list_force (var, &stmt_list);
340 /* We've just finished processing a lambda; if the containing scope is also
341 a lambda, insert any capture proxies that were created while processing
342 the nested lambda. */
344 void
345 insert_pending_capture_proxies (void)
347 tree lam;
348 vec<tree, va_gc> *proxies;
349 unsigned i;
351 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
352 return;
354 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
355 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
356 for (i = 0; i < vec_safe_length (proxies); ++i)
358 tree var = (*proxies)[i];
359 insert_capture_proxy (var);
361 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
362 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
365 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
366 return the type we want the proxy to have: the type of the field itself,
367 with added const-qualification if the lambda isn't mutable and the
368 capture is by value. */
370 tree
371 lambda_proxy_type (tree ref)
373 tree type;
374 if (ref == error_mark_node)
375 return error_mark_node;
376 if (REFERENCE_REF_P (ref))
377 ref = TREE_OPERAND (ref, 0);
378 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
379 type = TREE_TYPE (ref);
380 if (!type || WILDCARD_TYPE_P (non_reference (type)))
382 type = cxx_make_type (DECLTYPE_TYPE);
383 DECLTYPE_TYPE_EXPR (type) = ref;
384 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
385 SET_TYPE_STRUCTURAL_EQUALITY (type);
387 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
388 type = make_pack_expansion (type);
389 return type;
392 /* MEMBER is a capture field in a lambda closure class. Now that we're
393 inside the operator(), build a placeholder var for future lookups and
394 debugging. */
396 static tree
397 build_capture_proxy (tree member, tree init)
399 tree var, object, fn, closure, name, lam, type;
401 if (PACK_EXPANSION_P (member))
402 member = PACK_EXPANSION_PATTERN (member);
404 closure = DECL_CONTEXT (member);
405 fn = lambda_function (closure);
406 lam = CLASSTYPE_LAMBDA_EXPR (closure);
408 /* The proxy variable forwards to the capture field. */
409 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
410 object = finish_non_static_data_member (member, object, NULL_TREE);
411 if (REFERENCE_REF_P (object))
412 object = TREE_OPERAND (object, 0);
414 /* Remove the __ inserted by add_capture. */
415 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
417 type = lambda_proxy_type (object);
419 if (name == this_identifier && !INDIRECT_TYPE_P (type))
421 type = build_pointer_type (type);
422 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
423 object = build_fold_addr_expr_with_type (object, type);
426 if (DECL_VLA_CAPTURE_P (member))
428 /* Rebuild the VLA type from the pointer and maxindex. */
429 tree field = next_aggregate_field (TYPE_FIELDS (type));
430 tree ptr = build_simple_component_ref (object, field);
431 field = next_aggregate_field (DECL_CHAIN (field));
432 tree max = build_simple_component_ref (object, field);
433 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
434 build_index_type (max));
435 type = build_reference_type (type);
436 object = convert (type, ptr);
439 complete_type (type);
441 var = build_decl (input_location, VAR_DECL, name, type);
442 SET_DECL_VALUE_EXPR (var, object);
443 DECL_HAS_VALUE_EXPR_P (var) = 1;
444 DECL_ARTIFICIAL (var) = 1;
445 TREE_USED (var) = 1;
446 DECL_CONTEXT (var) = fn;
448 if (DECL_NORMAL_CAPTURE_P (member))
450 if (DECL_VLA_CAPTURE_P (member))
452 init = CONSTRUCTOR_ELT (init, 0)->value;
453 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
454 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
456 else
458 if (PACK_EXPANSION_P (init))
459 init = PACK_EXPANSION_PATTERN (init);
462 if (INDIRECT_REF_P (init))
463 init = TREE_OPERAND (init, 0);
464 STRIP_NOPS (init);
466 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
467 while (is_normal_capture_proxy (init))
468 init = DECL_CAPTURED_VARIABLE (init);
469 retrofit_lang_decl (var);
470 DECL_CAPTURED_VARIABLE (var) = init;
473 if (name == this_identifier)
475 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
476 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
479 if (fn == current_function_decl)
480 insert_capture_proxy (var);
481 else
482 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
484 return var;
487 static GTY(()) tree ptr_id;
488 static GTY(()) tree max_id;
490 /* Return a struct containing a pointer and a length for lambda capture of
491 an array of runtime length. */
493 static tree
494 vla_capture_type (tree array_type)
496 tree type = xref_tag (record_type, make_anon_name ());
497 xref_basetypes (type, NULL_TREE);
498 type = begin_class_definition (type);
499 if (!ptr_id)
501 ptr_id = get_identifier ("ptr");
502 max_id = get_identifier ("max");
504 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
505 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
506 finish_member_declaration (field);
507 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
508 finish_member_declaration (field);
509 return finish_struct (type, NULL_TREE);
512 /* From an ID and INITIALIZER, create a capture (by reference if
513 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
514 and return it. If ID is `this', BY_REFERENCE_P says whether
515 `*this' is captured by reference. */
517 tree
518 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
519 bool explicit_init_p)
521 char *buf;
522 tree type, member, name;
523 bool vla = false;
524 bool variadic = false;
525 tree initializer = orig_init;
527 if (PACK_EXPANSION_P (initializer))
529 initializer = PACK_EXPANSION_PATTERN (initializer);
530 variadic = true;
533 if (TREE_CODE (initializer) == TREE_LIST
534 /* A pack expansion might end up with multiple elements. */
535 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
536 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
537 tf_warning_or_error);
538 type = TREE_TYPE (initializer);
539 if (type == error_mark_node)
540 return error_mark_node;
542 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
544 vla = true;
545 if (!by_reference_p)
546 error ("array of runtime bound cannot be captured by copy, "
547 "only by reference");
549 /* For a VLA, we capture the address of the first element and the
550 maximum index, and then reconstruct the VLA for the proxy. */
551 tree elt = cp_build_array_ref (input_location, initializer,
552 integer_zero_node, tf_warning_or_error);
553 initializer = build_constructor_va (init_list_type_node, 2,
554 NULL_TREE, build_address (elt),
555 NULL_TREE, array_type_nelts (type));
556 type = vla_capture_type (type);
558 else if (!dependent_type_p (type)
559 && variably_modified_type_p (type, NULL_TREE))
561 sorry ("capture of variably-modified type %qT that is not an N3639 array "
562 "of runtime bound", type);
563 if (TREE_CODE (type) == ARRAY_TYPE
564 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
565 inform (input_location, "because the array element type %qT has "
566 "variable size", TREE_TYPE (type));
567 return error_mark_node;
569 else
571 type = lambda_capture_field_type (initializer, explicit_init_p,
572 by_reference_p);
573 if (type == error_mark_node)
574 return error_mark_node;
576 if (id == this_identifier && !by_reference_p)
578 gcc_assert (INDIRECT_TYPE_P (type));
579 type = TREE_TYPE (type);
580 initializer = cp_build_fold_indirect_ref (initializer);
583 if (dependent_type_p (type))
585 else if (id != this_identifier && by_reference_p)
587 if (!lvalue_p (initializer))
589 error ("cannot capture %qE by reference", initializer);
590 return error_mark_node;
593 else
595 /* Capture by copy requires a complete type. */
596 type = complete_type (type);
597 if (!COMPLETE_TYPE_P (type))
599 error ("capture by copy of incomplete type %qT", type);
600 cxx_incomplete_type_inform (type);
601 return error_mark_node;
603 else if (!verify_type_context (input_location,
604 TCTX_CAPTURE_BY_COPY, type))
605 return error_mark_node;
609 /* Add __ to the beginning of the field name so that user code
610 won't find the field with name lookup. We can't just leave the name
611 unset because template instantiation uses the name to find
612 instantiated fields. */
613 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
614 buf[1] = buf[0] = '_';
615 memcpy (buf + 2, IDENTIFIER_POINTER (id),
616 IDENTIFIER_LENGTH (id) + 1);
617 name = get_identifier (buf);
619 if (variadic)
621 type = make_pack_expansion (type);
622 if (explicit_init_p)
623 /* With an explicit initializer 'type' is auto, which isn't really a
624 parameter pack in this context. We will want as many fields as we
625 have elements in the expansion of the initializer, so use its packs
626 instead. */
628 PACK_EXPANSION_PARAMETER_PACKS (type)
629 = uses_parameter_packs (initializer);
630 PACK_EXPANSION_AUTO_P (type) = true;
634 /* Make member variable. */
635 member = build_decl (input_location, FIELD_DECL, name, type);
636 DECL_VLA_CAPTURE_P (member) = vla;
638 if (!explicit_init_p)
639 /* Normal captures are invisible to name lookup but uses are replaced
640 with references to the capture field; we implement this by only
641 really making them invisible in unevaluated context; see
642 qualify_lookup. For now, let's make explicitly initialized captures
643 always visible. */
644 DECL_NORMAL_CAPTURE_P (member) = true;
646 if (id == this_identifier)
647 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
649 /* Add it to the appropriate closure class if we've started it. */
650 if (current_class_type
651 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
653 if (COMPLETE_TYPE_P (current_class_type))
654 internal_error ("trying to capture %qD in instantiation of "
655 "generic lambda", id);
656 finish_member_declaration (member);
659 tree listmem = member;
660 if (variadic)
662 listmem = make_pack_expansion (member);
663 initializer = orig_init;
665 LAMBDA_EXPR_CAPTURE_LIST (lambda)
666 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
668 if (LAMBDA_EXPR_CLOSURE (lambda))
669 return build_capture_proxy (member, initializer);
670 /* For explicit captures we haven't started the function yet, so we wait
671 and build the proxy from cp_parser_lambda_body. */
672 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
673 return NULL_TREE;
676 /* Register all the capture members on the list CAPTURES, which is the
677 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
679 void
680 register_capture_members (tree captures)
682 if (captures == NULL_TREE)
683 return;
685 register_capture_members (TREE_CHAIN (captures));
687 tree field = TREE_PURPOSE (captures);
688 if (PACK_EXPANSION_P (field))
689 field = PACK_EXPANSION_PATTERN (field);
691 finish_member_declaration (field);
694 /* Similar to add_capture, except this works on a stack of nested lambdas.
695 BY_REFERENCE_P in this case is derived from the default capture mode.
696 Returns the capture for the lambda at the bottom of the stack. */
698 tree
699 add_default_capture (tree lambda_stack, tree id, tree initializer)
701 bool this_capture_p = (id == this_identifier);
702 tree var = NULL_TREE;
703 tree saved_class_type = current_class_type;
705 for (tree node = lambda_stack;
706 node;
707 node = TREE_CHAIN (node))
709 tree lambda = TREE_VALUE (node);
711 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
712 if (DECL_PACK_P (initializer))
713 initializer = make_pack_expansion (initializer);
714 var = add_capture (lambda,
716 initializer,
717 /*by_reference_p=*/
718 (this_capture_p
719 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
720 == CPLD_REFERENCE)),
721 /*explicit_init_p=*/false);
722 initializer = convert_from_reference (var);
724 /* Warn about deprecated implicit capture of this via [=]. */
725 if (cxx_dialect >= cxx20
726 && this_capture_p
727 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
729 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
730 "implicit capture of %qE via %<[=]%> is deprecated "
731 "in C++20", this_identifier))
732 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
733 "%<*this%> capture");
737 current_class_type = saved_class_type;
739 return var;
742 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
743 form of an INDIRECT_REF, possibly adding it through default
744 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
745 try to capture but don't complain if we can't. */
747 tree
748 lambda_expr_this_capture (tree lambda, int add_capture_p)
750 tree result;
752 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
754 /* In unevaluated context this isn't an odr-use, so don't capture. */
755 if (cp_unevaluated_operand)
756 add_capture_p = false;
758 /* Try to default capture 'this' if we can. */
759 if (!this_capture)
761 tree lambda_stack = NULL_TREE;
762 tree init = NULL_TREE;
763 bool saw_complete = false;
765 /* If we are in a lambda function, we can move out until we hit:
766 1. a non-lambda function or NSDMI,
767 2. a lambda function capturing 'this', or
768 3. a non-default capturing lambda function. */
769 for (tree tlambda = lambda; ;)
771 if (add_capture_p
772 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
773 /* tlambda won't let us capture 'this'. */
774 break;
776 if (add_capture_p)
777 lambda_stack = tree_cons (NULL_TREE,
778 tlambda,
779 lambda_stack);
781 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
782 if (COMPLETE_TYPE_P (closure))
783 /* We're instantiating a generic lambda op(), the containing
784 scope may be gone. */
785 saw_complete = true;
787 tree containing_function
788 = decl_function_context (TYPE_NAME (closure));
790 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
791 if (ex && TREE_CODE (ex) == FIELD_DECL)
793 /* Lambda in an NSDMI. We don't have a function to look up
794 'this' in, but we can find (or rebuild) the fake one from
795 inject_this_parameter. */
796 if (!containing_function && !saw_complete)
797 /* If we're parsing a lambda in a non-local class,
798 we can find the fake 'this' in scope_chain. */
799 init = scope_chain->x_current_class_ptr;
800 else
801 /* Otherwise it's either gone or buried in
802 function_context_stack, so make another. */
803 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
804 TYPE_UNQUALIFIED);
805 gcc_checking_assert
806 (init && (TREE_TYPE (TREE_TYPE (init))
807 == current_nonlambda_class_type ()));
808 break;
811 if (containing_function == NULL_TREE)
812 /* We ran out of scopes; there's no 'this' to capture. */
813 break;
815 if (!LAMBDA_FUNCTION_P (containing_function))
817 /* We found a non-lambda function. */
818 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
819 /* First parameter is 'this'. */
820 init = DECL_ARGUMENTS (containing_function);
821 break;
824 tlambda
825 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
827 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
829 /* An outer lambda has already captured 'this'. */
830 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
831 break;
835 if (init)
837 if (add_capture_p)
838 this_capture = add_default_capture (lambda_stack,
839 /*id=*/this_identifier,
840 init);
841 else
842 this_capture = init;
846 if (cp_unevaluated_operand)
847 result = this_capture;
848 else if (!this_capture)
850 if (add_capture_p == 1)
852 error ("%<this%> was not captured for this lambda function");
853 result = error_mark_node;
855 else
856 result = NULL_TREE;
858 else
860 /* To make sure that current_class_ref is for the lambda. */
861 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
862 == LAMBDA_EXPR_CLOSURE (lambda));
864 result = this_capture;
866 /* If 'this' is captured, each use of 'this' is transformed into an
867 access to the corresponding unnamed data member of the closure
868 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
869 ensures that the transformed expression is an rvalue. ] */
870 result = rvalue (result);
873 return result;
876 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
878 tree
879 current_lambda_expr (void)
881 tree type = current_class_type;
882 while (type && !LAMBDA_TYPE_P (type))
883 type = decl_type_context (TYPE_NAME (type));
884 if (type)
885 return CLASSTYPE_LAMBDA_EXPR (type);
886 else
887 return NULL_TREE;
890 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
891 object. NULL otherwise.. */
893 static tree
894 resolvable_dummy_lambda (tree object)
896 if (!is_dummy_object (object))
897 return NULL_TREE;
899 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
900 gcc_assert (!TYPE_PTR_P (type));
902 if (type != current_class_type
903 && current_class_type
904 && LAMBDA_TYPE_P (current_class_type)
905 && lambda_function (current_class_type)
906 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
907 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
909 return NULL_TREE;
912 /* We don't want to capture 'this' until we know we need it, i.e. after
913 overload resolution has chosen a non-static member function. At that
914 point we call this function to turn a dummy object into a use of the
915 'this' capture. */
917 tree
918 maybe_resolve_dummy (tree object, bool add_capture_p)
920 if (tree lam = resolvable_dummy_lambda (object))
921 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
922 if (cap != error_mark_node)
923 object = build_fold_indirect_ref (cap);
925 return object;
928 /* When parsing a generic lambda containing an argument-dependent
929 member function call we defer overload resolution to instantiation
930 time. But we have to know now whether to capture this or not.
931 Do that if FNS contains any non-static fns.
932 The std doesn't anticipate this case, but I expect this to be the
933 outcome of discussion. */
935 void
936 maybe_generic_this_capture (tree object, tree fns)
938 if (tree lam = resolvable_dummy_lambda (object))
939 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
941 /* We've not yet captured, so look at the function set of
942 interest. */
943 if (BASELINK_P (fns))
944 fns = BASELINK_FUNCTIONS (fns);
945 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
946 if (id_expr)
947 fns = TREE_OPERAND (fns, 0);
949 for (lkp_iterator iter (fns); iter; ++iter)
950 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
951 || TREE_CODE (*iter) == TEMPLATE_DECL)
952 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
954 /* Found a non-static member. Capture this. */
955 lambda_expr_this_capture (lam, /*maybe*/-1);
956 break;
961 /* Returns the innermost non-lambda function. */
963 tree
964 current_nonlambda_function (void)
966 tree fn = current_function_decl;
967 while (fn && LAMBDA_FUNCTION_P (fn))
968 fn = decl_function_context (fn);
969 return fn;
972 /* Returns the method basetype of the innermost non-lambda function, including
973 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
975 tree
976 nonlambda_method_basetype (void)
978 if (!current_class_ref)
979 return NULL_TREE;
981 tree type = current_class_type;
982 if (!type || !LAMBDA_TYPE_P (type))
983 return type;
985 while (true)
987 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
988 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
989 if (ex && TREE_CODE (ex) == FIELD_DECL)
990 /* Lambda in an NSDMI. */
991 return DECL_CONTEXT (ex);
993 tree fn = TYPE_CONTEXT (type);
994 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
995 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
996 /* No enclosing non-lambda method. */
997 return NULL_TREE;
998 if (!LAMBDA_FUNCTION_P (fn))
999 /* Found an enclosing non-lambda method. */
1000 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1001 type = DECL_CONTEXT (fn);
1005 /* Like current_scope, but looking through lambdas. */
1007 tree
1008 current_nonlambda_scope (void)
1010 tree scope = current_scope ();
1011 for (;;)
1013 if (TREE_CODE (scope) == FUNCTION_DECL
1014 && LAMBDA_FUNCTION_P (scope))
1016 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1017 continue;
1019 else if (LAMBDA_TYPE_P (scope))
1021 scope = CP_TYPE_CONTEXT (scope);
1022 continue;
1024 break;
1026 return scope;
1029 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1030 indicated FN and NARGS, but do not initialize the return type or any of the
1031 argument slots. */
1033 static tree
1034 prepare_op_call (tree fn, int nargs)
1036 tree t;
1038 t = build_vl_exp (CALL_EXPR, nargs + 3);
1039 CALL_EXPR_FN (t) = fn;
1040 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1042 return t;
1045 /* Return true iff CALLOP is the op() for a generic lambda. */
1047 bool
1048 generic_lambda_fn_p (tree callop)
1050 return (LAMBDA_FUNCTION_P (callop)
1051 && DECL_TEMPLATE_INFO (callop)
1052 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1055 /* If the closure TYPE has a static op(), also add a conversion to function
1056 pointer. */
1058 void
1059 maybe_add_lambda_conv_op (tree type)
1061 bool nested = (cfun != NULL);
1062 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1063 tree callop = lambda_function (type);
1064 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1066 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1067 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1068 return;
1070 if (processing_template_decl)
1071 return;
1073 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1075 if (!generic_lambda_p && undeduced_auto_decl (callop))
1077 /* If the op() wasn't deduced due to errors, give up. */
1078 gcc_assert (errorcount || sorrycount);
1079 return;
1082 /* Non-generic non-capturing lambdas only have a conversion function to
1083 pointer to function when the trailing requires-clause's constraints are
1084 satisfied. */
1085 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1086 return;
1088 /* Non-template conversion operators are defined directly with build_call_a
1089 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1090 deferred and the CALL is built in-place. In the case of a deduced return
1091 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1092 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1093 the return expression may differ in flags from those in the body CALL. In
1094 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1095 the body CALL, but not in DECLTYPE_CALL. */
1097 vec<tree, va_gc> *direct_argvec = 0;
1098 tree decltype_call = 0, call = 0;
1099 tree optype = TREE_TYPE (callop);
1100 tree fn_result = TREE_TYPE (optype);
1102 tree thisarg = NULL_TREE;
1103 if (TREE_CODE (optype) == METHOD_TYPE)
1104 thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1105 if (generic_lambda_p)
1107 ++processing_template_decl;
1109 /* Prepare the dependent member call for the static member function
1110 '_FUN' and, potentially, prepare another call to be used in a decltype
1111 return expression for a deduced return call op to allow for simple
1112 implementation of the conversion operator. */
1114 tree objfn;
1115 int nargs = list_length (DECL_ARGUMENTS (callop));
1116 if (thisarg)
1118 tree instance = cp_build_fold_indirect_ref (thisarg);
1119 objfn = lookup_template_function (DECL_NAME (callop),
1120 DECL_TI_ARGS (callop));
1121 objfn = build_min (COMPONENT_REF, NULL_TREE,
1122 instance, objfn, NULL_TREE);
1123 --nargs;
1124 call = prepare_op_call (objfn, nargs);
1126 else
1127 objfn = callop;
1129 if (type_uses_auto (fn_result))
1130 decltype_call = prepare_op_call (objfn, nargs);
1132 else if (thisarg)
1134 direct_argvec = make_tree_vector ();
1135 direct_argvec->quick_push (thisarg);
1138 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1139 declare the static member function "_FUN" below. For each arg append to
1140 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1141 call args (for the template case). If a parameter pack is found, expand
1142 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1144 tree fn_args = NULL_TREE;
1146 int ix = 0;
1147 tree src = FUNCTION_FIRST_USER_PARM (callop);
1148 tree tgt = NULL;
1150 if (!thisarg && !decltype_call)
1151 src = NULL_TREE;
1152 while (src)
1154 tree new_node = copy_node (src);
1155 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1156 Notice this is creating a recursive type! */
1158 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1159 TREE_ADDRESSABLE (new_node) = 0;
1161 if (!fn_args)
1162 fn_args = tgt = new_node;
1163 else
1165 TREE_CHAIN (tgt) = new_node;
1166 tgt = new_node;
1169 mark_exp_read (tgt);
1171 if (generic_lambda_p)
1173 tree a = tgt;
1174 if (thisarg)
1176 if (DECL_PACK_P (tgt))
1178 a = make_pack_expansion (a);
1179 PACK_EXPANSION_LOCAL_P (a) = true;
1181 CALL_EXPR_ARG (call, ix) = a;
1184 if (decltype_call)
1186 /* Avoid capturing variables in this context. */
1187 ++cp_unevaluated_operand;
1188 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1189 --cp_unevaluated_operand;
1192 ++ix;
1194 else
1195 vec_safe_push (direct_argvec, tgt);
1197 src = TREE_CHAIN (src);
1201 if (generic_lambda_p)
1203 if (decltype_call)
1205 fn_result = finish_decltype_type
1206 (decltype_call, /*id_expression_or_member_access_p=*/false,
1207 tf_warning_or_error);
1210 else if (thisarg)
1212 /* Don't warn on deprecated or unavailable lambda declarations, unless
1213 the lambda is actually called. */
1214 auto du = make_temp_override (deprecated_state,
1215 UNAVAILABLE_DEPRECATED_SUPPRESS);
1216 call = build_call_a (callop, direct_argvec->length (),
1217 direct_argvec->address ());
1220 if (thisarg)
1222 CALL_FROM_THUNK_P (call) = 1;
1223 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1226 tree stattype
1227 = build_function_type (fn_result, FUNCTION_FIRST_USER_PARMTYPE (callop));
1228 stattype = (cp_build_type_attribute_variant
1229 (stattype, TYPE_ATTRIBUTES (optype)));
1230 if (flag_noexcept_type
1231 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1232 stattype = build_exception_variant (stattype, noexcept_true_spec);
1234 if (generic_lambda_p)
1235 --processing_template_decl;
1237 /* First build up the conversion op. */
1239 tree rettype = build_pointer_type (stattype);
1240 tree name = make_conv_op_name (rettype);
1241 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1242 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1243 /* DR 1722: The conversion function should be noexcept. */
1244 fntype = build_exception_variant (fntype, noexcept_true_spec);
1245 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1246 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1247 tree fn = convfn;
1248 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1249 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1250 grokclassfn (type, fn, NO_SPECIAL);
1251 set_linkage_according_to_type (type, fn);
1252 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1253 DECL_IN_AGGR_P (fn) = 1;
1254 DECL_ARTIFICIAL (fn) = 1;
1255 DECL_NOT_REALLY_EXTERN (fn) = 1;
1256 DECL_DECLARED_INLINE_P (fn) = 1;
1257 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1258 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1259 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1260 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1262 if (nested_def)
1263 DECL_INTERFACE_KNOWN (fn) = 1;
1265 if (generic_lambda_p)
1266 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1268 add_method (type, fn, false);
1270 if (thisarg == NULL_TREE)
1272 /* For static lambda, just return operator(). */
1273 if (nested)
1274 push_function_context ();
1275 else
1276 /* Still increment function_depth so that we don't GC in the
1277 middle of an expression. */
1278 ++function_depth;
1280 /* Generate the body of the conversion op. */
1282 start_preparsed_function (convfn, NULL_TREE,
1283 SF_PRE_PARSED | SF_INCLASS_INLINE);
1284 tree body = begin_function_body ();
1285 tree compound_stmt = begin_compound_stmt (0);
1287 /* decl_needed_p needs to see that it's used. */
1288 TREE_USED (callop) = 1;
1289 finish_return_stmt (decay_conversion (callop, tf_warning_or_error));
1291 finish_compound_stmt (compound_stmt);
1292 finish_function_body (body);
1294 fn = finish_function (/*inline_p=*/true);
1295 if (!generic_lambda_p)
1296 expand_or_defer_fn (fn);
1298 if (nested)
1299 pop_function_context ();
1300 else
1301 --function_depth;
1302 return;
1305 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1306 the conversion op is used. */
1307 if (varargs_function_p (callop))
1309 DECL_DELETED_FN (fn) = 1;
1310 return;
1313 /* Now build up the thunk to be returned. */
1315 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1316 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1317 fn = statfn;
1318 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1319 grokclassfn (type, fn, NO_SPECIAL);
1320 set_linkage_according_to_type (type, fn);
1321 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1322 DECL_IN_AGGR_P (fn) = 1;
1323 DECL_ARTIFICIAL (fn) = 1;
1324 DECL_NOT_REALLY_EXTERN (fn) = 1;
1325 DECL_DECLARED_INLINE_P (fn) = 1;
1326 DECL_STATIC_FUNCTION_P (fn) = 1;
1327 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1328 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1329 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1330 DECL_ARGUMENTS (fn) = fn_args;
1331 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1333 /* Avoid duplicate -Wshadow warnings. */
1334 DECL_NAME (arg) = NULL_TREE;
1335 DECL_CONTEXT (arg) = fn;
1337 if (nested_def)
1338 DECL_INTERFACE_KNOWN (fn) = 1;
1340 if (generic_lambda_p)
1341 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1343 if (flag_sanitize & SANITIZE_NULL)
1344 /* Don't UBsan this function; we're deliberately calling op() with a null
1345 object argument. */
1346 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1348 add_method (type, fn, false);
1350 if (nested)
1351 push_function_context ();
1352 else
1353 /* Still increment function_depth so that we don't GC in the
1354 middle of an expression. */
1355 ++function_depth;
1357 /* Generate the body of the thunk. */
1359 start_preparsed_function (statfn, NULL_TREE,
1360 SF_PRE_PARSED | SF_INCLASS_INLINE);
1361 tree body = begin_function_body ();
1362 tree compound_stmt = begin_compound_stmt (0);
1363 if (!generic_lambda_p)
1365 set_flags_from_callee (call);
1366 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1367 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1369 call = convert_from_reference (call);
1370 finish_return_stmt (call);
1372 finish_compound_stmt (compound_stmt);
1373 finish_function_body (body);
1375 fn = finish_function (/*inline_p=*/true);
1376 if (!generic_lambda_p)
1377 expand_or_defer_fn (fn);
1379 /* Generate the body of the conversion op. */
1381 start_preparsed_function (convfn, NULL_TREE,
1382 SF_PRE_PARSED | SF_INCLASS_INLINE);
1383 body = begin_function_body ();
1384 compound_stmt = begin_compound_stmt (0);
1386 /* decl_needed_p needs to see that it's used. */
1387 TREE_USED (statfn) = 1;
1388 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1390 finish_compound_stmt (compound_stmt);
1391 finish_function_body (body);
1393 fn = finish_function (/*inline_p=*/true);
1394 if (!generic_lambda_p)
1395 expand_or_defer_fn (fn);
1397 if (nested)
1398 pop_function_context ();
1399 else
1400 --function_depth;
1403 /* True if FN is the static function "_FUN" that gets returned from the lambda
1404 conversion operator. */
1406 bool
1407 lambda_static_thunk_p (tree fn)
1409 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1410 && DECL_ARTIFICIAL (fn)
1411 && DECL_STATIC_FUNCTION_P (fn)
1412 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1415 bool
1416 call_from_lambda_thunk_p (tree call)
1418 return (CALL_FROM_THUNK_P (call)
1419 && lambda_static_thunk_p (current_function_decl));
1422 /* Returns true iff VAL is a lambda-related declaration which should
1423 be ignored by unqualified lookup. */
1425 bool
1426 is_lambda_ignored_entity (tree val)
1428 /* Look past normal, non-VLA capture proxies. */
1429 if (is_normal_capture_proxy (val)
1430 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1431 return true;
1433 /* Always ignore lambda fields, their names are only for debugging. */
1434 if (TREE_CODE (val) == FIELD_DECL
1435 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1436 return true;
1438 /* None of the lookups that use qualify_lookup want the op() from the
1439 lambda; they want the one from the enclosing class. */
1440 if (tree fns = maybe_get_fns (val))
1441 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1442 return true;
1444 return false;
1447 /* Lambdas that appear in variable initializer or default argument
1448 scope get that in their mangling, so we need to record it. Also,
1449 multiple lambdas in the same scope may need a mangling
1450 discriminator. In ABI <= 17, there is a single per-scope sequence
1451 number. In ABI >= 18, there are per-scope per-signature sequence
1452 numbers. */
1453 struct GTY(()) lambda_sig_count
1455 tree fn; // The lambda fn whose sig this is.
1456 unsigned count;
1458 struct GTY(()) lambda_discriminator
1460 tree scope;
1461 unsigned nesting; // Inside a function, VAR_DECLs get the function
1462 // as scope. This counts that nesting.
1463 unsigned count; // The per-scope counter.
1464 vec<lambda_sig_count, va_gc> *discriminators; // Per-signature counters
1466 // The current scope.
1467 static GTY(()) lambda_discriminator lambda_scope;
1468 // Stack of previous scopes.
1469 static GTY(()) vec<lambda_discriminator, va_gc> *lambda_scope_stack;
1471 // Push DECL as lambda extra scope, also new discriminator counters.
1473 void
1474 start_lambda_scope (tree decl)
1476 gcc_checking_assert (decl);
1477 if (current_function_decl && VAR_P (decl))
1478 // If we're inside a function, we ignore variable scope. Don't push.
1479 lambda_scope.nesting++;
1480 else
1482 vec_safe_push (lambda_scope_stack, lambda_scope);
1483 lambda_scope.scope = decl;
1484 lambda_scope.nesting = 0;
1485 lambda_scope.count = 0;
1486 lambda_scope.discriminators = nullptr;
1490 // Pop from the current lambda extra scope.
1492 void
1493 finish_lambda_scope (void)
1495 if (!lambda_scope.nesting--)
1497 lambda_scope = lambda_scope_stack->last ();
1498 lambda_scope_stack->pop ();
1502 // Record the current lambda scope into LAMBDA
1504 void
1505 record_lambda_scope (tree lambda)
1507 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope.scope;
1508 if (lambda_scope.scope)
1510 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1511 gcc_checking_assert (closure);
1512 maybe_key_decl (lambda_scope.scope, TYPE_NAME (closure));
1516 // Compare lambda template heads TMPL_A and TMPL_B, used for both
1517 // templated lambdas, and template template parameters of said lambda.
1519 static bool
1520 compare_lambda_template_head (tree tmpl_a, tree tmpl_b)
1522 // We only need one level of template parms
1523 tree inner_a = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_a));
1524 tree inner_b = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_b));
1526 // We only compare explicit template parms, ignoring trailing
1527 // synthetic ones.
1528 int len_a = TREE_VEC_LENGTH (inner_a);
1529 int len_b = TREE_VEC_LENGTH (inner_b);
1531 for (int ix = 0, len = MAX (len_a, len_b); ix != len; ix++)
1533 tree parm_a = NULL_TREE;
1534 if (ix < len_a)
1536 parm_a = TREE_VEC_ELT (inner_a, ix);
1537 if (parm_a == error_mark_node)
1538 return false;
1539 parm_a = TREE_VALUE (parm_a);
1540 if (parm_a == error_mark_node)
1541 return false;
1542 if (DECL_VIRTUAL_P (parm_a))
1543 parm_a = NULL_TREE;
1546 tree parm_b = NULL_TREE;
1547 if (ix < len_b)
1549 parm_b = TREE_VEC_ELT (inner_b, ix);
1550 if (parm_b == error_mark_node)
1551 return false;
1552 parm_b = TREE_VALUE (parm_b);
1553 if (parm_b == error_mark_node)
1554 return false;
1555 if (DECL_VIRTUAL_P (parm_b))
1556 parm_b = NULL_TREE;
1559 if (!parm_a && !parm_b)
1560 // we're done
1561 break;
1563 if (!(parm_a && parm_b))
1564 return false;
1566 if (TREE_CODE (parm_a) != TREE_CODE (parm_b))
1567 return false;
1569 if (TREE_CODE (parm_a) == PARM_DECL)
1571 if (TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_a))
1572 != TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_b)))
1573 return false;
1575 if (!same_type_p (TREE_TYPE (parm_a), TREE_TYPE (parm_b)))
1576 return false;
1578 else
1580 if (TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_a))
1581 != TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_b)))
1582 return false;
1584 if (TREE_CODE (parm_a) != TEMPLATE_DECL)
1585 gcc_checking_assert (TREE_CODE (parm_a) == TYPE_DECL);
1586 else if (!compare_lambda_template_head (parm_a, parm_b))
1587 return false;
1591 return true;
1594 // Compare lambda signatures FN_A and FN_B, they may be TEMPLATE_DECLs too.
1596 static bool
1597 compare_lambda_sig (tree fn_a, tree fn_b)
1599 if (TREE_CODE (fn_a) == TEMPLATE_DECL
1600 && TREE_CODE (fn_b) == TEMPLATE_DECL)
1602 if (!compare_lambda_template_head (fn_a, fn_b))
1603 return false;
1604 fn_a = DECL_TEMPLATE_RESULT (fn_a);
1605 fn_b = DECL_TEMPLATE_RESULT (fn_b);
1607 else if (TREE_CODE (fn_a) == TEMPLATE_DECL
1608 || TREE_CODE (fn_b) == TEMPLATE_DECL)
1609 return false;
1611 if (fn_a == error_mark_node
1612 || fn_b == error_mark_node)
1613 return false;
1615 for (tree args_a = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_a))),
1616 args_b = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_b)));
1617 args_a || args_b;
1618 args_a = TREE_CHAIN (args_a), args_b = TREE_CHAIN (args_b))
1620 if (!args_a || !args_b)
1621 return false;
1622 // This check also deals with differing varadicness
1623 if (!same_type_p (TREE_VALUE (args_a), TREE_VALUE (args_b)))
1624 return false;
1627 return true;
1630 // Record the per-scope discriminator of LAMBDA. If the extra scope
1631 // is empty, we must use the empty scope counter, which might not be
1632 // the live one.
1634 void
1635 record_lambda_scope_discriminator (tree lambda)
1637 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1638 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1639 ? &lambda_scope : lambda_scope_stack->begin ());
1640 LAMBDA_EXPR_SCOPE_ONLY_DISCRIMINATOR (lambda) = slot->count++;
1643 // Record the per-scope per-signature discriminator of LAMBDA. If the
1644 // extra scope is empty, we must use the empty scope counter, which
1645 // might not be the live one.
1647 void
1648 record_lambda_scope_sig_discriminator (tree lambda, tree fn)
1650 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1651 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1652 ? &lambda_scope : lambda_scope_stack->begin ());
1653 gcc_checking_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == slot->scope);
1655 // A linear search, we're not expecting this to be a big list, and
1656 // this avoids needing a signature hash function.
1657 lambda_sig_count *sig;
1658 if (unsigned ix = vec_safe_length (slot->discriminators))
1659 for (sig = slot->discriminators->begin (); ix--; sig++)
1660 if (compare_lambda_sig (fn, sig->fn))
1661 goto found;
1663 lambda_sig_count init = {fn, 0};
1664 sig = vec_safe_push (slot->discriminators, init);
1666 found:
1667 LAMBDA_EXPR_SCOPE_SIG_DISCRIMINATOR (lambda) = sig->count++;
1670 tree
1671 start_lambda_function (tree fco, tree lambda_expr)
1673 /* Let the front end know that we are going to be defining this
1674 function. */
1675 start_preparsed_function (fco,
1676 NULL_TREE,
1677 SF_PRE_PARSED | SF_INCLASS_INLINE);
1679 tree body = begin_function_body ();
1681 /* Push the proxies for any explicit captures. */
1682 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1683 cap = TREE_CHAIN (cap))
1684 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1686 return body;
1689 /* Subroutine of prune_lambda_captures: CAP is a node in
1690 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1691 might optimize away the capture, or NULL_TREE if there is no such
1692 variable. */
1694 static tree
1695 var_to_maybe_prune (tree cap)
1697 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1698 /* Don't prune explicit captures. */
1699 return NULL_TREE;
1701 tree mem = TREE_PURPOSE (cap);
1702 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1703 /* Packs and init-captures aren't captures of constant vars. */
1704 return NULL_TREE;
1706 tree init = TREE_VALUE (cap);
1707 if (is_normal_capture_proxy (init))
1708 init = DECL_CAPTURED_VARIABLE (init);
1709 if (decl_constant_var_p (init))
1710 return init;
1712 return NULL_TREE;
1715 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1716 for constant variables are actually used in the lambda body.
1718 There will always be a DECL_EXPR for the capture proxy; remember it when we
1719 see it, but replace it with any other use. */
1721 static tree
1722 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1724 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1726 tree var = NULL_TREE;
1727 if (TREE_CODE (*t) == DECL_EXPR)
1729 tree decl = DECL_EXPR_DECL (*t);
1730 if (is_constant_capture_proxy (decl))
1732 var = DECL_CAPTURED_VARIABLE (decl);
1733 *walk_subtrees = 0;
1736 else if (!location_wrapper_p (*t) /* is_capture_proxy dislikes them. */
1737 && is_constant_capture_proxy (*t))
1738 var = DECL_CAPTURED_VARIABLE (*t);
1740 if (var)
1742 tree *&slot = const_vars.get_or_insert (var);
1743 if (!slot || VAR_P (*t))
1744 slot = t;
1747 return NULL_TREE;
1750 /* We're at the end of processing a lambda; go back and remove any captures of
1751 constant variables for which we've folded away all uses. */
1753 static void
1754 prune_lambda_captures (tree body)
1756 tree lam = current_lambda_expr ();
1757 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1758 /* No uses were optimized away. */
1759 return;
1760 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1761 /* No default captures, and we don't prune explicit captures. */
1762 return;
1763 /* Don't bother pruning in a template, we'll prune at instantiation time. */
1764 if (dependent_type_p (TREE_TYPE (lam)))
1765 return;
1767 hash_map<tree,tree*> const_vars;
1769 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1771 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1772 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1774 tree cap = *capp;
1775 if (tree var = var_to_maybe_prune (cap))
1777 tree **use = const_vars.get (var);
1778 if (use && TREE_CODE (**use) == DECL_EXPR)
1780 /* All uses of this capture were folded away, leaving only the
1781 proxy declaration. */
1783 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1784 *capp = TREE_CHAIN (cap);
1786 /* And out of TYPE_FIELDS. */
1787 tree field = TREE_PURPOSE (cap);
1788 while (*fieldp != field)
1789 fieldp = &DECL_CHAIN (*fieldp);
1790 *fieldp = DECL_CHAIN (*fieldp);
1792 /* And remove the capture proxy declaration. */
1793 **use = void_node;
1794 continue;
1798 capp = &TREE_CHAIN (cap);
1802 // Record the per-scope per-signature discriminator of LAMBDA. If the
1803 // extra scope is empty, we must use the empty scope counter, which
1804 // might not be the live one.
1806 void
1807 finish_lambda_function (tree body)
1809 finish_function_body (body);
1811 prune_lambda_captures (body);
1813 /* Finish the function and generate code for it if necessary. */
1814 tree fn = finish_function (/*inline_p=*/true);
1816 /* Only expand if the call op is not a template. */
1817 if (!DECL_TEMPLATE_INFO (fn))
1818 expand_or_defer_fn (fn);
1821 #include "gt-cp-lambda.h"