Daily bump.
[official-gcc.git] / gcc / cp / lambda.cc
blob4b1f9391fee18fb55550175acfe8632f258af025
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2024 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 #include "decl.h"
36 /* Constructor for a lambda expression. */
38 tree
39 build_lambda_expr (void)
41 tree lambda = make_node (LAMBDA_EXPR);
42 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
44 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
46 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
47 return lambda;
50 /* Create the closure object for a LAMBDA_EXPR. */
52 tree
53 build_lambda_object (tree lambda_expr)
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
64 /* Make sure any error messages refer to the lambda-introducer. */
65 location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
66 iloc_sentinel il (loc);
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
75 if (field == error_mark_node)
77 expr = error_mark_node;
78 goto out;
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
85 if (DECL_P (val))
86 mark_used (val);
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && !TYPE_REF_P (TREE_TYPE (field)))
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 protected_set_expr_location (expr, loc);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
122 out:
123 return expr;
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
129 tree
130 begin_lambda_type (tree lambda)
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
136 /* Create the new RECORD_TYPE for this lambda. */
137 tree type = xref_tag (/*tag_code=*/record_type, name);
138 if (type == error_mark_node)
139 return error_mark_node;
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
150 if (cxx_dialect >= cxx17)
151 CLASSTYPE_LITERAL_P (type) = true;
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
156 /* Start the class. */
157 type = begin_class_definition (type);
159 return type;
162 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
163 closure type. */
165 tree
166 lambda_function (tree lambda)
168 tree type;
169 if (TREE_CODE (lambda) == LAMBDA_EXPR)
170 type = LAMBDA_EXPR_CLOSURE (lambda);
171 else
172 type = lambda;
173 gcc_assert (LAMBDA_TYPE_P (type));
174 /* Don't let debug_tree cause instantiation. */
175 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
176 && !COMPLETE_OR_OPEN_TYPE_P (type))
177 return NULL_TREE;
178 lambda = get_class_binding_direct (type, call_op_identifier);
179 if (lambda)
180 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
181 return lambda;
184 /* True if EXPR is an expression whose type can be used directly in lambda
185 capture. Not to be used for 'auto'. */
187 static bool
188 type_deducible_expression_p (tree expr)
190 if (!type_dependent_expression_p (expr))
191 return true;
192 if (BRACE_ENCLOSED_INITIALIZER_P (expr)
193 || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
194 return false;
195 tree t = non_reference (TREE_TYPE (expr));
196 return (t && TREE_CODE (t) != TYPE_PACK_EXPANSION
197 && !WILDCARD_TYPE_P (t) && !LAMBDA_TYPE_P (t)
198 && !array_of_unknown_bound_p (t)
199 && !type_uses_auto (t));
202 /* Returns the type to use for the FIELD_DECL corresponding to the
203 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
204 C++14 init capture, and BY_REFERENCE_P indicates whether we're
205 capturing by reference. */
207 tree
208 lambda_capture_field_type (tree expr, bool explicit_init_p,
209 bool by_reference_p)
211 tree type;
212 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
214 if (is_this)
215 type = TREE_TYPE (expr);
216 else if (explicit_init_p)
218 tree auto_node = make_auto ();
220 type = auto_node;
221 if (by_reference_p)
222 /* Add the reference now, so deduction doesn't lose
223 outermost CV qualifiers of EXPR. */
224 type = build_reference_type (type);
225 if (uses_parameter_packs (expr))
226 /* Stick with 'auto' even if the type could be deduced. */;
227 else
228 type = do_auto_deduction (type, expr, auto_node);
230 else if (!type_deducible_expression_p (expr))
232 type = cxx_make_type (DECLTYPE_TYPE);
233 DECLTYPE_TYPE_EXPR (type) = expr;
234 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
235 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
236 SET_TYPE_STRUCTURAL_EQUALITY (type);
238 else
240 STRIP_ANY_LOCATION_WRAPPER (expr);
242 if (!by_reference_p && is_capture_proxy (expr))
244 /* When capturing by-value another capture proxy from an enclosing
245 lambda, consider the type of the corresponding field instead,
246 as the proxy may be additionally const-qualifed if the enclosing
247 lambda is non-mutable (PR94376). */
248 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
249 expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
252 type = non_reference (unlowered_expr_type (expr));
254 if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
255 type = build_reference_type (type);
258 return type;
261 /* Returns true iff DECL is a lambda capture proxy variable created by
262 build_capture_proxy. */
264 bool
265 is_capture_proxy (tree decl)
267 /* Location wrappers should be stripped or otherwise handled by the
268 caller before using this predicate. */
269 gcc_checking_assert (!location_wrapper_p (decl));
271 return (VAR_P (decl)
272 && DECL_HAS_VALUE_EXPR_P (decl)
273 && !DECL_ANON_UNION_VAR_P (decl)
274 && !DECL_DECOMPOSITION_P (decl)
275 && !DECL_FNAME_P (decl)
276 && !(DECL_ARTIFICIAL (decl)
277 && DECL_LANG_SPECIFIC (decl)
278 && DECL_OMP_PRIVATIZED_MEMBER (decl))
279 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
282 /* Returns true iff DECL is a capture proxy for a normal capture
283 (i.e. without explicit initializer). */
285 bool
286 is_normal_capture_proxy (tree decl)
288 if (!is_capture_proxy (decl))
289 /* It's not a capture proxy. */
290 return false;
292 return (DECL_LANG_SPECIFIC (decl)
293 && DECL_CAPTURED_VARIABLE (decl));
296 /* Returns true iff DECL is a capture proxy for a normal capture
297 of a constant variable. */
299 bool
300 is_constant_capture_proxy (tree decl)
302 if (is_normal_capture_proxy (decl))
303 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
304 return false;
307 /* VAR is a capture proxy created by build_capture_proxy; add it to the
308 current function, which is the operator() for the appropriate lambda. */
310 void
311 insert_capture_proxy (tree var)
313 if (is_normal_capture_proxy (var))
315 tree cap = DECL_CAPTURED_VARIABLE (var);
316 if (CHECKING_P)
318 gcc_assert (!is_normal_capture_proxy (cap));
319 tree old = retrieve_local_specialization (cap);
320 if (old)
321 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
323 register_local_specialization (var, cap);
326 /* Put the capture proxy in the extra body block so that it won't clash
327 with a later local variable. */
328 pushdecl_outermost_localscope (var);
330 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
331 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
332 tree stmt_list = (*stmt_list_stack)[1];
333 gcc_assert (stmt_list);
334 append_to_statement_list_force (var, &stmt_list);
337 /* We've just finished processing a lambda; if the containing scope is also
338 a lambda, insert any capture proxies that were created while processing
339 the nested lambda. */
341 void
342 insert_pending_capture_proxies (void)
344 tree lam;
345 vec<tree, va_gc> *proxies;
346 unsigned i;
348 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
349 return;
351 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
352 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
353 for (i = 0; i < vec_safe_length (proxies); ++i)
355 tree var = (*proxies)[i];
356 insert_capture_proxy (var);
358 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
359 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
362 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
363 return the type we want the proxy to have: the type of the field itself,
364 with added const-qualification if the lambda isn't mutable and the
365 capture is by value. */
367 tree
368 lambda_proxy_type (tree ref)
370 tree type;
371 if (ref == error_mark_node)
372 return error_mark_node;
373 if (REFERENCE_REF_P (ref))
374 ref = TREE_OPERAND (ref, 0);
375 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
376 type = TREE_TYPE (ref);
377 if (!type || WILDCARD_TYPE_P (non_reference (type)))
379 type = cxx_make_type (DECLTYPE_TYPE);
380 DECLTYPE_TYPE_EXPR (type) = ref;
381 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
382 SET_TYPE_STRUCTURAL_EQUALITY (type);
384 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
385 type = make_pack_expansion (type);
386 return type;
389 /* MEMBER is a capture field in a lambda closure class. Now that we're
390 inside the operator(), build a placeholder var for future lookups and
391 debugging. */
393 static tree
394 build_capture_proxy (tree member, tree init)
396 tree var, object, fn, closure, name, lam, type;
398 if (PACK_EXPANSION_P (member))
399 member = PACK_EXPANSION_PATTERN (member);
401 closure = DECL_CONTEXT (member);
402 fn = lambda_function (closure);
403 lam = CLASSTYPE_LAMBDA_EXPR (closure);
405 object = DECL_ARGUMENTS (fn);
406 /* The proxy variable forwards to the capture field. */
407 if (INDIRECT_TYPE_P (TREE_TYPE (object)))
408 object = build_fold_indirect_ref (object);
409 object = finish_non_static_data_member (member, object, NULL_TREE);
410 if (REFERENCE_REF_P (object))
411 object = TREE_OPERAND (object, 0);
413 /* Remove the __ inserted by add_capture. */
414 if (IDENTIFIER_POINTER (DECL_NAME (member))[2] == '_'
415 && IDENTIFIER_POINTER (DECL_NAME (member))[3] == '.')
416 name = get_identifier ("_");
417 else
418 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
420 type = lambda_proxy_type (object);
422 if (name == this_identifier && !INDIRECT_TYPE_P (type))
424 type = build_pointer_type (type);
425 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
426 object = build_fold_addr_expr_with_type (object, type);
429 if (DECL_VLA_CAPTURE_P (member))
431 /* Rebuild the VLA type from the pointer and maxindex. */
432 tree field = next_aggregate_field (TYPE_FIELDS (type));
433 tree ptr = build_simple_component_ref (object, field);
434 field = next_aggregate_field (DECL_CHAIN (field));
435 tree max = build_simple_component_ref (object, field);
436 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
437 build_index_type (max));
438 type = build_reference_type (type);
439 object = convert (type, ptr);
442 complete_type (type);
444 var = build_decl (input_location, VAR_DECL, name, type);
445 SET_DECL_VALUE_EXPR (var, object);
446 DECL_HAS_VALUE_EXPR_P (var) = 1;
447 DECL_ARTIFICIAL (var) = 1;
448 TREE_USED (var) = 1;
449 DECL_CONTEXT (var) = fn;
451 if (DECL_NORMAL_CAPTURE_P (member))
453 if (DECL_VLA_CAPTURE_P (member))
455 init = CONSTRUCTOR_ELT (init, 0)->value;
456 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
457 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
459 else
461 if (PACK_EXPANSION_P (init))
462 init = PACK_EXPANSION_PATTERN (init);
465 if (INDIRECT_REF_P (init))
466 init = TREE_OPERAND (init, 0);
467 STRIP_NOPS (init);
469 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
470 while (is_normal_capture_proxy (init))
471 init = DECL_CAPTURED_VARIABLE (init);
472 retrofit_lang_decl (var);
473 DECL_CAPTURED_VARIABLE (var) = init;
476 if (name == this_identifier)
478 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
479 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
482 if (fn == current_function_decl)
483 insert_capture_proxy (var);
484 else
485 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
487 return var;
490 static GTY(()) tree ptr_id;
491 static GTY(()) tree max_id;
493 /* Return a struct containing a pointer and a length for lambda capture of
494 an array of runtime length. */
496 static tree
497 vla_capture_type (tree array_type)
499 tree type = xref_tag (record_type, make_anon_name ());
500 xref_basetypes (type, NULL_TREE);
501 type = begin_class_definition (type);
502 if (!ptr_id)
504 ptr_id = get_identifier ("ptr");
505 max_id = get_identifier ("max");
507 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
508 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
509 finish_member_declaration (field);
510 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
511 finish_member_declaration (field);
512 return finish_struct (type, NULL_TREE);
515 /* From an ID and INITIALIZER, create a capture (by reference if
516 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
517 and return it. If ID is `this', BY_REFERENCE_P says whether
518 `*this' is captured by reference. */
520 tree
521 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
522 bool explicit_init_p, unsigned *name_independent_cnt)
524 char *buf;
525 tree type, member, name;
526 bool vla = false;
527 bool variadic = false;
528 tree initializer = orig_init;
530 if (PACK_EXPANSION_P (initializer))
532 initializer = PACK_EXPANSION_PATTERN (initializer);
533 variadic = true;
536 if (TREE_CODE (initializer) == TREE_LIST
537 /* A pack expansion might end up with multiple elements. */
538 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
539 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
540 tf_warning_or_error);
541 type = TREE_TYPE (initializer);
542 if (type == error_mark_node)
543 return error_mark_node;
545 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
547 vla = true;
548 if (!by_reference_p)
549 error ("array of runtime bound cannot be captured by copy, "
550 "only by reference");
552 /* For a VLA, we capture the address of the first element and the
553 maximum index, and then reconstruct the VLA for the proxy. */
554 tree elt = cp_build_array_ref (input_location, initializer,
555 integer_zero_node, tf_warning_or_error);
556 initializer = build_constructor_va (init_list_type_node, 2,
557 NULL_TREE, build_address (elt),
558 NULL_TREE, array_type_nelts (type));
559 type = vla_capture_type (type);
561 else if (!dependent_type_p (type)
562 && variably_modified_type_p (type, NULL_TREE))
564 sorry ("capture of variably-modified type %qT that is not an N3639 array "
565 "of runtime bound", type);
566 if (TREE_CODE (type) == ARRAY_TYPE
567 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
568 inform (input_location, "because the array element type %qT has "
569 "variable size", TREE_TYPE (type));
570 return error_mark_node;
572 else
574 type = lambda_capture_field_type (initializer, explicit_init_p,
575 by_reference_p);
576 if (type == error_mark_node)
577 return error_mark_node;
579 if (id == this_identifier && !by_reference_p)
581 gcc_assert (INDIRECT_TYPE_P (type));
582 type = TREE_TYPE (type);
583 initializer = cp_build_fold_indirect_ref (initializer);
586 if (dependent_type_p (type))
588 else if (id != this_identifier && by_reference_p)
590 if (!lvalue_p (initializer))
592 error ("cannot capture %qE by reference", initializer);
593 return error_mark_node;
596 else
598 /* Capture by copy requires a complete type. */
599 type = complete_type (type);
600 if (!COMPLETE_TYPE_P (type))
602 error ("capture by copy of incomplete type %qT", type);
603 cxx_incomplete_type_inform (type);
604 return error_mark_node;
606 else if (!verify_type_context (input_location,
607 TCTX_CAPTURE_BY_COPY, type))
608 return error_mark_node;
612 /* Add __ to the beginning of the field name so that user code
613 won't find the field with name lookup. We can't just leave the name
614 unset because template instantiation uses the name to find
615 instantiated fields. */
616 if (id_equal (id, "_") && name_independent_cnt)
618 if (*name_independent_cnt == 0)
619 name = get_identifier ("___");
620 else
622 /* For 2nd and later name-independent capture use
623 unique names. */
624 char buf2[5 + (HOST_BITS_PER_INT + 2) / 3];
625 sprintf (buf2, "___.%u", *name_independent_cnt);
626 name = get_identifier (buf2);
628 name_independent_cnt[0]++;
630 else
632 buf = XALLOCAVEC (char, IDENTIFIER_LENGTH (id) + 3);
633 buf[1] = buf[0] = '_';
634 memcpy (buf + 2, IDENTIFIER_POINTER (id),
635 IDENTIFIER_LENGTH (id) + 1);
636 name = get_identifier (buf);
639 if (variadic)
641 type = make_pack_expansion (type);
642 if (explicit_init_p)
643 /* With an explicit initializer 'type' is auto, which isn't really a
644 parameter pack in this context. We will want as many fields as we
645 have elements in the expansion of the initializer, so use its packs
646 instead. */
648 PACK_EXPANSION_PARAMETER_PACKS (type)
649 = uses_parameter_packs (initializer);
650 PACK_EXPANSION_AUTO_P (type) = true;
654 /* Make member variable. */
655 member = build_decl (input_location, FIELD_DECL, name, type);
656 DECL_VLA_CAPTURE_P (member) = vla;
658 if (!explicit_init_p)
659 /* Normal captures are invisible to name lookup but uses are replaced
660 with references to the capture field; we implement this by only
661 really making them invisible in unevaluated context; see
662 qualify_lookup. For now, let's make explicitly initialized captures
663 always visible. */
664 DECL_NORMAL_CAPTURE_P (member) = true;
666 if (id == this_identifier)
667 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
669 /* Add it to the appropriate closure class if we've started it. */
670 if (current_class_type
671 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
673 if (COMPLETE_TYPE_P (current_class_type))
674 internal_error ("trying to capture %qD in instantiation of "
675 "generic lambda", id);
676 finish_member_declaration (member);
679 tree listmem = member;
680 if (variadic)
682 listmem = make_pack_expansion (member);
683 initializer = orig_init;
685 LAMBDA_EXPR_CAPTURE_LIST (lambda)
686 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
688 if (LAMBDA_EXPR_CLOSURE (lambda))
689 return build_capture_proxy (member, initializer);
690 /* For explicit captures we haven't started the function yet, so we wait
691 and build the proxy from cp_parser_lambda_body. */
692 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
693 return NULL_TREE;
696 /* Register all the capture members on the list CAPTURES, which is the
697 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
699 void
700 register_capture_members (tree captures)
702 if (captures == NULL_TREE)
703 return;
705 register_capture_members (TREE_CHAIN (captures));
707 tree field = TREE_PURPOSE (captures);
708 if (PACK_EXPANSION_P (field))
709 field = PACK_EXPANSION_PATTERN (field);
711 finish_member_declaration (field);
714 /* Similar to add_capture, except this works on a stack of nested lambdas.
715 BY_REFERENCE_P in this case is derived from the default capture mode.
716 Returns the capture for the lambda at the bottom of the stack. */
718 tree
719 add_default_capture (tree lambda_stack, tree id, tree initializer)
721 bool this_capture_p = (id == this_identifier);
722 tree var = NULL_TREE;
723 tree saved_class_type = current_class_type;
725 for (tree node = lambda_stack;
726 node;
727 node = TREE_CHAIN (node))
729 tree lambda = TREE_VALUE (node);
731 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
732 if (DECL_PACK_P (initializer))
733 initializer = make_pack_expansion (initializer);
734 var = add_capture (lambda,
736 initializer,
737 /*by_reference_p=*/
738 (this_capture_p
739 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
740 == CPLD_REFERENCE)),
741 /*explicit_init_p=*/false, NULL);
742 initializer = convert_from_reference (var);
744 /* Warn about deprecated implicit capture of this via [=]. */
745 if (cxx_dialect >= cxx20
746 && this_capture_p
747 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
749 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
750 "implicit capture of %qE via %<[=]%> is deprecated "
751 "in C++20", this_identifier))
752 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
753 "%<*this%> capture");
757 current_class_type = saved_class_type;
759 return var;
762 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
763 form of an INDIRECT_REF, possibly adding it through default
764 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
765 try to capture but don't complain if we can't. */
767 tree
768 lambda_expr_this_capture (tree lambda, int add_capture_p)
770 tree result;
772 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
774 /* In unevaluated context this isn't an odr-use, so don't capture. */
775 if (cp_unevaluated_operand)
776 add_capture_p = false;
778 /* Try to default capture 'this' if we can. */
779 if (!this_capture)
781 tree lambda_stack = NULL_TREE;
782 tree init = NULL_TREE;
783 bool saw_complete = false;
785 /* If we are in a lambda function, we can move out until we hit:
786 1. a non-lambda function or NSDMI,
787 2. a lambda function capturing 'this', or
788 3. a non-default capturing lambda function. */
789 for (tree tlambda = lambda; ;)
791 if (add_capture_p
792 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
793 /* tlambda won't let us capture 'this'. */
794 break;
796 if (add_capture_p)
797 lambda_stack = tree_cons (NULL_TREE,
798 tlambda,
799 lambda_stack);
801 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
802 if (COMPLETE_TYPE_P (closure))
803 /* We're instantiating a generic lambda op(), the containing
804 scope may be gone. */
805 saw_complete = true;
807 tree containing_function
808 = decl_function_context (TYPE_NAME (closure));
810 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
811 if (ex && TREE_CODE (ex) == FIELD_DECL)
813 /* Lambda in an NSDMI. We don't have a function to look up
814 'this' in, but we can find (or rebuild) the fake one from
815 inject_this_parameter. */
816 if (!containing_function && !saw_complete)
817 /* If we're parsing a lambda in a non-local class,
818 we can find the fake 'this' in scope_chain. */
819 init = scope_chain->x_current_class_ptr;
820 else
821 /* Otherwise it's either gone or buried in
822 function_context_stack, so make another. */
823 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
824 TYPE_UNQUALIFIED);
825 gcc_checking_assert
826 (init && (TREE_TYPE (TREE_TYPE (init))
827 == current_nonlambda_class_type ()));
828 break;
831 if (containing_function == NULL_TREE)
832 /* We ran out of scopes; there's no 'this' to capture. */
833 break;
835 if (!LAMBDA_FUNCTION_P (containing_function))
837 /* We found a non-lambda function.
838 There is no this pointer in xobj member functions. */
839 if (DECL_IOBJ_MEMBER_FUNCTION_P (containing_function))
840 /* First parameter is 'this'. */
841 init = DECL_ARGUMENTS (containing_function);
842 break;
845 tlambda
846 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
848 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
850 /* An outer lambda has already captured 'this'. */
851 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
852 break;
856 if (init)
858 if (add_capture_p)
859 this_capture = add_default_capture (lambda_stack,
860 /*id=*/this_identifier,
861 init);
862 else
863 this_capture = init;
867 if (cp_unevaluated_operand)
868 result = this_capture;
869 else if (!this_capture)
871 if (add_capture_p == 1)
873 error ("%<this%> was not captured for this lambda function");
874 result = error_mark_node;
876 else
877 result = NULL_TREE;
879 else
881 /* To make sure that current_class_ref is for the lambda. */
882 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
883 == LAMBDA_EXPR_CLOSURE (lambda));
885 result = this_capture;
887 /* If 'this' is captured, each use of 'this' is transformed into an
888 access to the corresponding unnamed data member of the closure
889 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
890 ensures that the transformed expression is an rvalue. ] */
891 result = rvalue (result);
894 return result;
897 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
899 tree
900 current_lambda_expr (void)
902 tree type = current_class_type;
903 while (type && !LAMBDA_TYPE_P (type))
904 type = decl_type_context (TYPE_NAME (type));
905 if (type)
906 return CLASSTYPE_LAMBDA_EXPR (type);
907 else
908 return NULL_TREE;
911 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
912 object. NULL otherwise.. */
914 static tree
915 resolvable_dummy_lambda (tree object)
917 if (!is_dummy_object (object))
918 return NULL_TREE;
920 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
921 gcc_assert (!TYPE_PTR_P (type));
923 if (type != current_class_type
924 && current_class_type
925 && LAMBDA_TYPE_P (current_class_type)
926 && lambda_function (current_class_type)
927 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
928 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
930 return NULL_TREE;
933 /* We don't want to capture 'this' until we know we need it, i.e. after
934 overload resolution has chosen a non-static member function. At that
935 point we call this function to turn a dummy object into a use of the
936 'this' capture. */
938 tree
939 maybe_resolve_dummy (tree object, bool add_capture_p)
941 if (tree lam = resolvable_dummy_lambda (object))
942 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
943 if (cap != error_mark_node)
944 object = build_fold_indirect_ref (cap);
946 return object;
949 /* When parsing a generic lambda containing an argument-dependent
950 member function call we defer overload resolution to instantiation
951 time. But we have to know now whether to capture this or not.
952 Do that if FNS contains any non-static fns.
953 The std doesn't anticipate this case, but I expect this to be the
954 outcome of discussion. */
956 void
957 maybe_generic_this_capture (tree object, tree fns)
959 if (tree lam = resolvable_dummy_lambda (object))
960 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
962 /* We've not yet captured, so look at the function set of
963 interest. */
964 if (BASELINK_P (fns))
965 fns = BASELINK_FUNCTIONS (fns);
966 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
967 if (id_expr)
968 fns = TREE_OPERAND (fns, 0);
970 for (lkp_iterator iter (fns); iter; ++iter)
971 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
972 || TREE_CODE (*iter) == TEMPLATE_DECL)
973 && DECL_IOBJ_MEMBER_FUNCTION_P (*iter))
975 /* Found a non-static member. Capture this. */
976 lambda_expr_this_capture (lam, /*maybe*/-1);
977 break;
982 /* Returns the innermost non-lambda function. */
984 tree
985 current_nonlambda_function (void)
987 tree fn = current_function_decl;
988 while (fn && LAMBDA_FUNCTION_P (fn))
989 fn = decl_function_context (fn);
990 return fn;
993 /* Returns the method basetype of the innermost non-lambda function, including
994 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
996 tree
997 nonlambda_method_basetype (void)
999 if (!current_class_ref)
1000 return NULL_TREE;
1002 tree type = current_class_type;
1003 if (!type || !LAMBDA_TYPE_P (type))
1004 return type;
1006 while (true)
1008 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1009 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
1010 if (ex && TREE_CODE (ex) == FIELD_DECL)
1011 /* Lambda in an NSDMI. */
1012 return DECL_CONTEXT (ex);
1014 tree fn = TYPE_CONTEXT (type);
1015 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
1016 || !DECL_IOBJ_MEMBER_FUNCTION_P (fn))
1017 /* No enclosing non-lambda method. */
1018 return NULL_TREE;
1019 if (!LAMBDA_FUNCTION_P (fn))
1020 /* Found an enclosing non-lambda method. */
1021 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1022 type = DECL_CONTEXT (fn);
1026 /* Like current_scope, but looking through lambdas. */
1028 tree
1029 current_nonlambda_scope (void)
1031 tree scope = current_scope ();
1032 for (;;)
1034 if (TREE_CODE (scope) == FUNCTION_DECL
1035 && LAMBDA_FUNCTION_P (scope))
1037 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1038 continue;
1040 else if (LAMBDA_TYPE_P (scope))
1042 scope = CP_TYPE_CONTEXT (scope);
1043 continue;
1045 break;
1047 return scope;
1050 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1051 indicated FN and NARGS, but do not initialize the return type or any of the
1052 argument slots. */
1054 static tree
1055 prepare_op_call (tree fn, int nargs)
1057 tree t;
1059 t = build_vl_exp (CALL_EXPR, nargs + 3);
1060 CALL_EXPR_FN (t) = fn;
1061 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1063 return t;
1066 /* Return true iff CALLOP is the op() for a generic lambda. */
1068 bool
1069 generic_lambda_fn_p (tree callop)
1071 return (LAMBDA_FUNCTION_P (callop)
1072 && DECL_TEMPLATE_INFO (callop)
1073 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1076 /* If the closure TYPE has a static op(), also add a conversion to function
1077 pointer. */
1079 void
1080 maybe_add_lambda_conv_op (tree type)
1082 bool nested = (cfun != NULL);
1083 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1084 tree callop = lambda_function (type);
1085 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1087 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1088 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1089 return;
1091 if (processing_template_decl)
1092 return;
1094 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1096 if (!generic_lambda_p && undeduced_auto_decl (callop))
1098 /* If the op() wasn't deduced due to errors, give up. */
1099 gcc_assert (errorcount || sorrycount);
1100 return;
1103 /* Non-generic non-capturing lambdas only have a conversion function to
1104 pointer to function when the trailing requires-clause's constraints are
1105 satisfied. */
1106 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1107 return;
1109 /* Non-template conversion operators are defined directly with build_call_a
1110 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1111 deferred and the CALL is built in-place. In the case of a deduced return
1112 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1113 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1114 the return expression may differ in flags from those in the body CALL. In
1115 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1116 the body CALL, but not in DECLTYPE_CALL. */
1118 vec<tree, va_gc> *direct_argvec = 0;
1119 tree decltype_call = 0, call = 0;
1120 tree optype = TREE_TYPE (callop);
1121 tree fn_result = TREE_TYPE (optype);
1123 tree thisarg = NULL_TREE;
1124 if (TREE_CODE (optype) == METHOD_TYPE)
1125 thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1126 if (generic_lambda_p)
1128 ++processing_template_decl;
1130 /* Prepare the dependent member call for the static member function
1131 '_FUN' and, potentially, prepare another call to be used in a decltype
1132 return expression for a deduced return call op to allow for simple
1133 implementation of the conversion operator. */
1135 tree objfn;
1136 int nargs = list_length (DECL_ARGUMENTS (callop));
1137 if (thisarg)
1139 tree instance = cp_build_fold_indirect_ref (thisarg);
1140 objfn = lookup_template_function (DECL_NAME (callop),
1141 DECL_TI_ARGS (callop));
1142 objfn = build_min (COMPONENT_REF, NULL_TREE,
1143 instance, objfn, NULL_TREE);
1144 --nargs;
1145 call = prepare_op_call (objfn, nargs);
1147 else
1148 objfn = callop;
1150 if (type_uses_auto (fn_result))
1151 decltype_call = prepare_op_call (objfn, nargs);
1153 else if (thisarg)
1155 direct_argvec = make_tree_vector ();
1156 direct_argvec->quick_push (thisarg);
1159 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1160 declare the static member function "_FUN" below. For each arg append to
1161 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1162 call args (for the template case). If a parameter pack is found, expand
1163 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1165 tree fn_args = NULL_TREE;
1167 int ix = 0;
1168 tree src = FUNCTION_FIRST_USER_PARM (callop);
1169 tree tgt = NULL;
1171 if (!thisarg && !decltype_call)
1172 src = NULL_TREE;
1173 while (src)
1175 tree new_node = copy_node (src);
1176 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1177 Notice this is creating a recursive type! */
1179 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1180 TREE_ADDRESSABLE (new_node) = 0;
1182 if (!fn_args)
1183 fn_args = tgt = new_node;
1184 else
1186 TREE_CHAIN (tgt) = new_node;
1187 tgt = new_node;
1190 mark_exp_read (tgt);
1192 if (generic_lambda_p)
1194 tree a = tgt;
1195 if (thisarg)
1197 if (DECL_PACK_P (tgt))
1199 a = make_pack_expansion (a);
1200 PACK_EXPANSION_LOCAL_P (a) = true;
1202 CALL_EXPR_ARG (call, ix) = a;
1205 if (decltype_call)
1207 /* Avoid capturing variables in this context. */
1208 ++cp_unevaluated_operand;
1209 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1210 --cp_unevaluated_operand;
1213 ++ix;
1215 else
1216 vec_safe_push (direct_argvec, tgt);
1218 src = TREE_CHAIN (src);
1222 if (generic_lambda_p)
1224 if (decltype_call)
1226 fn_result = finish_decltype_type
1227 (decltype_call, /*id_expression_or_member_access_p=*/false,
1228 tf_warning_or_error);
1231 else if (thisarg)
1233 /* Don't warn on deprecated or unavailable lambda declarations, unless
1234 the lambda is actually called. */
1235 auto du = make_temp_override (deprecated_state,
1236 UNAVAILABLE_DEPRECATED_SUPPRESS);
1237 call = build_call_a (callop, direct_argvec->length (),
1238 direct_argvec->address ());
1241 if (thisarg)
1243 CALL_FROM_THUNK_P (call) = 1;
1244 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1247 tree stattype
1248 = build_function_type (fn_result, FUNCTION_FIRST_USER_PARMTYPE (callop));
1249 stattype = (cp_build_type_attribute_variant
1250 (stattype, TYPE_ATTRIBUTES (optype)));
1251 if (flag_noexcept_type
1252 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1253 stattype = build_exception_variant (stattype, noexcept_true_spec);
1255 if (generic_lambda_p)
1256 --processing_template_decl;
1258 /* First build up the conversion op. */
1260 tree rettype = build_pointer_type (stattype);
1261 tree name = make_conv_op_name (rettype);
1262 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1263 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1264 /* DR 1722: The conversion function should be noexcept. */
1265 fntype = build_exception_variant (fntype, noexcept_true_spec);
1266 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1267 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1268 tree fn = convfn;
1269 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1270 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1271 grokclassfn (type, fn, NO_SPECIAL);
1272 set_linkage_according_to_type (type, fn);
1273 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1274 DECL_IN_AGGR_P (fn) = 1;
1275 DECL_ARTIFICIAL (fn) = 1;
1276 DECL_NOT_REALLY_EXTERN (fn) = 1;
1277 DECL_DECLARED_INLINE_P (fn) = 1;
1278 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1279 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1280 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1281 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1283 if (nested_def)
1284 DECL_INTERFACE_KNOWN (fn) = 1;
1286 if (generic_lambda_p)
1287 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1289 add_method (type, fn, false);
1291 if (thisarg == NULL_TREE)
1293 /* For static lambda, just return operator(). */
1294 if (nested)
1295 push_function_context ();
1296 else
1297 /* Still increment function_depth so that we don't GC in the
1298 middle of an expression. */
1299 ++function_depth;
1301 /* Generate the body of the conversion op. */
1303 start_preparsed_function (convfn, NULL_TREE,
1304 SF_PRE_PARSED | SF_INCLASS_INLINE);
1305 tree body = begin_function_body ();
1306 tree compound_stmt = begin_compound_stmt (0);
1308 /* decl_needed_p needs to see that it's used. */
1309 TREE_USED (callop) = 1;
1310 finish_return_stmt (decay_conversion (callop, tf_warning_or_error));
1312 finish_compound_stmt (compound_stmt);
1313 finish_function_body (body);
1315 fn = finish_function (/*inline_p=*/true);
1316 if (!generic_lambda_p)
1317 expand_or_defer_fn (fn);
1319 if (nested)
1320 pop_function_context ();
1321 else
1322 --function_depth;
1323 return;
1326 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1327 the conversion op is used. */
1328 if (varargs_function_p (callop))
1330 DECL_DELETED_FN (fn) = 1;
1331 return;
1334 /* Now build up the thunk to be returned. */
1336 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1337 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1338 fn = statfn;
1339 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1340 grokclassfn (type, fn, NO_SPECIAL);
1341 set_linkage_according_to_type (type, fn);
1342 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1343 DECL_IN_AGGR_P (fn) = 1;
1344 DECL_ARTIFICIAL (fn) = 1;
1345 DECL_NOT_REALLY_EXTERN (fn) = 1;
1346 DECL_DECLARED_INLINE_P (fn) = 1;
1347 DECL_STATIC_FUNCTION_P (fn) = 1;
1348 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1349 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1350 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1351 DECL_ARGUMENTS (fn) = fn_args;
1352 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1354 /* Avoid duplicate -Wshadow warnings. */
1355 DECL_NAME (arg) = NULL_TREE;
1356 DECL_CONTEXT (arg) = fn;
1358 if (nested_def)
1359 DECL_INTERFACE_KNOWN (fn) = 1;
1361 if (generic_lambda_p)
1362 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1364 if (flag_sanitize & SANITIZE_NULL)
1365 /* Don't UBsan this function; we're deliberately calling op() with a null
1366 object argument. */
1367 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1369 add_method (type, fn, false);
1371 if (nested)
1372 push_function_context ();
1373 else
1374 /* Still increment function_depth so that we don't GC in the
1375 middle of an expression. */
1376 ++function_depth;
1378 /* Generate the body of the thunk. */
1380 start_preparsed_function (statfn, NULL_TREE,
1381 SF_PRE_PARSED | SF_INCLASS_INLINE);
1382 tree body = begin_function_body ();
1383 tree compound_stmt = begin_compound_stmt (0);
1384 if (!generic_lambda_p)
1386 set_flags_from_callee (call);
1387 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1388 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1390 call = convert_from_reference (call);
1391 finish_return_stmt (call);
1393 finish_compound_stmt (compound_stmt);
1394 finish_function_body (body);
1396 fn = finish_function (/*inline_p=*/true);
1397 if (!generic_lambda_p)
1398 expand_or_defer_fn (fn);
1400 /* Generate the body of the conversion op. */
1402 start_preparsed_function (convfn, NULL_TREE,
1403 SF_PRE_PARSED | SF_INCLASS_INLINE);
1404 body = begin_function_body ();
1405 compound_stmt = begin_compound_stmt (0);
1407 /* decl_needed_p needs to see that it's used. */
1408 TREE_USED (statfn) = 1;
1409 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1411 finish_compound_stmt (compound_stmt);
1412 finish_function_body (body);
1414 fn = finish_function (/*inline_p=*/true);
1415 if (!generic_lambda_p)
1416 expand_or_defer_fn (fn);
1418 if (nested)
1419 pop_function_context ();
1420 else
1421 --function_depth;
1424 /* True if FN is the static function "_FUN" that gets returned from the lambda
1425 conversion operator. */
1427 bool
1428 lambda_static_thunk_p (tree fn)
1430 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1431 && DECL_ARTIFICIAL (fn)
1432 && DECL_STATIC_FUNCTION_P (fn)
1433 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1436 bool
1437 call_from_lambda_thunk_p (tree call)
1439 return (CALL_FROM_THUNK_P (call)
1440 && lambda_static_thunk_p (current_function_decl));
1443 /* Returns true iff VAL is a lambda-related declaration which should
1444 be ignored by unqualified lookup. */
1446 bool
1447 is_lambda_ignored_entity (tree val)
1449 /* Look past normal, non-VLA capture proxies. */
1450 if (is_normal_capture_proxy (val)
1451 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1452 return true;
1454 /* Always ignore lambda fields, their names are only for debugging. */
1455 if (TREE_CODE (val) == FIELD_DECL
1456 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1457 return true;
1459 /* None of the lookups that use qualify_lookup want the op() from the
1460 lambda; they want the one from the enclosing class. */
1461 if (tree fns = maybe_get_fns (val))
1462 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1463 return true;
1465 return false;
1468 /* Lambdas that appear in variable initializer or default argument
1469 scope get that in their mangling, so we need to record it. Also,
1470 multiple lambdas in the same scope may need a mangling
1471 discriminator. In ABI <= 17, there is a single per-scope sequence
1472 number. In ABI >= 18, there are per-scope per-signature sequence
1473 numbers. */
1474 struct GTY(()) lambda_sig_count
1476 tree fn; // The lambda fn whose sig this is.
1477 unsigned count;
1479 struct GTY(()) lambda_discriminator
1481 tree scope;
1482 unsigned nesting; // Inside a function, VAR_DECLs get the function
1483 // as scope. This counts that nesting.
1484 unsigned count; // The per-scope counter.
1485 vec<lambda_sig_count, va_gc> *discriminators; // Per-signature counters
1487 // The current scope.
1488 static GTY(()) lambda_discriminator lambda_scope;
1489 // Stack of previous scopes.
1490 static GTY(()) vec<lambda_discriminator, va_gc> *lambda_scope_stack;
1492 // Push DECL as lambda extra scope, also new discriminator counters.
1494 void
1495 start_lambda_scope (tree decl)
1497 gcc_checking_assert (decl);
1498 if (current_function_decl && VAR_P (decl))
1499 // If we're inside a function, we ignore variable scope. Don't push.
1500 lambda_scope.nesting++;
1501 else
1503 vec_safe_push (lambda_scope_stack, lambda_scope);
1504 lambda_scope.scope = decl;
1505 lambda_scope.nesting = 0;
1506 lambda_scope.count = 0;
1507 lambda_scope.discriminators = nullptr;
1511 // Pop from the current lambda extra scope.
1513 void
1514 finish_lambda_scope (void)
1516 if (!lambda_scope.nesting--)
1518 lambda_scope = lambda_scope_stack->last ();
1519 lambda_scope_stack->pop ();
1523 // Record the current lambda scope into LAMBDA
1525 void
1526 record_lambda_scope (tree lambda)
1528 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope.scope;
1529 if (lambda_scope.scope)
1531 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1532 gcc_checking_assert (closure);
1533 maybe_key_decl (lambda_scope.scope, TYPE_NAME (closure));
1537 // Compare lambda template heads TMPL_A and TMPL_B, used for both
1538 // templated lambdas, and template template parameters of said lambda.
1540 static bool
1541 compare_lambda_template_head (tree tmpl_a, tree tmpl_b)
1543 // We only need one level of template parms
1544 tree inner_a = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_a));
1545 tree inner_b = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_b));
1547 // We only compare explicit template parms, ignoring trailing
1548 // synthetic ones.
1549 int len_a = TREE_VEC_LENGTH (inner_a);
1550 int len_b = TREE_VEC_LENGTH (inner_b);
1552 for (int ix = 0, len = MAX (len_a, len_b); ix != len; ix++)
1554 tree parm_a = NULL_TREE;
1555 if (ix < len_a)
1557 parm_a = TREE_VEC_ELT (inner_a, ix);
1558 if (parm_a == error_mark_node)
1559 return false;
1560 parm_a = TREE_VALUE (parm_a);
1561 if (parm_a == error_mark_node)
1562 return false;
1563 if (DECL_VIRTUAL_P (parm_a))
1564 parm_a = NULL_TREE;
1567 tree parm_b = NULL_TREE;
1568 if (ix < len_b)
1570 parm_b = TREE_VEC_ELT (inner_b, ix);
1571 if (parm_b == error_mark_node)
1572 return false;
1573 parm_b = TREE_VALUE (parm_b);
1574 if (parm_b == error_mark_node)
1575 return false;
1576 if (DECL_VIRTUAL_P (parm_b))
1577 parm_b = NULL_TREE;
1580 if (!parm_a && !parm_b)
1581 // we're done
1582 break;
1584 if (!(parm_a && parm_b))
1585 return false;
1587 if (TREE_CODE (parm_a) != TREE_CODE (parm_b))
1588 return false;
1590 if (TREE_CODE (parm_a) == PARM_DECL)
1592 if (TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_a))
1593 != TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_b)))
1594 return false;
1596 if (!same_type_p (TREE_TYPE (parm_a), TREE_TYPE (parm_b)))
1597 return false;
1599 else
1601 if (TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_a))
1602 != TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_b)))
1603 return false;
1605 if (TREE_CODE (parm_a) != TEMPLATE_DECL)
1606 gcc_checking_assert (TREE_CODE (parm_a) == TYPE_DECL);
1607 else if (!compare_lambda_template_head (parm_a, parm_b))
1608 return false;
1612 return true;
1615 // Compare lambda signatures FN_A and FN_B, they may be TEMPLATE_DECLs too.
1617 static bool
1618 compare_lambda_sig (tree fn_a, tree fn_b)
1620 if (TREE_CODE (fn_a) == TEMPLATE_DECL
1621 && TREE_CODE (fn_b) == TEMPLATE_DECL)
1623 if (!compare_lambda_template_head (fn_a, fn_b))
1624 return false;
1625 fn_a = DECL_TEMPLATE_RESULT (fn_a);
1626 fn_b = DECL_TEMPLATE_RESULT (fn_b);
1628 else if (TREE_CODE (fn_a) == TEMPLATE_DECL
1629 || TREE_CODE (fn_b) == TEMPLATE_DECL)
1630 return false;
1632 if (fn_a == error_mark_node
1633 || fn_b == error_mark_node)
1634 return false;
1636 for (tree args_a = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_a))),
1637 args_b = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_b)));
1638 args_a || args_b;
1639 args_a = TREE_CHAIN (args_a), args_b = TREE_CHAIN (args_b))
1641 if (!args_a || !args_b)
1642 return false;
1643 // This check also deals with differing variadicness
1644 if (!same_type_p (TREE_VALUE (args_a), TREE_VALUE (args_b)))
1645 return false;
1648 return true;
1651 // Record the per-scope discriminator of LAMBDA. If the extra scope
1652 // is empty, we must use the empty scope counter, which might not be
1653 // the live one.
1655 void
1656 record_lambda_scope_discriminator (tree lambda)
1658 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1659 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1660 ? &lambda_scope : lambda_scope_stack->begin ());
1661 LAMBDA_EXPR_SCOPE_ONLY_DISCRIMINATOR (lambda) = slot->count++;
1664 // Record the per-scope per-signature discriminator of LAMBDA. If the
1665 // extra scope is empty, we must use the empty scope counter, which
1666 // might not be the live one.
1668 void
1669 record_lambda_scope_sig_discriminator (tree lambda, tree fn)
1671 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1672 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1673 ? &lambda_scope : lambda_scope_stack->begin ());
1674 gcc_checking_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == slot->scope);
1676 // A linear search, we're not expecting this to be a big list, and
1677 // this avoids needing a signature hash function.
1678 lambda_sig_count *sig;
1679 if (unsigned ix = vec_safe_length (slot->discriminators))
1680 for (sig = slot->discriminators->begin (); ix--; sig++)
1681 if (compare_lambda_sig (fn, sig->fn))
1682 goto found;
1684 lambda_sig_count init = {fn, 0};
1685 sig = vec_safe_push (slot->discriminators, init);
1687 found:
1688 LAMBDA_EXPR_SCOPE_SIG_DISCRIMINATOR (lambda) = sig->count++;
1691 tree
1692 start_lambda_function (tree fco, tree lambda_expr)
1694 /* Let the front end know that we are going to be defining this
1695 function. */
1696 start_preparsed_function (fco,
1697 NULL_TREE,
1698 SF_PRE_PARSED | SF_INCLASS_INLINE);
1700 tree body = begin_function_body ();
1702 /* Push the proxies for any explicit captures. */
1703 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1704 cap = TREE_CHAIN (cap))
1705 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1707 return body;
1710 /* Subroutine of prune_lambda_captures: CAP is a node in
1711 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1712 might optimize away the capture, or NULL_TREE if there is no such
1713 variable. */
1715 static tree
1716 var_to_maybe_prune (tree cap)
1718 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1719 /* Don't prune explicit captures. */
1720 return NULL_TREE;
1722 tree mem = TREE_PURPOSE (cap);
1723 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1724 /* Packs and init-captures aren't captures of constant vars. */
1725 return NULL_TREE;
1727 tree init = TREE_VALUE (cap);
1728 if (is_normal_capture_proxy (init))
1729 init = DECL_CAPTURED_VARIABLE (init);
1730 if (decl_constant_var_p (init))
1731 return init;
1733 return NULL_TREE;
1736 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1737 for constant variables are actually used in the lambda body.
1739 There will always be a DECL_EXPR for the capture proxy; remember it when we
1740 see it, but replace it with any other use. */
1742 static tree
1743 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1745 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1747 tree var = NULL_TREE;
1748 if (TREE_CODE (*t) == DECL_EXPR)
1750 tree decl = DECL_EXPR_DECL (*t);
1751 if (is_constant_capture_proxy (decl))
1753 var = DECL_CAPTURED_VARIABLE (decl);
1754 *walk_subtrees = 0;
1757 else if (!location_wrapper_p (*t) /* is_capture_proxy dislikes them. */
1758 && is_constant_capture_proxy (*t))
1759 var = DECL_CAPTURED_VARIABLE (*t);
1761 if (var)
1763 tree *&slot = const_vars.get_or_insert (var);
1764 if (!slot || VAR_P (*t))
1765 slot = t;
1768 return NULL_TREE;
1771 /* We're at the end of processing a lambda; go back and remove any captures of
1772 constant variables for which we've folded away all uses. */
1774 static void
1775 prune_lambda_captures (tree body)
1777 tree lam = current_lambda_expr ();
1778 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1779 /* No uses were optimized away. */
1780 return;
1781 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1782 /* No default captures, and we don't prune explicit captures. */
1783 return;
1784 /* Don't bother pruning in a template, we'll prune at instantiation time. */
1785 if (dependent_type_p (TREE_TYPE (lam)))
1786 return;
1788 hash_map<tree,tree*> const_vars;
1790 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1792 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1793 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1795 tree cap = *capp;
1796 if (tree var = var_to_maybe_prune (cap))
1798 tree **use = const_vars.get (var);
1799 if (use && TREE_CODE (**use) == DECL_EXPR)
1801 /* All uses of this capture were folded away, leaving only the
1802 proxy declaration. */
1804 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1805 *capp = TREE_CHAIN (cap);
1807 /* And out of TYPE_FIELDS. */
1808 tree field = TREE_PURPOSE (cap);
1809 while (*fieldp != field)
1810 fieldp = &DECL_CHAIN (*fieldp);
1811 *fieldp = DECL_CHAIN (*fieldp);
1813 /* And remove the capture proxy declaration. */
1814 **use = void_node;
1815 continue;
1819 capp = &TREE_CHAIN (cap);
1823 // Record the per-scope per-signature discriminator of LAMBDA. If the
1824 // extra scope is empty, we must use the empty scope counter, which
1825 // might not be the live one.
1827 void
1828 finish_lambda_function (tree body)
1830 finish_function_body (body);
1832 prune_lambda_captures (body);
1834 /* Finish the function and generate code for it if necessary. */
1835 tree fn = finish_function (/*inline_p=*/true);
1837 /* Only expand if the call op is not a template. */
1838 if (!DECL_TEMPLATE_INFO (fn))
1839 expand_or_defer_fn (fn);
1842 #include "gt-cp-lambda.h"