Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / cp / lambda.c
blob2e9d38bbe832702804ddcaffc4024ce930a74843
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2021 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
35 /* Constructor for a lambda expression. */
37 tree
38 build_lambda_expr (void)
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
45 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
46 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
47 return lambda;
50 /* Create the closure object for a LAMBDA_EXPR. */
52 tree
53 build_lambda_object (tree lambda_expr)
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60 location_t saved_loc;
62 if (processing_template_decl || lambda_expr == error_mark_node)
63 return lambda_expr;
65 /* Make sure any error messages refer to the lambda-introducer. */
66 saved_loc = input_location;
67 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
76 if (field == error_mark_node)
78 expr = error_mark_node;
79 goto out;
82 if (TREE_CODE (val) == TREE_LIST)
83 val = build_x_compound_expr_from_list (val, ELK_INIT,
84 tf_warning_or_error);
86 if (DECL_P (val))
87 mark_used (val);
89 /* Mere mortals can't copy arrays with aggregate initialization, so
90 do some magic to make it work here. */
91 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 val = build_array_copy (val);
93 else if (DECL_NORMAL_CAPTURE_P (field)
94 && !DECL_VLA_CAPTURE_P (field)
95 && !TYPE_REF_P (TREE_TYPE (field)))
97 /* "the entities that are captured by copy are used to
98 direct-initialize each corresponding non-static data
99 member of the resulting closure object."
101 There's normally no way to express direct-initialization
102 from an element of a CONSTRUCTOR, so we build up a special
103 TARGET_EXPR to bypass the usual copy-initialization. */
104 val = force_rvalue (val, tf_warning_or_error);
105 if (TREE_CODE (val) == TARGET_EXPR)
106 TARGET_EXPR_DIRECT_INIT_P (val) = true;
109 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
112 expr = build_constructor (init_list_type_node, elts);
113 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
115 /* N2927: "[The closure] class type is not an aggregate."
116 But we briefly treat it as an aggregate to make this simpler. */
117 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118 CLASSTYPE_NON_AGGREGATE (type) = 0;
119 expr = finish_compound_literal (type, expr, tf_warning_or_error);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
122 out:
123 input_location = saved_loc;
124 return expr;
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128 LAMBDA must have its explicit captures already. */
130 tree
131 begin_lambda_type (tree lambda)
133 /* Lambda names are nearly but not quite anonymous. */
134 tree name = make_anon_name ();
135 IDENTIFIER_LAMBDA_P (name) = true;
137 /* Create the new RECORD_TYPE for this lambda. */
138 tree type = xref_tag (/*tag_code=*/record_type, name);
139 if (type == error_mark_node)
140 return error_mark_node;
142 /* Designate it as a struct so that we can use aggregate initialization. */
143 CLASSTYPE_DECLARED_CLASS (type) = false;
145 /* Cross-reference the expression and the type. */
146 LAMBDA_EXPR_CLOSURE (lambda) = type;
147 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
149 /* In C++17, assume the closure is literal; we'll clear the flag later if
150 necessary. */
151 if (cxx_dialect >= cxx17)
152 CLASSTYPE_LITERAL_P (type) = true;
154 /* Clear base types. */
155 xref_basetypes (type, /*bases=*/NULL_TREE);
157 /* Start the class. */
158 type = begin_class_definition (type);
160 return type;
163 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
164 closure type. */
166 tree
167 lambda_function (tree lambda)
169 tree type;
170 if (TREE_CODE (lambda) == LAMBDA_EXPR)
171 type = LAMBDA_EXPR_CLOSURE (lambda);
172 else
173 type = lambda;
174 gcc_assert (LAMBDA_TYPE_P (type));
175 /* Don't let debug_tree cause instantiation. */
176 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
177 && !COMPLETE_OR_OPEN_TYPE_P (type))
178 return NULL_TREE;
179 lambda = lookup_member (type, call_op_identifier,
180 /*protect=*/0, /*want_type=*/false,
181 tf_warning_or_error);
182 if (lambda)
183 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
184 return lambda;
187 /* Returns the type to use for the FIELD_DECL corresponding to the
188 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
189 C++14 init capture, and BY_REFERENCE_P indicates whether we're
190 capturing by reference. */
192 tree
193 lambda_capture_field_type (tree expr, bool explicit_init_p,
194 bool by_reference_p)
196 tree type;
197 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
199 if (!is_this && explicit_init_p)
201 tree auto_node = make_auto ();
203 type = auto_node;
204 if (by_reference_p)
205 /* Add the reference now, so deduction doesn't lose
206 outermost CV qualifiers of EXPR. */
207 type = build_reference_type (type);
208 if (uses_parameter_packs (expr))
209 /* Stick with 'auto' even if the type could be deduced. */;
210 else
211 type = do_auto_deduction (type, expr, auto_node);
213 else if (!is_this && type_dependent_expression_p (expr))
215 type = cxx_make_type (DECLTYPE_TYPE);
216 DECLTYPE_TYPE_EXPR (type) = expr;
217 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
218 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
219 SET_TYPE_STRUCTURAL_EQUALITY (type);
221 else
223 type = non_reference (unlowered_expr_type (expr));
225 if (!is_this
226 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
227 type = build_reference_type (type);
230 return type;
233 /* Returns true iff DECL is a lambda capture proxy variable created by
234 build_capture_proxy. */
236 bool
237 is_capture_proxy (tree decl)
239 return (VAR_P (decl)
240 && DECL_HAS_VALUE_EXPR_P (decl)
241 && !DECL_ANON_UNION_VAR_P (decl)
242 && !DECL_DECOMPOSITION_P (decl)
243 && !DECL_FNAME_P (decl)
244 && !(DECL_ARTIFICIAL (decl)
245 && DECL_LANG_SPECIFIC (decl)
246 && DECL_OMP_PRIVATIZED_MEMBER (decl))
247 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
250 /* Returns true iff DECL is a capture proxy for a normal capture
251 (i.e. without explicit initializer). */
253 bool
254 is_normal_capture_proxy (tree decl)
256 if (!is_capture_proxy (decl))
257 /* It's not a capture proxy. */
258 return false;
260 return (DECL_LANG_SPECIFIC (decl)
261 && DECL_CAPTURED_VARIABLE (decl));
264 /* Returns true iff DECL is a capture proxy for a normal capture
265 of a constant variable. */
267 bool
268 is_constant_capture_proxy (tree decl)
270 if (is_normal_capture_proxy (decl))
271 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
272 return false;
275 /* VAR is a capture proxy created by build_capture_proxy; add it to the
276 current function, which is the operator() for the appropriate lambda. */
278 void
279 insert_capture_proxy (tree var)
281 if (is_normal_capture_proxy (var))
283 tree cap = DECL_CAPTURED_VARIABLE (var);
284 if (CHECKING_P)
286 gcc_assert (!is_normal_capture_proxy (cap));
287 tree old = retrieve_local_specialization (cap);
288 if (old)
289 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
291 register_local_specialization (var, cap);
294 /* Put the capture proxy in the extra body block so that it won't clash
295 with a later local variable. */
296 pushdecl_outermost_localscope (var);
298 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
299 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
300 tree stmt_list = (*stmt_list_stack)[1];
301 gcc_assert (stmt_list);
302 append_to_statement_list_force (var, &stmt_list);
305 /* We've just finished processing a lambda; if the containing scope is also
306 a lambda, insert any capture proxies that were created while processing
307 the nested lambda. */
309 void
310 insert_pending_capture_proxies (void)
312 tree lam;
313 vec<tree, va_gc> *proxies;
314 unsigned i;
316 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
317 return;
319 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
320 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
321 for (i = 0; i < vec_safe_length (proxies); ++i)
323 tree var = (*proxies)[i];
324 insert_capture_proxy (var);
326 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
327 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
330 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
331 return the type we want the proxy to have: the type of the field itself,
332 with added const-qualification if the lambda isn't mutable and the
333 capture is by value. */
335 tree
336 lambda_proxy_type (tree ref)
338 tree type;
339 if (ref == error_mark_node)
340 return error_mark_node;
341 if (REFERENCE_REF_P (ref))
342 ref = TREE_OPERAND (ref, 0);
343 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
344 type = TREE_TYPE (ref);
345 if (!type || WILDCARD_TYPE_P (non_reference (type)))
347 type = cxx_make_type (DECLTYPE_TYPE);
348 DECLTYPE_TYPE_EXPR (type) = ref;
349 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
350 SET_TYPE_STRUCTURAL_EQUALITY (type);
352 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
353 type = make_pack_expansion (type);
354 return type;
357 /* MEMBER is a capture field in a lambda closure class. Now that we're
358 inside the operator(), build a placeholder var for future lookups and
359 debugging. */
361 static tree
362 build_capture_proxy (tree member, tree init)
364 tree var, object, fn, closure, name, lam, type;
366 if (PACK_EXPANSION_P (member))
367 member = PACK_EXPANSION_PATTERN (member);
369 closure = DECL_CONTEXT (member);
370 fn = lambda_function (closure);
371 lam = CLASSTYPE_LAMBDA_EXPR (closure);
373 /* The proxy variable forwards to the capture field. */
374 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
375 object = finish_non_static_data_member (member, object, NULL_TREE);
376 if (REFERENCE_REF_P (object))
377 object = TREE_OPERAND (object, 0);
379 /* Remove the __ inserted by add_capture. */
380 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
382 type = lambda_proxy_type (object);
384 if (name == this_identifier && !INDIRECT_TYPE_P (type))
386 type = build_pointer_type (type);
387 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
388 object = build_fold_addr_expr_with_type (object, type);
391 if (DECL_VLA_CAPTURE_P (member))
393 /* Rebuild the VLA type from the pointer and maxindex. */
394 tree field = next_initializable_field (TYPE_FIELDS (type));
395 tree ptr = build_simple_component_ref (object, field);
396 field = next_initializable_field (DECL_CHAIN (field));
397 tree max = build_simple_component_ref (object, field);
398 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
399 build_index_type (max));
400 type = build_reference_type (type);
401 object = convert (type, ptr);
404 complete_type (type);
406 var = build_decl (input_location, VAR_DECL, name, type);
407 SET_DECL_VALUE_EXPR (var, object);
408 DECL_HAS_VALUE_EXPR_P (var) = 1;
409 DECL_ARTIFICIAL (var) = 1;
410 TREE_USED (var) = 1;
411 DECL_CONTEXT (var) = fn;
413 if (DECL_NORMAL_CAPTURE_P (member))
415 if (DECL_VLA_CAPTURE_P (member))
417 init = CONSTRUCTOR_ELT (init, 0)->value;
418 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
419 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
421 else
423 if (PACK_EXPANSION_P (init))
424 init = PACK_EXPANSION_PATTERN (init);
427 if (INDIRECT_REF_P (init))
428 init = TREE_OPERAND (init, 0);
429 STRIP_NOPS (init);
431 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
432 while (is_normal_capture_proxy (init))
433 init = DECL_CAPTURED_VARIABLE (init);
434 retrofit_lang_decl (var);
435 DECL_CAPTURED_VARIABLE (var) = init;
438 if (name == this_identifier)
440 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
441 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
444 if (fn == current_function_decl)
445 insert_capture_proxy (var);
446 else
447 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
449 return var;
452 static GTY(()) tree ptr_id;
453 static GTY(()) tree max_id;
455 /* Return a struct containing a pointer and a length for lambda capture of
456 an array of runtime length. */
458 static tree
459 vla_capture_type (tree array_type)
461 tree type = xref_tag (record_type, make_anon_name ());
462 xref_basetypes (type, NULL_TREE);
463 type = begin_class_definition (type);
464 if (!ptr_id)
466 ptr_id = get_identifier ("ptr");
467 max_id = get_identifier ("max");
469 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
470 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
471 finish_member_declaration (field);
472 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
473 finish_member_declaration (field);
474 return finish_struct (type, NULL_TREE);
477 /* From an ID and INITIALIZER, create a capture (by reference if
478 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
479 and return it. If ID is `this', BY_REFERENCE_P says whether
480 `*this' is captured by reference. */
482 tree
483 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
484 bool explicit_init_p)
486 char *buf;
487 tree type, member, name;
488 bool vla = false;
489 bool variadic = false;
490 tree initializer = orig_init;
492 if (PACK_EXPANSION_P (initializer))
494 initializer = PACK_EXPANSION_PATTERN (initializer);
495 variadic = true;
498 if (TREE_CODE (initializer) == TREE_LIST
499 /* A pack expansion might end up with multiple elements. */
500 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
501 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
502 tf_warning_or_error);
503 type = TREE_TYPE (initializer);
504 if (type == error_mark_node)
505 return error_mark_node;
507 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
509 vla = true;
510 if (!by_reference_p)
511 error ("array of runtime bound cannot be captured by copy, "
512 "only by reference");
514 /* For a VLA, we capture the address of the first element and the
515 maximum index, and then reconstruct the VLA for the proxy. */
516 tree elt = cp_build_array_ref (input_location, initializer,
517 integer_zero_node, tf_warning_or_error);
518 initializer = build_constructor_va (init_list_type_node, 2,
519 NULL_TREE, build_address (elt),
520 NULL_TREE, array_type_nelts (type));
521 type = vla_capture_type (type);
523 else if (!dependent_type_p (type)
524 && variably_modified_type_p (type, NULL_TREE))
526 sorry ("capture of variably-modified type %qT that is not an N3639 array "
527 "of runtime bound", type);
528 if (TREE_CODE (type) == ARRAY_TYPE
529 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
530 inform (input_location, "because the array element type %qT has "
531 "variable size", TREE_TYPE (type));
532 return error_mark_node;
534 else
536 type = lambda_capture_field_type (initializer, explicit_init_p,
537 by_reference_p);
538 if (type == error_mark_node)
539 return error_mark_node;
541 if (id == this_identifier && !by_reference_p)
543 gcc_assert (INDIRECT_TYPE_P (type));
544 type = TREE_TYPE (type);
545 initializer = cp_build_fold_indirect_ref (initializer);
548 if (dependent_type_p (type))
550 else if (id != this_identifier && by_reference_p)
552 if (!lvalue_p (initializer))
554 error ("cannot capture %qE by reference", initializer);
555 return error_mark_node;
558 else
560 /* Capture by copy requires a complete type. */
561 type = complete_type (type);
562 if (!COMPLETE_TYPE_P (type))
564 error ("capture by copy of incomplete type %qT", type);
565 cxx_incomplete_type_inform (type);
566 return error_mark_node;
568 else if (!verify_type_context (input_location,
569 TCTX_CAPTURE_BY_COPY, type))
570 return error_mark_node;
574 /* Add __ to the beginning of the field name so that user code
575 won't find the field with name lookup. We can't just leave the name
576 unset because template instantiation uses the name to find
577 instantiated fields. */
578 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
579 buf[1] = buf[0] = '_';
580 memcpy (buf + 2, IDENTIFIER_POINTER (id),
581 IDENTIFIER_LENGTH (id) + 1);
582 name = get_identifier (buf);
584 if (variadic)
586 type = make_pack_expansion (type);
587 if (explicit_init_p)
588 /* With an explicit initializer 'type' is auto, which isn't really a
589 parameter pack in this context. We will want as many fields as we
590 have elements in the expansion of the initializer, so use its packs
591 instead. */
593 PACK_EXPANSION_PARAMETER_PACKS (type)
594 = uses_parameter_packs (initializer);
595 PACK_EXPANSION_AUTO_P (type) = true;
599 /* Make member variable. */
600 member = build_decl (input_location, FIELD_DECL, name, type);
601 DECL_VLA_CAPTURE_P (member) = vla;
603 if (!explicit_init_p)
604 /* Normal captures are invisible to name lookup but uses are replaced
605 with references to the capture field; we implement this by only
606 really making them invisible in unevaluated context; see
607 qualify_lookup. For now, let's make explicitly initialized captures
608 always visible. */
609 DECL_NORMAL_CAPTURE_P (member) = true;
611 if (id == this_identifier)
612 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
614 /* Add it to the appropriate closure class if we've started it. */
615 if (current_class_type
616 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
618 if (COMPLETE_TYPE_P (current_class_type))
619 internal_error ("trying to capture %qD in instantiation of "
620 "generic lambda", id);
621 finish_member_declaration (member);
624 tree listmem = member;
625 if (variadic)
627 listmem = make_pack_expansion (member);
628 initializer = orig_init;
630 LAMBDA_EXPR_CAPTURE_LIST (lambda)
631 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
633 if (LAMBDA_EXPR_CLOSURE (lambda))
634 return build_capture_proxy (member, initializer);
635 /* For explicit captures we haven't started the function yet, so we wait
636 and build the proxy from cp_parser_lambda_body. */
637 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
638 return NULL_TREE;
641 /* Register all the capture members on the list CAPTURES, which is the
642 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
644 void
645 register_capture_members (tree captures)
647 if (captures == NULL_TREE)
648 return;
650 register_capture_members (TREE_CHAIN (captures));
652 tree field = TREE_PURPOSE (captures);
653 if (PACK_EXPANSION_P (field))
654 field = PACK_EXPANSION_PATTERN (field);
656 finish_member_declaration (field);
659 /* Similar to add_capture, except this works on a stack of nested lambdas.
660 BY_REFERENCE_P in this case is derived from the default capture mode.
661 Returns the capture for the lambda at the bottom of the stack. */
663 tree
664 add_default_capture (tree lambda_stack, tree id, tree initializer)
666 bool this_capture_p = (id == this_identifier);
667 tree var = NULL_TREE;
668 tree saved_class_type = current_class_type;
670 for (tree node = lambda_stack;
671 node;
672 node = TREE_CHAIN (node))
674 tree lambda = TREE_VALUE (node);
676 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
677 if (DECL_PACK_P (initializer))
678 initializer = make_pack_expansion (initializer);
679 var = add_capture (lambda,
681 initializer,
682 /*by_reference_p=*/
683 (this_capture_p
684 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
685 == CPLD_REFERENCE)),
686 /*explicit_init_p=*/false);
687 initializer = convert_from_reference (var);
689 /* Warn about deprecated implicit capture of this via [=]. */
690 if (cxx_dialect >= cxx20
691 && this_capture_p
692 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
694 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
695 "implicit capture of %qE via %<[=]%> is deprecated "
696 "in C++20", this_identifier))
697 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
698 "%<*this%> capture");
702 current_class_type = saved_class_type;
704 return var;
707 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
708 form of an INDIRECT_REF, possibly adding it through default
709 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
710 try to capture but don't complain if we can't. */
712 tree
713 lambda_expr_this_capture (tree lambda, int add_capture_p)
715 tree result;
717 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
719 /* In unevaluated context this isn't an odr-use, so don't capture. */
720 if (cp_unevaluated_operand)
721 add_capture_p = false;
723 /* Try to default capture 'this' if we can. */
724 if (!this_capture)
726 tree lambda_stack = NULL_TREE;
727 tree init = NULL_TREE;
729 /* If we are in a lambda function, we can move out until we hit:
730 1. a non-lambda function or NSDMI,
731 2. a lambda function capturing 'this', or
732 3. a non-default capturing lambda function. */
733 for (tree tlambda = lambda; ;)
735 if (add_capture_p
736 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
737 /* tlambda won't let us capture 'this'. */
738 break;
740 if (add_capture_p)
741 lambda_stack = tree_cons (NULL_TREE,
742 tlambda,
743 lambda_stack);
745 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
746 tree containing_function
747 = decl_function_context (TYPE_NAME (closure));
749 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
750 if (ex && TREE_CODE (ex) == FIELD_DECL)
752 /* Lambda in an NSDMI. We don't have a function to look up
753 'this' in, but we can find (or rebuild) the fake one from
754 inject_this_parameter. */
755 if (!containing_function && !COMPLETE_TYPE_P (closure))
756 /* If we're parsing a lambda in a non-local class,
757 we can find the fake 'this' in scope_chain. */
758 init = scope_chain->x_current_class_ptr;
759 else
760 /* Otherwise it's either gone or buried in
761 function_context_stack, so make another. */
762 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
763 TYPE_UNQUALIFIED);
764 gcc_checking_assert
765 (init && (TREE_TYPE (TREE_TYPE (init))
766 == current_nonlambda_class_type ()));
767 break;
770 if (containing_function == NULL_TREE)
771 /* We ran out of scopes; there's no 'this' to capture. */
772 break;
774 if (!LAMBDA_FUNCTION_P (containing_function))
776 /* We found a non-lambda function. */
777 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
778 /* First parameter is 'this'. */
779 init = DECL_ARGUMENTS (containing_function);
780 break;
783 tlambda
784 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
786 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
788 /* An outer lambda has already captured 'this'. */
789 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
790 break;
794 if (init)
796 if (add_capture_p)
797 this_capture = add_default_capture (lambda_stack,
798 /*id=*/this_identifier,
799 init);
800 else
801 this_capture = init;
805 if (cp_unevaluated_operand)
806 result = this_capture;
807 else if (!this_capture)
809 if (add_capture_p == 1)
811 error ("%<this%> was not captured for this lambda function");
812 result = error_mark_node;
814 else
815 result = NULL_TREE;
817 else
819 /* To make sure that current_class_ref is for the lambda. */
820 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
821 == LAMBDA_EXPR_CLOSURE (lambda));
823 result = this_capture;
825 /* If 'this' is captured, each use of 'this' is transformed into an
826 access to the corresponding unnamed data member of the closure
827 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
828 ensures that the transformed expression is an rvalue. ] */
829 result = rvalue (result);
832 return result;
835 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
837 tree
838 current_lambda_expr (void)
840 tree type = current_class_type;
841 while (type && !LAMBDA_TYPE_P (type))
842 type = decl_type_context (TYPE_NAME (type));
843 if (type)
844 return CLASSTYPE_LAMBDA_EXPR (type);
845 else
846 return NULL_TREE;
849 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
850 object. NULL otherwise.. */
852 static tree
853 resolvable_dummy_lambda (tree object)
855 if (!is_dummy_object (object))
856 return NULL_TREE;
858 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
859 gcc_assert (!TYPE_PTR_P (type));
861 if (type != current_class_type
862 && current_class_type
863 && LAMBDA_TYPE_P (current_class_type)
864 && lambda_function (current_class_type)
865 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
866 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
868 return NULL_TREE;
871 /* We don't want to capture 'this' until we know we need it, i.e. after
872 overload resolution has chosen a non-static member function. At that
873 point we call this function to turn a dummy object into a use of the
874 'this' capture. */
876 tree
877 maybe_resolve_dummy (tree object, bool add_capture_p)
879 if (tree lam = resolvable_dummy_lambda (object))
880 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
881 if (cap != error_mark_node)
882 object = build_fold_indirect_ref (cap);
884 return object;
887 /* When parsing a generic lambda containing an argument-dependent
888 member function call we defer overload resolution to instantiation
889 time. But we have to know now whether to capture this or not.
890 Do that if FNS contains any non-static fns.
891 The std doesn't anticipate this case, but I expect this to be the
892 outcome of discussion. */
894 void
895 maybe_generic_this_capture (tree object, tree fns)
897 if (tree lam = resolvable_dummy_lambda (object))
898 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
900 /* We've not yet captured, so look at the function set of
901 interest. */
902 if (BASELINK_P (fns))
903 fns = BASELINK_FUNCTIONS (fns);
904 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
905 if (id_expr)
906 fns = TREE_OPERAND (fns, 0);
908 for (lkp_iterator iter (fns); iter; ++iter)
909 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
910 || TREE_CODE (*iter) == TEMPLATE_DECL)
911 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
913 /* Found a non-static member. Capture this. */
914 lambda_expr_this_capture (lam, /*maybe*/-1);
915 break;
920 /* Returns the innermost non-lambda function. */
922 tree
923 current_nonlambda_function (void)
925 tree fn = current_function_decl;
926 while (fn && LAMBDA_FUNCTION_P (fn))
927 fn = decl_function_context (fn);
928 return fn;
931 /* Returns the method basetype of the innermost non-lambda function, including
932 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
934 tree
935 nonlambda_method_basetype (void)
937 if (!current_class_ref)
938 return NULL_TREE;
940 tree type = current_class_type;
941 if (!type || !LAMBDA_TYPE_P (type))
942 return type;
944 while (true)
946 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
947 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
948 if (ex && TREE_CODE (ex) == FIELD_DECL)
949 /* Lambda in an NSDMI. */
950 return DECL_CONTEXT (ex);
952 tree fn = TYPE_CONTEXT (type);
953 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
954 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
955 /* No enclosing non-lambda method. */
956 return NULL_TREE;
957 if (!LAMBDA_FUNCTION_P (fn))
958 /* Found an enclosing non-lambda method. */
959 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
960 type = DECL_CONTEXT (fn);
964 /* Like current_scope, but looking through lambdas. */
966 tree
967 current_nonlambda_scope (void)
969 tree scope = current_scope ();
970 for (;;)
972 if (TREE_CODE (scope) == FUNCTION_DECL
973 && LAMBDA_FUNCTION_P (scope))
975 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
976 continue;
978 else if (LAMBDA_TYPE_P (scope))
980 scope = CP_TYPE_CONTEXT (scope);
981 continue;
983 break;
985 return scope;
988 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
989 indicated FN and NARGS, but do not initialize the return type or any of the
990 argument slots. */
992 static tree
993 prepare_op_call (tree fn, int nargs)
995 tree t;
997 t = build_vl_exp (CALL_EXPR, nargs + 3);
998 CALL_EXPR_FN (t) = fn;
999 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1001 return t;
1004 /* Return true iff CALLOP is the op() for a generic lambda. */
1006 bool
1007 generic_lambda_fn_p (tree callop)
1009 return (LAMBDA_FUNCTION_P (callop)
1010 && DECL_TEMPLATE_INFO (callop)
1011 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1014 /* If the closure TYPE has a static op(), also add a conversion to function
1015 pointer. */
1017 void
1018 maybe_add_lambda_conv_op (tree type)
1020 bool nested = (cfun != NULL);
1021 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1022 tree callop = lambda_function (type);
1023 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1025 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1026 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1027 return;
1029 if (processing_template_decl)
1030 return;
1032 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1034 if (!generic_lambda_p && undeduced_auto_decl (callop))
1036 /* If the op() wasn't deduced due to errors, give up. */
1037 gcc_assert (errorcount || sorrycount);
1038 return;
1041 /* Non-generic non-capturing lambdas only have a conversion function to
1042 pointer to function when the trailing requires-clause's constraints are
1043 satisfied. */
1044 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1045 return;
1047 /* Non-template conversion operators are defined directly with build_call_a
1048 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1049 deferred and the CALL is built in-place. In the case of a deduced return
1050 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1051 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1052 the return expression may differ in flags from those in the body CALL. In
1053 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1054 the body CALL, but not in DECLTYPE_CALL. */
1056 vec<tree, va_gc> *direct_argvec = 0;
1057 tree decltype_call = 0, call = 0;
1058 tree optype = TREE_TYPE (callop);
1059 tree fn_result = TREE_TYPE (optype);
1061 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1062 if (generic_lambda_p)
1064 ++processing_template_decl;
1066 /* Prepare the dependent member call for the static member function
1067 '_FUN' and, potentially, prepare another call to be used in a decltype
1068 return expression for a deduced return call op to allow for simple
1069 implementation of the conversion operator. */
1071 tree instance = cp_build_fold_indirect_ref (thisarg);
1072 tree objfn = lookup_template_function (DECL_NAME (callop),
1073 DECL_TI_ARGS (callop));
1074 objfn = build_min (COMPONENT_REF, NULL_TREE,
1075 instance, objfn, NULL_TREE);
1076 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1078 call = prepare_op_call (objfn, nargs);
1079 if (type_uses_auto (fn_result))
1080 decltype_call = prepare_op_call (objfn, nargs);
1082 else
1084 direct_argvec = make_tree_vector ();
1085 direct_argvec->quick_push (thisarg);
1088 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1089 declare the static member function "_FUN" below. For each arg append to
1090 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1091 call args (for the template case). If a parameter pack is found, expand
1092 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1094 tree fn_args = NULL_TREE;
1096 int ix = 0;
1097 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1098 tree tgt = NULL;
1100 while (src)
1102 tree new_node = copy_node (src);
1103 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1104 Notice this is creating a recursive type! */
1106 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1107 TREE_ADDRESSABLE (new_node) = 0;
1109 if (!fn_args)
1110 fn_args = tgt = new_node;
1111 else
1113 TREE_CHAIN (tgt) = new_node;
1114 tgt = new_node;
1117 mark_exp_read (tgt);
1119 if (generic_lambda_p)
1121 tree a = tgt;
1122 if (DECL_PACK_P (tgt))
1124 a = make_pack_expansion (a);
1125 PACK_EXPANSION_LOCAL_P (a) = true;
1127 CALL_EXPR_ARG (call, ix) = a;
1129 if (decltype_call)
1131 /* Avoid capturing variables in this context. */
1132 ++cp_unevaluated_operand;
1133 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1134 --cp_unevaluated_operand;
1137 ++ix;
1139 else
1140 vec_safe_push (direct_argvec, tgt);
1142 src = TREE_CHAIN (src);
1146 if (generic_lambda_p)
1148 if (decltype_call)
1150 fn_result = finish_decltype_type
1151 (decltype_call, /*id_expression_or_member_access_p=*/false,
1152 tf_warning_or_error);
1155 else
1156 call = build_call_a (callop,
1157 direct_argvec->length (),
1158 direct_argvec->address ());
1160 CALL_FROM_THUNK_P (call) = 1;
1161 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1163 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1164 stattype = (cp_build_type_attribute_variant
1165 (stattype, TYPE_ATTRIBUTES (optype)));
1166 if (flag_noexcept_type
1167 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1168 stattype = build_exception_variant (stattype, noexcept_true_spec);
1170 if (generic_lambda_p)
1171 --processing_template_decl;
1173 /* First build up the conversion op. */
1175 tree rettype = build_pointer_type (stattype);
1176 tree name = make_conv_op_name (rettype);
1177 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1178 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1179 /* DR 1722: The conversion function should be noexcept. */
1180 fntype = build_exception_variant (fntype, noexcept_true_spec);
1181 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1182 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1183 tree fn = convfn;
1184 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1185 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1186 grokclassfn (type, fn, NO_SPECIAL);
1187 set_linkage_according_to_type (type, fn);
1188 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1189 DECL_IN_AGGR_P (fn) = 1;
1190 DECL_ARTIFICIAL (fn) = 1;
1191 DECL_NOT_REALLY_EXTERN (fn) = 1;
1192 DECL_DECLARED_INLINE_P (fn) = 1;
1193 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1194 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1195 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1196 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1198 if (nested_def)
1199 DECL_INTERFACE_KNOWN (fn) = 1;
1201 if (generic_lambda_p)
1202 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1204 add_method (type, fn, false);
1206 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1207 the conversion op is used. */
1208 if (varargs_function_p (callop))
1210 DECL_DELETED_FN (fn) = 1;
1211 return;
1214 /* Now build up the thunk to be returned. */
1216 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1217 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1218 fn = statfn;
1219 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1220 grokclassfn (type, fn, NO_SPECIAL);
1221 set_linkage_according_to_type (type, fn);
1222 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1223 DECL_IN_AGGR_P (fn) = 1;
1224 DECL_ARTIFICIAL (fn) = 1;
1225 DECL_NOT_REALLY_EXTERN (fn) = 1;
1226 DECL_DECLARED_INLINE_P (fn) = 1;
1227 DECL_STATIC_FUNCTION_P (fn) = 1;
1228 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1229 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1230 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1231 DECL_ARGUMENTS (fn) = fn_args;
1232 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1234 /* Avoid duplicate -Wshadow warnings. */
1235 DECL_NAME (arg) = NULL_TREE;
1236 DECL_CONTEXT (arg) = fn;
1238 if (nested_def)
1239 DECL_INTERFACE_KNOWN (fn) = 1;
1241 if (generic_lambda_p)
1242 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1244 if (flag_sanitize & SANITIZE_NULL)
1245 /* Don't UBsan this function; we're deliberately calling op() with a null
1246 object argument. */
1247 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1249 add_method (type, fn, false);
1251 if (nested)
1252 push_function_context ();
1253 else
1254 /* Still increment function_depth so that we don't GC in the
1255 middle of an expression. */
1256 ++function_depth;
1258 /* Generate the body of the thunk. */
1260 start_preparsed_function (statfn, NULL_TREE,
1261 SF_PRE_PARSED | SF_INCLASS_INLINE);
1262 tree body = begin_function_body ();
1263 tree compound_stmt = begin_compound_stmt (0);
1264 if (!generic_lambda_p)
1266 set_flags_from_callee (call);
1267 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1268 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1270 call = convert_from_reference (call);
1271 finish_return_stmt (call);
1273 finish_compound_stmt (compound_stmt);
1274 finish_function_body (body);
1276 fn = finish_function (/*inline_p=*/true);
1277 if (!generic_lambda_p)
1278 expand_or_defer_fn (fn);
1280 /* Generate the body of the conversion op. */
1282 start_preparsed_function (convfn, NULL_TREE,
1283 SF_PRE_PARSED | SF_INCLASS_INLINE);
1284 body = begin_function_body ();
1285 compound_stmt = begin_compound_stmt (0);
1287 /* decl_needed_p needs to see that it's used. */
1288 TREE_USED (statfn) = 1;
1289 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1291 finish_compound_stmt (compound_stmt);
1292 finish_function_body (body);
1294 fn = finish_function (/*inline_p=*/true);
1295 if (!generic_lambda_p)
1296 expand_or_defer_fn (fn);
1298 if (nested)
1299 pop_function_context ();
1300 else
1301 --function_depth;
1304 /* True if FN is the static function "_FUN" that gets returned from the lambda
1305 conversion operator. */
1307 bool
1308 lambda_static_thunk_p (tree fn)
1310 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1311 && DECL_ARTIFICIAL (fn)
1312 && DECL_STATIC_FUNCTION_P (fn)
1313 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1316 bool
1317 call_from_lambda_thunk_p (tree call)
1319 return (CALL_FROM_THUNK_P (call)
1320 && lambda_static_thunk_p (current_function_decl));
1323 /* Returns true iff VAL is a lambda-related declaration which should
1324 be ignored by unqualified lookup. */
1326 bool
1327 is_lambda_ignored_entity (tree val)
1329 /* Look past normal, non-VLA capture proxies. */
1330 if (is_normal_capture_proxy (val)
1331 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1332 return true;
1334 /* Always ignore lambda fields, their names are only for debugging. */
1335 if (TREE_CODE (val) == FIELD_DECL
1336 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1337 return true;
1339 /* None of the lookups that use qualify_lookup want the op() from the
1340 lambda; they want the one from the enclosing class. */
1341 if (tree fns = maybe_get_fns (val))
1342 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1343 return true;
1345 return false;
1348 /* Lambdas that appear in variable initializer or default argument scope
1349 get that in their mangling, so we need to record it. We might as well
1350 use the count for function and namespace scopes as well. */
1351 static GTY(()) tree lambda_scope;
1352 static GTY(()) int lambda_count;
1353 struct GTY(()) tree_int
1355 tree t;
1356 int i;
1358 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1360 void
1361 start_lambda_scope (tree decl)
1363 tree_int ti;
1364 gcc_assert (decl);
1365 /* Once we're inside a function, we ignore variable scope and just push
1366 the function again so that popping works properly. */
1367 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1368 decl = current_function_decl;
1369 ti.t = lambda_scope;
1370 ti.i = lambda_count;
1371 vec_safe_push (lambda_scope_stack, ti);
1372 if (lambda_scope != decl)
1374 /* Don't reset the count if we're still in the same function. */
1375 lambda_scope = decl;
1376 lambda_count = 0;
1380 void
1381 record_lambda_scope (tree lambda)
1383 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1384 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1385 if (lambda_scope)
1387 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1388 gcc_checking_assert (closure);
1389 maybe_attach_decl (lambda_scope, TYPE_NAME (closure));
1393 /* This lambda is an instantiation of a lambda in a template default argument
1394 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1395 need to use and increment the global count to avoid collisions. */
1397 void
1398 record_null_lambda_scope (tree lambda)
1400 if (vec_safe_is_empty (lambda_scope_stack))
1401 record_lambda_scope (lambda);
1402 else
1404 tree_int *p = lambda_scope_stack->begin();
1405 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1406 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1408 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1411 void
1412 finish_lambda_scope (void)
1414 tree_int *p = &lambda_scope_stack->last ();
1415 if (lambda_scope != p->t)
1417 lambda_scope = p->t;
1418 lambda_count = p->i;
1420 lambda_scope_stack->pop ();
1423 tree
1424 start_lambda_function (tree fco, tree lambda_expr)
1426 /* Let the front end know that we are going to be defining this
1427 function. */
1428 start_preparsed_function (fco,
1429 NULL_TREE,
1430 SF_PRE_PARSED | SF_INCLASS_INLINE);
1432 tree body = begin_function_body ();
1434 /* Push the proxies for any explicit captures. */
1435 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1436 cap = TREE_CHAIN (cap))
1437 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1439 return body;
1442 /* Subroutine of prune_lambda_captures: CAP is a node in
1443 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1444 might optimize away the capture, or NULL_TREE if there is no such
1445 variable. */
1447 static tree
1448 var_to_maybe_prune (tree cap)
1450 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1451 /* Don't prune explicit captures. */
1452 return NULL_TREE;
1454 tree mem = TREE_PURPOSE (cap);
1455 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1456 /* Packs and init-captures aren't captures of constant vars. */
1457 return NULL_TREE;
1459 tree init = TREE_VALUE (cap);
1460 if (is_normal_capture_proxy (init))
1461 init = DECL_CAPTURED_VARIABLE (init);
1462 if (decl_constant_var_p (init))
1463 return init;
1465 return NULL_TREE;
1468 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1469 for constant variables are actually used in the lambda body.
1471 There will always be a DECL_EXPR for the capture proxy; remember it when we
1472 see it, but replace it with any other use. */
1474 static tree
1475 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1477 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1479 tree var = NULL_TREE;
1480 if (TREE_CODE (*t) == DECL_EXPR)
1482 tree decl = DECL_EXPR_DECL (*t);
1483 if (is_constant_capture_proxy (decl))
1485 var = DECL_CAPTURED_VARIABLE (decl);
1486 *walk_subtrees = 0;
1489 else if (is_constant_capture_proxy (*t))
1490 var = DECL_CAPTURED_VARIABLE (*t);
1492 if (var)
1494 tree *&slot = const_vars.get_or_insert (var);
1495 if (!slot || VAR_P (*t))
1496 slot = t;
1499 return NULL_TREE;
1502 /* We're at the end of processing a lambda; go back and remove any captures of
1503 constant variables for which we've folded away all uses. */
1505 static void
1506 prune_lambda_captures (tree body)
1508 tree lam = current_lambda_expr ();
1509 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1510 /* No uses were optimized away. */
1511 return;
1512 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1513 /* No default captures, and we don't prune explicit captures. */
1514 return;
1516 hash_map<tree,tree*> const_vars;
1518 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1520 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1521 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1523 tree cap = *capp;
1524 if (tree var = var_to_maybe_prune (cap))
1526 tree **use = const_vars.get (var);
1527 if (use && TREE_CODE (**use) == DECL_EXPR)
1529 /* All uses of this capture were folded away, leaving only the
1530 proxy declaration. */
1532 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1533 *capp = TREE_CHAIN (cap);
1535 /* And out of TYPE_FIELDS. */
1536 tree field = TREE_PURPOSE (cap);
1537 while (*fieldp != field)
1538 fieldp = &DECL_CHAIN (*fieldp);
1539 *fieldp = DECL_CHAIN (*fieldp);
1541 /* And remove the capture proxy declaration. */
1542 **use = void_node;
1543 continue;
1547 capp = &TREE_CHAIN (cap);
1551 void
1552 finish_lambda_function (tree body)
1554 finish_function_body (body);
1556 prune_lambda_captures (body);
1558 /* Finish the function and generate code for it if necessary. */
1559 tree fn = finish_function (/*inline_p=*/true);
1561 /* Only expand if the call op is not a template. */
1562 if (!DECL_TEMPLATE_INFO (fn))
1563 expand_or_defer_fn (fn);
1566 #include "gt-cp-lambda.h"