1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2018 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
28 #include "stringpool.h"
30 #include "tree-iterator.h"
34 /* Constructor for a lambda expression. */
37 build_lambda_expr (void)
39 tree lambda
= make_node (LAMBDA_EXPR
);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
) = CPLD_NONE
;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda
) = NULL_TREE
;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda
) = NULL_TREE
;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda
) = NULL
;
44 LAMBDA_EXPR_MUTABLE_P (lambda
) = false;
48 /* Create the closure object for a LAMBDA_EXPR. */
51 build_lambda_object (tree lambda_expr
)
53 /* Build aggregate constructor call.
54 - cp_parser_braced_list
55 - cp_parser_functional_cast */
56 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
57 tree node
, expr
, type
;
60 if (processing_template_decl
|| lambda_expr
== error_mark_node
)
63 /* Make sure any error messages refer to the lambda-introducer. */
64 saved_loc
= input_location
;
65 input_location
= LAMBDA_EXPR_LOCATION (lambda_expr
);
67 for (node
= LAMBDA_EXPR_CAPTURE_LIST (lambda_expr
);
69 node
= TREE_CHAIN (node
))
71 tree field
= TREE_PURPOSE (node
);
72 tree val
= TREE_VALUE (node
);
74 if (field
== error_mark_node
)
76 expr
= error_mark_node
;
80 if (TREE_CODE (val
) == TREE_LIST
)
81 val
= build_x_compound_expr_from_list (val
, ELK_INIT
,
87 /* Mere mortals can't copy arrays with aggregate initialization, so
88 do some magic to make it work here. */
89 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
90 val
= build_array_copy (val
);
91 else if (DECL_NORMAL_CAPTURE_P (field
)
92 && !DECL_VLA_CAPTURE_P (field
)
93 && !TYPE_REF_P (TREE_TYPE (field
)))
95 /* "the entities that are captured by copy are used to
96 direct-initialize each corresponding non-static data
97 member of the resulting closure object."
99 There's normally no way to express direct-initialization
100 from an element of a CONSTRUCTOR, so we build up a special
101 TARGET_EXPR to bypass the usual copy-initialization. */
102 val
= force_rvalue (val
, tf_warning_or_error
);
103 if (TREE_CODE (val
) == TARGET_EXPR
)
104 TARGET_EXPR_DIRECT_INIT_P (val
) = true;
107 CONSTRUCTOR_APPEND_ELT (elts
, DECL_NAME (field
), val
);
110 expr
= build_constructor (init_list_type_node
, elts
);
111 CONSTRUCTOR_IS_DIRECT_INIT (expr
) = 1;
113 /* N2927: "[The closure] class type is not an aggregate."
114 But we briefly treat it as an aggregate to make this simpler. */
115 type
= LAMBDA_EXPR_CLOSURE (lambda_expr
);
116 CLASSTYPE_NON_AGGREGATE (type
) = 0;
117 expr
= finish_compound_literal (type
, expr
, tf_warning_or_error
);
118 CLASSTYPE_NON_AGGREGATE (type
) = 1;
121 input_location
= saved_loc
;
125 /* Return an initialized RECORD_TYPE for LAMBDA.
126 LAMBDA must have its explicit captures already. */
129 begin_lambda_type (tree lambda
)
134 /* Unique name. This is just like an unnamed class, but we cannot use
135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
137 name
= make_lambda_name ();
139 /* Create the new RECORD_TYPE for this lambda. */
140 type
= xref_tag (/*tag_code=*/record_type
,
143 /*template_header_p=*/false);
144 if (type
== error_mark_node
)
145 return error_mark_node
;
148 /* Designate it as a struct so that we can use aggregate initialization. */
149 CLASSTYPE_DECLARED_CLASS (type
) = false;
151 /* Cross-reference the expression and the type. */
152 LAMBDA_EXPR_CLOSURE (lambda
) = type
;
153 CLASSTYPE_LAMBDA_EXPR (type
) = lambda
;
155 /* In C++17, assume the closure is literal; we'll clear the flag later if
157 if (cxx_dialect
>= cxx17
)
158 CLASSTYPE_LITERAL_P (type
) = true;
160 /* Clear base types. */
161 xref_basetypes (type
, /*bases=*/NULL_TREE
);
163 /* Start the class. */
164 type
= begin_class_definition (type
);
169 /* Returns the type to use for the return type of the operator() of a
173 lambda_return_type (tree expr
)
175 if (expr
== NULL_TREE
)
176 return void_type_node
;
177 if (type_unknown_p (expr
)
178 || BRACE_ENCLOSED_INITIALIZER_P (expr
))
180 cxx_incomplete_type_error (expr
, TREE_TYPE (expr
));
181 return error_mark_node
;
183 gcc_checking_assert (!type_dependent_expression_p (expr
));
184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr
)));
187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
191 lambda_function (tree lambda
)
194 if (TREE_CODE (lambda
) == LAMBDA_EXPR
)
195 type
= LAMBDA_EXPR_CLOSURE (lambda
);
198 gcc_assert (LAMBDA_TYPE_P (type
));
199 /* Don't let debug_tree cause instantiation. */
200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type
)
201 && !COMPLETE_OR_OPEN_TYPE_P (type
))
203 lambda
= lookup_member (type
, call_op_identifier
,
204 /*protect=*/0, /*want_type=*/false,
205 tf_warning_or_error
);
207 lambda
= STRIP_TEMPLATE (get_first_fn (lambda
));
211 /* Returns the type to use for the FIELD_DECL corresponding to the
212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
213 C++14 init capture, and BY_REFERENCE_P indicates whether we're
214 capturing by reference. */
217 lambda_capture_field_type (tree expr
, bool explicit_init_p
,
221 bool is_this
= is_this_parameter (tree_strip_nop_conversions (expr
));
223 if (!is_this
&& type_dependent_expression_p (expr
))
225 type
= cxx_make_type (DECLTYPE_TYPE
);
226 DECLTYPE_TYPE_EXPR (type
) = expr
;
227 DECLTYPE_FOR_LAMBDA_CAPTURE (type
) = true;
228 DECLTYPE_FOR_INIT_CAPTURE (type
) = explicit_init_p
;
229 DECLTYPE_FOR_REF_CAPTURE (type
) = by_reference_p
;
230 SET_TYPE_STRUCTURAL_EQUALITY (type
);
232 else if (!is_this
&& explicit_init_p
)
234 tree auto_node
= make_auto ();
238 /* Add the reference now, so deduction doesn't lose
239 outermost CV qualifiers of EXPR. */
240 type
= build_reference_type (type
);
241 type
= do_auto_deduction (type
, expr
, auto_node
);
245 type
= non_reference (unlowered_expr_type (expr
));
248 && (by_reference_p
|| TREE_CODE (type
) == FUNCTION_TYPE
))
249 type
= build_reference_type (type
);
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
259 is_capture_proxy (tree decl
)
262 && DECL_HAS_VALUE_EXPR_P (decl
)
263 && !DECL_ANON_UNION_VAR_P (decl
)
264 && !DECL_DECOMPOSITION_P (decl
)
265 && !DECL_FNAME_P (decl
)
266 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl
)));
269 /* Returns true iff DECL is a capture proxy for a normal capture
270 (i.e. without explicit initializer). */
273 is_normal_capture_proxy (tree decl
)
275 if (!is_capture_proxy (decl
))
276 /* It's not a capture proxy. */
279 if (variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
283 /* It is a capture proxy, is it a normal capture? */
284 tree val
= DECL_VALUE_EXPR (decl
);
285 if (val
== error_mark_node
)
288 if (TREE_CODE (val
) == ADDR_EXPR
)
289 val
= TREE_OPERAND (val
, 0);
290 gcc_assert (TREE_CODE (val
) == COMPONENT_REF
);
291 val
= TREE_OPERAND (val
, 1);
292 return DECL_NORMAL_CAPTURE_P (val
);
295 /* Returns true iff DECL is a capture proxy for a normal capture
296 of a constant variable. */
299 is_constant_capture_proxy (tree decl
)
301 if (is_normal_capture_proxy (decl
))
302 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl
));
306 /* VAR is a capture proxy created by build_capture_proxy; add it to the
307 current function, which is the operator() for the appropriate lambda. */
310 insert_capture_proxy (tree var
)
312 if (is_normal_capture_proxy (var
))
314 tree cap
= DECL_CAPTURED_VARIABLE (var
);
317 gcc_assert (!is_normal_capture_proxy (cap
));
318 tree old
= retrieve_local_specialization (cap
);
320 gcc_assert (DECL_CONTEXT (old
) != DECL_CONTEXT (var
));
322 register_local_specialization (var
, cap
);
325 /* Put the capture proxy in the extra body block so that it won't clash
326 with a later local variable. */
327 pushdecl_outermost_localscope (var
);
329 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
330 var
= build_stmt (DECL_SOURCE_LOCATION (var
), DECL_EXPR
, var
);
331 tree stmt_list
= (*stmt_list_stack
)[1];
332 gcc_assert (stmt_list
);
333 append_to_statement_list_force (var
, &stmt_list
);
336 /* We've just finished processing a lambda; if the containing scope is also
337 a lambda, insert any capture proxies that were created while processing
338 the nested lambda. */
341 insert_pending_capture_proxies (void)
344 vec
<tree
, va_gc
> *proxies
;
347 if (!current_function_decl
|| !LAMBDA_FUNCTION_P (current_function_decl
))
350 lam
= CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl
));
351 proxies
= LAMBDA_EXPR_PENDING_PROXIES (lam
);
352 for (i
= 0; i
< vec_safe_length (proxies
); ++i
)
354 tree var
= (*proxies
)[i
];
355 insert_capture_proxy (var
);
357 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam
));
358 LAMBDA_EXPR_PENDING_PROXIES (lam
) = NULL
;
361 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
362 return the type we want the proxy to have: the type of the field itself,
363 with added const-qualification if the lambda isn't mutable and the
364 capture is by value. */
367 lambda_proxy_type (tree ref
)
370 if (ref
== error_mark_node
)
371 return error_mark_node
;
372 if (REFERENCE_REF_P (ref
))
373 ref
= TREE_OPERAND (ref
, 0);
374 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
375 type
= TREE_TYPE (ref
);
376 if (!type
|| WILDCARD_TYPE_P (non_reference (type
)))
378 type
= cxx_make_type (DECLTYPE_TYPE
);
379 DECLTYPE_TYPE_EXPR (type
) = ref
;
380 DECLTYPE_FOR_LAMBDA_PROXY (type
) = true;
381 SET_TYPE_STRUCTURAL_EQUALITY (type
);
383 if (DECL_PACK_P (TREE_OPERAND (ref
, 1)))
384 type
= make_pack_expansion (type
);
388 /* MEMBER is a capture field in a lambda closure class. Now that we're
389 inside the operator(), build a placeholder var for future lookups and
393 build_capture_proxy (tree member
, tree init
)
395 tree var
, object
, fn
, closure
, name
, lam
, type
;
397 if (PACK_EXPANSION_P (member
))
398 member
= PACK_EXPANSION_PATTERN (member
);
400 closure
= DECL_CONTEXT (member
);
401 fn
= lambda_function (closure
);
402 lam
= CLASSTYPE_LAMBDA_EXPR (closure
);
404 /* The proxy variable forwards to the capture field. */
405 object
= build_fold_indirect_ref (DECL_ARGUMENTS (fn
));
406 object
= finish_non_static_data_member (member
, object
, NULL_TREE
);
407 if (REFERENCE_REF_P (object
))
408 object
= TREE_OPERAND (object
, 0);
410 /* Remove the __ inserted by add_capture. */
411 name
= get_identifier (IDENTIFIER_POINTER (DECL_NAME (member
)) + 2);
413 type
= lambda_proxy_type (object
);
415 if (name
== this_identifier
&& !INDIRECT_TYPE_P (type
))
417 type
= build_pointer_type (type
);
418 type
= cp_build_qualified_type (type
, TYPE_QUAL_CONST
);
419 object
= build_fold_addr_expr_with_type (object
, type
);
422 if (DECL_VLA_CAPTURE_P (member
))
424 /* Rebuild the VLA type from the pointer and maxindex. */
425 tree field
= next_initializable_field (TYPE_FIELDS (type
));
426 tree ptr
= build_simple_component_ref (object
, field
);
427 field
= next_initializable_field (DECL_CHAIN (field
));
428 tree max
= build_simple_component_ref (object
, field
);
429 type
= build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr
)),
430 build_index_type (max
));
431 type
= build_reference_type (type
);
432 REFERENCE_VLA_OK (type
) = true;
433 object
= convert (type
, ptr
);
436 complete_type (type
);
438 var
= build_decl (input_location
, VAR_DECL
, name
, type
);
439 SET_DECL_VALUE_EXPR (var
, object
);
440 DECL_HAS_VALUE_EXPR_P (var
) = 1;
441 DECL_ARTIFICIAL (var
) = 1;
443 DECL_CONTEXT (var
) = fn
;
445 if (DECL_NORMAL_CAPTURE_P (member
))
447 if (DECL_VLA_CAPTURE_P (member
))
449 init
= CONSTRUCTOR_ELT (init
, 0)->value
;
450 init
= TREE_OPERAND (init
, 0); // Strip ADDR_EXPR.
451 init
= TREE_OPERAND (init
, 0); // Strip ARRAY_REF.
455 if (PACK_EXPANSION_P (init
))
456 init
= PACK_EXPANSION_PATTERN (init
);
459 if (INDIRECT_REF_P (init
))
460 init
= TREE_OPERAND (init
, 0);
463 gcc_assert (VAR_P (init
) || TREE_CODE (init
) == PARM_DECL
);
464 while (is_normal_capture_proxy (init
))
465 init
= DECL_CAPTURED_VARIABLE (init
);
466 retrofit_lang_decl (var
);
467 DECL_CAPTURED_VARIABLE (var
) = init
;
470 if (name
== this_identifier
)
472 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam
) == member
);
473 LAMBDA_EXPR_THIS_CAPTURE (lam
) = var
;
476 if (fn
== current_function_decl
)
477 insert_capture_proxy (var
);
479 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam
), var
);
484 static GTY(()) tree ptr_id
;
485 static GTY(()) tree max_id
;
487 /* Return a struct containing a pointer and a length for lambda capture of
488 an array of runtime length. */
491 vla_capture_type (tree array_type
)
493 tree type
= xref_tag (record_type
, make_anon_name (), ts_current
, false);
494 xref_basetypes (type
, NULL_TREE
);
495 type
= begin_class_definition (type
);
498 ptr_id
= get_identifier ("ptr");
499 max_id
= get_identifier ("max");
501 tree ptrtype
= build_pointer_type (TREE_TYPE (array_type
));
502 tree field
= build_decl (input_location
, FIELD_DECL
, ptr_id
, ptrtype
);
503 finish_member_declaration (field
);
504 field
= build_decl (input_location
, FIELD_DECL
, max_id
, sizetype
);
505 finish_member_declaration (field
);
506 return finish_struct (type
, NULL_TREE
);
509 /* From an ID and INITIALIZER, create a capture (by reference if
510 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
511 and return it. If ID is `this', BY_REFERENCE_P says whether
512 `*this' is captured by reference. */
515 add_capture (tree lambda
, tree id
, tree orig_init
, bool by_reference_p
,
516 bool explicit_init_p
)
519 tree type
, member
, name
;
521 bool variadic
= false;
522 tree initializer
= orig_init
;
524 if (PACK_EXPANSION_P (initializer
))
526 initializer
= PACK_EXPANSION_PATTERN (initializer
);
530 if (TREE_CODE (initializer
) == TREE_LIST
531 /* A pack expansion might end up with multiple elements. */
532 && !PACK_EXPANSION_P (TREE_VALUE (initializer
)))
533 initializer
= build_x_compound_expr_from_list (initializer
, ELK_INIT
,
534 tf_warning_or_error
);
535 type
= TREE_TYPE (initializer
);
536 if (type
== error_mark_node
)
537 return error_mark_node
;
539 if (array_of_runtime_bound_p (type
))
543 error ("array of runtime bound cannot be captured by copy, "
544 "only by reference");
546 /* For a VLA, we capture the address of the first element and the
547 maximum index, and then reconstruct the VLA for the proxy. */
548 tree elt
= cp_build_array_ref (input_location
, initializer
,
549 integer_zero_node
, tf_warning_or_error
);
550 initializer
= build_constructor_va (init_list_type_node
, 2,
551 NULL_TREE
, build_address (elt
),
552 NULL_TREE
, array_type_nelts (type
));
553 type
= vla_capture_type (type
);
555 else if (!dependent_type_p (type
)
556 && variably_modified_type_p (type
, NULL_TREE
))
558 sorry ("capture of variably-modified type %qT that is not an N3639 array "
559 "of runtime bound", type
);
560 if (TREE_CODE (type
) == ARRAY_TYPE
561 && variably_modified_type_p (TREE_TYPE (type
), NULL_TREE
))
562 inform (input_location
, "because the array element type %qT has "
563 "variable size", TREE_TYPE (type
));
564 return error_mark_node
;
568 type
= lambda_capture_field_type (initializer
, explicit_init_p
,
570 if (type
== error_mark_node
)
571 return error_mark_node
;
573 if (id
== this_identifier
&& !by_reference_p
)
575 gcc_assert (INDIRECT_TYPE_P (type
));
576 type
= TREE_TYPE (type
);
577 initializer
= cp_build_fold_indirect_ref (initializer
);
580 if (dependent_type_p (type
))
582 else if (id
!= this_identifier
&& by_reference_p
)
584 if (!lvalue_p (initializer
))
586 error ("cannot capture %qE by reference", initializer
);
587 return error_mark_node
;
592 /* Capture by copy requires a complete type. */
593 type
= complete_type (type
);
594 if (!COMPLETE_TYPE_P (type
))
596 error ("capture by copy of incomplete type %qT", type
);
597 cxx_incomplete_type_inform (type
);
598 return error_mark_node
;
603 /* Add __ to the beginning of the field name so that user code
604 won't find the field with name lookup. We can't just leave the name
605 unset because template instantiation uses the name to find
606 instantiated fields. */
607 buf
= (char *) alloca (IDENTIFIER_LENGTH (id
) + 3);
608 buf
[1] = buf
[0] = '_';
609 memcpy (buf
+ 2, IDENTIFIER_POINTER (id
),
610 IDENTIFIER_LENGTH (id
) + 1);
611 name
= get_identifier (buf
);
613 /* If TREE_TYPE isn't set, we're still in the introducer, so check
615 if (!LAMBDA_EXPR_CLOSURE (lambda
))
617 if (IDENTIFIER_MARKED (name
))
619 pedwarn (input_location
, 0,
620 "already captured %qD in lambda expression", id
);
623 IDENTIFIER_MARKED (name
) = true;
627 type
= make_pack_expansion (type
);
629 /* Make member variable. */
630 member
= build_decl (input_location
, FIELD_DECL
, name
, type
);
631 DECL_VLA_CAPTURE_P (member
) = vla
;
633 if (!explicit_init_p
)
634 /* Normal captures are invisible to name lookup but uses are replaced
635 with references to the capture field; we implement this by only
636 really making them invisible in unevaluated context; see
637 qualify_lookup. For now, let's make explicitly initialized captures
639 DECL_NORMAL_CAPTURE_P (member
) = true;
641 if (id
== this_identifier
)
642 LAMBDA_EXPR_THIS_CAPTURE (lambda
) = member
;
644 /* Add it to the appropriate closure class if we've started it. */
645 if (current_class_type
646 && current_class_type
== LAMBDA_EXPR_CLOSURE (lambda
))
648 if (COMPLETE_TYPE_P (current_class_type
))
649 internal_error ("trying to capture %qD in instantiation of "
650 "generic lambda", id
);
651 finish_member_declaration (member
);
654 tree listmem
= member
;
657 listmem
= make_pack_expansion (member
);
658 initializer
= orig_init
;
660 LAMBDA_EXPR_CAPTURE_LIST (lambda
)
661 = tree_cons (listmem
, initializer
, LAMBDA_EXPR_CAPTURE_LIST (lambda
));
663 if (LAMBDA_EXPR_CLOSURE (lambda
))
664 return build_capture_proxy (member
, initializer
);
665 /* For explicit captures we haven't started the function yet, so we wait
666 and build the proxy from cp_parser_lambda_body. */
667 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda
)) = true;
671 /* Register all the capture members on the list CAPTURES, which is the
672 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
675 register_capture_members (tree captures
)
677 if (captures
== NULL_TREE
)
680 register_capture_members (TREE_CHAIN (captures
));
682 tree field
= TREE_PURPOSE (captures
);
683 if (PACK_EXPANSION_P (field
))
684 field
= PACK_EXPANSION_PATTERN (field
);
686 /* We set this in add_capture to avoid duplicates. */
687 IDENTIFIER_MARKED (DECL_NAME (field
)) = false;
688 finish_member_declaration (field
);
691 /* Similar to add_capture, except this works on a stack of nested lambdas.
692 BY_REFERENCE_P in this case is derived from the default capture mode.
693 Returns the capture for the lambda at the bottom of the stack. */
696 add_default_capture (tree lambda_stack
, tree id
, tree initializer
)
698 bool this_capture_p
= (id
== this_identifier
);
699 tree var
= NULL_TREE
;
700 tree saved_class_type
= current_class_type
;
702 for (tree node
= lambda_stack
;
704 node
= TREE_CHAIN (node
))
706 tree lambda
= TREE_VALUE (node
);
708 current_class_type
= LAMBDA_EXPR_CLOSURE (lambda
);
709 if (DECL_PACK_P (initializer
))
710 initializer
= make_pack_expansion (initializer
);
711 var
= add_capture (lambda
,
716 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
)
718 /*explicit_init_p=*/false);
719 initializer
= convert_from_reference (var
);
721 /* Warn about deprecated implicit capture of this via [=]. */
722 if (cxx_dialect
>= cxx2a
724 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
) == CPLD_COPY
725 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda
)))
727 if (warning_at (LAMBDA_EXPR_LOCATION (lambda
), OPT_Wdeprecated
,
728 "implicit capture of %qE via %<[=]%> is deprecated "
729 "in C++20", this_identifier
))
730 inform (LAMBDA_EXPR_LOCATION (lambda
), "add explicit %<this%> or "
731 "%<*this%> capture");
735 current_class_type
= saved_class_type
;
740 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
741 form of an INDIRECT_REF, possibly adding it through default
742 capturing, if ADD_CAPTURE_P is true. */
745 lambda_expr_this_capture (tree lambda
, bool add_capture_p
)
749 tree this_capture
= LAMBDA_EXPR_THIS_CAPTURE (lambda
);
751 /* In unevaluated context this isn't an odr-use, so don't capture. */
752 if (cp_unevaluated_operand
)
753 add_capture_p
= false;
755 /* Try to default capture 'this' if we can. */
758 tree lambda_stack
= NULL_TREE
;
759 tree init
= NULL_TREE
;
761 /* If we are in a lambda function, we can move out until we hit:
762 1. a non-lambda function or NSDMI,
763 2. a lambda function capturing 'this', or
764 3. a non-default capturing lambda function. */
765 for (tree tlambda
= lambda
; ;)
768 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda
) == CPLD_NONE
)
769 /* tlambda won't let us capture 'this'. */
773 lambda_stack
= tree_cons (NULL_TREE
,
777 tree closure
= LAMBDA_EXPR_CLOSURE (tlambda
);
778 tree containing_function
779 = decl_function_context (TYPE_NAME (closure
));
781 tree ex
= LAMBDA_EXPR_EXTRA_SCOPE (tlambda
);
782 if (ex
&& TREE_CODE (ex
) == FIELD_DECL
)
784 /* Lambda in an NSDMI. We don't have a function to look up
785 'this' in, but we can find (or rebuild) the fake one from
786 inject_this_parameter. */
787 if (!containing_function
&& !COMPLETE_TYPE_P (closure
))
788 /* If we're parsing a lambda in a non-local class,
789 we can find the fake 'this' in scope_chain. */
790 init
= scope_chain
->x_current_class_ptr
;
792 /* Otherwise it's either gone or buried in
793 function_context_stack, so make another. */
794 init
= build_this_parm (NULL_TREE
, DECL_CONTEXT (ex
),
797 (init
&& (TREE_TYPE (TREE_TYPE (init
))
798 == current_nonlambda_class_type ()));
802 if (containing_function
== NULL_TREE
)
803 /* We ran out of scopes; there's no 'this' to capture. */
806 if (!LAMBDA_FUNCTION_P (containing_function
))
808 /* We found a non-lambda function. */
809 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function
))
810 /* First parameter is 'this'. */
811 init
= DECL_ARGUMENTS (containing_function
);
816 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function
));
818 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda
))
820 /* An outer lambda has already captured 'this'. */
821 init
= LAMBDA_EXPR_THIS_CAPTURE (tlambda
);
829 this_capture
= add_default_capture (lambda_stack
,
830 /*id=*/this_identifier
,
837 if (cp_unevaluated_operand
)
838 result
= this_capture
;
839 else if (!this_capture
)
843 error ("%<this%> was not captured for this lambda function");
844 result
= error_mark_node
;
851 /* To make sure that current_class_ref is for the lambda. */
852 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref
))
853 == LAMBDA_EXPR_CLOSURE (lambda
));
855 result
= this_capture
;
857 /* If 'this' is captured, each use of 'this' is transformed into an
858 access to the corresponding unnamed data member of the closure
859 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
860 ensures that the transformed expression is an rvalue. ] */
861 result
= rvalue (result
);
867 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
870 current_lambda_expr (void)
872 tree type
= current_class_type
;
873 while (type
&& !LAMBDA_TYPE_P (type
))
874 type
= decl_type_context (TYPE_NAME (type
));
876 return CLASSTYPE_LAMBDA_EXPR (type
);
881 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
882 object. NULL otherwise.. */
885 resolvable_dummy_lambda (tree object
)
887 if (!is_dummy_object (object
))
890 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (object
));
891 gcc_assert (!TYPE_PTR_P (type
));
893 if (type
!= current_class_type
894 && current_class_type
895 && LAMBDA_TYPE_P (current_class_type
)
896 && lambda_function (current_class_type
)
897 && DERIVED_FROM_P (type
, nonlambda_method_basetype()))
898 return CLASSTYPE_LAMBDA_EXPR (current_class_type
);
903 /* We don't want to capture 'this' until we know we need it, i.e. after
904 overload resolution has chosen a non-static member function. At that
905 point we call this function to turn a dummy object into a use of the
909 maybe_resolve_dummy (tree object
, bool add_capture_p
)
911 if (tree lam
= resolvable_dummy_lambda (object
))
912 if (tree cap
= lambda_expr_this_capture (lam
, add_capture_p
))
913 if (cap
!= error_mark_node
)
914 object
= build_fold_indirect_ref (cap
);
919 /* When parsing a generic lambda containing an argument-dependent
920 member function call we defer overload resolution to instantiation
921 time. But we have to know now whether to capture this or not.
922 Do that if FNS contains any non-static fns.
923 The std doesn't anticipate this case, but I expect this to be the
924 outcome of discussion. */
927 maybe_generic_this_capture (tree object
, tree fns
)
929 if (tree lam
= resolvable_dummy_lambda (object
))
930 if (!LAMBDA_EXPR_THIS_CAPTURE (lam
))
932 /* We've not yet captured, so look at the function set of
934 if (BASELINK_P (fns
))
935 fns
= BASELINK_FUNCTIONS (fns
);
936 bool id_expr
= TREE_CODE (fns
) == TEMPLATE_ID_EXPR
;
938 fns
= TREE_OPERAND (fns
, 0);
940 for (lkp_iterator
iter (fns
); iter
; ++iter
)
941 if ((!id_expr
|| TREE_CODE (*iter
) == TEMPLATE_DECL
)
942 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter
))
944 /* Found a non-static member. Capture this. */
945 lambda_expr_this_capture (lam
, true);
951 /* Returns the innermost non-lambda function. */
954 current_nonlambda_function (void)
956 tree fn
= current_function_decl
;
957 while (fn
&& LAMBDA_FUNCTION_P (fn
))
958 fn
= decl_function_context (fn
);
962 /* Returns the method basetype of the innermost non-lambda function, including
963 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
966 nonlambda_method_basetype (void)
968 if (!current_class_ref
)
971 tree type
= current_class_type
;
972 if (!type
|| !LAMBDA_TYPE_P (type
))
977 tree lam
= CLASSTYPE_LAMBDA_EXPR (type
);
978 tree ex
= LAMBDA_EXPR_EXTRA_SCOPE (lam
);
979 if (ex
&& TREE_CODE (ex
) == FIELD_DECL
)
980 /* Lambda in an NSDMI. */
981 return DECL_CONTEXT (ex
);
983 tree fn
= TYPE_CONTEXT (type
);
984 if (!fn
|| TREE_CODE (fn
) != FUNCTION_DECL
985 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn
))
986 /* No enclosing non-lambda method. */
988 if (!LAMBDA_FUNCTION_P (fn
))
989 /* Found an enclosing non-lambda method. */
990 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn
));
991 type
= DECL_CONTEXT (fn
);
995 /* Like current_scope, but looking through lambdas. */
998 current_nonlambda_scope (void)
1000 tree scope
= current_scope ();
1003 if (TREE_CODE (scope
) == FUNCTION_DECL
1004 && LAMBDA_FUNCTION_P (scope
))
1006 scope
= CP_TYPE_CONTEXT (DECL_CONTEXT (scope
));
1009 else if (LAMBDA_TYPE_P (scope
))
1011 scope
= CP_TYPE_CONTEXT (scope
);
1019 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1020 indicated FN and NARGS, but do not initialize the return type or any of the
1024 prepare_op_call (tree fn
, int nargs
)
1028 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
1029 CALL_EXPR_FN (t
) = fn
;
1030 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
1035 /* Return true iff CALLOP is the op() for a generic lambda. */
1038 generic_lambda_fn_p (tree callop
)
1040 return (LAMBDA_FUNCTION_P (callop
)
1041 && DECL_TEMPLATE_INFO (callop
)
1042 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop
)));
1045 /* If the closure TYPE has a static op(), also add a conversion to function
1049 maybe_add_lambda_conv_op (tree type
)
1051 bool nested
= (cfun
!= NULL
);
1052 bool nested_def
= decl_function_context (TYPE_MAIN_DECL (type
));
1053 tree callop
= lambda_function (type
);
1054 tree lam
= CLASSTYPE_LAMBDA_EXPR (type
);
1056 if (LAMBDA_EXPR_CAPTURE_LIST (lam
) != NULL_TREE
1057 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam
) != CPLD_NONE
)
1060 if (processing_template_decl
)
1063 bool const generic_lambda_p
= generic_lambda_fn_p (callop
);
1065 if (!generic_lambda_p
&& DECL_INITIAL (callop
) == NULL_TREE
)
1067 /* If the op() wasn't instantiated due to errors, give up. */
1068 gcc_assert (errorcount
|| sorrycount
);
1072 /* Non-template conversion operators are defined directly with build_call_a
1073 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1074 deferred and the CALL is built in-place. In the case of a deduced return
1075 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1076 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1077 the return expression may differ in flags from those in the body CALL. In
1078 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1079 the body CALL, but not in DECLTYPE_CALL. */
1081 vec
<tree
, va_gc
> *direct_argvec
= 0;
1082 tree decltype_call
= 0, call
= 0;
1083 tree optype
= TREE_TYPE (callop
);
1084 tree fn_result
= TREE_TYPE (optype
);
1086 tree thisarg
= build_nop (TREE_TYPE (DECL_ARGUMENTS (callop
)),
1088 if (generic_lambda_p
)
1090 ++processing_template_decl
;
1092 /* Prepare the dependent member call for the static member function
1093 '_FUN' and, potentially, prepare another call to be used in a decltype
1094 return expression for a deduced return call op to allow for simple
1095 implementation of the conversion operator. */
1097 tree instance
= cp_build_fold_indirect_ref (thisarg
);
1098 tree objfn
= build_min (COMPONENT_REF
, NULL_TREE
,
1099 instance
, DECL_NAME (callop
), NULL_TREE
);
1100 int nargs
= list_length (DECL_ARGUMENTS (callop
)) - 1;
1102 call
= prepare_op_call (objfn
, nargs
);
1103 if (type_uses_auto (fn_result
))
1104 decltype_call
= prepare_op_call (objfn
, nargs
);
1108 direct_argvec
= make_tree_vector ();
1109 direct_argvec
->quick_push (thisarg
);
1112 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1113 declare the static member function "_FUN" below. For each arg append to
1114 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1115 call args (for the template case). If a parameter pack is found, expand
1116 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1118 tree fn_args
= NULL_TREE
;
1121 tree src
= DECL_CHAIN (DECL_ARGUMENTS (callop
));
1126 tree new_node
= copy_node (src
);
1129 fn_args
= tgt
= new_node
;
1132 TREE_CHAIN (tgt
) = new_node
;
1136 mark_exp_read (tgt
);
1138 if (generic_lambda_p
)
1140 /* Avoid capturing variables in this context. */
1141 ++cp_unevaluated_operand
;
1142 tree a
= forward_parm (tgt
);
1143 --cp_unevaluated_operand
;
1145 CALL_EXPR_ARG (call
, ix
) = a
;
1147 CALL_EXPR_ARG (decltype_call
, ix
) = unshare_expr (a
);
1149 if (PACK_EXPANSION_P (a
))
1150 /* Set this after unsharing so it's not in decltype_call. */
1151 PACK_EXPANSION_LOCAL_P (a
) = true;
1156 vec_safe_push (direct_argvec
, tgt
);
1158 src
= TREE_CHAIN (src
);
1162 if (generic_lambda_p
)
1166 fn_result
= finish_decltype_type
1167 (decltype_call
, /*id_expression_or_member_access_p=*/false,
1168 tf_warning_or_error
);
1172 call
= build_call_a (callop
,
1173 direct_argvec
->length (),
1174 direct_argvec
->address ());
1176 CALL_FROM_THUNK_P (call
) = 1;
1177 SET_EXPR_LOCATION (call
, UNKNOWN_LOCATION
);
1179 tree stattype
= build_function_type (fn_result
, FUNCTION_ARG_CHAIN (callop
));
1180 stattype
= (cp_build_type_attribute_variant
1181 (stattype
, TYPE_ATTRIBUTES (optype
)));
1182 if (flag_noexcept_type
1183 && TYPE_NOTHROW_P (TREE_TYPE (callop
)))
1184 stattype
= build_exception_variant (stattype
, noexcept_true_spec
);
1186 if (generic_lambda_p
)
1187 --processing_template_decl
;
1189 /* First build up the conversion op. */
1191 tree rettype
= build_pointer_type (stattype
);
1192 tree name
= make_conv_op_name (rettype
);
1193 tree thistype
= cp_build_qualified_type (type
, TYPE_QUAL_CONST
);
1194 tree fntype
= build_method_type_directly (thistype
, rettype
, void_list_node
);
1195 tree convfn
= build_lang_decl (FUNCTION_DECL
, name
, fntype
);
1196 SET_DECL_LANGUAGE (convfn
, lang_cplusplus
);
1198 DECL_SOURCE_LOCATION (fn
) = DECL_SOURCE_LOCATION (callop
);
1199 SET_DECL_ALIGN (fn
, MINIMUM_METHOD_BOUNDARY
);
1200 grokclassfn (type
, fn
, NO_SPECIAL
);
1201 set_linkage_according_to_type (type
, fn
);
1202 rest_of_decl_compilation (fn
, namespace_bindings_p (), at_eof
);
1203 DECL_IN_AGGR_P (fn
) = 1;
1204 DECL_ARTIFICIAL (fn
) = 1;
1205 DECL_NOT_REALLY_EXTERN (fn
) = 1;
1206 DECL_DECLARED_INLINE_P (fn
) = 1;
1207 DECL_ARGUMENTS (fn
) = build_this_parm (fn
, fntype
, TYPE_QUAL_CONST
);
1210 DECL_INTERFACE_KNOWN (fn
) = 1;
1212 if (generic_lambda_p
)
1213 fn
= add_inherited_template_parms (fn
, DECL_TI_TEMPLATE (callop
));
1215 add_method (type
, fn
, false);
1217 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1218 the conversion op is used. */
1219 if (varargs_function_p (callop
))
1221 DECL_DELETED_FN (fn
) = 1;
1225 /* Now build up the thunk to be returned. */
1227 tree statfn
= build_lang_decl (FUNCTION_DECL
, fun_identifier
, stattype
);
1228 SET_DECL_LANGUAGE (statfn
, lang_cplusplus
);
1230 DECL_SOURCE_LOCATION (fn
) = DECL_SOURCE_LOCATION (callop
);
1231 grokclassfn (type
, fn
, NO_SPECIAL
);
1232 set_linkage_according_to_type (type
, fn
);
1233 rest_of_decl_compilation (fn
, namespace_bindings_p (), at_eof
);
1234 DECL_IN_AGGR_P (fn
) = 1;
1235 DECL_ARTIFICIAL (fn
) = 1;
1236 DECL_NOT_REALLY_EXTERN (fn
) = 1;
1237 DECL_DECLARED_INLINE_P (fn
) = 1;
1238 DECL_STATIC_FUNCTION_P (fn
) = 1;
1239 DECL_ARGUMENTS (fn
) = fn_args
;
1240 for (tree arg
= fn_args
; arg
; arg
= DECL_CHAIN (arg
))
1242 /* Avoid duplicate -Wshadow warnings. */
1243 DECL_NAME (arg
) = NULL_TREE
;
1244 DECL_CONTEXT (arg
) = fn
;
1247 DECL_INTERFACE_KNOWN (fn
) = 1;
1249 if (generic_lambda_p
)
1250 fn
= add_inherited_template_parms (fn
, DECL_TI_TEMPLATE (callop
));
1252 if (flag_sanitize
& SANITIZE_NULL
)
1253 /* Don't UBsan this function; we're deliberately calling op() with a null
1255 add_no_sanitize_value (fn
, SANITIZE_UNDEFINED
);
1257 add_method (type
, fn
, false);
1260 push_function_context ();
1262 /* Still increment function_depth so that we don't GC in the
1263 middle of an expression. */
1266 /* Generate the body of the thunk. */
1268 start_preparsed_function (statfn
, NULL_TREE
,
1269 SF_PRE_PARSED
| SF_INCLASS_INLINE
);
1270 if (DECL_ONE_ONLY (statfn
))
1272 /* Put the thunk in the same comdat group as the call op. */
1273 cgraph_node::get_create (statfn
)->add_to_same_comdat_group
1274 (cgraph_node::get_create (callop
));
1276 tree body
= begin_function_body ();
1277 tree compound_stmt
= begin_compound_stmt (0);
1278 if (!generic_lambda_p
)
1280 set_flags_from_callee (call
);
1281 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call
)))
1282 call
= build_cplus_new (TREE_TYPE (call
), call
, tf_warning_or_error
);
1284 call
= convert_from_reference (call
);
1285 finish_return_stmt (call
);
1287 finish_compound_stmt (compound_stmt
);
1288 finish_function_body (body
);
1290 fn
= finish_function (/*inline_p=*/true);
1291 if (!generic_lambda_p
)
1292 expand_or_defer_fn (fn
);
1294 /* Generate the body of the conversion op. */
1296 start_preparsed_function (convfn
, NULL_TREE
,
1297 SF_PRE_PARSED
| SF_INCLASS_INLINE
);
1298 body
= begin_function_body ();
1299 compound_stmt
= begin_compound_stmt (0);
1301 /* decl_needed_p needs to see that it's used. */
1302 TREE_USED (statfn
) = 1;
1303 finish_return_stmt (decay_conversion (statfn
, tf_warning_or_error
));
1305 finish_compound_stmt (compound_stmt
);
1306 finish_function_body (body
);
1308 fn
= finish_function (/*inline_p=*/true);
1309 if (!generic_lambda_p
)
1310 expand_or_defer_fn (fn
);
1313 pop_function_context ();
1318 /* True if FN is the static function "_FUN" that gets returned from the lambda
1319 conversion operator. */
1322 lambda_static_thunk_p (tree fn
)
1324 return (fn
&& TREE_CODE (fn
) == FUNCTION_DECL
1325 && DECL_ARTIFICIAL (fn
)
1326 && DECL_STATIC_FUNCTION_P (fn
)
1327 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn
)));
1330 /* Returns true iff VAL is a lambda-related declaration which should
1331 be ignored by unqualified lookup. */
1334 is_lambda_ignored_entity (tree val
)
1336 /* Look past normal capture proxies. */
1337 if (is_normal_capture_proxy (val
))
1340 /* Always ignore lambda fields, their names are only for debugging. */
1341 if (TREE_CODE (val
) == FIELD_DECL
1342 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val
)))
1345 /* None of the lookups that use qualify_lookup want the op() from the
1346 lambda; they want the one from the enclosing class. */
1347 if (TREE_CODE (val
) == FUNCTION_DECL
&& LAMBDA_FUNCTION_P (val
))
1353 /* Lambdas that appear in variable initializer or default argument scope
1354 get that in their mangling, so we need to record it. We might as well
1355 use the count for function and namespace scopes as well. */
1356 static GTY(()) tree lambda_scope
;
1357 static GTY(()) int lambda_count
;
1358 struct GTY(()) tree_int
1363 static GTY(()) vec
<tree_int
, va_gc
> *lambda_scope_stack
;
1366 start_lambda_scope (tree decl
)
1370 /* Once we're inside a function, we ignore variable scope and just push
1371 the function again so that popping works properly. */
1372 if (current_function_decl
&& TREE_CODE (decl
) == VAR_DECL
)
1373 decl
= current_function_decl
;
1374 ti
.t
= lambda_scope
;
1375 ti
.i
= lambda_count
;
1376 vec_safe_push (lambda_scope_stack
, ti
);
1377 if (lambda_scope
!= decl
)
1379 /* Don't reset the count if we're still in the same function. */
1380 lambda_scope
= decl
;
1386 record_lambda_scope (tree lambda
)
1388 LAMBDA_EXPR_EXTRA_SCOPE (lambda
) = lambda_scope
;
1389 LAMBDA_EXPR_DISCRIMINATOR (lambda
) = lambda_count
++;
1392 /* This lambda is an instantiation of a lambda in a template default argument
1393 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1394 need to use and increment the global count to avoid collisions. */
1397 record_null_lambda_scope (tree lambda
)
1399 if (vec_safe_is_empty (lambda_scope_stack
))
1400 record_lambda_scope (lambda
);
1403 tree_int
*p
= lambda_scope_stack
->begin();
1404 LAMBDA_EXPR_EXTRA_SCOPE (lambda
) = p
->t
;
1405 LAMBDA_EXPR_DISCRIMINATOR (lambda
) = p
->i
++;
1407 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda
) == NULL_TREE
);
1411 finish_lambda_scope (void)
1413 tree_int
*p
= &lambda_scope_stack
->last ();
1414 if (lambda_scope
!= p
->t
)
1416 lambda_scope
= p
->t
;
1417 lambda_count
= p
->i
;
1419 lambda_scope_stack
->pop ();
1423 start_lambda_function (tree fco
, tree lambda_expr
)
1425 /* Let the front end know that we are going to be defining this
1427 start_preparsed_function (fco
,
1429 SF_PRE_PARSED
| SF_INCLASS_INLINE
);
1431 tree body
= begin_function_body ();
1433 /* Push the proxies for any explicit captures. */
1434 for (tree cap
= LAMBDA_EXPR_CAPTURE_LIST (lambda_expr
); cap
;
1435 cap
= TREE_CHAIN (cap
))
1436 build_capture_proxy (TREE_PURPOSE (cap
), TREE_VALUE (cap
));
1441 /* Subroutine of prune_lambda_captures: CAP is a node in
1442 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1443 might optimize away the capture, or NULL_TREE if there is no such
1447 var_to_maybe_prune (tree cap
)
1449 if (LAMBDA_CAPTURE_EXPLICIT_P (cap
))
1450 /* Don't prune explicit captures. */
1453 tree mem
= TREE_PURPOSE (cap
);
1454 if (!DECL_P (mem
) || !DECL_NORMAL_CAPTURE_P (mem
))
1455 /* Packs and init-captures aren't captures of constant vars. */
1458 tree init
= TREE_VALUE (cap
);
1459 if (is_normal_capture_proxy (init
))
1460 init
= DECL_CAPTURED_VARIABLE (init
);
1461 if (decl_constant_var_p (init
))
1467 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1468 for constant variables are actually used in the lambda body.
1470 There will always be a DECL_EXPR for the capture proxy; remember it when we
1471 see it, but replace it with any other use. */
1474 mark_const_cap_r (tree
*t
, int *walk_subtrees
, void *data
)
1476 hash_map
<tree
,tree
*> &const_vars
= *(hash_map
<tree
,tree
*>*)data
;
1478 tree var
= NULL_TREE
;
1479 if (TREE_CODE (*t
) == DECL_EXPR
)
1481 tree decl
= DECL_EXPR_DECL (*t
);
1482 if (is_constant_capture_proxy (decl
))
1483 var
= DECL_CAPTURED_VARIABLE (decl
);
1486 else if (is_constant_capture_proxy (*t
))
1487 var
= DECL_CAPTURED_VARIABLE (*t
);
1491 tree
*&slot
= const_vars
.get_or_insert (var
);
1492 if (!slot
|| VAR_P (*t
))
1499 /* We're at the end of processing a lambda; go back and remove any captures of
1500 constant variables for which we've folded away all uses. */
1503 prune_lambda_captures (tree body
)
1505 tree lam
= current_lambda_expr ();
1506 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam
))
1507 /* No uses were optimized away. */
1509 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam
) == CPLD_NONE
)
1510 /* No default captures, and we don't prune explicit captures. */
1513 hash_map
<tree
,tree
*> const_vars
;
1515 cp_walk_tree_without_duplicates (&body
, mark_const_cap_r
, &const_vars
);
1517 tree
*fieldp
= &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam
));
1518 for (tree
*capp
= &LAMBDA_EXPR_CAPTURE_LIST (lam
); *capp
; )
1521 if (tree var
= var_to_maybe_prune (cap
))
1523 tree
**use
= const_vars
.get (var
);
1524 if (use
&& TREE_CODE (**use
) == DECL_EXPR
)
1526 /* All uses of this capture were folded away, leaving only the
1527 proxy declaration. */
1529 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1530 *capp
= TREE_CHAIN (cap
);
1532 /* And out of TYPE_FIELDS. */
1533 tree field
= TREE_PURPOSE (cap
);
1534 while (*fieldp
!= field
)
1535 fieldp
= &DECL_CHAIN (*fieldp
);
1536 *fieldp
= DECL_CHAIN (*fieldp
);
1538 /* And remove the capture proxy declaration. */
1544 capp
= &TREE_CHAIN (cap
);
1549 finish_lambda_function (tree body
)
1551 finish_function_body (body
);
1553 prune_lambda_captures (body
);
1555 /* Finish the function and generate code for it if necessary. */
1556 tree fn
= finish_function (/*inline_p=*/true);
1558 /* Only expand if the call op is not a template. */
1559 if (!DECL_TEMPLATE_INFO (fn
))
1560 expand_or_defer_fn (fn
);
1563 #include "gt-cp-lambda.h"