Defer folding of *&.
[official-gcc.git] / gcc / cp / lambda.c
blob7c8b64094096f5901f13a1866b4a36faa6915f53
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
35 /* Constructor for a lambda expression. */
37 tree
38 build_lambda_expr (void)
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
49 /* Create the closure object for a LAMBDA_EXPR. */
51 tree
52 build_lambda_object (tree lambda_expr)
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
75 if (field == error_mark_node)
77 expr = error_mark_node;
78 goto out;
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
85 if (DECL_P (val))
86 mark_used (val);
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 CLASSTYPE_NON_AGGREGATE (type) = 1;
121 out:
122 input_location = saved_loc;
123 return expr;
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
129 tree
130 begin_lambda_type (tree lambda)
132 tree type;
135 /* Unique name. This is just like an unnamed class, but we cannot use
136 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
137 tree name;
138 name = make_lambda_name ();
140 /* Create the new RECORD_TYPE for this lambda. */
141 type = xref_tag (/*tag_code=*/record_type,
142 name,
143 /*scope=*/ts_lambda,
144 /*template_header_p=*/false);
145 if (type == error_mark_node)
146 return error_mark_node;
149 /* Designate it as a struct so that we can use aggregate initialization. */
150 CLASSTYPE_DECLARED_CLASS (type) = false;
152 /* Cross-reference the expression and the type. */
153 LAMBDA_EXPR_CLOSURE (lambda) = type;
154 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156 /* In C++17, assume the closure is literal; we'll clear the flag later if
157 necessary. */
158 if (cxx_dialect >= cxx17)
159 CLASSTYPE_LITERAL_P (type) = true;
161 /* Clear base types. */
162 xref_basetypes (type, /*bases=*/NULL_TREE);
164 /* Start the class. */
165 type = begin_class_definition (type);
167 return type;
170 /* Returns the type to use for the return type of the operator() of a
171 closure class. */
173 tree
174 lambda_return_type (tree expr)
176 if (expr == NULL_TREE)
177 return void_type_node;
178 if (type_unknown_p (expr)
179 || BRACE_ENCLOSED_INITIALIZER_P (expr))
181 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
182 return error_mark_node;
184 gcc_checking_assert (!type_dependent_expression_p (expr));
185 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
188 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
189 closure type. */
191 tree
192 lambda_function (tree lambda)
194 tree type;
195 if (TREE_CODE (lambda) == LAMBDA_EXPR)
196 type = LAMBDA_EXPR_CLOSURE (lambda);
197 else
198 type = lambda;
199 gcc_assert (LAMBDA_TYPE_P (type));
200 /* Don't let debug_tree cause instantiation. */
201 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
202 && !COMPLETE_OR_OPEN_TYPE_P (type))
203 return NULL_TREE;
204 lambda = lookup_member (type, call_op_identifier,
205 /*protect=*/0, /*want_type=*/false,
206 tf_warning_or_error);
207 if (lambda)
208 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
209 return lambda;
212 /* Returns the type to use for the FIELD_DECL corresponding to the
213 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
214 C++14 init capture, and BY_REFERENCE_P indicates whether we're
215 capturing by reference. */
217 tree
218 lambda_capture_field_type (tree expr, bool explicit_init_p,
219 bool by_reference_p)
221 tree type;
222 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
224 if (!is_this && type_dependent_expression_p (expr))
226 type = cxx_make_type (DECLTYPE_TYPE);
227 DECLTYPE_TYPE_EXPR (type) = expr;
228 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
229 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
230 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
231 SET_TYPE_STRUCTURAL_EQUALITY (type);
233 else if (!is_this && explicit_init_p)
235 tree auto_node = make_auto ();
237 type = auto_node;
238 if (by_reference_p)
239 /* Add the reference now, so deduction doesn't lose
240 outermost CV qualifiers of EXPR. */
241 type = build_reference_type (type);
242 type = do_auto_deduction (type, expr, auto_node);
244 else
246 type = non_reference (unlowered_expr_type (expr));
248 if (!is_this && by_reference_p)
249 type = build_reference_type (type);
252 return type;
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
258 bool
259 is_capture_proxy (tree decl)
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
267 /* Returns true iff DECL is a capture proxy for a normal capture
268 (i.e. without explicit initializer). */
270 bool
271 is_normal_capture_proxy (tree decl)
273 if (!is_capture_proxy (decl))
274 /* It's not a capture proxy. */
275 return false;
277 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
278 /* VLA capture. */
279 return true;
281 /* It is a capture proxy, is it a normal capture? */
282 tree val = DECL_VALUE_EXPR (decl);
283 if (val == error_mark_node)
284 return true;
286 if (TREE_CODE (val) == ADDR_EXPR)
287 val = TREE_OPERAND (val, 0);
288 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
289 val = TREE_OPERAND (val, 1);
290 return DECL_NORMAL_CAPTURE_P (val);
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294 current function, which is the operator() for the appropriate lambda. */
296 void
297 insert_capture_proxy (tree var)
299 if (is_normal_capture_proxy (var))
301 tree cap = DECL_CAPTURED_VARIABLE (var);
302 if (CHECKING_P)
304 gcc_assert (!is_normal_capture_proxy (cap));
305 tree old = retrieve_local_specialization (cap);
306 if (old)
307 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
309 register_local_specialization (var, cap);
312 /* Put the capture proxy in the extra body block so that it won't clash
313 with a later local variable. */
314 pushdecl_outermost_localscope (var);
316 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
317 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
318 tree stmt_list = (*stmt_list_stack)[1];
319 gcc_assert (stmt_list);
320 append_to_statement_list_force (var, &stmt_list);
323 /* We've just finished processing a lambda; if the containing scope is also
324 a lambda, insert any capture proxies that were created while processing
325 the nested lambda. */
327 void
328 insert_pending_capture_proxies (void)
330 tree lam;
331 vec<tree, va_gc> *proxies;
332 unsigned i;
334 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
335 return;
337 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
338 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
339 for (i = 0; i < vec_safe_length (proxies); ++i)
341 tree var = (*proxies)[i];
342 insert_capture_proxy (var);
344 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
345 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
348 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
349 return the type we want the proxy to have: the type of the field itself,
350 with added const-qualification if the lambda isn't mutable and the
351 capture is by value. */
353 tree
354 lambda_proxy_type (tree ref)
356 tree type;
357 if (ref == error_mark_node)
358 return error_mark_node;
359 if (REFERENCE_REF_P (ref))
360 ref = TREE_OPERAND (ref, 0);
361 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
362 type = TREE_TYPE (ref);
363 if (!type || WILDCARD_TYPE_P (non_reference (type)))
365 type = cxx_make_type (DECLTYPE_TYPE);
366 DECLTYPE_TYPE_EXPR (type) = ref;
367 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
368 SET_TYPE_STRUCTURAL_EQUALITY (type);
370 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
371 type = make_pack_expansion (type);
372 return type;
375 /* MEMBER is a capture field in a lambda closure class. Now that we're
376 inside the operator(), build a placeholder var for future lookups and
377 debugging. */
379 tree
380 build_capture_proxy (tree member, tree init)
382 tree var, object, fn, closure, name, lam, type;
384 if (PACK_EXPANSION_P (member))
385 member = PACK_EXPANSION_PATTERN (member);
387 closure = DECL_CONTEXT (member);
388 fn = lambda_function (closure);
389 lam = CLASSTYPE_LAMBDA_EXPR (closure);
391 /* The proxy variable forwards to the capture field. */
392 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
393 object = finish_non_static_data_member (member, object, NULL_TREE);
394 if (REFERENCE_REF_P (object))
395 object = TREE_OPERAND (object, 0);
397 /* Remove the __ inserted by add_capture. */
398 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
400 type = lambda_proxy_type (object);
402 if (name == this_identifier && !POINTER_TYPE_P (type))
404 type = build_pointer_type (type);
405 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
406 object = build_fold_addr_expr_with_type (object, type);
409 if (DECL_VLA_CAPTURE_P (member))
411 /* Rebuild the VLA type from the pointer and maxindex. */
412 tree field = next_initializable_field (TYPE_FIELDS (type));
413 tree ptr = build_simple_component_ref (object, field);
414 field = next_initializable_field (DECL_CHAIN (field));
415 tree max = build_simple_component_ref (object, field);
416 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
417 build_index_type (max));
418 type = build_reference_type (type);
419 REFERENCE_VLA_OK (type) = true;
420 object = convert (type, ptr);
423 var = build_decl (input_location, VAR_DECL, name, type);
424 SET_DECL_VALUE_EXPR (var, object);
425 DECL_HAS_VALUE_EXPR_P (var) = 1;
426 DECL_ARTIFICIAL (var) = 1;
427 TREE_USED (var) = 1;
428 DECL_CONTEXT (var) = fn;
430 if (DECL_NORMAL_CAPTURE_P (member))
432 if (DECL_VLA_CAPTURE_P (member))
434 init = CONSTRUCTOR_ELT (init, 0)->value;
435 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
436 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
438 else
440 if (PACK_EXPANSION_P (init))
441 init = PACK_EXPANSION_PATTERN (init);
442 if (TREE_CODE (init) == INDIRECT_REF)
443 init = TREE_OPERAND (init, 0);
444 STRIP_NOPS (init);
446 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
447 while (is_normal_capture_proxy (init))
448 init = DECL_CAPTURED_VARIABLE (init);
449 retrofit_lang_decl (var);
450 DECL_CAPTURED_VARIABLE (var) = init;
453 if (name == this_identifier)
455 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
456 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
459 if (fn == current_function_decl)
460 insert_capture_proxy (var);
461 else
462 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
464 return var;
467 static GTY(()) tree ptr_id;
468 static GTY(()) tree max_id;
470 /* Return a struct containing a pointer and a length for lambda capture of
471 an array of runtime length. */
473 static tree
474 vla_capture_type (tree array_type)
476 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
477 xref_basetypes (type, NULL_TREE);
478 type = begin_class_definition (type);
479 if (!ptr_id)
481 ptr_id = get_identifier ("ptr");
482 max_id = get_identifier ("max");
484 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
485 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
486 finish_member_declaration (field);
487 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
488 finish_member_declaration (field);
489 return finish_struct (type, NULL_TREE);
492 /* From an ID and INITIALIZER, create a capture (by reference if
493 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
494 and return it. If ID is `this', BY_REFERENCE_P says whether
495 `*this' is captured by reference. */
497 tree
498 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
499 bool explicit_init_p)
501 char *buf;
502 tree type, member, name;
503 bool vla = false;
504 bool variadic = false;
505 tree initializer = orig_init;
507 if (PACK_EXPANSION_P (initializer))
509 initializer = PACK_EXPANSION_PATTERN (initializer);
510 variadic = true;
513 if (TREE_CODE (initializer) == TREE_LIST
514 /* A pack expansion might end up with multiple elements. */
515 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
516 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
517 tf_warning_or_error);
518 type = TREE_TYPE (initializer);
519 if (type == error_mark_node)
520 return error_mark_node;
522 if (array_of_runtime_bound_p (type))
524 vla = true;
525 if (!by_reference_p)
526 error ("array of runtime bound cannot be captured by copy, "
527 "only by reference");
529 /* For a VLA, we capture the address of the first element and the
530 maximum index, and then reconstruct the VLA for the proxy. */
531 tree elt = cp_build_array_ref (input_location, initializer,
532 integer_zero_node, tf_warning_or_error);
533 initializer = build_constructor_va (init_list_type_node, 2,
534 NULL_TREE, build_address (elt),
535 NULL_TREE, array_type_nelts (type));
536 type = vla_capture_type (type);
538 else if (!dependent_type_p (type)
539 && variably_modified_type_p (type, NULL_TREE))
541 error ("capture of variable-size type %qT that is not an N3639 array "
542 "of runtime bound", type);
543 if (TREE_CODE (type) == ARRAY_TYPE
544 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
545 inform (input_location, "because the array element type %qT has "
546 "variable size", TREE_TYPE (type));
547 type = error_mark_node;
549 else
551 type = lambda_capture_field_type (initializer, explicit_init_p,
552 by_reference_p);
553 if (type == error_mark_node)
554 return error_mark_node;
556 if (id == this_identifier && !by_reference_p)
558 gcc_assert (POINTER_TYPE_P (type));
559 type = TREE_TYPE (type);
560 initializer = cp_build_fold_indirect_ref (initializer);
563 if (dependent_type_p (type))
565 else if (id != this_identifier && by_reference_p)
567 if (!lvalue_p (initializer))
569 error ("cannot capture %qE by reference", initializer);
570 return error_mark_node;
573 else
575 /* Capture by copy requires a complete type. */
576 type = complete_type (type);
577 if (!COMPLETE_TYPE_P (type))
579 error ("capture by copy of incomplete type %qT", type);
580 cxx_incomplete_type_inform (type);
581 return error_mark_node;
586 /* Add __ to the beginning of the field name so that user code
587 won't find the field with name lookup. We can't just leave the name
588 unset because template instantiation uses the name to find
589 instantiated fields. */
590 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
591 buf[1] = buf[0] = '_';
592 memcpy (buf + 2, IDENTIFIER_POINTER (id),
593 IDENTIFIER_LENGTH (id) + 1);
594 name = get_identifier (buf);
596 /* If TREE_TYPE isn't set, we're still in the introducer, so check
597 for duplicates. */
598 if (!LAMBDA_EXPR_CLOSURE (lambda))
600 if (IDENTIFIER_MARKED (name))
602 pedwarn (input_location, 0,
603 "already captured %qD in lambda expression", id);
604 return NULL_TREE;
606 IDENTIFIER_MARKED (name) = true;
609 if (variadic)
610 type = make_pack_expansion (type);
612 /* Make member variable. */
613 member = build_decl (input_location, FIELD_DECL, name, type);
614 DECL_VLA_CAPTURE_P (member) = vla;
616 if (!explicit_init_p)
617 /* Normal captures are invisible to name lookup but uses are replaced
618 with references to the capture field; we implement this by only
619 really making them invisible in unevaluated context; see
620 qualify_lookup. For now, let's make explicitly initialized captures
621 always visible. */
622 DECL_NORMAL_CAPTURE_P (member) = true;
624 if (id == this_identifier)
625 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
627 /* Add it to the appropriate closure class if we've started it. */
628 if (current_class_type
629 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
631 if (COMPLETE_TYPE_P (current_class_type))
632 internal_error ("trying to capture %qD in instantiation of "
633 "generic lambda", id);
634 finish_member_declaration (member);
637 tree listmem = member;
638 if (variadic)
640 listmem = make_pack_expansion (member);
641 initializer = orig_init;
643 LAMBDA_EXPR_CAPTURE_LIST (lambda)
644 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
646 if (LAMBDA_EXPR_CLOSURE (lambda))
647 return build_capture_proxy (member, initializer);
648 /* For explicit captures we haven't started the function yet, so we wait
649 and build the proxy from cp_parser_lambda_body. */
650 return NULL_TREE;
653 /* Register all the capture members on the list CAPTURES, which is the
654 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
656 void
657 register_capture_members (tree captures)
659 if (captures == NULL_TREE)
660 return;
662 register_capture_members (TREE_CHAIN (captures));
664 tree field = TREE_PURPOSE (captures);
665 if (PACK_EXPANSION_P (field))
666 field = PACK_EXPANSION_PATTERN (field);
668 /* We set this in add_capture to avoid duplicates. */
669 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
670 finish_member_declaration (field);
673 /* Similar to add_capture, except this works on a stack of nested lambdas.
674 BY_REFERENCE_P in this case is derived from the default capture mode.
675 Returns the capture for the lambda at the bottom of the stack. */
677 tree
678 add_default_capture (tree lambda_stack, tree id, tree initializer)
680 bool this_capture_p = (id == this_identifier);
682 tree var = NULL_TREE;
684 tree saved_class_type = current_class_type;
686 tree node;
688 for (node = lambda_stack;
689 node;
690 node = TREE_CHAIN (node))
692 tree lambda = TREE_VALUE (node);
694 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
695 if (DECL_PACK_P (initializer))
696 initializer = make_pack_expansion (initializer);
697 var = add_capture (lambda,
699 initializer,
700 /*by_reference_p=*/
701 (this_capture_p
702 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
703 == CPLD_REFERENCE)),
704 /*explicit_init_p=*/false);
705 initializer = convert_from_reference (var);
708 current_class_type = saved_class_type;
710 return var;
713 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
714 form of an INDIRECT_REF, possibly adding it through default
715 capturing, if ADD_CAPTURE_P is true. */
717 tree
718 lambda_expr_this_capture (tree lambda, bool add_capture_p)
720 tree result;
722 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
724 /* In unevaluated context this isn't an odr-use, so don't capture. */
725 if (cp_unevaluated_operand)
726 add_capture_p = false;
728 /* Try to default capture 'this' if we can. */
729 if (!this_capture
730 && (!add_capture_p
731 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
733 tree lambda_stack = NULL_TREE;
734 tree init = NULL_TREE;
736 /* If we are in a lambda function, we can move out until we hit:
737 1. a non-lambda function or NSDMI,
738 2. a lambda function capturing 'this', or
739 3. a non-default capturing lambda function. */
740 for (tree tlambda = lambda; ;)
742 lambda_stack = tree_cons (NULL_TREE,
743 tlambda,
744 lambda_stack);
746 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
747 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
749 /* In an NSDMI, we don't have a function to look up the decl in,
750 but the fake 'this' pointer that we're using for parsing is
751 in scope_chain. */
752 init = scope_chain->x_current_class_ptr;
753 gcc_checking_assert
754 (init && (TREE_TYPE (TREE_TYPE (init))
755 == current_nonlambda_class_type ()));
756 break;
759 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
760 tree containing_function = decl_function_context (closure_decl);
762 if (containing_function == NULL_TREE)
763 /* We ran out of scopes; there's no 'this' to capture. */
764 break;
766 if (!LAMBDA_FUNCTION_P (containing_function))
768 /* We found a non-lambda function. */
769 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
770 /* First parameter is 'this'. */
771 init = DECL_ARGUMENTS (containing_function);
772 break;
775 tlambda
776 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
778 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
780 /* An outer lambda has already captured 'this'. */
781 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
782 break;
785 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
786 /* An outer lambda won't let us capture 'this'. */
787 break;
790 if (init)
792 if (add_capture_p)
793 this_capture = add_default_capture (lambda_stack,
794 /*id=*/this_identifier,
795 init);
796 else
797 this_capture = init;
801 if (cp_unevaluated_operand)
802 result = this_capture;
803 else if (!this_capture)
805 if (add_capture_p)
807 error ("%<this%> was not captured for this lambda function");
808 result = error_mark_node;
810 else
811 result = NULL_TREE;
813 else
815 /* To make sure that current_class_ref is for the lambda. */
816 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
817 == LAMBDA_EXPR_CLOSURE (lambda));
819 result = this_capture;
821 /* If 'this' is captured, each use of 'this' is transformed into an
822 access to the corresponding unnamed data member of the closure
823 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
824 ensures that the transformed expression is an rvalue. ] */
825 result = rvalue (result);
828 return result;
831 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
832 object. NULL otherwise.. */
834 static tree
835 resolvable_dummy_lambda (tree object)
837 if (!is_dummy_object (object))
838 return NULL_TREE;
840 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
841 gcc_assert (!TYPE_PTR_P (type));
843 if (type != current_class_type
844 && current_class_type
845 && LAMBDA_TYPE_P (current_class_type)
846 && lambda_function (current_class_type)
847 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
848 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
850 return NULL_TREE;
853 /* We don't want to capture 'this' until we know we need it, i.e. after
854 overload resolution has chosen a non-static member function. At that
855 point we call this function to turn a dummy object into a use of the
856 'this' capture. */
858 tree
859 maybe_resolve_dummy (tree object, bool add_capture_p)
861 if (tree lam = resolvable_dummy_lambda (object))
862 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
863 if (cap != error_mark_node)
864 object = build_fold_indirect_ref (cap);
866 return object;
869 /* When parsing a generic lambda containing an argument-dependent
870 member function call we defer overload resolution to instantiation
871 time. But we have to know now whether to capture this or not.
872 Do that if FNS contains any non-static fns.
873 The std doesn't anticipate this case, but I expect this to be the
874 outcome of discussion. */
876 void
877 maybe_generic_this_capture (tree object, tree fns)
879 if (tree lam = resolvable_dummy_lambda (object))
880 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
882 /* We've not yet captured, so look at the function set of
883 interest. */
884 if (BASELINK_P (fns))
885 fns = BASELINK_FUNCTIONS (fns);
886 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
887 if (id_expr)
888 fns = TREE_OPERAND (fns, 0);
890 for (lkp_iterator iter (fns); iter; ++iter)
891 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
892 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
894 /* Found a non-static member. Capture this. */
895 lambda_expr_this_capture (lam, true);
896 break;
901 /* Returns the innermost non-lambda function. */
903 tree
904 current_nonlambda_function (void)
906 tree fn = current_function_decl;
907 while (fn && LAMBDA_FUNCTION_P (fn))
908 fn = decl_function_context (fn);
909 return fn;
912 /* Returns the method basetype of the innermost non-lambda function, or
913 NULL_TREE if none. */
915 tree
916 nonlambda_method_basetype (void)
918 tree fn, type;
919 if (!current_class_ref)
920 return NULL_TREE;
922 type = current_class_type;
923 if (!LAMBDA_TYPE_P (type))
924 return type;
926 /* Find the nearest enclosing non-lambda function. */
927 fn = TYPE_NAME (type);
929 fn = decl_function_context (fn);
930 while (fn && LAMBDA_FUNCTION_P (fn));
932 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
933 return NULL_TREE;
935 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
938 /* Like current_scope, but looking through lambdas. */
940 tree
941 current_nonlambda_scope (void)
943 tree scope = current_scope ();
944 for (;;)
946 if (TREE_CODE (scope) == FUNCTION_DECL
947 && LAMBDA_FUNCTION_P (scope))
949 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
950 continue;
952 else if (LAMBDA_TYPE_P (scope))
954 scope = CP_TYPE_CONTEXT (scope);
955 continue;
957 break;
959 return scope;
962 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
963 indicated FN and NARGS, but do not initialize the return type or any of the
964 argument slots. */
966 static tree
967 prepare_op_call (tree fn, int nargs)
969 tree t;
971 t = build_vl_exp (CALL_EXPR, nargs + 3);
972 CALL_EXPR_FN (t) = fn;
973 CALL_EXPR_STATIC_CHAIN (t) = NULL;
975 return t;
978 /* Return true iff CALLOP is the op() for a generic lambda. */
980 bool
981 generic_lambda_fn_p (tree callop)
983 return (LAMBDA_FUNCTION_P (callop)
984 && DECL_TEMPLATE_INFO (callop)
985 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
988 /* Returns true iff we need to consider default capture for an enclosing
989 generic lambda. */
991 bool
992 need_generic_capture (void)
994 if (!processing_template_decl)
995 return false;
997 tree outer_closure = NULL_TREE;
998 for (tree t = current_class_type; t;
999 t = decl_type_context (TYPE_MAIN_DECL (t)))
1001 tree lam = CLASSTYPE_LAMBDA_EXPR (t);
1002 if (!lam || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1003 /* No default capture. */
1004 break;
1005 outer_closure = t;
1008 if (!outer_closure)
1009 /* No lambda. */
1010 return false;
1011 else if (dependent_type_p (outer_closure))
1012 /* The enclosing context isn't instantiated. */
1013 return false;
1014 else
1015 return true;
1018 /* A lambda-expression...is said to implicitly capture the entity...if the
1019 compound-statement...names the entity in a potentially-evaluated
1020 expression where the enclosing full-expression depends on a generic lambda
1021 parameter declared within the reaching scope of the lambda-expression. */
1023 static tree
1024 dependent_capture_r (tree *tp, int *walk_subtrees, void *data)
1026 hash_set<tree> *pset = (hash_set<tree> *)data;
1028 if (TYPE_P (*tp))
1029 *walk_subtrees = 0;
1031 if (outer_automatic_var_p (*tp))
1033 tree t = process_outer_var_ref (*tp, tf_warning_or_error, /*force*/true);
1034 if (t != *tp
1035 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1036 && TREE_CODE (TREE_TYPE (*tp)) != REFERENCE_TYPE)
1037 t = convert_from_reference (t);
1038 *tp = t;
1041 if (pset->add (*tp))
1042 *walk_subtrees = 0;
1044 switch (TREE_CODE (*tp))
1046 /* Don't walk into unevaluated context or another lambda. */
1047 case SIZEOF_EXPR:
1048 case ALIGNOF_EXPR:
1049 case TYPEID_EXPR:
1050 case NOEXCEPT_EXPR:
1051 case LAMBDA_EXPR:
1052 *walk_subtrees = 0;
1053 break;
1055 /* Don't walk into statements whose subexpressions we already
1056 handled. */
1057 case TRY_BLOCK:
1058 case EH_SPEC_BLOCK:
1059 case HANDLER:
1060 case IF_STMT:
1061 case FOR_STMT:
1062 case RANGE_FOR_STMT:
1063 case WHILE_STMT:
1064 case DO_STMT:
1065 case SWITCH_STMT:
1066 case STATEMENT_LIST:
1067 case RETURN_EXPR:
1068 *walk_subtrees = 0;
1069 break;
1071 case DECL_EXPR:
1073 tree decl = DECL_EXPR_DECL (*tp);
1074 if (VAR_P (decl))
1076 /* walk_tree_1 won't step in here. */
1077 cp_walk_tree (&DECL_INITIAL (decl),
1078 dependent_capture_r, &pset, NULL);
1079 *walk_subtrees = 0;
1082 break;
1084 default:
1085 break;
1088 return NULL_TREE;
1091 tree
1092 do_dependent_capture (tree expr, bool force)
1094 if (!need_generic_capture ()
1095 || (!force && !instantiation_dependent_expression_p (expr)))
1096 return expr;
1098 hash_set<tree> pset;
1099 cp_walk_tree (&expr, dependent_capture_r, &pset, NULL);
1100 return expr;
1103 /* If the closure TYPE has a static op(), also add a conversion to function
1104 pointer. */
1106 void
1107 maybe_add_lambda_conv_op (tree type)
1109 bool nested = (cfun != NULL);
1110 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1111 tree callop = lambda_function (type);
1112 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1114 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1115 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1116 return;
1118 if (processing_template_decl)
1119 return;
1121 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1123 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
1125 /* If the op() wasn't instantiated due to errors, give up. */
1126 gcc_assert (errorcount || sorrycount);
1127 return;
1130 /* Non-template conversion operators are defined directly with build_call_a
1131 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1132 deferred and the CALL is built in-place. In the case of a deduced return
1133 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1134 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1135 the return expression may differ in flags from those in the body CALL. In
1136 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1137 the body CALL, but not in DECLTYPE_CALL. */
1139 vec<tree, va_gc> *direct_argvec = 0;
1140 tree decltype_call = 0, call = 0;
1141 tree optype = TREE_TYPE (callop);
1142 tree fn_result = TREE_TYPE (optype);
1144 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
1145 null_pointer_node);
1146 if (generic_lambda_p)
1148 ++processing_template_decl;
1150 /* Prepare the dependent member call for the static member function
1151 '_FUN' and, potentially, prepare another call to be used in a decltype
1152 return expression for a deduced return call op to allow for simple
1153 implementation of the conversion operator. */
1155 tree instance = cp_build_fold_indirect_ref (thisarg);
1156 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1157 instance, DECL_NAME (callop), NULL_TREE);
1158 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1160 call = prepare_op_call (objfn, nargs);
1161 if (type_uses_auto (fn_result))
1162 decltype_call = prepare_op_call (objfn, nargs);
1164 else
1166 direct_argvec = make_tree_vector ();
1167 direct_argvec->quick_push (thisarg);
1170 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1171 declare the static member function "_FUN" below. For each arg append to
1172 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1173 call args (for the template case). If a parameter pack is found, expand
1174 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1176 tree fn_args = NULL_TREE;
1178 int ix = 0;
1179 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1180 tree tgt = NULL;
1182 while (src)
1184 tree new_node = copy_node (src);
1186 if (!fn_args)
1187 fn_args = tgt = new_node;
1188 else
1190 TREE_CHAIN (tgt) = new_node;
1191 tgt = new_node;
1194 mark_exp_read (tgt);
1196 if (generic_lambda_p)
1198 /* Avoid capturing variables in this context. */
1199 ++cp_unevaluated_operand;
1200 tree a = forward_parm (tgt);
1201 --cp_unevaluated_operand;
1203 CALL_EXPR_ARG (call, ix) = a;
1204 if (decltype_call)
1205 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1207 if (PACK_EXPANSION_P (a))
1208 /* Set this after unsharing so it's not in decltype_call. */
1209 PACK_EXPANSION_LOCAL_P (a) = true;
1211 ++ix;
1213 else
1214 vec_safe_push (direct_argvec, tgt);
1216 src = TREE_CHAIN (src);
1221 if (generic_lambda_p)
1223 if (decltype_call)
1225 fn_result = finish_decltype_type
1226 (decltype_call, /*id_expression_or_member_access_p=*/false,
1227 tf_warning_or_error);
1230 else
1231 call = build_call_a (callop,
1232 direct_argvec->length (),
1233 direct_argvec->address ());
1235 CALL_FROM_THUNK_P (call) = 1;
1236 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1238 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1239 stattype = (cp_build_type_attribute_variant
1240 (stattype, TYPE_ATTRIBUTES (optype)));
1241 if (flag_noexcept_type
1242 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1243 stattype = build_exception_variant (stattype, noexcept_true_spec);
1245 if (generic_lambda_p)
1246 --processing_template_decl;
1248 /* First build up the conversion op. */
1250 tree rettype = build_pointer_type (stattype);
1251 tree name = make_conv_op_name (rettype);
1252 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1253 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1254 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1255 tree fn = convfn;
1256 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1257 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1258 grokclassfn (type, fn, NO_SPECIAL);
1259 set_linkage_according_to_type (type, fn);
1260 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1261 DECL_IN_AGGR_P (fn) = 1;
1262 DECL_ARTIFICIAL (fn) = 1;
1263 DECL_NOT_REALLY_EXTERN (fn) = 1;
1264 DECL_DECLARED_INLINE_P (fn) = 1;
1265 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1267 if (nested_def)
1268 DECL_INTERFACE_KNOWN (fn) = 1;
1270 if (generic_lambda_p)
1271 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1273 add_method (type, fn, false);
1275 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1276 the conversion op is used. */
1277 if (varargs_function_p (callop))
1279 DECL_DELETED_FN (fn) = 1;
1280 return;
1283 /* Now build up the thunk to be returned. */
1285 name = get_identifier ("_FUN");
1286 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1287 fn = statfn;
1288 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1289 grokclassfn (type, fn, NO_SPECIAL);
1290 set_linkage_according_to_type (type, fn);
1291 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1292 DECL_IN_AGGR_P (fn) = 1;
1293 DECL_ARTIFICIAL (fn) = 1;
1294 DECL_NOT_REALLY_EXTERN (fn) = 1;
1295 DECL_DECLARED_INLINE_P (fn) = 1;
1296 DECL_STATIC_FUNCTION_P (fn) = 1;
1297 DECL_ARGUMENTS (fn) = fn_args;
1298 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1300 /* Avoid duplicate -Wshadow warnings. */
1301 DECL_NAME (arg) = NULL_TREE;
1302 DECL_CONTEXT (arg) = fn;
1304 if (nested_def)
1305 DECL_INTERFACE_KNOWN (fn) = 1;
1307 if (generic_lambda_p)
1308 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1310 if (flag_sanitize & SANITIZE_NULL)
1311 /* Don't UBsan this function; we're deliberately calling op() with a null
1312 object argument. */
1313 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1315 add_method (type, fn, false);
1317 if (nested)
1318 push_function_context ();
1319 else
1320 /* Still increment function_depth so that we don't GC in the
1321 middle of an expression. */
1322 ++function_depth;
1324 /* Generate the body of the thunk. */
1326 start_preparsed_function (statfn, NULL_TREE,
1327 SF_PRE_PARSED | SF_INCLASS_INLINE);
1328 if (DECL_ONE_ONLY (statfn))
1330 /* Put the thunk in the same comdat group as the call op. */
1331 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1332 (cgraph_node::get_create (callop));
1334 tree body = begin_function_body ();
1335 tree compound_stmt = begin_compound_stmt (0);
1336 if (!generic_lambda_p)
1338 set_flags_from_callee (call);
1339 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1340 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1342 call = convert_from_reference (call);
1343 finish_return_stmt (call);
1345 finish_compound_stmt (compound_stmt);
1346 finish_function_body (body);
1348 fn = finish_function (/*inline_p=*/true);
1349 if (!generic_lambda_p)
1350 expand_or_defer_fn (fn);
1352 /* Generate the body of the conversion op. */
1354 start_preparsed_function (convfn, NULL_TREE,
1355 SF_PRE_PARSED | SF_INCLASS_INLINE);
1356 body = begin_function_body ();
1357 compound_stmt = begin_compound_stmt (0);
1359 /* decl_needed_p needs to see that it's used. */
1360 TREE_USED (statfn) = 1;
1361 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1363 finish_compound_stmt (compound_stmt);
1364 finish_function_body (body);
1366 fn = finish_function (/*inline_p=*/true);
1367 if (!generic_lambda_p)
1368 expand_or_defer_fn (fn);
1370 if (nested)
1371 pop_function_context ();
1372 else
1373 --function_depth;
1376 /* True if FN is the static function "_FUN" that gets returned from the lambda
1377 conversion operator. */
1379 bool
1380 lambda_static_thunk_p (tree fn)
1382 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1383 && DECL_ARTIFICIAL (fn)
1384 && DECL_STATIC_FUNCTION_P (fn)
1385 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1388 /* Returns true iff VAL is a lambda-related declaration which should
1389 be ignored by unqualified lookup. */
1391 bool
1392 is_lambda_ignored_entity (tree val)
1394 /* Look past normal capture proxies. */
1395 if (is_normal_capture_proxy (val))
1396 return true;
1398 /* Always ignore lambda fields, their names are only for debugging. */
1399 if (TREE_CODE (val) == FIELD_DECL
1400 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1401 return true;
1403 /* None of the lookups that use qualify_lookup want the op() from the
1404 lambda; they want the one from the enclosing class. */
1405 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1406 return true;
1408 return false;
1411 /* Lambdas that appear in variable initializer or default argument scope
1412 get that in their mangling, so we need to record it. We might as well
1413 use the count for function and namespace scopes as well. */
1414 static GTY(()) tree lambda_scope;
1415 static GTY(()) int lambda_count;
1416 struct GTY(()) tree_int
1418 tree t;
1419 int i;
1421 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1423 void
1424 start_lambda_scope (tree decl)
1426 tree_int ti;
1427 gcc_assert (decl);
1428 /* Once we're inside a function, we ignore variable scope and just push
1429 the function again so that popping works properly. */
1430 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1431 decl = current_function_decl;
1432 ti.t = lambda_scope;
1433 ti.i = lambda_count;
1434 vec_safe_push (lambda_scope_stack, ti);
1435 if (lambda_scope != decl)
1437 /* Don't reset the count if we're still in the same function. */
1438 lambda_scope = decl;
1439 lambda_count = 0;
1443 void
1444 record_lambda_scope (tree lambda)
1446 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1447 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1450 void
1451 finish_lambda_scope (void)
1453 tree_int *p = &lambda_scope_stack->last ();
1454 if (lambda_scope != p->t)
1456 lambda_scope = p->t;
1457 lambda_count = p->i;
1459 lambda_scope_stack->pop ();
1462 tree
1463 start_lambda_function (tree fco, tree lambda_expr)
1465 /* Let the front end know that we are going to be defining this
1466 function. */
1467 start_preparsed_function (fco,
1468 NULL_TREE,
1469 SF_PRE_PARSED | SF_INCLASS_INLINE);
1471 tree body = begin_function_body ();
1473 /* Push the proxies for any explicit captures. */
1474 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1475 cap = TREE_CHAIN (cap))
1476 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1478 return body;
1481 void
1482 finish_lambda_function (tree body)
1484 finish_function_body (body);
1486 /* Finish the function and generate code for it if necessary. */
1487 tree fn = finish_function (/*inline_p=*/true);
1489 /* Only expand if the call op is not a template. */
1490 if (!DECL_TEMPLATE_INFO (fn))
1491 expand_or_defer_fn (fn);
1494 #include "gt-cp-lambda.h"