* doc/invoke.texi: Document -std=c++17 and -std=gnu++17 and document
[official-gcc.git] / gcc / cp / lambda.c
blobe4412569a611012a67d2fd5c29450955d1ee9b62
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
35 /* Constructor for a lambda expression. */
37 tree
38 build_lambda_expr (void)
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
49 /* Create the closure object for a LAMBDA_EXPR. */
51 tree
52 build_lambda_object (tree lambda_expr)
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
75 if (field == error_mark_node)
77 expr = error_mark_node;
78 goto out;
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
85 if (DECL_P (val))
86 mark_used (val);
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 CLASSTYPE_NON_AGGREGATE (type) = 1;
121 out:
122 input_location = saved_loc;
123 return expr;
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
129 tree
130 begin_lambda_type (tree lambda)
132 tree type;
135 /* Unique name. This is just like an unnamed class, but we cannot use
136 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
137 tree name;
138 name = make_lambda_name ();
140 /* Create the new RECORD_TYPE for this lambda. */
141 type = xref_tag (/*tag_code=*/record_type,
142 name,
143 /*scope=*/ts_lambda,
144 /*template_header_p=*/false);
145 if (type == error_mark_node)
146 return error_mark_node;
149 /* Designate it as a struct so that we can use aggregate initialization. */
150 CLASSTYPE_DECLARED_CLASS (type) = false;
152 /* Cross-reference the expression and the type. */
153 LAMBDA_EXPR_CLOSURE (lambda) = type;
154 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156 /* In C++17, assume the closure is literal; we'll clear the flag later if
157 necessary. */
158 if (cxx_dialect >= cxx17)
159 CLASSTYPE_LITERAL_P (type) = true;
161 /* Clear base types. */
162 xref_basetypes (type, /*bases=*/NULL_TREE);
164 /* Start the class. */
165 type = begin_class_definition (type);
167 return type;
170 /* Returns the type to use for the return type of the operator() of a
171 closure class. */
173 tree
174 lambda_return_type (tree expr)
176 if (expr == NULL_TREE)
177 return void_type_node;
178 if (type_unknown_p (expr)
179 || BRACE_ENCLOSED_INITIALIZER_P (expr))
181 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
182 return error_mark_node;
184 gcc_checking_assert (!type_dependent_expression_p (expr));
185 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
188 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
189 closure type. */
191 tree
192 lambda_function (tree lambda)
194 tree type;
195 if (TREE_CODE (lambda) == LAMBDA_EXPR)
196 type = LAMBDA_EXPR_CLOSURE (lambda);
197 else
198 type = lambda;
199 gcc_assert (LAMBDA_TYPE_P (type));
200 /* Don't let debug_tree cause instantiation. */
201 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
202 && !COMPLETE_OR_OPEN_TYPE_P (type))
203 return NULL_TREE;
204 lambda = lookup_member (type, cp_operator_id (CALL_EXPR),
205 /*protect=*/0, /*want_type=*/false,
206 tf_warning_or_error);
207 if (lambda)
208 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
209 return lambda;
212 /* Returns the type to use for the FIELD_DECL corresponding to the
213 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
214 C++14 init capture, and BY_REFERENCE_P indicates whether we're
215 capturing by reference. */
217 tree
218 lambda_capture_field_type (tree expr, bool explicit_init_p,
219 bool by_reference_p)
221 tree type;
222 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
224 if (!is_this && type_dependent_expression_p (expr))
226 type = cxx_make_type (DECLTYPE_TYPE);
227 DECLTYPE_TYPE_EXPR (type) = expr;
228 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
229 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
230 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
231 SET_TYPE_STRUCTURAL_EQUALITY (type);
233 else if (!is_this && explicit_init_p)
235 tree auto_node = make_auto ();
237 type = auto_node;
238 if (by_reference_p)
239 /* Add the reference now, so deduction doesn't lose
240 outermost CV qualifiers of EXPR. */
241 type = build_reference_type (type);
242 type = do_auto_deduction (type, expr, auto_node);
244 else
246 type = non_reference (unlowered_expr_type (expr));
248 if (!is_this && by_reference_p)
249 type = build_reference_type (type);
252 return type;
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
258 bool
259 is_capture_proxy (tree decl)
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
267 /* Returns true iff DECL is a capture proxy for a normal capture
268 (i.e. without explicit initializer). */
270 bool
271 is_normal_capture_proxy (tree decl)
273 if (!is_capture_proxy (decl))
274 /* It's not a capture proxy. */
275 return false;
277 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
278 /* VLA capture. */
279 return true;
281 /* It is a capture proxy, is it a normal capture? */
282 tree val = DECL_VALUE_EXPR (decl);
283 if (val == error_mark_node)
284 return true;
286 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
287 val = TREE_OPERAND (val, 1);
288 return DECL_NORMAL_CAPTURE_P (val);
291 /* VAR is a capture proxy created by build_capture_proxy; add it to the
292 current function, which is the operator() for the appropriate lambda. */
294 void
295 insert_capture_proxy (tree var)
297 /* Put the capture proxy in the extra body block so that it won't clash
298 with a later local variable. */
299 pushdecl_outermost_localscope (var);
301 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
302 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
303 tree stmt_list = (*stmt_list_stack)[1];
304 gcc_assert (stmt_list);
305 append_to_statement_list_force (var, &stmt_list);
308 /* We've just finished processing a lambda; if the containing scope is also
309 a lambda, insert any capture proxies that were created while processing
310 the nested lambda. */
312 void
313 insert_pending_capture_proxies (void)
315 tree lam;
316 vec<tree, va_gc> *proxies;
317 unsigned i;
319 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
320 return;
322 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
323 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
324 for (i = 0; i < vec_safe_length (proxies); ++i)
326 tree var = (*proxies)[i];
327 insert_capture_proxy (var);
329 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
330 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
333 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
334 return the type we want the proxy to have: the type of the field itself,
335 with added const-qualification if the lambda isn't mutable and the
336 capture is by value. */
338 tree
339 lambda_proxy_type (tree ref)
341 tree type;
342 if (ref == error_mark_node)
343 return error_mark_node;
344 if (REFERENCE_REF_P (ref))
345 ref = TREE_OPERAND (ref, 0);
346 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
347 type = TREE_TYPE (ref);
348 if (!type || WILDCARD_TYPE_P (non_reference (type)))
350 type = cxx_make_type (DECLTYPE_TYPE);
351 DECLTYPE_TYPE_EXPR (type) = ref;
352 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
353 SET_TYPE_STRUCTURAL_EQUALITY (type);
355 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
356 type = make_pack_expansion (type);
357 return type;
360 /* MEMBER is a capture field in a lambda closure class. Now that we're
361 inside the operator(), build a placeholder var for future lookups and
362 debugging. */
364 tree
365 build_capture_proxy (tree member)
367 tree var, object, fn, closure, name, lam, type;
369 if (PACK_EXPANSION_P (member))
370 member = PACK_EXPANSION_PATTERN (member);
372 closure = DECL_CONTEXT (member);
373 fn = lambda_function (closure);
374 lam = CLASSTYPE_LAMBDA_EXPR (closure);
376 /* The proxy variable forwards to the capture field. */
377 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
378 object = finish_non_static_data_member (member, object, NULL_TREE);
379 if (REFERENCE_REF_P (object))
380 object = TREE_OPERAND (object, 0);
382 /* Remove the __ inserted by add_capture. */
383 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
385 type = lambda_proxy_type (object);
387 if (name == this_identifier && !POINTER_TYPE_P (type))
389 type = build_pointer_type (type);
390 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
391 object = build_fold_addr_expr_with_type (object, type);
394 if (DECL_VLA_CAPTURE_P (member))
396 /* Rebuild the VLA type from the pointer and maxindex. */
397 tree field = next_initializable_field (TYPE_FIELDS (type));
398 tree ptr = build_simple_component_ref (object, field);
399 field = next_initializable_field (DECL_CHAIN (field));
400 tree max = build_simple_component_ref (object, field);
401 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
402 build_index_type (max));
403 type = build_reference_type (type);
404 REFERENCE_VLA_OK (type) = true;
405 object = convert (type, ptr);
408 var = build_decl (input_location, VAR_DECL, name, type);
409 SET_DECL_VALUE_EXPR (var, object);
410 DECL_HAS_VALUE_EXPR_P (var) = 1;
411 DECL_ARTIFICIAL (var) = 1;
412 TREE_USED (var) = 1;
413 DECL_CONTEXT (var) = fn;
415 if (name == this_identifier)
417 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
418 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
421 if (fn == current_function_decl)
422 insert_capture_proxy (var);
423 else
424 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
426 return var;
429 static GTY(()) tree ptr_id;
430 static GTY(()) tree max_id;
432 /* Return a struct containing a pointer and a length for lambda capture of
433 an array of runtime length. */
435 static tree
436 vla_capture_type (tree array_type)
438 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
439 xref_basetypes (type, NULL_TREE);
440 type = begin_class_definition (type);
441 if (!ptr_id)
443 ptr_id = get_identifier ("ptr");
444 max_id = get_identifier ("max");
446 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
447 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
448 finish_member_declaration (field);
449 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
450 finish_member_declaration (field);
451 return finish_struct (type, NULL_TREE);
454 /* From an ID and INITIALIZER, create a capture (by reference if
455 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
456 and return it. If ID is `this', BY_REFERENCE_P says whether
457 `*this' is captured by reference. */
459 tree
460 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
461 bool explicit_init_p)
463 char *buf;
464 tree type, member, name;
465 bool vla = false;
466 bool variadic = false;
467 tree initializer = orig_init;
469 if (PACK_EXPANSION_P (initializer))
471 initializer = PACK_EXPANSION_PATTERN (initializer);
472 variadic = true;
475 if (TREE_CODE (initializer) == TREE_LIST
476 /* A pack expansion might end up with multiple elements. */
477 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
478 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
479 tf_warning_or_error);
480 type = TREE_TYPE (initializer);
481 if (type == error_mark_node)
482 return error_mark_node;
484 if (array_of_runtime_bound_p (type))
486 vla = true;
487 if (!by_reference_p)
488 error ("array of runtime bound cannot be captured by copy, "
489 "only by reference");
491 /* For a VLA, we capture the address of the first element and the
492 maximum index, and then reconstruct the VLA for the proxy. */
493 tree elt = cp_build_array_ref (input_location, initializer,
494 integer_zero_node, tf_warning_or_error);
495 initializer = build_constructor_va (init_list_type_node, 2,
496 NULL_TREE, build_address (elt),
497 NULL_TREE, array_type_nelts (type));
498 type = vla_capture_type (type);
500 else if (!dependent_type_p (type)
501 && variably_modified_type_p (type, NULL_TREE))
503 error ("capture of variable-size type %qT that is not an N3639 array "
504 "of runtime bound", type);
505 if (TREE_CODE (type) == ARRAY_TYPE
506 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
507 inform (input_location, "because the array element type %qT has "
508 "variable size", TREE_TYPE (type));
509 type = error_mark_node;
511 else
513 type = lambda_capture_field_type (initializer, explicit_init_p,
514 by_reference_p);
515 if (type == error_mark_node)
516 return error_mark_node;
518 if (id == this_identifier && !by_reference_p)
520 gcc_assert (POINTER_TYPE_P (type));
521 type = TREE_TYPE (type);
522 initializer = cp_build_indirect_ref (initializer, RO_NULL,
523 tf_warning_or_error);
526 if (dependent_type_p (type))
528 else if (id != this_identifier && by_reference_p)
530 if (!lvalue_p (initializer))
532 error ("cannot capture %qE by reference", initializer);
533 return error_mark_node;
536 else
538 /* Capture by copy requires a complete type. */
539 type = complete_type (type);
540 if (!COMPLETE_TYPE_P (type))
542 error ("capture by copy of incomplete type %qT", type);
543 cxx_incomplete_type_inform (type);
544 return error_mark_node;
549 /* Add __ to the beginning of the field name so that user code
550 won't find the field with name lookup. We can't just leave the name
551 unset because template instantiation uses the name to find
552 instantiated fields. */
553 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
554 buf[1] = buf[0] = '_';
555 memcpy (buf + 2, IDENTIFIER_POINTER (id),
556 IDENTIFIER_LENGTH (id) + 1);
557 name = get_identifier (buf);
559 /* If TREE_TYPE isn't set, we're still in the introducer, so check
560 for duplicates. */
561 if (!LAMBDA_EXPR_CLOSURE (lambda))
563 if (IDENTIFIER_MARKED (name))
565 pedwarn (input_location, 0,
566 "already captured %qD in lambda expression", id);
567 return NULL_TREE;
569 IDENTIFIER_MARKED (name) = true;
572 if (variadic)
573 type = make_pack_expansion (type);
575 /* Make member variable. */
576 member = build_decl (input_location, FIELD_DECL, name, type);
577 DECL_VLA_CAPTURE_P (member) = vla;
579 if (!explicit_init_p)
580 /* Normal captures are invisible to name lookup but uses are replaced
581 with references to the capture field; we implement this by only
582 really making them invisible in unevaluated context; see
583 qualify_lookup. For now, let's make explicitly initialized captures
584 always visible. */
585 DECL_NORMAL_CAPTURE_P (member) = true;
587 if (id == this_identifier)
588 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
590 /* Add it to the appropriate closure class if we've started it. */
591 if (current_class_type
592 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
594 if (COMPLETE_TYPE_P (current_class_type))
595 internal_error ("trying to capture %qD after closure is complete", id);
596 finish_member_declaration (member);
599 tree listmem = member;
600 if (variadic)
602 listmem = make_pack_expansion (member);
603 initializer = orig_init;
605 LAMBDA_EXPR_CAPTURE_LIST (lambda)
606 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
608 if (LAMBDA_EXPR_CLOSURE (lambda))
609 return build_capture_proxy (member);
610 /* For explicit captures we haven't started the function yet, so we wait
611 and build the proxy from cp_parser_lambda_body. */
612 return NULL_TREE;
615 /* Register all the capture members on the list CAPTURES, which is the
616 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
618 void
619 register_capture_members (tree captures)
621 if (captures == NULL_TREE)
622 return;
624 register_capture_members (TREE_CHAIN (captures));
626 tree field = TREE_PURPOSE (captures);
627 if (PACK_EXPANSION_P (field))
628 field = PACK_EXPANSION_PATTERN (field);
630 /* We set this in add_capture to avoid duplicates. */
631 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
632 finish_member_declaration (field);
635 /* Similar to add_capture, except this works on a stack of nested lambdas.
636 BY_REFERENCE_P in this case is derived from the default capture mode.
637 Returns the capture for the lambda at the bottom of the stack. */
639 tree
640 add_default_capture (tree lambda_stack, tree id, tree initializer)
642 bool this_capture_p = (id == this_identifier);
644 tree var = NULL_TREE;
646 tree saved_class_type = current_class_type;
648 tree node;
650 for (node = lambda_stack;
651 node;
652 node = TREE_CHAIN (node))
654 tree lambda = TREE_VALUE (node);
656 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
657 if (DECL_PACK_P (initializer))
658 initializer = make_pack_expansion (initializer);
659 var = add_capture (lambda,
661 initializer,
662 /*by_reference_p=*/
663 (this_capture_p
664 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
665 == CPLD_REFERENCE)),
666 /*explicit_init_p=*/false);
667 initializer = convert_from_reference (var);
670 current_class_type = saved_class_type;
672 return var;
675 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
676 form of an INDIRECT_REF, possibly adding it through default
677 capturing, if ADD_CAPTURE_P is true. */
679 tree
680 lambda_expr_this_capture (tree lambda, bool add_capture_p)
682 tree result;
684 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
686 /* In unevaluated context this isn't an odr-use, so don't capture. */
687 if (cp_unevaluated_operand)
688 add_capture_p = false;
690 /* Try to default capture 'this' if we can. */
691 if (!this_capture
692 && (!add_capture_p
693 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
695 tree lambda_stack = NULL_TREE;
696 tree init = NULL_TREE;
698 /* If we are in a lambda function, we can move out until we hit:
699 1. a non-lambda function or NSDMI,
700 2. a lambda function capturing 'this', or
701 3. a non-default capturing lambda function. */
702 for (tree tlambda = lambda; ;)
704 lambda_stack = tree_cons (NULL_TREE,
705 tlambda,
706 lambda_stack);
708 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
709 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
711 /* In an NSDMI, we don't have a function to look up the decl in,
712 but the fake 'this' pointer that we're using for parsing is
713 in scope_chain. */
714 init = scope_chain->x_current_class_ptr;
715 gcc_checking_assert
716 (init && (TREE_TYPE (TREE_TYPE (init))
717 == current_nonlambda_class_type ()));
718 break;
721 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
722 tree containing_function = decl_function_context (closure_decl);
724 if (containing_function == NULL_TREE)
725 /* We ran out of scopes; there's no 'this' to capture. */
726 break;
728 if (!LAMBDA_FUNCTION_P (containing_function))
730 /* We found a non-lambda function. */
731 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
732 /* First parameter is 'this'. */
733 init = DECL_ARGUMENTS (containing_function);
734 break;
737 tlambda
738 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
740 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
742 /* An outer lambda has already captured 'this'. */
743 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
744 break;
747 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
748 /* An outer lambda won't let us capture 'this'. */
749 break;
752 if (init)
754 if (add_capture_p)
755 this_capture = add_default_capture (lambda_stack,
756 /*id=*/this_identifier,
757 init);
758 else
759 this_capture = init;
763 if (cp_unevaluated_operand)
764 result = this_capture;
765 else if (!this_capture)
767 if (add_capture_p)
769 error ("%<this%> was not captured for this lambda function");
770 result = error_mark_node;
772 else
773 result = NULL_TREE;
775 else
777 /* To make sure that current_class_ref is for the lambda. */
778 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
779 == LAMBDA_EXPR_CLOSURE (lambda));
781 result = this_capture;
783 /* If 'this' is captured, each use of 'this' is transformed into an
784 access to the corresponding unnamed data member of the closure
785 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
786 ensures that the transformed expression is an rvalue. ] */
787 result = rvalue (result);
790 return result;
793 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
794 object. NULL otherwise.. */
796 static tree
797 resolvable_dummy_lambda (tree object)
799 if (!is_dummy_object (object))
800 return NULL_TREE;
802 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
803 gcc_assert (!TYPE_PTR_P (type));
805 if (type != current_class_type
806 && current_class_type
807 && LAMBDA_TYPE_P (current_class_type)
808 && lambda_function (current_class_type)
809 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
810 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
812 return NULL_TREE;
815 /* We don't want to capture 'this' until we know we need it, i.e. after
816 overload resolution has chosen a non-static member function. At that
817 point we call this function to turn a dummy object into a use of the
818 'this' capture. */
820 tree
821 maybe_resolve_dummy (tree object, bool add_capture_p)
823 if (tree lam = resolvable_dummy_lambda (object))
824 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
825 if (cap != error_mark_node)
826 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
827 RO_NULL, tf_warning_or_error);
829 return object;
832 /* When parsing a generic lambda containing an argument-dependent
833 member function call we defer overload resolution to instantiation
834 time. But we have to know now whether to capture this or not.
835 Do that if FNS contains any non-static fns.
836 The std doesn't anticipate this case, but I expect this to be the
837 outcome of discussion. */
839 void
840 maybe_generic_this_capture (tree object, tree fns)
842 if (tree lam = resolvable_dummy_lambda (object))
843 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
845 /* We've not yet captured, so look at the function set of
846 interest. */
847 if (BASELINK_P (fns))
848 fns = BASELINK_FUNCTIONS (fns);
849 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
850 if (id_expr)
851 fns = TREE_OPERAND (fns, 0);
853 for (lkp_iterator iter (fns); iter; ++iter)
854 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
855 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
857 /* Found a non-static member. Capture this. */
858 lambda_expr_this_capture (lam, true);
859 break;
864 /* Returns the innermost non-lambda function. */
866 tree
867 current_nonlambda_function (void)
869 tree fn = current_function_decl;
870 while (fn && LAMBDA_FUNCTION_P (fn))
871 fn = decl_function_context (fn);
872 return fn;
875 /* Returns the method basetype of the innermost non-lambda function, or
876 NULL_TREE if none. */
878 tree
879 nonlambda_method_basetype (void)
881 tree fn, type;
882 if (!current_class_ref)
883 return NULL_TREE;
885 type = current_class_type;
886 if (!LAMBDA_TYPE_P (type))
887 return type;
889 /* Find the nearest enclosing non-lambda function. */
890 fn = TYPE_NAME (type);
892 fn = decl_function_context (fn);
893 while (fn && LAMBDA_FUNCTION_P (fn));
895 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
896 return NULL_TREE;
898 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
901 /* Like current_scope, but looking through lambdas. */
903 tree
904 current_nonlambda_scope (void)
906 tree scope = current_scope ();
907 for (;;)
909 if (TREE_CODE (scope) == FUNCTION_DECL
910 && LAMBDA_FUNCTION_P (scope))
912 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
913 continue;
915 else if (LAMBDA_TYPE_P (scope))
917 scope = CP_TYPE_CONTEXT (scope);
918 continue;
920 break;
922 return scope;
925 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
926 indicated FN and NARGS, but do not initialize the return type or any of the
927 argument slots. */
929 static tree
930 prepare_op_call (tree fn, int nargs)
932 tree t;
934 t = build_vl_exp (CALL_EXPR, nargs + 3);
935 CALL_EXPR_FN (t) = fn;
936 CALL_EXPR_STATIC_CHAIN (t) = NULL;
938 return t;
941 /* Return true iff CALLOP is the op() for a generic lambda. */
943 bool
944 generic_lambda_fn_p (tree callop)
946 return (LAMBDA_FUNCTION_P (callop)
947 && DECL_TEMPLATE_INFO (callop)
948 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
951 /* If the closure TYPE has a static op(), also add a conversion to function
952 pointer. */
954 void
955 maybe_add_lambda_conv_op (tree type)
957 bool nested = (cfun != NULL);
958 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
959 tree callop = lambda_function (type);
960 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
962 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
963 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
964 return;
966 if (processing_template_decl)
967 return;
969 bool const generic_lambda_p = generic_lambda_fn_p (callop);
971 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
973 /* If the op() wasn't instantiated due to errors, give up. */
974 gcc_assert (errorcount || sorrycount);
975 return;
978 /* Non-template conversion operators are defined directly with build_call_a
979 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
980 deferred and the CALL is built in-place. In the case of a deduced return
981 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
982 the return type is also built in-place. The arguments of DECLTYPE_CALL in
983 the return expression may differ in flags from those in the body CALL. In
984 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
985 the body CALL, but not in DECLTYPE_CALL. */
987 vec<tree, va_gc> *direct_argvec = 0;
988 tree decltype_call = 0, call = 0;
989 tree optype = TREE_TYPE (callop);
990 tree fn_result = TREE_TYPE (optype);
992 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
993 null_pointer_node);
994 if (generic_lambda_p)
996 ++processing_template_decl;
998 /* Prepare the dependent member call for the static member function
999 '_FUN' and, potentially, prepare another call to be used in a decltype
1000 return expression for a deduced return call op to allow for simple
1001 implementation of the conversion operator. */
1003 tree instance = cp_build_indirect_ref (thisarg, RO_NULL,
1004 tf_warning_or_error);
1005 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1006 instance, DECL_NAME (callop), NULL_TREE);
1007 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1009 call = prepare_op_call (objfn, nargs);
1010 if (type_uses_auto (fn_result))
1011 decltype_call = prepare_op_call (objfn, nargs);
1013 else
1015 direct_argvec = make_tree_vector ();
1016 direct_argvec->quick_push (thisarg);
1019 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1020 declare the static member function "_FUN" below. For each arg append to
1021 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1022 call args (for the template case). If a parameter pack is found, expand
1023 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1025 tree fn_args = NULL_TREE;
1027 int ix = 0;
1028 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1029 tree tgt = NULL;
1031 while (src)
1033 tree new_node = copy_node (src);
1035 if (!fn_args)
1036 fn_args = tgt = new_node;
1037 else
1039 TREE_CHAIN (tgt) = new_node;
1040 tgt = new_node;
1043 mark_exp_read (tgt);
1045 if (generic_lambda_p)
1047 tree a = forward_parm (tgt);
1049 CALL_EXPR_ARG (call, ix) = a;
1050 if (decltype_call)
1051 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1053 if (PACK_EXPANSION_P (a))
1054 /* Set this after unsharing so it's not in decltype_call. */
1055 PACK_EXPANSION_LOCAL_P (a) = true;
1057 ++ix;
1059 else
1060 vec_safe_push (direct_argvec, tgt);
1062 src = TREE_CHAIN (src);
1067 if (generic_lambda_p)
1069 if (decltype_call)
1071 fn_result = finish_decltype_type
1072 (decltype_call, /*id_expression_or_member_access_p=*/false,
1073 tf_warning_or_error);
1076 else
1077 call = build_call_a (callop,
1078 direct_argvec->length (),
1079 direct_argvec->address ());
1081 CALL_FROM_THUNK_P (call) = 1;
1082 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1084 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1085 stattype = (cp_build_type_attribute_variant
1086 (stattype, TYPE_ATTRIBUTES (optype)));
1087 if (flag_noexcept_type
1088 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1089 stattype = build_exception_variant (stattype, noexcept_true_spec);
1091 if (generic_lambda_p)
1092 --processing_template_decl;
1094 /* First build up the conversion op. */
1096 tree rettype = build_pointer_type (stattype);
1097 tree name = make_conv_op_name (rettype);
1098 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1099 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1100 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1101 tree fn = convfn;
1102 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1103 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1104 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1105 grokclassfn (type, fn, NO_SPECIAL);
1106 set_linkage_according_to_type (type, fn);
1107 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1108 DECL_IN_AGGR_P (fn) = 1;
1109 DECL_ARTIFICIAL (fn) = 1;
1110 DECL_NOT_REALLY_EXTERN (fn) = 1;
1111 DECL_DECLARED_INLINE_P (fn) = 1;
1112 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1114 if (nested_def)
1115 DECL_INTERFACE_KNOWN (fn) = 1;
1117 if (generic_lambda_p)
1118 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1120 add_method (type, fn, false);
1122 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1123 the conversion op is used. */
1124 if (varargs_function_p (callop))
1126 DECL_DELETED_FN (fn) = 1;
1127 return;
1130 /* Now build up the thunk to be returned. */
1132 name = get_identifier ("_FUN");
1133 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1134 fn = statfn;
1135 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1136 grokclassfn (type, fn, NO_SPECIAL);
1137 set_linkage_according_to_type (type, fn);
1138 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1139 DECL_IN_AGGR_P (fn) = 1;
1140 DECL_ARTIFICIAL (fn) = 1;
1141 DECL_NOT_REALLY_EXTERN (fn) = 1;
1142 DECL_DECLARED_INLINE_P (fn) = 1;
1143 DECL_STATIC_FUNCTION_P (fn) = 1;
1144 DECL_ARGUMENTS (fn) = fn_args;
1145 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1147 /* Avoid duplicate -Wshadow warnings. */
1148 DECL_NAME (arg) = NULL_TREE;
1149 DECL_CONTEXT (arg) = fn;
1151 if (nested_def)
1152 DECL_INTERFACE_KNOWN (fn) = 1;
1154 if (generic_lambda_p)
1155 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1157 if (flag_sanitize & SANITIZE_NULL)
1159 /* Don't UBsan this function; we're deliberately calling op() with a null
1160 object argument. */
1161 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1164 add_method (type, fn, false);
1166 if (nested)
1167 push_function_context ();
1168 else
1169 /* Still increment function_depth so that we don't GC in the
1170 middle of an expression. */
1171 ++function_depth;
1173 /* Generate the body of the thunk. */
1175 start_preparsed_function (statfn, NULL_TREE,
1176 SF_PRE_PARSED | SF_INCLASS_INLINE);
1177 if (DECL_ONE_ONLY (statfn))
1179 /* Put the thunk in the same comdat group as the call op. */
1180 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1181 (cgraph_node::get_create (callop));
1183 tree body = begin_function_body ();
1184 tree compound_stmt = begin_compound_stmt (0);
1185 if (!generic_lambda_p)
1187 set_flags_from_callee (call);
1188 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1189 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1191 call = convert_from_reference (call);
1192 finish_return_stmt (call);
1194 finish_compound_stmt (compound_stmt);
1195 finish_function_body (body);
1197 fn = finish_function (/*inline*/2);
1198 if (!generic_lambda_p)
1199 expand_or_defer_fn (fn);
1201 /* Generate the body of the conversion op. */
1203 start_preparsed_function (convfn, NULL_TREE,
1204 SF_PRE_PARSED | SF_INCLASS_INLINE);
1205 body = begin_function_body ();
1206 compound_stmt = begin_compound_stmt (0);
1208 /* decl_needed_p needs to see that it's used. */
1209 TREE_USED (statfn) = 1;
1210 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1212 finish_compound_stmt (compound_stmt);
1213 finish_function_body (body);
1215 fn = finish_function (/*inline*/2);
1216 if (!generic_lambda_p)
1217 expand_or_defer_fn (fn);
1219 if (nested)
1220 pop_function_context ();
1221 else
1222 --function_depth;
1225 /* True if FN is the static function "_FUN" that gets returned from the lambda
1226 conversion operator. */
1228 bool
1229 lambda_static_thunk_p (tree fn)
1231 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1232 && DECL_ARTIFICIAL (fn)
1233 && DECL_STATIC_FUNCTION_P (fn)
1234 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1237 /* Returns true iff VAL is a lambda-related declaration which should
1238 be ignored by unqualified lookup. */
1240 bool
1241 is_lambda_ignored_entity (tree val)
1243 /* In unevaluated context, look past normal capture proxies. */
1244 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1245 return true;
1247 /* Always ignore lambda fields, their names are only for debugging. */
1248 if (TREE_CODE (val) == FIELD_DECL
1249 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1250 return true;
1252 /* None of the lookups that use qualify_lookup want the op() from the
1253 lambda; they want the one from the enclosing class. */
1254 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1255 return true;
1257 return false;
1260 /* Lambdas that appear in variable initializer or default argument scope
1261 get that in their mangling, so we need to record it. We might as well
1262 use the count for function and namespace scopes as well. */
1263 static GTY(()) tree lambda_scope;
1264 static GTY(()) int lambda_count;
1265 struct GTY(()) tree_int
1267 tree t;
1268 int i;
1270 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1272 void
1273 start_lambda_scope (tree decl)
1275 tree_int ti;
1276 gcc_assert (decl);
1277 /* Once we're inside a function, we ignore variable scope and just push
1278 the function again so that popping works properly. */
1279 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1280 decl = current_function_decl;
1281 ti.t = lambda_scope;
1282 ti.i = lambda_count;
1283 vec_safe_push (lambda_scope_stack, ti);
1284 if (lambda_scope != decl)
1286 /* Don't reset the count if we're still in the same function. */
1287 lambda_scope = decl;
1288 lambda_count = 0;
1292 void
1293 record_lambda_scope (tree lambda)
1295 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1296 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1299 void
1300 finish_lambda_scope (void)
1302 tree_int *p = &lambda_scope_stack->last ();
1303 if (lambda_scope != p->t)
1305 lambda_scope = p->t;
1306 lambda_count = p->i;
1308 lambda_scope_stack->pop ();
1311 tree
1312 start_lambda_function (tree fco, tree lambda_expr)
1314 /* Let the front end know that we are going to be defining this
1315 function. */
1316 start_preparsed_function (fco,
1317 NULL_TREE,
1318 SF_PRE_PARSED | SF_INCLASS_INLINE);
1320 tree body = begin_function_body ();
1322 /* Push the proxies for any explicit captures. */
1323 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1324 cap = TREE_CHAIN (cap))
1325 build_capture_proxy (TREE_PURPOSE (cap));
1327 return body;
1330 void
1331 finish_lambda_function (tree body)
1333 finish_function_body (body);
1335 /* Finish the function and generate code for it if necessary. */
1336 tree fn = finish_function (/*inline*/2);
1338 /* Only expand if the call op is not a template. */
1339 if (!DECL_TEMPLATE_INFO (fn))
1340 expand_or_defer_fn (fn);
1343 #include "gt-cp-lambda.h"