* cgraph.h: Flatten. Remove all include files.
[official-gcc.git] / gcc / cp / lambda.c
blob01a508aad565e75ea06d0a489f2d898c9e3bee59
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2014 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "hash-map.h"
30 #include "is-a.h"
31 #include "plugin-api.h"
32 #include "vec.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "machmode.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "ipa-ref.h"
41 #include "cgraph.h"
42 #include "tree-iterator.h"
43 #include "cp-tree.h"
44 #include "toplev.h"
46 /* Constructor for a lambda expression. */
48 tree
49 build_lambda_expr (void)
51 tree lambda = make_node (LAMBDA_EXPR);
52 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
53 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
54 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
55 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
56 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
57 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
58 return lambda;
61 /* Create the closure object for a LAMBDA_EXPR. */
63 tree
64 build_lambda_object (tree lambda_expr)
66 /* Build aggregate constructor call.
67 - cp_parser_braced_list
68 - cp_parser_functional_cast */
69 vec<constructor_elt, va_gc> *elts = NULL;
70 tree node, expr, type;
71 location_t saved_loc;
73 if (processing_template_decl)
74 return lambda_expr;
76 /* Make sure any error messages refer to the lambda-introducer. */
77 saved_loc = input_location;
78 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
80 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
81 node;
82 node = TREE_CHAIN (node))
84 tree field = TREE_PURPOSE (node);
85 tree val = TREE_VALUE (node);
87 if (field == error_mark_node)
89 expr = error_mark_node;
90 goto out;
93 if (DECL_P (val))
94 mark_used (val);
96 /* Mere mortals can't copy arrays with aggregate initialization, so
97 do some magic to make it work here. */
98 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
99 val = build_array_copy (val);
100 else if (DECL_NORMAL_CAPTURE_P (field)
101 && !DECL_VLA_CAPTURE_P (field)
102 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
104 /* "the entities that are captured by copy are used to
105 direct-initialize each corresponding non-static data
106 member of the resulting closure object."
108 There's normally no way to express direct-initialization
109 from an element of a CONSTRUCTOR, so we build up a special
110 TARGET_EXPR to bypass the usual copy-initialization. */
111 val = force_rvalue (val, tf_warning_or_error);
112 if (TREE_CODE (val) == TARGET_EXPR)
113 TARGET_EXPR_DIRECT_INIT_P (val) = true;
116 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
119 expr = build_constructor (init_list_type_node, elts);
120 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
122 /* N2927: "[The closure] class type is not an aggregate."
123 But we briefly treat it as an aggregate to make this simpler. */
124 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
125 CLASSTYPE_NON_AGGREGATE (type) = 0;
126 expr = finish_compound_literal (type, expr, tf_warning_or_error);
127 CLASSTYPE_NON_AGGREGATE (type) = 1;
129 out:
130 input_location = saved_loc;
131 return expr;
134 /* Return an initialized RECORD_TYPE for LAMBDA.
135 LAMBDA must have its explicit captures already. */
137 tree
138 begin_lambda_type (tree lambda)
140 tree type;
143 /* Unique name. This is just like an unnamed class, but we cannot use
144 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
145 tree name;
146 name = make_lambda_name ();
148 /* Create the new RECORD_TYPE for this lambda. */
149 type = xref_tag (/*tag_code=*/record_type,
150 name,
151 /*scope=*/ts_lambda,
152 /*template_header_p=*/false);
153 if (type == error_mark_node)
154 return error_mark_node;
157 /* Designate it as a struct so that we can use aggregate initialization. */
158 CLASSTYPE_DECLARED_CLASS (type) = false;
160 /* Cross-reference the expression and the type. */
161 LAMBDA_EXPR_CLOSURE (lambda) = type;
162 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
164 /* Clear base types. */
165 xref_basetypes (type, /*bases=*/NULL_TREE);
167 /* Start the class. */
168 type = begin_class_definition (type);
170 return type;
173 /* Returns the type to use for the return type of the operator() of a
174 closure class. */
176 tree
177 lambda_return_type (tree expr)
179 if (expr == NULL_TREE)
180 return void_type_node;
181 if (type_unknown_p (expr)
182 || BRACE_ENCLOSED_INITIALIZER_P (expr))
184 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
185 return void_type_node;
187 gcc_checking_assert (!type_dependent_expression_p (expr));
188 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
191 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
192 closure type. */
194 tree
195 lambda_function (tree lambda)
197 tree type;
198 if (TREE_CODE (lambda) == LAMBDA_EXPR)
199 type = LAMBDA_EXPR_CLOSURE (lambda);
200 else
201 type = lambda;
202 gcc_assert (LAMBDA_TYPE_P (type));
203 /* Don't let debug_tree cause instantiation. */
204 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
205 && !COMPLETE_OR_OPEN_TYPE_P (type))
206 return NULL_TREE;
207 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
208 /*protect=*/0, /*want_type=*/false,
209 tf_warning_or_error);
210 if (lambda)
211 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
212 return lambda;
215 /* Returns the type to use for the FIELD_DECL corresponding to the
216 capture of EXPR.
217 The caller should add REFERENCE_TYPE for capture by reference. */
219 tree
220 lambda_capture_field_type (tree expr, bool explicit_init_p)
222 tree type;
223 if (explicit_init_p)
225 type = make_auto ();
226 type = do_auto_deduction (type, expr, type);
228 else
229 type = non_reference (unlowered_expr_type (expr));
230 if (type_dependent_expression_p (expr)
231 && !is_this_parameter (tree_strip_nop_conversions (expr)))
233 type = cxx_make_type (DECLTYPE_TYPE);
234 DECLTYPE_TYPE_EXPR (type) = expr;
235 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
236 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
237 SET_TYPE_STRUCTURAL_EQUALITY (type);
239 return type;
242 /* Returns true iff DECL is a lambda capture proxy variable created by
243 build_capture_proxy. */
245 bool
246 is_capture_proxy (tree decl)
248 return (VAR_P (decl)
249 && DECL_HAS_VALUE_EXPR_P (decl)
250 && !DECL_ANON_UNION_VAR_P (decl)
251 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
254 /* Returns true iff DECL is a capture proxy for a normal capture
255 (i.e. without explicit initializer). */
257 bool
258 is_normal_capture_proxy (tree decl)
260 if (!is_capture_proxy (decl))
261 /* It's not a capture proxy. */
262 return false;
264 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
265 /* VLA capture. */
266 return true;
268 /* It is a capture proxy, is it a normal capture? */
269 tree val = DECL_VALUE_EXPR (decl);
270 if (val == error_mark_node)
271 return true;
273 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
274 val = TREE_OPERAND (val, 1);
275 return DECL_NORMAL_CAPTURE_P (val);
278 /* VAR is a capture proxy created by build_capture_proxy; add it to the
279 current function, which is the operator() for the appropriate lambda. */
281 void
282 insert_capture_proxy (tree var)
284 cp_binding_level *b;
285 tree stmt_list;
287 /* Put the capture proxy in the extra body block so that it won't clash
288 with a later local variable. */
289 b = current_binding_level;
290 for (;;)
292 cp_binding_level *n = b->level_chain;
293 if (n->kind == sk_function_parms)
294 break;
295 b = n;
297 pushdecl_with_scope (var, b, false);
299 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
300 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
301 stmt_list = (*stmt_list_stack)[1];
302 gcc_assert (stmt_list);
303 append_to_statement_list_force (var, &stmt_list);
306 /* We've just finished processing a lambda; if the containing scope is also
307 a lambda, insert any capture proxies that were created while processing
308 the nested lambda. */
310 void
311 insert_pending_capture_proxies (void)
313 tree lam;
314 vec<tree, va_gc> *proxies;
315 unsigned i;
317 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
318 return;
320 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
321 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
322 for (i = 0; i < vec_safe_length (proxies); ++i)
324 tree var = (*proxies)[i];
325 insert_capture_proxy (var);
327 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
328 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
331 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
332 return the type we want the proxy to have: the type of the field itself,
333 with added const-qualification if the lambda isn't mutable and the
334 capture is by value. */
336 tree
337 lambda_proxy_type (tree ref)
339 tree type;
340 if (ref == error_mark_node)
341 return error_mark_node;
342 if (REFERENCE_REF_P (ref))
343 ref = TREE_OPERAND (ref, 0);
344 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
345 type = TREE_TYPE (ref);
346 if (!type || WILDCARD_TYPE_P (non_reference (type)))
348 type = cxx_make_type (DECLTYPE_TYPE);
349 DECLTYPE_TYPE_EXPR (type) = ref;
350 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
351 SET_TYPE_STRUCTURAL_EQUALITY (type);
353 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
354 type = make_pack_expansion (type);
355 return type;
358 /* MEMBER is a capture field in a lambda closure class. Now that we're
359 inside the operator(), build a placeholder var for future lookups and
360 debugging. */
362 tree
363 build_capture_proxy (tree member)
365 tree var, object, fn, closure, name, lam, type;
367 if (PACK_EXPANSION_P (member))
368 member = PACK_EXPANSION_PATTERN (member);
370 closure = DECL_CONTEXT (member);
371 fn = lambda_function (closure);
372 lam = CLASSTYPE_LAMBDA_EXPR (closure);
374 /* The proxy variable forwards to the capture field. */
375 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
376 object = finish_non_static_data_member (member, object, NULL_TREE);
377 if (REFERENCE_REF_P (object))
378 object = TREE_OPERAND (object, 0);
380 /* Remove the __ inserted by add_capture. */
381 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
383 type = lambda_proxy_type (object);
385 if (DECL_VLA_CAPTURE_P (member))
387 /* Rebuild the VLA type from the pointer and maxindex. */
388 tree field = next_initializable_field (TYPE_FIELDS (type));
389 tree ptr = build_simple_component_ref (object, field);
390 field = next_initializable_field (DECL_CHAIN (field));
391 tree max = build_simple_component_ref (object, field);
392 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
393 build_index_type (max));
394 type = build_reference_type (type);
395 REFERENCE_VLA_OK (type) = true;
396 object = convert (type, ptr);
399 var = build_decl (input_location, VAR_DECL, name, type);
400 SET_DECL_VALUE_EXPR (var, object);
401 DECL_HAS_VALUE_EXPR_P (var) = 1;
402 DECL_ARTIFICIAL (var) = 1;
403 TREE_USED (var) = 1;
404 DECL_CONTEXT (var) = fn;
406 if (name == this_identifier)
408 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
409 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
412 if (fn == current_function_decl)
413 insert_capture_proxy (var);
414 else
415 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
417 return var;
420 /* Return a struct containing a pointer and a length for lambda capture of
421 an array of runtime length. */
423 static tree
424 vla_capture_type (tree array_type)
426 static tree ptr_id, max_id;
427 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
428 xref_basetypes (type, NULL_TREE);
429 type = begin_class_definition (type);
430 if (!ptr_id)
432 ptr_id = get_identifier ("ptr");
433 max_id = get_identifier ("max");
435 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
436 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
437 finish_member_declaration (field);
438 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
439 finish_member_declaration (field);
440 return finish_struct (type, NULL_TREE);
443 /* From an ID and INITIALIZER, create a capture (by reference if
444 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
445 and return it. */
447 tree
448 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
449 bool explicit_init_p)
451 char *buf;
452 tree type, member, name;
453 bool vla = false;
454 bool variadic = false;
455 tree initializer = orig_init;
457 if (PACK_EXPANSION_P (initializer))
459 initializer = PACK_EXPANSION_PATTERN (initializer);
460 variadic = true;
463 if (TREE_CODE (initializer) == TREE_LIST)
464 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
465 tf_warning_or_error);
466 type = TREE_TYPE (initializer);
467 if (type == error_mark_node)
468 return error_mark_node;
470 if (array_of_runtime_bound_p (type))
472 vla = true;
473 if (!by_reference_p)
474 error ("array of runtime bound cannot be captured by copy, "
475 "only by reference");
477 /* For a VLA, we capture the address of the first element and the
478 maximum index, and then reconstruct the VLA for the proxy. */
479 tree elt = cp_build_array_ref (input_location, initializer,
480 integer_zero_node, tf_warning_or_error);
481 initializer = build_constructor_va (init_list_type_node, 2,
482 NULL_TREE, build_address (elt),
483 NULL_TREE, array_type_nelts (type));
484 type = vla_capture_type (type);
486 else if (variably_modified_type_p (type, NULL_TREE))
488 error ("capture of variable-size type %qT that is not a C++14 array "
489 "of runtime bound", type);
490 if (TREE_CODE (type) == ARRAY_TYPE
491 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
492 inform (input_location, "because the array element type %qT has "
493 "variable size", TREE_TYPE (type));
494 type = error_mark_node;
496 else
498 type = lambda_capture_field_type (initializer, explicit_init_p);
499 if (by_reference_p)
501 type = build_reference_type (type);
502 if (!real_lvalue_p (initializer))
503 error ("cannot capture %qE by reference", initializer);
505 else
507 /* Capture by copy requires a complete type. */
508 type = complete_type (type);
509 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
511 error ("capture by copy of incomplete type %qT", type);
512 cxx_incomplete_type_inform (type);
513 return error_mark_node;
518 /* Add __ to the beginning of the field name so that user code
519 won't find the field with name lookup. We can't just leave the name
520 unset because template instantiation uses the name to find
521 instantiated fields. */
522 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
523 buf[1] = buf[0] = '_';
524 memcpy (buf + 2, IDENTIFIER_POINTER (id),
525 IDENTIFIER_LENGTH (id) + 1);
526 name = get_identifier (buf);
528 /* If TREE_TYPE isn't set, we're still in the introducer, so check
529 for duplicates. */
530 if (!LAMBDA_EXPR_CLOSURE (lambda))
532 if (IDENTIFIER_MARKED (name))
534 pedwarn (input_location, 0,
535 "already captured %qD in lambda expression", id);
536 return NULL_TREE;
538 IDENTIFIER_MARKED (name) = true;
541 if (variadic)
542 type = make_pack_expansion (type);
544 /* Make member variable. */
545 member = build_decl (input_location, FIELD_DECL, name, type);
546 DECL_VLA_CAPTURE_P (member) = vla;
548 if (!explicit_init_p)
549 /* Normal captures are invisible to name lookup but uses are replaced
550 with references to the capture field; we implement this by only
551 really making them invisible in unevaluated context; see
552 qualify_lookup. For now, let's make explicitly initialized captures
553 always visible. */
554 DECL_NORMAL_CAPTURE_P (member) = true;
556 if (id == this_identifier)
557 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
559 /* Add it to the appropriate closure class if we've started it. */
560 if (current_class_type
561 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
562 finish_member_declaration (member);
564 tree listmem = member;
565 if (variadic)
567 listmem = make_pack_expansion (member);
568 initializer = orig_init;
570 LAMBDA_EXPR_CAPTURE_LIST (lambda)
571 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
573 if (LAMBDA_EXPR_CLOSURE (lambda))
574 return build_capture_proxy (member);
575 /* For explicit captures we haven't started the function yet, so we wait
576 and build the proxy from cp_parser_lambda_body. */
577 return NULL_TREE;
580 /* Register all the capture members on the list CAPTURES, which is the
581 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
583 void
584 register_capture_members (tree captures)
586 if (captures == NULL_TREE)
587 return;
589 register_capture_members (TREE_CHAIN (captures));
591 tree field = TREE_PURPOSE (captures);
592 if (PACK_EXPANSION_P (field))
593 field = PACK_EXPANSION_PATTERN (field);
595 /* We set this in add_capture to avoid duplicates. */
596 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
597 finish_member_declaration (field);
600 /* Similar to add_capture, except this works on a stack of nested lambdas.
601 BY_REFERENCE_P in this case is derived from the default capture mode.
602 Returns the capture for the lambda at the bottom of the stack. */
604 tree
605 add_default_capture (tree lambda_stack, tree id, tree initializer)
607 bool this_capture_p = (id == this_identifier);
609 tree var = NULL_TREE;
611 tree saved_class_type = current_class_type;
613 tree node;
615 for (node = lambda_stack;
616 node;
617 node = TREE_CHAIN (node))
619 tree lambda = TREE_VALUE (node);
621 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
622 if (DECL_PACK_P (initializer))
623 initializer = make_pack_expansion (initializer);
624 var = add_capture (lambda,
626 initializer,
627 /*by_reference_p=*/
628 (!this_capture_p
629 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
630 == CPLD_REFERENCE)),
631 /*explicit_init_p=*/false);
632 initializer = convert_from_reference (var);
635 current_class_type = saved_class_type;
637 return var;
640 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
641 form of an INDIRECT_REF, possibly adding it through default
642 capturing, if ADD_CAPTURE_P is false. */
644 tree
645 lambda_expr_this_capture (tree lambda, bool add_capture_p)
647 tree result;
649 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
651 /* In unevaluated context this isn't an odr-use, so just return the
652 nearest 'this'. */
653 if (cp_unevaluated_operand)
655 /* In an NSDMI the fake 'this' pointer that we're using for
656 parsing is in scope_chain. */
657 if (LAMBDA_EXPR_EXTRA_SCOPE (lambda)
658 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (lambda)) == FIELD_DECL)
659 return scope_chain->x_current_class_ptr;
660 return lookup_name (this_identifier);
663 /* Try to default capture 'this' if we can. */
664 if (!this_capture
665 && (!add_capture_p
666 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
668 tree lambda_stack = NULL_TREE;
669 tree init = NULL_TREE;
671 /* If we are in a lambda function, we can move out until we hit:
672 1. a non-lambda function or NSDMI,
673 2. a lambda function capturing 'this', or
674 3. a non-default capturing lambda function. */
675 for (tree tlambda = lambda; ;)
677 lambda_stack = tree_cons (NULL_TREE,
678 tlambda,
679 lambda_stack);
681 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
682 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
684 /* In an NSDMI, we don't have a function to look up the decl in,
685 but the fake 'this' pointer that we're using for parsing is
686 in scope_chain. */
687 init = scope_chain->x_current_class_ptr;
688 gcc_checking_assert
689 (init && (TREE_TYPE (TREE_TYPE (init))
690 == current_nonlambda_class_type ()));
691 break;
694 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
695 tree containing_function = decl_function_context (closure_decl);
697 if (containing_function == NULL_TREE)
698 /* We ran out of scopes; there's no 'this' to capture. */
699 break;
701 if (!LAMBDA_FUNCTION_P (containing_function))
703 /* We found a non-lambda function. */
704 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
705 /* First parameter is 'this'. */
706 init = DECL_ARGUMENTS (containing_function);
707 break;
710 tlambda
711 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
713 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
715 /* An outer lambda has already captured 'this'. */
716 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
717 break;
720 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
721 /* An outer lambda won't let us capture 'this'. */
722 break;
725 if (init)
727 if (add_capture_p)
728 this_capture = add_default_capture (lambda_stack,
729 /*id=*/this_identifier,
730 init);
731 else
732 this_capture = init;
736 if (!this_capture)
738 if (add_capture_p)
739 error ("%<this%> was not captured for this lambda function");
740 result = error_mark_node;
742 else
744 /* To make sure that current_class_ref is for the lambda. */
745 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
746 == LAMBDA_EXPR_CLOSURE (lambda));
748 result = this_capture;
750 /* If 'this' is captured, each use of 'this' is transformed into an
751 access to the corresponding unnamed data member of the closure
752 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
753 ensures that the transformed expression is an rvalue. ] */
754 result = rvalue (result);
757 return result;
760 /* We don't want to capture 'this' until we know we need it, i.e. after
761 overload resolution has chosen a non-static member function. At that
762 point we call this function to turn a dummy object into a use of the
763 'this' capture. */
765 tree
766 maybe_resolve_dummy (tree object, bool add_capture_p)
768 if (!is_dummy_object (object))
769 return object;
771 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
772 gcc_assert (!TYPE_PTR_P (type));
774 if (type != current_class_type
775 && current_class_type
776 && LAMBDA_TYPE_P (current_class_type)
777 && lambda_function (current_class_type)
778 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
780 /* In a lambda, need to go through 'this' capture. */
781 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
782 tree cap = lambda_expr_this_capture (lam, add_capture_p);
783 if (cap != error_mark_node)
784 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
785 RO_NULL, tf_warning_or_error);
788 return object;
791 /* Returns the innermost non-lambda function. */
793 tree
794 current_nonlambda_function (void)
796 tree fn = current_function_decl;
797 while (fn && LAMBDA_FUNCTION_P (fn))
798 fn = decl_function_context (fn);
799 return fn;
802 /* Returns the method basetype of the innermost non-lambda function, or
803 NULL_TREE if none. */
805 tree
806 nonlambda_method_basetype (void)
808 tree fn, type;
809 if (!current_class_ref)
810 return NULL_TREE;
812 type = current_class_type;
813 if (!LAMBDA_TYPE_P (type))
814 return type;
816 /* Find the nearest enclosing non-lambda function. */
817 fn = TYPE_NAME (type);
819 fn = decl_function_context (fn);
820 while (fn && LAMBDA_FUNCTION_P (fn));
822 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
823 return NULL_TREE;
825 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
828 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
829 indicated FN and NARGS, but do not initialize the return type or any of the
830 argument slots. */
832 static tree
833 prepare_op_call (tree fn, int nargs)
835 tree t;
837 t = build_vl_exp (CALL_EXPR, nargs + 3);
838 CALL_EXPR_FN (t) = fn;
839 CALL_EXPR_STATIC_CHAIN (t) = NULL;
841 return t;
844 /* If the closure TYPE has a static op(), also add a conversion to function
845 pointer. */
847 void
848 maybe_add_lambda_conv_op (tree type)
850 bool nested = (current_function_decl != NULL_TREE);
851 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
852 tree callop = lambda_function (type);
854 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
855 return;
857 if (processing_template_decl)
858 return;
860 bool const generic_lambda_p
861 = (DECL_TEMPLATE_INFO (callop)
862 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
864 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
866 /* If the op() wasn't instantiated due to errors, give up. */
867 gcc_assert (errorcount || sorrycount);
868 return;
871 /* Non-template conversion operators are defined directly with build_call_a
872 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
873 deferred and the CALL is built in-place. In the case of a deduced return
874 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
875 the return type is also built in-place. The arguments of DECLTYPE_CALL in
876 the return expression may differ in flags from those in the body CALL. In
877 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
878 the body CALL, but not in DECLTYPE_CALL. */
880 vec<tree, va_gc> *direct_argvec = 0;
881 tree decltype_call = 0, call = 0;
882 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
884 if (generic_lambda_p)
886 /* Prepare the dependent member call for the static member function
887 '_FUN' and, potentially, prepare another call to be used in a decltype
888 return expression for a deduced return call op to allow for simple
889 implementation of the conversion operator. */
891 tree instance = build_nop (type, null_pointer_node);
892 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
893 instance, DECL_NAME (callop), NULL_TREE);
894 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
896 call = prepare_op_call (objfn, nargs);
897 if (type_uses_auto (fn_result))
898 decltype_call = prepare_op_call (objfn, nargs);
900 else
902 direct_argvec = make_tree_vector ();
903 direct_argvec->quick_push (build1 (NOP_EXPR,
904 TREE_TYPE (DECL_ARGUMENTS (callop)),
905 null_pointer_node));
908 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
909 declare the static member function "_FUN" below. For each arg append to
910 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
911 call args (for the template case). If a parameter pack is found, expand
912 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
914 tree fn_args = NULL_TREE;
916 int ix = 0;
917 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
918 tree tgt;
920 while (src)
922 tree new_node = copy_node (src);
924 if (!fn_args)
925 fn_args = tgt = new_node;
926 else
928 TREE_CHAIN (tgt) = new_node;
929 tgt = new_node;
932 mark_exp_read (tgt);
934 if (generic_lambda_p)
936 if (DECL_PACK_P (tgt))
938 tree a = make_pack_expansion (tgt);
939 if (decltype_call)
940 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
941 PACK_EXPANSION_LOCAL_P (a) = true;
942 CALL_EXPR_ARG (call, ix) = a;
944 else
946 tree a = convert_from_reference (tgt);
947 CALL_EXPR_ARG (call, ix) = a;
948 if (decltype_call)
949 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
951 ++ix;
953 else
954 vec_safe_push (direct_argvec, tgt);
956 src = TREE_CHAIN (src);
961 if (generic_lambda_p)
963 if (decltype_call)
965 ++processing_template_decl;
966 fn_result = finish_decltype_type
967 (decltype_call, /*id_expression_or_member_access_p=*/false,
968 tf_warning_or_error);
969 --processing_template_decl;
972 else
973 call = build_call_a (callop,
974 direct_argvec->length (),
975 direct_argvec->address ());
977 CALL_FROM_THUNK_P (call) = 1;
979 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
981 /* First build up the conversion op. */
983 tree rettype = build_pointer_type (stattype);
984 tree name = mangle_conv_op_name_for_type (rettype);
985 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
986 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
987 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
988 tree fn = convfn;
989 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
991 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
992 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
993 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
995 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
996 grokclassfn (type, fn, NO_SPECIAL);
997 set_linkage_according_to_type (type, fn);
998 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
999 DECL_IN_AGGR_P (fn) = 1;
1000 DECL_ARTIFICIAL (fn) = 1;
1001 DECL_NOT_REALLY_EXTERN (fn) = 1;
1002 DECL_DECLARED_INLINE_P (fn) = 1;
1003 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
1004 if (nested_def)
1005 DECL_INTERFACE_KNOWN (fn) = 1;
1007 if (generic_lambda_p)
1008 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1010 add_method (type, fn, NULL_TREE);
1012 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1013 the conversion op is used. */
1014 if (varargs_function_p (callop))
1016 DECL_DELETED_FN (fn) = 1;
1017 return;
1020 /* Now build up the thunk to be returned. */
1022 name = get_identifier ("_FUN");
1023 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1024 fn = statfn;
1025 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1026 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
1027 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1028 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1029 grokclassfn (type, fn, NO_SPECIAL);
1030 set_linkage_according_to_type (type, fn);
1031 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1032 DECL_IN_AGGR_P (fn) = 1;
1033 DECL_ARTIFICIAL (fn) = 1;
1034 DECL_NOT_REALLY_EXTERN (fn) = 1;
1035 DECL_DECLARED_INLINE_P (fn) = 1;
1036 DECL_STATIC_FUNCTION_P (fn) = 1;
1037 DECL_ARGUMENTS (fn) = fn_args;
1038 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1040 /* Avoid duplicate -Wshadow warnings. */
1041 DECL_NAME (arg) = NULL_TREE;
1042 DECL_CONTEXT (arg) = fn;
1044 if (nested_def)
1045 DECL_INTERFACE_KNOWN (fn) = 1;
1047 if (generic_lambda_p)
1048 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1050 add_method (type, fn, NULL_TREE);
1052 if (nested)
1053 push_function_context ();
1054 else
1055 /* Still increment function_depth so that we don't GC in the
1056 middle of an expression. */
1057 ++function_depth;
1059 /* Generate the body of the thunk. */
1061 start_preparsed_function (statfn, NULL_TREE,
1062 SF_PRE_PARSED | SF_INCLASS_INLINE);
1063 if (DECL_ONE_ONLY (statfn))
1065 /* Put the thunk in the same comdat group as the call op. */
1066 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1067 (cgraph_node::get_create (callop));
1069 tree body = begin_function_body ();
1070 tree compound_stmt = begin_compound_stmt (0);
1071 if (!generic_lambda_p)
1073 set_flags_from_callee (call);
1074 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1075 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1077 call = convert_from_reference (call);
1078 finish_return_stmt (call);
1080 finish_compound_stmt (compound_stmt);
1081 finish_function_body (body);
1083 fn = finish_function (/*inline*/2);
1084 if (!generic_lambda_p)
1085 expand_or_defer_fn (fn);
1087 /* Generate the body of the conversion op. */
1089 start_preparsed_function (convfn, NULL_TREE,
1090 SF_PRE_PARSED | SF_INCLASS_INLINE);
1091 body = begin_function_body ();
1092 compound_stmt = begin_compound_stmt (0);
1094 /* decl_needed_p needs to see that it's used. */
1095 TREE_USED (statfn) = 1;
1096 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1098 finish_compound_stmt (compound_stmt);
1099 finish_function_body (body);
1101 fn = finish_function (/*inline*/2);
1102 if (!generic_lambda_p)
1103 expand_or_defer_fn (fn);
1105 if (nested)
1106 pop_function_context ();
1107 else
1108 --function_depth;
1111 /* Returns true iff VAL is a lambda-related declaration which should
1112 be ignored by unqualified lookup. */
1114 bool
1115 is_lambda_ignored_entity (tree val)
1117 /* In unevaluated context, look past normal capture proxies. */
1118 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1119 return true;
1121 /* Always ignore lambda fields, their names are only for debugging. */
1122 if (TREE_CODE (val) == FIELD_DECL
1123 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1124 return true;
1126 /* None of the lookups that use qualify_lookup want the op() from the
1127 lambda; they want the one from the enclosing class. */
1128 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1129 return true;
1131 return false;