jit.exp: Don't drop the extension from the testcase when naming executable
[official-gcc.git] / gcc / cp / lambda.c
blob1da59ac0cf347e4d1436458070b4dbf759acb269
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2015 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "hash-map.h"
30 #include "is-a.h"
31 #include "plugin-api.h"
32 #include "vec.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "machmode.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "ipa-ref.h"
41 #include "cgraph.h"
42 #include "tree-iterator.h"
43 #include "cp-tree.h"
44 #include "toplev.h"
46 /* Constructor for a lambda expression. */
48 tree
49 build_lambda_expr (void)
51 tree lambda = make_node (LAMBDA_EXPR);
52 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
53 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
54 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
55 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
56 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
57 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
58 return lambda;
61 /* Create the closure object for a LAMBDA_EXPR. */
63 tree
64 build_lambda_object (tree lambda_expr)
66 /* Build aggregate constructor call.
67 - cp_parser_braced_list
68 - cp_parser_functional_cast */
69 vec<constructor_elt, va_gc> *elts = NULL;
70 tree node, expr, type;
71 location_t saved_loc;
73 if (processing_template_decl)
74 return lambda_expr;
76 /* Make sure any error messages refer to the lambda-introducer. */
77 saved_loc = input_location;
78 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
80 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
81 node;
82 node = TREE_CHAIN (node))
84 tree field = TREE_PURPOSE (node);
85 tree val = TREE_VALUE (node);
87 if (field == error_mark_node)
89 expr = error_mark_node;
90 goto out;
93 if (DECL_P (val))
94 mark_used (val);
96 /* Mere mortals can't copy arrays with aggregate initialization, so
97 do some magic to make it work here. */
98 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
99 val = build_array_copy (val);
100 else if (DECL_NORMAL_CAPTURE_P (field)
101 && !DECL_VLA_CAPTURE_P (field)
102 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
104 /* "the entities that are captured by copy are used to
105 direct-initialize each corresponding non-static data
106 member of the resulting closure object."
108 There's normally no way to express direct-initialization
109 from an element of a CONSTRUCTOR, so we build up a special
110 TARGET_EXPR to bypass the usual copy-initialization. */
111 val = force_rvalue (val, tf_warning_or_error);
112 if (TREE_CODE (val) == TARGET_EXPR)
113 TARGET_EXPR_DIRECT_INIT_P (val) = true;
116 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
119 expr = build_constructor (init_list_type_node, elts);
120 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
122 /* N2927: "[The closure] class type is not an aggregate."
123 But we briefly treat it as an aggregate to make this simpler. */
124 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
125 CLASSTYPE_NON_AGGREGATE (type) = 0;
126 expr = finish_compound_literal (type, expr, tf_warning_or_error);
127 CLASSTYPE_NON_AGGREGATE (type) = 1;
129 out:
130 input_location = saved_loc;
131 return expr;
134 /* Return an initialized RECORD_TYPE for LAMBDA.
135 LAMBDA must have its explicit captures already. */
137 tree
138 begin_lambda_type (tree lambda)
140 tree type;
143 /* Unique name. This is just like an unnamed class, but we cannot use
144 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
145 tree name;
146 name = make_lambda_name ();
148 /* Create the new RECORD_TYPE for this lambda. */
149 type = xref_tag (/*tag_code=*/record_type,
150 name,
151 /*scope=*/ts_lambda,
152 /*template_header_p=*/false);
153 if (type == error_mark_node)
154 return error_mark_node;
157 /* Designate it as a struct so that we can use aggregate initialization. */
158 CLASSTYPE_DECLARED_CLASS (type) = false;
160 /* Cross-reference the expression and the type. */
161 LAMBDA_EXPR_CLOSURE (lambda) = type;
162 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
164 /* Clear base types. */
165 xref_basetypes (type, /*bases=*/NULL_TREE);
167 /* Start the class. */
168 type = begin_class_definition (type);
170 return type;
173 /* Returns the type to use for the return type of the operator() of a
174 closure class. */
176 tree
177 lambda_return_type (tree expr)
179 if (expr == NULL_TREE)
180 return void_type_node;
181 if (type_unknown_p (expr)
182 || BRACE_ENCLOSED_INITIALIZER_P (expr))
184 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
185 return void_type_node;
187 gcc_checking_assert (!type_dependent_expression_p (expr));
188 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
191 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
192 closure type. */
194 tree
195 lambda_function (tree lambda)
197 tree type;
198 if (TREE_CODE (lambda) == LAMBDA_EXPR)
199 type = LAMBDA_EXPR_CLOSURE (lambda);
200 else
201 type = lambda;
202 gcc_assert (LAMBDA_TYPE_P (type));
203 /* Don't let debug_tree cause instantiation. */
204 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
205 && !COMPLETE_OR_OPEN_TYPE_P (type))
206 return NULL_TREE;
207 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
208 /*protect=*/0, /*want_type=*/false,
209 tf_warning_or_error);
210 if (lambda)
211 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
212 return lambda;
215 /* Returns the type to use for the FIELD_DECL corresponding to the
216 capture of EXPR.
217 The caller should add REFERENCE_TYPE for capture by reference. */
219 tree
220 lambda_capture_field_type (tree expr, bool explicit_init_p)
222 tree type;
223 if (explicit_init_p)
225 type = make_auto ();
226 type = do_auto_deduction (type, expr, type);
228 else
229 type = non_reference (unlowered_expr_type (expr));
230 if (type_dependent_expression_p (expr)
231 && !is_this_parameter (tree_strip_nop_conversions (expr)))
233 type = cxx_make_type (DECLTYPE_TYPE);
234 DECLTYPE_TYPE_EXPR (type) = expr;
235 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
236 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
237 SET_TYPE_STRUCTURAL_EQUALITY (type);
239 return type;
242 /* Returns true iff DECL is a lambda capture proxy variable created by
243 build_capture_proxy. */
245 bool
246 is_capture_proxy (tree decl)
248 return (VAR_P (decl)
249 && DECL_HAS_VALUE_EXPR_P (decl)
250 && !DECL_ANON_UNION_VAR_P (decl)
251 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
254 /* Returns true iff DECL is a capture proxy for a normal capture
255 (i.e. without explicit initializer). */
257 bool
258 is_normal_capture_proxy (tree decl)
260 if (!is_capture_proxy (decl))
261 /* It's not a capture proxy. */
262 return false;
264 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
265 /* VLA capture. */
266 return true;
268 /* It is a capture proxy, is it a normal capture? */
269 tree val = DECL_VALUE_EXPR (decl);
270 if (val == error_mark_node)
271 return true;
273 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
274 val = TREE_OPERAND (val, 1);
275 return DECL_NORMAL_CAPTURE_P (val);
278 /* VAR is a capture proxy created by build_capture_proxy; add it to the
279 current function, which is the operator() for the appropriate lambda. */
281 void
282 insert_capture_proxy (tree var)
284 cp_binding_level *b;
285 tree stmt_list;
287 /* Put the capture proxy in the extra body block so that it won't clash
288 with a later local variable. */
289 b = current_binding_level;
290 for (;;)
292 cp_binding_level *n = b->level_chain;
293 if (n->kind == sk_function_parms)
294 break;
295 b = n;
297 pushdecl_with_scope (var, b, false);
299 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
300 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
301 stmt_list = (*stmt_list_stack)[1];
302 gcc_assert (stmt_list);
303 append_to_statement_list_force (var, &stmt_list);
306 /* We've just finished processing a lambda; if the containing scope is also
307 a lambda, insert any capture proxies that were created while processing
308 the nested lambda. */
310 void
311 insert_pending_capture_proxies (void)
313 tree lam;
314 vec<tree, va_gc> *proxies;
315 unsigned i;
317 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
318 return;
320 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
321 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
322 for (i = 0; i < vec_safe_length (proxies); ++i)
324 tree var = (*proxies)[i];
325 insert_capture_proxy (var);
327 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
328 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
331 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
332 return the type we want the proxy to have: the type of the field itself,
333 with added const-qualification if the lambda isn't mutable and the
334 capture is by value. */
336 tree
337 lambda_proxy_type (tree ref)
339 tree type;
340 if (ref == error_mark_node)
341 return error_mark_node;
342 if (REFERENCE_REF_P (ref))
343 ref = TREE_OPERAND (ref, 0);
344 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
345 type = TREE_TYPE (ref);
346 if (!type || WILDCARD_TYPE_P (non_reference (type)))
348 type = cxx_make_type (DECLTYPE_TYPE);
349 DECLTYPE_TYPE_EXPR (type) = ref;
350 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
351 SET_TYPE_STRUCTURAL_EQUALITY (type);
353 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
354 type = make_pack_expansion (type);
355 return type;
358 /* MEMBER is a capture field in a lambda closure class. Now that we're
359 inside the operator(), build a placeholder var for future lookups and
360 debugging. */
362 tree
363 build_capture_proxy (tree member)
365 tree var, object, fn, closure, name, lam, type;
367 if (PACK_EXPANSION_P (member))
368 member = PACK_EXPANSION_PATTERN (member);
370 closure = DECL_CONTEXT (member);
371 fn = lambda_function (closure);
372 lam = CLASSTYPE_LAMBDA_EXPR (closure);
374 /* The proxy variable forwards to the capture field. */
375 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
376 object = finish_non_static_data_member (member, object, NULL_TREE);
377 if (REFERENCE_REF_P (object))
378 object = TREE_OPERAND (object, 0);
380 /* Remove the __ inserted by add_capture. */
381 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
383 type = lambda_proxy_type (object);
385 if (DECL_VLA_CAPTURE_P (member))
387 /* Rebuild the VLA type from the pointer and maxindex. */
388 tree field = next_initializable_field (TYPE_FIELDS (type));
389 tree ptr = build_simple_component_ref (object, field);
390 field = next_initializable_field (DECL_CHAIN (field));
391 tree max = build_simple_component_ref (object, field);
392 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
393 build_index_type (max));
394 type = build_reference_type (type);
395 REFERENCE_VLA_OK (type) = true;
396 object = convert (type, ptr);
399 var = build_decl (input_location, VAR_DECL, name, type);
400 SET_DECL_VALUE_EXPR (var, object);
401 DECL_HAS_VALUE_EXPR_P (var) = 1;
402 DECL_ARTIFICIAL (var) = 1;
403 TREE_USED (var) = 1;
404 DECL_CONTEXT (var) = fn;
406 if (name == this_identifier)
408 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
409 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
412 if (fn == current_function_decl)
413 insert_capture_proxy (var);
414 else
415 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
417 return var;
420 /* Return a struct containing a pointer and a length for lambda capture of
421 an array of runtime length. */
423 static tree
424 vla_capture_type (tree array_type)
426 static tree ptr_id, max_id;
427 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
428 xref_basetypes (type, NULL_TREE);
429 type = begin_class_definition (type);
430 if (!ptr_id)
432 ptr_id = get_identifier ("ptr");
433 max_id = get_identifier ("max");
435 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
436 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
437 finish_member_declaration (field);
438 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
439 finish_member_declaration (field);
440 return finish_struct (type, NULL_TREE);
443 /* From an ID and INITIALIZER, create a capture (by reference if
444 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
445 and return it. */
447 tree
448 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
449 bool explicit_init_p)
451 char *buf;
452 tree type, member, name;
453 bool vla = false;
454 bool variadic = false;
455 tree initializer = orig_init;
457 if (PACK_EXPANSION_P (initializer))
459 initializer = PACK_EXPANSION_PATTERN (initializer);
460 variadic = true;
463 if (TREE_CODE (initializer) == TREE_LIST)
464 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
465 tf_warning_or_error);
466 type = TREE_TYPE (initializer);
467 if (type == error_mark_node)
468 return error_mark_node;
470 if (array_of_runtime_bound_p (type))
472 vla = true;
473 if (!by_reference_p)
474 error ("array of runtime bound cannot be captured by copy, "
475 "only by reference");
477 /* For a VLA, we capture the address of the first element and the
478 maximum index, and then reconstruct the VLA for the proxy. */
479 tree elt = cp_build_array_ref (input_location, initializer,
480 integer_zero_node, tf_warning_or_error);
481 initializer = build_constructor_va (init_list_type_node, 2,
482 NULL_TREE, build_address (elt),
483 NULL_TREE, array_type_nelts (type));
484 type = vla_capture_type (type);
486 else if (!dependent_type_p (type)
487 && variably_modified_type_p (type, NULL_TREE))
489 error ("capture of variable-size type %qT that is not an N3639 array "
490 "of runtime bound", type);
491 if (TREE_CODE (type) == ARRAY_TYPE
492 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
493 inform (input_location, "because the array element type %qT has "
494 "variable size", TREE_TYPE (type));
495 type = error_mark_node;
497 else
499 type = lambda_capture_field_type (initializer, explicit_init_p);
500 if (by_reference_p)
502 type = build_reference_type (type);
503 if (!real_lvalue_p (initializer))
504 error ("cannot capture %qE by reference", initializer);
506 else
508 /* Capture by copy requires a complete type. */
509 type = complete_type (type);
510 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
512 error ("capture by copy of incomplete type %qT", type);
513 cxx_incomplete_type_inform (type);
514 return error_mark_node;
519 /* Add __ to the beginning of the field name so that user code
520 won't find the field with name lookup. We can't just leave the name
521 unset because template instantiation uses the name to find
522 instantiated fields. */
523 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
524 buf[1] = buf[0] = '_';
525 memcpy (buf + 2, IDENTIFIER_POINTER (id),
526 IDENTIFIER_LENGTH (id) + 1);
527 name = get_identifier (buf);
529 /* If TREE_TYPE isn't set, we're still in the introducer, so check
530 for duplicates. */
531 if (!LAMBDA_EXPR_CLOSURE (lambda))
533 if (IDENTIFIER_MARKED (name))
535 pedwarn (input_location, 0,
536 "already captured %qD in lambda expression", id);
537 return NULL_TREE;
539 IDENTIFIER_MARKED (name) = true;
542 if (variadic)
543 type = make_pack_expansion (type);
545 /* Make member variable. */
546 member = build_decl (input_location, FIELD_DECL, name, type);
547 DECL_VLA_CAPTURE_P (member) = vla;
549 if (!explicit_init_p)
550 /* Normal captures are invisible to name lookup but uses are replaced
551 with references to the capture field; we implement this by only
552 really making them invisible in unevaluated context; see
553 qualify_lookup. For now, let's make explicitly initialized captures
554 always visible. */
555 DECL_NORMAL_CAPTURE_P (member) = true;
557 if (id == this_identifier)
558 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
560 /* Add it to the appropriate closure class if we've started it. */
561 if (current_class_type
562 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
563 finish_member_declaration (member);
565 tree listmem = member;
566 if (variadic)
568 listmem = make_pack_expansion (member);
569 initializer = orig_init;
571 LAMBDA_EXPR_CAPTURE_LIST (lambda)
572 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
574 if (LAMBDA_EXPR_CLOSURE (lambda))
575 return build_capture_proxy (member);
576 /* For explicit captures we haven't started the function yet, so we wait
577 and build the proxy from cp_parser_lambda_body. */
578 return NULL_TREE;
581 /* Register all the capture members on the list CAPTURES, which is the
582 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
584 void
585 register_capture_members (tree captures)
587 if (captures == NULL_TREE)
588 return;
590 register_capture_members (TREE_CHAIN (captures));
592 tree field = TREE_PURPOSE (captures);
593 if (PACK_EXPANSION_P (field))
594 field = PACK_EXPANSION_PATTERN (field);
596 /* We set this in add_capture to avoid duplicates. */
597 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
598 finish_member_declaration (field);
601 /* Similar to add_capture, except this works on a stack of nested lambdas.
602 BY_REFERENCE_P in this case is derived from the default capture mode.
603 Returns the capture for the lambda at the bottom of the stack. */
605 tree
606 add_default_capture (tree lambda_stack, tree id, tree initializer)
608 bool this_capture_p = (id == this_identifier);
610 tree var = NULL_TREE;
612 tree saved_class_type = current_class_type;
614 tree node;
616 for (node = lambda_stack;
617 node;
618 node = TREE_CHAIN (node))
620 tree lambda = TREE_VALUE (node);
622 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
623 if (DECL_PACK_P (initializer))
624 initializer = make_pack_expansion (initializer);
625 var = add_capture (lambda,
627 initializer,
628 /*by_reference_p=*/
629 (!this_capture_p
630 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
631 == CPLD_REFERENCE)),
632 /*explicit_init_p=*/false);
633 initializer = convert_from_reference (var);
636 current_class_type = saved_class_type;
638 return var;
641 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
642 form of an INDIRECT_REF, possibly adding it through default
643 capturing, if ADD_CAPTURE_P is false. */
645 tree
646 lambda_expr_this_capture (tree lambda, bool add_capture_p)
648 tree result;
650 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
652 /* In unevaluated context this isn't an odr-use, so just return the
653 nearest 'this'. */
654 if (cp_unevaluated_operand)
656 /* In an NSDMI the fake 'this' pointer that we're using for
657 parsing is in scope_chain. */
658 if (LAMBDA_EXPR_EXTRA_SCOPE (lambda)
659 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (lambda)) == FIELD_DECL)
660 return scope_chain->x_current_class_ptr;
661 return lookup_name (this_identifier);
664 /* Try to default capture 'this' if we can. */
665 if (!this_capture
666 && (!add_capture_p
667 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
669 tree lambda_stack = NULL_TREE;
670 tree init = NULL_TREE;
672 /* If we are in a lambda function, we can move out until we hit:
673 1. a non-lambda function or NSDMI,
674 2. a lambda function capturing 'this', or
675 3. a non-default capturing lambda function. */
676 for (tree tlambda = lambda; ;)
678 lambda_stack = tree_cons (NULL_TREE,
679 tlambda,
680 lambda_stack);
682 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
683 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
685 /* In an NSDMI, we don't have a function to look up the decl in,
686 but the fake 'this' pointer that we're using for parsing is
687 in scope_chain. */
688 init = scope_chain->x_current_class_ptr;
689 gcc_checking_assert
690 (init && (TREE_TYPE (TREE_TYPE (init))
691 == current_nonlambda_class_type ()));
692 break;
695 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
696 tree containing_function = decl_function_context (closure_decl);
698 if (containing_function == NULL_TREE)
699 /* We ran out of scopes; there's no 'this' to capture. */
700 break;
702 if (!LAMBDA_FUNCTION_P (containing_function))
704 /* We found a non-lambda function. */
705 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
706 /* First parameter is 'this'. */
707 init = DECL_ARGUMENTS (containing_function);
708 break;
711 tlambda
712 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
714 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
716 /* An outer lambda has already captured 'this'. */
717 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
718 break;
721 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
722 /* An outer lambda won't let us capture 'this'. */
723 break;
726 if (init)
728 if (add_capture_p)
729 this_capture = add_default_capture (lambda_stack,
730 /*id=*/this_identifier,
731 init);
732 else
733 this_capture = init;
737 if (!this_capture)
739 if (add_capture_p)
740 error ("%<this%> was not captured for this lambda function");
741 result = error_mark_node;
743 else
745 /* To make sure that current_class_ref is for the lambda. */
746 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
747 == LAMBDA_EXPR_CLOSURE (lambda));
749 result = this_capture;
751 /* If 'this' is captured, each use of 'this' is transformed into an
752 access to the corresponding unnamed data member of the closure
753 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
754 ensures that the transformed expression is an rvalue. ] */
755 result = rvalue (result);
758 return result;
761 /* We don't want to capture 'this' until we know we need it, i.e. after
762 overload resolution has chosen a non-static member function. At that
763 point we call this function to turn a dummy object into a use of the
764 'this' capture. */
766 tree
767 maybe_resolve_dummy (tree object, bool add_capture_p)
769 if (!is_dummy_object (object))
770 return object;
772 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
773 gcc_assert (!TYPE_PTR_P (type));
775 if (type != current_class_type
776 && current_class_type
777 && LAMBDA_TYPE_P (current_class_type)
778 && lambda_function (current_class_type)
779 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
781 /* In a lambda, need to go through 'this' capture. */
782 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
783 tree cap = lambda_expr_this_capture (lam, add_capture_p);
784 if (cap != error_mark_node)
785 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
786 RO_NULL, tf_warning_or_error);
789 return object;
792 /* Returns the innermost non-lambda function. */
794 tree
795 current_nonlambda_function (void)
797 tree fn = current_function_decl;
798 while (fn && LAMBDA_FUNCTION_P (fn))
799 fn = decl_function_context (fn);
800 return fn;
803 /* Returns the method basetype of the innermost non-lambda function, or
804 NULL_TREE if none. */
806 tree
807 nonlambda_method_basetype (void)
809 tree fn, type;
810 if (!current_class_ref)
811 return NULL_TREE;
813 type = current_class_type;
814 if (!LAMBDA_TYPE_P (type))
815 return type;
817 /* Find the nearest enclosing non-lambda function. */
818 fn = TYPE_NAME (type);
820 fn = decl_function_context (fn);
821 while (fn && LAMBDA_FUNCTION_P (fn));
823 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
824 return NULL_TREE;
826 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
829 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
830 indicated FN and NARGS, but do not initialize the return type or any of the
831 argument slots. */
833 static tree
834 prepare_op_call (tree fn, int nargs)
836 tree t;
838 t = build_vl_exp (CALL_EXPR, nargs + 3);
839 CALL_EXPR_FN (t) = fn;
840 CALL_EXPR_STATIC_CHAIN (t) = NULL;
842 return t;
845 /* If the closure TYPE has a static op(), also add a conversion to function
846 pointer. */
848 void
849 maybe_add_lambda_conv_op (tree type)
851 bool nested = (current_function_decl != NULL_TREE);
852 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
853 tree callop = lambda_function (type);
855 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
856 return;
858 if (processing_template_decl)
859 return;
861 bool const generic_lambda_p
862 = (DECL_TEMPLATE_INFO (callop)
863 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
865 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
867 /* If the op() wasn't instantiated due to errors, give up. */
868 gcc_assert (errorcount || sorrycount);
869 return;
872 /* Non-template conversion operators are defined directly with build_call_a
873 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
874 deferred and the CALL is built in-place. In the case of a deduced return
875 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
876 the return type is also built in-place. The arguments of DECLTYPE_CALL in
877 the return expression may differ in flags from those in the body CALL. In
878 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
879 the body CALL, but not in DECLTYPE_CALL. */
881 vec<tree, va_gc> *direct_argvec = 0;
882 tree decltype_call = 0, call = 0;
883 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
885 if (generic_lambda_p)
887 /* Prepare the dependent member call for the static member function
888 '_FUN' and, potentially, prepare another call to be used in a decltype
889 return expression for a deduced return call op to allow for simple
890 implementation of the conversion operator. */
892 tree instance = build_nop (type, null_pointer_node);
893 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
894 instance, DECL_NAME (callop), NULL_TREE);
895 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
897 call = prepare_op_call (objfn, nargs);
898 if (type_uses_auto (fn_result))
899 decltype_call = prepare_op_call (objfn, nargs);
901 else
903 direct_argvec = make_tree_vector ();
904 direct_argvec->quick_push (build1 (NOP_EXPR,
905 TREE_TYPE (DECL_ARGUMENTS (callop)),
906 null_pointer_node));
909 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
910 declare the static member function "_FUN" below. For each arg append to
911 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
912 call args (for the template case). If a parameter pack is found, expand
913 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
915 tree fn_args = NULL_TREE;
917 int ix = 0;
918 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
919 tree tgt;
921 while (src)
923 tree new_node = copy_node (src);
925 if (!fn_args)
926 fn_args = tgt = new_node;
927 else
929 TREE_CHAIN (tgt) = new_node;
930 tgt = new_node;
933 mark_exp_read (tgt);
935 if (generic_lambda_p)
937 if (DECL_PACK_P (tgt))
939 tree a = make_pack_expansion (tgt);
940 if (decltype_call)
941 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
942 PACK_EXPANSION_LOCAL_P (a) = true;
943 CALL_EXPR_ARG (call, ix) = a;
945 else
947 tree a = convert_from_reference (tgt);
948 CALL_EXPR_ARG (call, ix) = a;
949 if (decltype_call)
950 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
952 ++ix;
954 else
955 vec_safe_push (direct_argvec, tgt);
957 src = TREE_CHAIN (src);
962 if (generic_lambda_p)
964 if (decltype_call)
966 ++processing_template_decl;
967 fn_result = finish_decltype_type
968 (decltype_call, /*id_expression_or_member_access_p=*/false,
969 tf_warning_or_error);
970 --processing_template_decl;
973 else
974 call = build_call_a (callop,
975 direct_argvec->length (),
976 direct_argvec->address ());
978 CALL_FROM_THUNK_P (call) = 1;
980 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
982 /* First build up the conversion op. */
984 tree rettype = build_pointer_type (stattype);
985 tree name = mangle_conv_op_name_for_type (rettype);
986 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
987 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
988 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
989 tree fn = convfn;
990 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
992 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
993 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
994 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
996 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
997 grokclassfn (type, fn, NO_SPECIAL);
998 set_linkage_according_to_type (type, fn);
999 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1000 DECL_IN_AGGR_P (fn) = 1;
1001 DECL_ARTIFICIAL (fn) = 1;
1002 DECL_NOT_REALLY_EXTERN (fn) = 1;
1003 DECL_DECLARED_INLINE_P (fn) = 1;
1004 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
1005 if (nested_def)
1006 DECL_INTERFACE_KNOWN (fn) = 1;
1008 if (generic_lambda_p)
1009 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1011 add_method (type, fn, NULL_TREE);
1013 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1014 the conversion op is used. */
1015 if (varargs_function_p (callop))
1017 DECL_DELETED_FN (fn) = 1;
1018 return;
1021 /* Now build up the thunk to be returned. */
1023 name = get_identifier ("_FUN");
1024 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1025 fn = statfn;
1026 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1027 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
1028 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1029 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1030 grokclassfn (type, fn, NO_SPECIAL);
1031 set_linkage_according_to_type (type, fn);
1032 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1033 DECL_IN_AGGR_P (fn) = 1;
1034 DECL_ARTIFICIAL (fn) = 1;
1035 DECL_NOT_REALLY_EXTERN (fn) = 1;
1036 DECL_DECLARED_INLINE_P (fn) = 1;
1037 DECL_STATIC_FUNCTION_P (fn) = 1;
1038 DECL_ARGUMENTS (fn) = fn_args;
1039 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1041 /* Avoid duplicate -Wshadow warnings. */
1042 DECL_NAME (arg) = NULL_TREE;
1043 DECL_CONTEXT (arg) = fn;
1045 if (nested_def)
1046 DECL_INTERFACE_KNOWN (fn) = 1;
1048 if (generic_lambda_p)
1049 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1051 add_method (type, fn, NULL_TREE);
1053 if (nested)
1054 push_function_context ();
1055 else
1056 /* Still increment function_depth so that we don't GC in the
1057 middle of an expression. */
1058 ++function_depth;
1060 /* Generate the body of the thunk. */
1062 start_preparsed_function (statfn, NULL_TREE,
1063 SF_PRE_PARSED | SF_INCLASS_INLINE);
1064 if (DECL_ONE_ONLY (statfn))
1066 /* Put the thunk in the same comdat group as the call op. */
1067 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1068 (cgraph_node::get_create (callop));
1070 tree body = begin_function_body ();
1071 tree compound_stmt = begin_compound_stmt (0);
1072 if (!generic_lambda_p)
1074 set_flags_from_callee (call);
1075 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1076 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1078 call = convert_from_reference (call);
1079 finish_return_stmt (call);
1081 finish_compound_stmt (compound_stmt);
1082 finish_function_body (body);
1084 fn = finish_function (/*inline*/2);
1085 if (!generic_lambda_p)
1086 expand_or_defer_fn (fn);
1088 /* Generate the body of the conversion op. */
1090 start_preparsed_function (convfn, NULL_TREE,
1091 SF_PRE_PARSED | SF_INCLASS_INLINE);
1092 body = begin_function_body ();
1093 compound_stmt = begin_compound_stmt (0);
1095 /* decl_needed_p needs to see that it's used. */
1096 TREE_USED (statfn) = 1;
1097 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1099 finish_compound_stmt (compound_stmt);
1100 finish_function_body (body);
1102 fn = finish_function (/*inline*/2);
1103 if (!generic_lambda_p)
1104 expand_or_defer_fn (fn);
1106 if (nested)
1107 pop_function_context ();
1108 else
1109 --function_depth;
1112 /* Returns true iff VAL is a lambda-related declaration which should
1113 be ignored by unqualified lookup. */
1115 bool
1116 is_lambda_ignored_entity (tree val)
1118 /* In unevaluated context, look past normal capture proxies. */
1119 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1120 return true;
1122 /* Always ignore lambda fields, their names are only for debugging. */
1123 if (TREE_CODE (val) == FIELD_DECL
1124 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1125 return true;
1127 /* None of the lookups that use qualify_lookup want the op() from the
1128 lambda; they want the one from the enclosing class. */
1129 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1130 return true;
1132 return false;