* targhooks.c (default_stack_clash_protection_final_dynamic_probe): Fix
[official-gcc.git] / gcc / cp / lambda.c
blob78bd89782aaba4b274d83a00a5780adfe2f7c133
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
35 /* Constructor for a lambda expression. */
37 tree
38 build_lambda_expr (void)
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
49 /* Create the closure object for a LAMBDA_EXPR. */
51 tree
52 build_lambda_object (tree lambda_expr)
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
75 if (field == error_mark_node)
77 expr = error_mark_node;
78 goto out;
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
85 if (DECL_P (val))
86 mark_used (val);
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 CLASSTYPE_NON_AGGREGATE (type) = 1;
121 out:
122 input_location = saved_loc;
123 return expr;
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
129 tree
130 begin_lambda_type (tree lambda)
132 tree type;
135 /* Unique name. This is just like an unnamed class, but we cannot use
136 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
137 tree name;
138 name = make_lambda_name ();
140 /* Create the new RECORD_TYPE for this lambda. */
141 type = xref_tag (/*tag_code=*/record_type,
142 name,
143 /*scope=*/ts_lambda,
144 /*template_header_p=*/false);
145 if (type == error_mark_node)
146 return error_mark_node;
149 /* Designate it as a struct so that we can use aggregate initialization. */
150 CLASSTYPE_DECLARED_CLASS (type) = false;
152 /* Cross-reference the expression and the type. */
153 LAMBDA_EXPR_CLOSURE (lambda) = type;
154 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156 /* In C++17, assume the closure is literal; we'll clear the flag later if
157 necessary. */
158 if (cxx_dialect >= cxx17)
159 CLASSTYPE_LITERAL_P (type) = true;
161 /* Clear base types. */
162 xref_basetypes (type, /*bases=*/NULL_TREE);
164 /* Start the class. */
165 type = begin_class_definition (type);
167 return type;
170 /* Returns the type to use for the return type of the operator() of a
171 closure class. */
173 tree
174 lambda_return_type (tree expr)
176 if (expr == NULL_TREE)
177 return void_type_node;
178 if (type_unknown_p (expr)
179 || BRACE_ENCLOSED_INITIALIZER_P (expr))
181 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
182 return error_mark_node;
184 gcc_checking_assert (!type_dependent_expression_p (expr));
185 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
188 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
189 closure type. */
191 tree
192 lambda_function (tree lambda)
194 tree type;
195 if (TREE_CODE (lambda) == LAMBDA_EXPR)
196 type = LAMBDA_EXPR_CLOSURE (lambda);
197 else
198 type = lambda;
199 gcc_assert (LAMBDA_TYPE_P (type));
200 /* Don't let debug_tree cause instantiation. */
201 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
202 && !COMPLETE_OR_OPEN_TYPE_P (type))
203 return NULL_TREE;
204 lambda = lookup_member (type, cp_operator_id (CALL_EXPR),
205 /*protect=*/0, /*want_type=*/false,
206 tf_warning_or_error);
207 if (lambda)
208 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
209 return lambda;
212 /* Returns the type to use for the FIELD_DECL corresponding to the
213 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
214 C++14 init capture, and BY_REFERENCE_P indicates whether we're
215 capturing by reference. */
217 tree
218 lambda_capture_field_type (tree expr, bool explicit_init_p,
219 bool by_reference_p)
221 tree type;
222 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
224 if (!is_this && type_dependent_expression_p (expr))
226 type = cxx_make_type (DECLTYPE_TYPE);
227 DECLTYPE_TYPE_EXPR (type) = expr;
228 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
229 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
230 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
231 SET_TYPE_STRUCTURAL_EQUALITY (type);
233 else if (!is_this && explicit_init_p)
235 tree auto_node = make_auto ();
237 type = auto_node;
238 if (by_reference_p)
239 /* Add the reference now, so deduction doesn't lose
240 outermost CV qualifiers of EXPR. */
241 type = build_reference_type (type);
242 type = do_auto_deduction (type, expr, auto_node);
244 else
246 type = non_reference (unlowered_expr_type (expr));
248 if (!is_this && by_reference_p)
249 type = build_reference_type (type);
252 return type;
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
258 bool
259 is_capture_proxy (tree decl)
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
267 /* Returns true iff DECL is a capture proxy for a normal capture
268 (i.e. without explicit initializer). */
270 bool
271 is_normal_capture_proxy (tree decl)
273 if (!is_capture_proxy (decl))
274 /* It's not a capture proxy. */
275 return false;
277 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
278 /* VLA capture. */
279 return true;
281 /* It is a capture proxy, is it a normal capture? */
282 tree val = DECL_VALUE_EXPR (decl);
283 if (val == error_mark_node)
284 return true;
286 if (TREE_CODE (val) == ADDR_EXPR)
287 val = TREE_OPERAND (val, 0);
288 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
289 val = TREE_OPERAND (val, 1);
290 return DECL_NORMAL_CAPTURE_P (val);
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294 current function, which is the operator() for the appropriate lambda. */
296 void
297 insert_capture_proxy (tree var)
299 if (is_normal_capture_proxy (var))
300 register_local_specialization (var, DECL_CAPTURED_VARIABLE (var));
302 /* Put the capture proxy in the extra body block so that it won't clash
303 with a later local variable. */
304 pushdecl_outermost_localscope (var);
306 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
307 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
308 tree stmt_list = (*stmt_list_stack)[1];
309 gcc_assert (stmt_list);
310 append_to_statement_list_force (var, &stmt_list);
313 /* We've just finished processing a lambda; if the containing scope is also
314 a lambda, insert any capture proxies that were created while processing
315 the nested lambda. */
317 void
318 insert_pending_capture_proxies (void)
320 tree lam;
321 vec<tree, va_gc> *proxies;
322 unsigned i;
324 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
325 return;
327 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
328 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
329 for (i = 0; i < vec_safe_length (proxies); ++i)
331 tree var = (*proxies)[i];
332 insert_capture_proxy (var);
334 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
335 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
338 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
339 return the type we want the proxy to have: the type of the field itself,
340 with added const-qualification if the lambda isn't mutable and the
341 capture is by value. */
343 tree
344 lambda_proxy_type (tree ref)
346 tree type;
347 if (ref == error_mark_node)
348 return error_mark_node;
349 if (REFERENCE_REF_P (ref))
350 ref = TREE_OPERAND (ref, 0);
351 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
352 type = TREE_TYPE (ref);
353 if (!type || WILDCARD_TYPE_P (non_reference (type)))
355 type = cxx_make_type (DECLTYPE_TYPE);
356 DECLTYPE_TYPE_EXPR (type) = ref;
357 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
358 SET_TYPE_STRUCTURAL_EQUALITY (type);
360 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
361 type = make_pack_expansion (type);
362 return type;
365 /* MEMBER is a capture field in a lambda closure class. Now that we're
366 inside the operator(), build a placeholder var for future lookups and
367 debugging. */
369 tree
370 build_capture_proxy (tree member, tree init)
372 tree var, object, fn, closure, name, lam, type;
374 if (PACK_EXPANSION_P (member))
375 member = PACK_EXPANSION_PATTERN (member);
377 closure = DECL_CONTEXT (member);
378 fn = lambda_function (closure);
379 lam = CLASSTYPE_LAMBDA_EXPR (closure);
381 /* The proxy variable forwards to the capture field. */
382 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
383 object = finish_non_static_data_member (member, object, NULL_TREE);
384 if (REFERENCE_REF_P (object))
385 object = TREE_OPERAND (object, 0);
387 /* Remove the __ inserted by add_capture. */
388 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
390 type = lambda_proxy_type (object);
392 if (name == this_identifier && !POINTER_TYPE_P (type))
394 type = build_pointer_type (type);
395 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
396 object = build_fold_addr_expr_with_type (object, type);
399 if (DECL_VLA_CAPTURE_P (member))
401 /* Rebuild the VLA type from the pointer and maxindex. */
402 tree field = next_initializable_field (TYPE_FIELDS (type));
403 tree ptr = build_simple_component_ref (object, field);
404 field = next_initializable_field (DECL_CHAIN (field));
405 tree max = build_simple_component_ref (object, field);
406 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
407 build_index_type (max));
408 type = build_reference_type (type);
409 REFERENCE_VLA_OK (type) = true;
410 object = convert (type, ptr);
413 var = build_decl (input_location, VAR_DECL, name, type);
414 SET_DECL_VALUE_EXPR (var, object);
415 DECL_HAS_VALUE_EXPR_P (var) = 1;
416 DECL_ARTIFICIAL (var) = 1;
417 TREE_USED (var) = 1;
418 DECL_CONTEXT (var) = fn;
420 if (DECL_NORMAL_CAPTURE_P (member))
422 if (DECL_VLA_CAPTURE_P (member))
424 init = CONSTRUCTOR_ELT (init, 0)->value;
425 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
426 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
428 else
430 if (PACK_EXPANSION_P (init))
431 init = PACK_EXPANSION_PATTERN (init);
432 if (TREE_CODE (init) == INDIRECT_REF)
433 init = TREE_OPERAND (init, 0);
434 STRIP_NOPS (init);
436 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
437 while (is_normal_capture_proxy (init))
438 init = DECL_CAPTURED_VARIABLE (init);
439 retrofit_lang_decl (var);
440 DECL_CAPTURED_VARIABLE (var) = init;
443 if (name == this_identifier)
445 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
446 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
449 if (fn == current_function_decl)
450 insert_capture_proxy (var);
451 else
452 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
454 return var;
457 static GTY(()) tree ptr_id;
458 static GTY(()) tree max_id;
460 /* Return a struct containing a pointer and a length for lambda capture of
461 an array of runtime length. */
463 static tree
464 vla_capture_type (tree array_type)
466 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
467 xref_basetypes (type, NULL_TREE);
468 type = begin_class_definition (type);
469 if (!ptr_id)
471 ptr_id = get_identifier ("ptr");
472 max_id = get_identifier ("max");
474 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
475 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
476 finish_member_declaration (field);
477 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
478 finish_member_declaration (field);
479 return finish_struct (type, NULL_TREE);
482 /* From an ID and INITIALIZER, create a capture (by reference if
483 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
484 and return it. If ID is `this', BY_REFERENCE_P says whether
485 `*this' is captured by reference. */
487 tree
488 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
489 bool explicit_init_p)
491 char *buf;
492 tree type, member, name;
493 bool vla = false;
494 bool variadic = false;
495 tree initializer = orig_init;
497 if (PACK_EXPANSION_P (initializer))
499 initializer = PACK_EXPANSION_PATTERN (initializer);
500 variadic = true;
503 if (TREE_CODE (initializer) == TREE_LIST
504 /* A pack expansion might end up with multiple elements. */
505 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
506 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
507 tf_warning_or_error);
508 type = TREE_TYPE (initializer);
509 if (type == error_mark_node)
510 return error_mark_node;
512 if (array_of_runtime_bound_p (type))
514 vla = true;
515 if (!by_reference_p)
516 error ("array of runtime bound cannot be captured by copy, "
517 "only by reference");
519 /* For a VLA, we capture the address of the first element and the
520 maximum index, and then reconstruct the VLA for the proxy. */
521 tree elt = cp_build_array_ref (input_location, initializer,
522 integer_zero_node, tf_warning_or_error);
523 initializer = build_constructor_va (init_list_type_node, 2,
524 NULL_TREE, build_address (elt),
525 NULL_TREE, array_type_nelts (type));
526 type = vla_capture_type (type);
528 else if (!dependent_type_p (type)
529 && variably_modified_type_p (type, NULL_TREE))
531 error ("capture of variable-size type %qT that is not an N3639 array "
532 "of runtime bound", type);
533 if (TREE_CODE (type) == ARRAY_TYPE
534 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
535 inform (input_location, "because the array element type %qT has "
536 "variable size", TREE_TYPE (type));
537 type = error_mark_node;
539 else
541 type = lambda_capture_field_type (initializer, explicit_init_p,
542 by_reference_p);
543 if (type == error_mark_node)
544 return error_mark_node;
546 if (id == this_identifier && !by_reference_p)
548 gcc_assert (POINTER_TYPE_P (type));
549 type = TREE_TYPE (type);
550 initializer = cp_build_indirect_ref (initializer, RO_NULL,
551 tf_warning_or_error);
554 if (dependent_type_p (type))
556 else if (id != this_identifier && by_reference_p)
558 if (!lvalue_p (initializer))
560 error ("cannot capture %qE by reference", initializer);
561 return error_mark_node;
564 else
566 /* Capture by copy requires a complete type. */
567 type = complete_type (type);
568 if (!COMPLETE_TYPE_P (type))
570 error ("capture by copy of incomplete type %qT", type);
571 cxx_incomplete_type_inform (type);
572 return error_mark_node;
577 /* Add __ to the beginning of the field name so that user code
578 won't find the field with name lookup. We can't just leave the name
579 unset because template instantiation uses the name to find
580 instantiated fields. */
581 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
582 buf[1] = buf[0] = '_';
583 memcpy (buf + 2, IDENTIFIER_POINTER (id),
584 IDENTIFIER_LENGTH (id) + 1);
585 name = get_identifier (buf);
587 /* If TREE_TYPE isn't set, we're still in the introducer, so check
588 for duplicates. */
589 if (!LAMBDA_EXPR_CLOSURE (lambda))
591 if (IDENTIFIER_MARKED (name))
593 pedwarn (input_location, 0,
594 "already captured %qD in lambda expression", id);
595 return NULL_TREE;
597 IDENTIFIER_MARKED (name) = true;
600 if (variadic)
601 type = make_pack_expansion (type);
603 /* Make member variable. */
604 member = build_decl (input_location, FIELD_DECL, name, type);
605 DECL_VLA_CAPTURE_P (member) = vla;
607 if (!explicit_init_p)
608 /* Normal captures are invisible to name lookup but uses are replaced
609 with references to the capture field; we implement this by only
610 really making them invisible in unevaluated context; see
611 qualify_lookup. For now, let's make explicitly initialized captures
612 always visible. */
613 DECL_NORMAL_CAPTURE_P (member) = true;
615 if (id == this_identifier)
616 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
618 /* Add it to the appropriate closure class if we've started it. */
619 if (current_class_type
620 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
622 if (COMPLETE_TYPE_P (current_class_type))
623 internal_error ("trying to capture %qD in instantiation of "
624 "generic lambda", id);
625 finish_member_declaration (member);
628 tree listmem = member;
629 if (variadic)
631 listmem = make_pack_expansion (member);
632 initializer = orig_init;
634 LAMBDA_EXPR_CAPTURE_LIST (lambda)
635 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
637 if (LAMBDA_EXPR_CLOSURE (lambda))
638 return build_capture_proxy (member, initializer);
639 /* For explicit captures we haven't started the function yet, so we wait
640 and build the proxy from cp_parser_lambda_body. */
641 return NULL_TREE;
644 /* Register all the capture members on the list CAPTURES, which is the
645 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
647 void
648 register_capture_members (tree captures)
650 if (captures == NULL_TREE)
651 return;
653 register_capture_members (TREE_CHAIN (captures));
655 tree field = TREE_PURPOSE (captures);
656 if (PACK_EXPANSION_P (field))
657 field = PACK_EXPANSION_PATTERN (field);
659 /* We set this in add_capture to avoid duplicates. */
660 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
661 finish_member_declaration (field);
664 /* Similar to add_capture, except this works on a stack of nested lambdas.
665 BY_REFERENCE_P in this case is derived from the default capture mode.
666 Returns the capture for the lambda at the bottom of the stack. */
668 tree
669 add_default_capture (tree lambda_stack, tree id, tree initializer)
671 bool this_capture_p = (id == this_identifier);
673 tree var = NULL_TREE;
675 tree saved_class_type = current_class_type;
677 tree node;
679 for (node = lambda_stack;
680 node;
681 node = TREE_CHAIN (node))
683 tree lambda = TREE_VALUE (node);
685 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
686 if (DECL_PACK_P (initializer))
687 initializer = make_pack_expansion (initializer);
688 var = add_capture (lambda,
690 initializer,
691 /*by_reference_p=*/
692 (this_capture_p
693 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
694 == CPLD_REFERENCE)),
695 /*explicit_init_p=*/false);
696 initializer = convert_from_reference (var);
699 current_class_type = saved_class_type;
701 return var;
704 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
705 form of an INDIRECT_REF, possibly adding it through default
706 capturing, if ADD_CAPTURE_P is true. */
708 tree
709 lambda_expr_this_capture (tree lambda, bool add_capture_p)
711 tree result;
713 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
715 /* In unevaluated context this isn't an odr-use, so don't capture. */
716 if (cp_unevaluated_operand)
717 add_capture_p = false;
719 /* Try to default capture 'this' if we can. */
720 if (!this_capture
721 && (!add_capture_p
722 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
724 tree lambda_stack = NULL_TREE;
725 tree init = NULL_TREE;
727 /* If we are in a lambda function, we can move out until we hit:
728 1. a non-lambda function or NSDMI,
729 2. a lambda function capturing 'this', or
730 3. a non-default capturing lambda function. */
731 for (tree tlambda = lambda; ;)
733 lambda_stack = tree_cons (NULL_TREE,
734 tlambda,
735 lambda_stack);
737 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
738 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
740 /* In an NSDMI, we don't have a function to look up the decl in,
741 but the fake 'this' pointer that we're using for parsing is
742 in scope_chain. */
743 init = scope_chain->x_current_class_ptr;
744 gcc_checking_assert
745 (init && (TREE_TYPE (TREE_TYPE (init))
746 == current_nonlambda_class_type ()));
747 break;
750 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
751 tree containing_function = decl_function_context (closure_decl);
753 if (containing_function == NULL_TREE)
754 /* We ran out of scopes; there's no 'this' to capture. */
755 break;
757 if (!LAMBDA_FUNCTION_P (containing_function))
759 /* We found a non-lambda function. */
760 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
761 /* First parameter is 'this'. */
762 init = DECL_ARGUMENTS (containing_function);
763 break;
766 tlambda
767 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
769 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
771 /* An outer lambda has already captured 'this'. */
772 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
773 break;
776 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
777 /* An outer lambda won't let us capture 'this'. */
778 break;
781 if (init)
783 if (add_capture_p)
784 this_capture = add_default_capture (lambda_stack,
785 /*id=*/this_identifier,
786 init);
787 else
788 this_capture = init;
792 if (cp_unevaluated_operand)
793 result = this_capture;
794 else if (!this_capture)
796 if (add_capture_p)
798 error ("%<this%> was not captured for this lambda function");
799 result = error_mark_node;
801 else
802 result = NULL_TREE;
804 else
806 /* To make sure that current_class_ref is for the lambda. */
807 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
808 == LAMBDA_EXPR_CLOSURE (lambda));
810 result = this_capture;
812 /* If 'this' is captured, each use of 'this' is transformed into an
813 access to the corresponding unnamed data member of the closure
814 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
815 ensures that the transformed expression is an rvalue. ] */
816 result = rvalue (result);
819 return result;
822 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
823 object. NULL otherwise.. */
825 static tree
826 resolvable_dummy_lambda (tree object)
828 if (!is_dummy_object (object))
829 return NULL_TREE;
831 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
832 gcc_assert (!TYPE_PTR_P (type));
834 if (type != current_class_type
835 && current_class_type
836 && LAMBDA_TYPE_P (current_class_type)
837 && lambda_function (current_class_type)
838 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
839 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
841 return NULL_TREE;
844 /* We don't want to capture 'this' until we know we need it, i.e. after
845 overload resolution has chosen a non-static member function. At that
846 point we call this function to turn a dummy object into a use of the
847 'this' capture. */
849 tree
850 maybe_resolve_dummy (tree object, bool add_capture_p)
852 if (tree lam = resolvable_dummy_lambda (object))
853 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
854 if (cap != error_mark_node)
855 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
856 RO_NULL, tf_warning_or_error);
858 return object;
861 /* When parsing a generic lambda containing an argument-dependent
862 member function call we defer overload resolution to instantiation
863 time. But we have to know now whether to capture this or not.
864 Do that if FNS contains any non-static fns.
865 The std doesn't anticipate this case, but I expect this to be the
866 outcome of discussion. */
868 void
869 maybe_generic_this_capture (tree object, tree fns)
871 if (tree lam = resolvable_dummy_lambda (object))
872 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
874 /* We've not yet captured, so look at the function set of
875 interest. */
876 if (BASELINK_P (fns))
877 fns = BASELINK_FUNCTIONS (fns);
878 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
879 if (id_expr)
880 fns = TREE_OPERAND (fns, 0);
882 for (lkp_iterator iter (fns); iter; ++iter)
883 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
884 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
886 /* Found a non-static member. Capture this. */
887 lambda_expr_this_capture (lam, true);
888 break;
893 /* Returns the innermost non-lambda function. */
895 tree
896 current_nonlambda_function (void)
898 tree fn = current_function_decl;
899 while (fn && LAMBDA_FUNCTION_P (fn))
900 fn = decl_function_context (fn);
901 return fn;
904 /* Returns the method basetype of the innermost non-lambda function, or
905 NULL_TREE if none. */
907 tree
908 nonlambda_method_basetype (void)
910 tree fn, type;
911 if (!current_class_ref)
912 return NULL_TREE;
914 type = current_class_type;
915 if (!LAMBDA_TYPE_P (type))
916 return type;
918 /* Find the nearest enclosing non-lambda function. */
919 fn = TYPE_NAME (type);
921 fn = decl_function_context (fn);
922 while (fn && LAMBDA_FUNCTION_P (fn));
924 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
925 return NULL_TREE;
927 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
930 /* Like current_scope, but looking through lambdas. */
932 tree
933 current_nonlambda_scope (void)
935 tree scope = current_scope ();
936 for (;;)
938 if (TREE_CODE (scope) == FUNCTION_DECL
939 && LAMBDA_FUNCTION_P (scope))
941 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
942 continue;
944 else if (LAMBDA_TYPE_P (scope))
946 scope = CP_TYPE_CONTEXT (scope);
947 continue;
949 break;
951 return scope;
954 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
955 indicated FN and NARGS, but do not initialize the return type or any of the
956 argument slots. */
958 static tree
959 prepare_op_call (tree fn, int nargs)
961 tree t;
963 t = build_vl_exp (CALL_EXPR, nargs + 3);
964 CALL_EXPR_FN (t) = fn;
965 CALL_EXPR_STATIC_CHAIN (t) = NULL;
967 return t;
970 /* Return true iff CALLOP is the op() for a generic lambda. */
972 bool
973 generic_lambda_fn_p (tree callop)
975 return (LAMBDA_FUNCTION_P (callop)
976 && DECL_TEMPLATE_INFO (callop)
977 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
980 /* If the closure TYPE has a static op(), also add a conversion to function
981 pointer. */
983 void
984 maybe_add_lambda_conv_op (tree type)
986 bool nested = (cfun != NULL);
987 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
988 tree callop = lambda_function (type);
989 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
991 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
992 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
993 return;
995 if (processing_template_decl)
996 return;
998 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1000 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
1002 /* If the op() wasn't instantiated due to errors, give up. */
1003 gcc_assert (errorcount || sorrycount);
1004 return;
1007 /* Non-template conversion operators are defined directly with build_call_a
1008 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1009 deferred and the CALL is built in-place. In the case of a deduced return
1010 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1011 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1012 the return expression may differ in flags from those in the body CALL. In
1013 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1014 the body CALL, but not in DECLTYPE_CALL. */
1016 vec<tree, va_gc> *direct_argvec = 0;
1017 tree decltype_call = 0, call = 0;
1018 tree optype = TREE_TYPE (callop);
1019 tree fn_result = TREE_TYPE (optype);
1021 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
1022 null_pointer_node);
1023 if (generic_lambda_p)
1025 ++processing_template_decl;
1027 /* Prepare the dependent member call for the static member function
1028 '_FUN' and, potentially, prepare another call to be used in a decltype
1029 return expression for a deduced return call op to allow for simple
1030 implementation of the conversion operator. */
1032 tree instance = cp_build_indirect_ref (thisarg, RO_NULL,
1033 tf_warning_or_error);
1034 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1035 instance, DECL_NAME (callop), NULL_TREE);
1036 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1038 call = prepare_op_call (objfn, nargs);
1039 if (type_uses_auto (fn_result))
1040 decltype_call = prepare_op_call (objfn, nargs);
1042 else
1044 direct_argvec = make_tree_vector ();
1045 direct_argvec->quick_push (thisarg);
1048 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1049 declare the static member function "_FUN" below. For each arg append to
1050 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1051 call args (for the template case). If a parameter pack is found, expand
1052 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1054 tree fn_args = NULL_TREE;
1056 int ix = 0;
1057 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1058 tree tgt = NULL;
1060 while (src)
1062 tree new_node = copy_node (src);
1064 if (!fn_args)
1065 fn_args = tgt = new_node;
1066 else
1068 TREE_CHAIN (tgt) = new_node;
1069 tgt = new_node;
1072 mark_exp_read (tgt);
1074 if (generic_lambda_p)
1076 tree a = forward_parm (tgt);
1078 CALL_EXPR_ARG (call, ix) = a;
1079 if (decltype_call)
1080 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1082 if (PACK_EXPANSION_P (a))
1083 /* Set this after unsharing so it's not in decltype_call. */
1084 PACK_EXPANSION_LOCAL_P (a) = true;
1086 ++ix;
1088 else
1089 vec_safe_push (direct_argvec, tgt);
1091 src = TREE_CHAIN (src);
1096 if (generic_lambda_p)
1098 if (decltype_call)
1100 fn_result = finish_decltype_type
1101 (decltype_call, /*id_expression_or_member_access_p=*/false,
1102 tf_warning_or_error);
1105 else
1106 call = build_call_a (callop,
1107 direct_argvec->length (),
1108 direct_argvec->address ());
1110 CALL_FROM_THUNK_P (call) = 1;
1111 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1113 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1114 stattype = (cp_build_type_attribute_variant
1115 (stattype, TYPE_ATTRIBUTES (optype)));
1116 if (flag_noexcept_type
1117 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1118 stattype = build_exception_variant (stattype, noexcept_true_spec);
1120 if (generic_lambda_p)
1121 --processing_template_decl;
1123 /* First build up the conversion op. */
1125 tree rettype = build_pointer_type (stattype);
1126 tree name = make_conv_op_name (rettype);
1127 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1128 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1129 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1130 tree fn = convfn;
1131 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1132 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1133 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1134 grokclassfn (type, fn, NO_SPECIAL);
1135 set_linkage_according_to_type (type, fn);
1136 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1137 DECL_IN_AGGR_P (fn) = 1;
1138 DECL_ARTIFICIAL (fn) = 1;
1139 DECL_NOT_REALLY_EXTERN (fn) = 1;
1140 DECL_DECLARED_INLINE_P (fn) = 1;
1141 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1143 if (nested_def)
1144 DECL_INTERFACE_KNOWN (fn) = 1;
1146 if (generic_lambda_p)
1147 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1149 add_method (type, fn, false);
1151 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1152 the conversion op is used. */
1153 if (varargs_function_p (callop))
1155 DECL_DELETED_FN (fn) = 1;
1156 return;
1159 /* Now build up the thunk to be returned. */
1161 name = get_identifier ("_FUN");
1162 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1163 fn = statfn;
1164 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1165 grokclassfn (type, fn, NO_SPECIAL);
1166 set_linkage_according_to_type (type, fn);
1167 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1168 DECL_IN_AGGR_P (fn) = 1;
1169 DECL_ARTIFICIAL (fn) = 1;
1170 DECL_NOT_REALLY_EXTERN (fn) = 1;
1171 DECL_DECLARED_INLINE_P (fn) = 1;
1172 DECL_STATIC_FUNCTION_P (fn) = 1;
1173 DECL_ARGUMENTS (fn) = fn_args;
1174 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1176 /* Avoid duplicate -Wshadow warnings. */
1177 DECL_NAME (arg) = NULL_TREE;
1178 DECL_CONTEXT (arg) = fn;
1180 if (nested_def)
1181 DECL_INTERFACE_KNOWN (fn) = 1;
1183 if (generic_lambda_p)
1184 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1186 if (flag_sanitize & SANITIZE_NULL)
1188 /* Don't UBsan this function; we're deliberately calling op() with a null
1189 object argument. */
1190 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1193 add_method (type, fn, false);
1195 if (nested)
1196 push_function_context ();
1197 else
1198 /* Still increment function_depth so that we don't GC in the
1199 middle of an expression. */
1200 ++function_depth;
1202 /* Generate the body of the thunk. */
1204 start_preparsed_function (statfn, NULL_TREE,
1205 SF_PRE_PARSED | SF_INCLASS_INLINE);
1206 if (DECL_ONE_ONLY (statfn))
1208 /* Put the thunk in the same comdat group as the call op. */
1209 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1210 (cgraph_node::get_create (callop));
1212 tree body = begin_function_body ();
1213 tree compound_stmt = begin_compound_stmt (0);
1214 if (!generic_lambda_p)
1216 set_flags_from_callee (call);
1217 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1218 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1220 call = convert_from_reference (call);
1221 finish_return_stmt (call);
1223 finish_compound_stmt (compound_stmt);
1224 finish_function_body (body);
1226 fn = finish_function (/*inline_p=*/true);
1227 if (!generic_lambda_p)
1228 expand_or_defer_fn (fn);
1230 /* Generate the body of the conversion op. */
1232 start_preparsed_function (convfn, NULL_TREE,
1233 SF_PRE_PARSED | SF_INCLASS_INLINE);
1234 body = begin_function_body ();
1235 compound_stmt = begin_compound_stmt (0);
1237 /* decl_needed_p needs to see that it's used. */
1238 TREE_USED (statfn) = 1;
1239 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1241 finish_compound_stmt (compound_stmt);
1242 finish_function_body (body);
1244 fn = finish_function (/*inline_p=*/true);
1245 if (!generic_lambda_p)
1246 expand_or_defer_fn (fn);
1248 if (nested)
1249 pop_function_context ();
1250 else
1251 --function_depth;
1254 /* True if FN is the static function "_FUN" that gets returned from the lambda
1255 conversion operator. */
1257 bool
1258 lambda_static_thunk_p (tree fn)
1260 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1261 && DECL_ARTIFICIAL (fn)
1262 && DECL_STATIC_FUNCTION_P (fn)
1263 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1266 /* Returns true iff VAL is a lambda-related declaration which should
1267 be ignored by unqualified lookup. */
1269 bool
1270 is_lambda_ignored_entity (tree val)
1272 /* Look past normal capture proxies. */
1273 if (is_normal_capture_proxy (val))
1274 return true;
1276 /* Always ignore lambda fields, their names are only for debugging. */
1277 if (TREE_CODE (val) == FIELD_DECL
1278 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1279 return true;
1281 /* None of the lookups that use qualify_lookup want the op() from the
1282 lambda; they want the one from the enclosing class. */
1283 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1284 return true;
1286 return false;
1289 /* Lambdas that appear in variable initializer or default argument scope
1290 get that in their mangling, so we need to record it. We might as well
1291 use the count for function and namespace scopes as well. */
1292 static GTY(()) tree lambda_scope;
1293 static GTY(()) int lambda_count;
1294 struct GTY(()) tree_int
1296 tree t;
1297 int i;
1299 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1301 void
1302 start_lambda_scope (tree decl)
1304 tree_int ti;
1305 gcc_assert (decl);
1306 /* Once we're inside a function, we ignore variable scope and just push
1307 the function again so that popping works properly. */
1308 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1309 decl = current_function_decl;
1310 ti.t = lambda_scope;
1311 ti.i = lambda_count;
1312 vec_safe_push (lambda_scope_stack, ti);
1313 if (lambda_scope != decl)
1315 /* Don't reset the count if we're still in the same function. */
1316 lambda_scope = decl;
1317 lambda_count = 0;
1321 void
1322 record_lambda_scope (tree lambda)
1324 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1325 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1328 void
1329 finish_lambda_scope (void)
1331 tree_int *p = &lambda_scope_stack->last ();
1332 if (lambda_scope != p->t)
1334 lambda_scope = p->t;
1335 lambda_count = p->i;
1337 lambda_scope_stack->pop ();
1340 tree
1341 start_lambda_function (tree fco, tree lambda_expr)
1343 /* Let the front end know that we are going to be defining this
1344 function. */
1345 start_preparsed_function (fco,
1346 NULL_TREE,
1347 SF_PRE_PARSED | SF_INCLASS_INLINE);
1349 tree body = begin_function_body ();
1351 /* Push the proxies for any explicit captures. */
1352 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1353 cap = TREE_CHAIN (cap))
1354 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1356 return body;
1359 void
1360 finish_lambda_function (tree body)
1362 finish_function_body (body);
1364 /* Finish the function and generate code for it if necessary. */
1365 tree fn = finish_function (/*inline_p=*/true);
1367 /* Only expand if the call op is not a template. */
1368 if (!DECL_TEMPLATE_INFO (fn))
1369 expand_or_defer_fn (fn);
1372 #include "gt-cp-lambda.h"