* arm.c (arm_get_frame_offsets): Validate architecture supports
[official-gcc.git] / gcc / cp / lambda.c
blobb04448b548726e1e3d2b0e0465fbb616c0053e4d
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2013 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "cgraph.h"
29 #include "tree-iterator.h"
30 #include "cp-tree.h"
31 #include "toplev.h"
32 #include "vec.h"
34 /* Constructor for a lambda expression. */
36 tree
37 build_lambda_expr (void)
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
44 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
49 /* Create the closure object for a LAMBDA_EXPR. */
51 tree
52 build_lambda_object (tree lambda_expr)
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
61 if (processing_template_decl)
62 return lambda_expr;
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
75 if (field == error_mark_node)
77 expr = error_mark_node;
78 goto out;
81 if (DECL_P (val))
82 mark_used (val);
84 /* Mere mortals can't copy arrays with aggregate initialization, so
85 do some magic to make it work here. */
86 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
87 val = build_array_copy (val);
88 else if (DECL_NORMAL_CAPTURE_P (field)
89 && !DECL_VLA_CAPTURE_P (field)
90 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
92 /* "the entities that are captured by copy are used to
93 direct-initialize each corresponding non-static data
94 member of the resulting closure object."
96 There's normally no way to express direct-initialization
97 from an element of a CONSTRUCTOR, so we build up a special
98 TARGET_EXPR to bypass the usual copy-initialization. */
99 val = force_rvalue (val, tf_warning_or_error);
100 if (TREE_CODE (val) == TARGET_EXPR)
101 TARGET_EXPR_DIRECT_INIT_P (val) = true;
104 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
107 expr = build_constructor (init_list_type_node, elts);
108 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
110 /* N2927: "[The closure] class type is not an aggregate."
111 But we briefly treat it as an aggregate to make this simpler. */
112 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
113 CLASSTYPE_NON_AGGREGATE (type) = 0;
114 expr = finish_compound_literal (type, expr, tf_warning_or_error);
115 CLASSTYPE_NON_AGGREGATE (type) = 1;
117 out:
118 input_location = saved_loc;
119 return expr;
122 /* Return an initialized RECORD_TYPE for LAMBDA.
123 LAMBDA must have its explicit captures already. */
125 tree
126 begin_lambda_type (tree lambda)
128 tree type;
131 /* Unique name. This is just like an unnamed class, but we cannot use
132 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
133 tree name;
134 name = make_lambda_name ();
136 /* Create the new RECORD_TYPE for this lambda. */
137 type = xref_tag (/*tag_code=*/record_type,
138 name,
139 /*scope=*/ts_lambda,
140 /*template_header_p=*/false);
143 /* Designate it as a struct so that we can use aggregate initialization. */
144 CLASSTYPE_DECLARED_CLASS (type) = false;
146 /* Cross-reference the expression and the type. */
147 LAMBDA_EXPR_CLOSURE (lambda) = type;
148 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
150 /* Clear base types. */
151 xref_basetypes (type, /*bases=*/NULL_TREE);
153 /* Start the class. */
154 type = begin_class_definition (type);
155 if (type == error_mark_node)
156 return error_mark_node;
158 return type;
161 /* Returns the type to use for the return type of the operator() of a
162 closure class. */
164 tree
165 lambda_return_type (tree expr)
167 if (expr == NULL_TREE)
168 return void_type_node;
169 if (type_unknown_p (expr)
170 || BRACE_ENCLOSED_INITIALIZER_P (expr))
172 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
173 return void_type_node;
175 gcc_checking_assert (!type_dependent_expression_p (expr));
176 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
179 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
180 closure type. */
182 tree
183 lambda_function (tree lambda)
185 tree type;
186 if (TREE_CODE (lambda) == LAMBDA_EXPR)
187 type = LAMBDA_EXPR_CLOSURE (lambda);
188 else
189 type = lambda;
190 gcc_assert (LAMBDA_TYPE_P (type));
191 /* Don't let debug_tree cause instantiation. */
192 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
193 && !COMPLETE_OR_OPEN_TYPE_P (type))
194 return NULL_TREE;
195 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
196 /*protect=*/0, /*want_type=*/false,
197 tf_warning_or_error);
198 if (lambda)
199 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
200 return lambda;
203 /* Returns the type to use for the FIELD_DECL corresponding to the
204 capture of EXPR.
205 The caller should add REFERENCE_TYPE for capture by reference. */
207 tree
208 lambda_capture_field_type (tree expr, bool explicit_init_p)
210 tree type;
211 if (explicit_init_p)
213 type = make_auto ();
214 type = do_auto_deduction (type, expr, type);
216 else
217 type = non_reference (unlowered_expr_type (expr));
218 if (!type || WILDCARD_TYPE_P (type) || type_uses_auto (type)
219 || DECL_PACK_P (expr))
221 type = cxx_make_type (DECLTYPE_TYPE);
222 DECLTYPE_TYPE_EXPR (type) = expr;
223 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
224 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
225 SET_TYPE_STRUCTURAL_EQUALITY (type);
227 return type;
230 /* Returns true iff DECL is a lambda capture proxy variable created by
231 build_capture_proxy. */
233 bool
234 is_capture_proxy (tree decl)
236 return (VAR_P (decl)
237 && DECL_HAS_VALUE_EXPR_P (decl)
238 && !DECL_ANON_UNION_VAR_P (decl)
239 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
242 /* Returns true iff DECL is a capture proxy for a normal capture
243 (i.e. without explicit initializer). */
245 bool
246 is_normal_capture_proxy (tree decl)
248 if (!is_capture_proxy (decl))
249 /* It's not a capture proxy. */
250 return false;
252 /* It is a capture proxy, is it a normal capture? */
253 tree val = DECL_VALUE_EXPR (decl);
254 if (val == error_mark_node)
255 return true;
257 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
258 val = TREE_OPERAND (val, 1);
259 return DECL_NORMAL_CAPTURE_P (val);
262 /* VAR is a capture proxy created by build_capture_proxy; add it to the
263 current function, which is the operator() for the appropriate lambda. */
265 void
266 insert_capture_proxy (tree var)
268 cp_binding_level *b;
269 tree stmt_list;
271 /* Put the capture proxy in the extra body block so that it won't clash
272 with a later local variable. */
273 b = current_binding_level;
274 for (;;)
276 cp_binding_level *n = b->level_chain;
277 if (n->kind == sk_function_parms)
278 break;
279 b = n;
281 pushdecl_with_scope (var, b, false);
283 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
284 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
285 stmt_list = (*stmt_list_stack)[1];
286 gcc_assert (stmt_list);
287 append_to_statement_list_force (var, &stmt_list);
290 /* We've just finished processing a lambda; if the containing scope is also
291 a lambda, insert any capture proxies that were created while processing
292 the nested lambda. */
294 void
295 insert_pending_capture_proxies (void)
297 tree lam;
298 vec<tree, va_gc> *proxies;
299 unsigned i;
301 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
302 return;
304 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
305 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
306 for (i = 0; i < vec_safe_length (proxies); ++i)
308 tree var = (*proxies)[i];
309 insert_capture_proxy (var);
311 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
312 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
315 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
316 return the type we want the proxy to have: the type of the field itself,
317 with added const-qualification if the lambda isn't mutable and the
318 capture is by value. */
320 tree
321 lambda_proxy_type (tree ref)
323 tree type;
324 if (ref == error_mark_node)
325 return error_mark_node;
326 if (REFERENCE_REF_P (ref))
327 ref = TREE_OPERAND (ref, 0);
328 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
329 type = TREE_TYPE (ref);
330 if (!type || WILDCARD_TYPE_P (non_reference (type)))
332 type = cxx_make_type (DECLTYPE_TYPE);
333 DECLTYPE_TYPE_EXPR (type) = ref;
334 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
335 SET_TYPE_STRUCTURAL_EQUALITY (type);
337 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
338 type = make_pack_expansion (type);
339 return type;
342 /* MEMBER is a capture field in a lambda closure class. Now that we're
343 inside the operator(), build a placeholder var for future lookups and
344 debugging. */
346 tree
347 build_capture_proxy (tree member)
349 tree var, object, fn, closure, name, lam, type;
351 if (PACK_EXPANSION_P (member))
352 member = PACK_EXPANSION_PATTERN (member);
354 closure = DECL_CONTEXT (member);
355 fn = lambda_function (closure);
356 lam = CLASSTYPE_LAMBDA_EXPR (closure);
358 /* The proxy variable forwards to the capture field. */
359 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
360 object = finish_non_static_data_member (member, object, NULL_TREE);
361 if (REFERENCE_REF_P (object))
362 object = TREE_OPERAND (object, 0);
364 /* Remove the __ inserted by add_capture. */
365 if (DECL_NORMAL_CAPTURE_P (member))
366 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
367 else
368 name = DECL_NAME (member);
370 type = lambda_proxy_type (object);
372 if (DECL_VLA_CAPTURE_P (member))
374 /* Rebuild the VLA type from the pointer and maxindex. */
375 tree field = next_initializable_field (TYPE_FIELDS (type));
376 tree ptr = build_simple_component_ref (object, field);
377 field = next_initializable_field (DECL_CHAIN (field));
378 tree max = build_simple_component_ref (object, field);
379 type = build_array_type (TREE_TYPE (TREE_TYPE (ptr)),
380 build_index_type (max));
381 type = build_reference_type (type);
382 REFERENCE_VLA_OK (type) = true;
383 object = convert (type, ptr);
386 var = build_decl (input_location, VAR_DECL, name, type);
387 SET_DECL_VALUE_EXPR (var, object);
388 DECL_HAS_VALUE_EXPR_P (var) = 1;
389 DECL_ARTIFICIAL (var) = 1;
390 TREE_USED (var) = 1;
391 DECL_CONTEXT (var) = fn;
393 if (name == this_identifier)
395 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
396 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
399 if (fn == current_function_decl)
400 insert_capture_proxy (var);
401 else
402 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
404 return var;
407 /* Return a struct containing a pointer and a length for lambda capture of
408 an array of runtime length. */
410 static tree
411 vla_capture_type (tree array_type)
413 static tree ptr_id, max_id;
414 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
415 xref_basetypes (type, NULL_TREE);
416 type = begin_class_definition (type);
417 if (!ptr_id)
419 ptr_id = get_identifier ("ptr");
420 max_id = get_identifier ("max");
422 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
423 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
424 finish_member_declaration (field);
425 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
426 finish_member_declaration (field);
427 return finish_struct (type, NULL_TREE);
430 /* From an ID and INITIALIZER, create a capture (by reference if
431 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
432 and return it. */
434 tree
435 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
436 bool explicit_init_p)
438 char *buf;
439 tree type, member, name;
440 bool vla = false;
441 bool variadic = false;
442 tree initializer = orig_init;
444 if (PACK_EXPANSION_P (initializer))
446 initializer = PACK_EXPANSION_PATTERN (initializer);
447 variadic = true;
450 if (TREE_CODE (initializer) == TREE_LIST)
451 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
452 tf_warning_or_error);
453 type = lambda_capture_field_type (initializer, explicit_init_p);
454 if (array_of_runtime_bound_p (type))
456 vla = true;
457 if (!by_reference_p)
458 error ("array of runtime bound cannot be captured by copy, "
459 "only by reference");
461 /* For a VLA, we capture the address of the first element and the
462 maximum index, and then reconstruct the VLA for the proxy. */
463 tree elt = cp_build_array_ref (input_location, initializer,
464 integer_zero_node, tf_warning_or_error);
465 initializer = build_constructor_va (init_list_type_node, 2,
466 NULL_TREE, build_address (elt),
467 NULL_TREE, array_type_nelts (type));
468 type = vla_capture_type (type);
470 else if (variably_modified_type_p (type, NULL_TREE))
472 error ("capture of variable-size type %qT that is not a C++1y array "
473 "of runtime bound", type);
474 if (TREE_CODE (type) == ARRAY_TYPE
475 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
476 inform (input_location, "because the array element type %qT has "
477 "variable size", TREE_TYPE (type));
478 type = error_mark_node;
480 else if (by_reference_p)
482 type = build_reference_type (type);
483 if (!real_lvalue_p (initializer))
484 error ("cannot capture %qE by reference", initializer);
486 else
487 /* Capture by copy requires a complete type. */
488 type = complete_type (type);
490 /* Add __ to the beginning of the field name so that user code
491 won't find the field with name lookup. We can't just leave the name
492 unset because template instantiation uses the name to find
493 instantiated fields. */
494 if (!explicit_init_p)
496 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
497 buf[1] = buf[0] = '_';
498 memcpy (buf + 2, IDENTIFIER_POINTER (id),
499 IDENTIFIER_LENGTH (id) + 1);
500 name = get_identifier (buf);
502 else
503 /* But captures with explicit initializers are named. */
504 name = id;
506 /* If TREE_TYPE isn't set, we're still in the introducer, so check
507 for duplicates. */
508 if (!LAMBDA_EXPR_CLOSURE (lambda))
510 if (IDENTIFIER_MARKED (name))
512 pedwarn (input_location, 0,
513 "already captured %qD in lambda expression", id);
514 return NULL_TREE;
516 IDENTIFIER_MARKED (name) = true;
519 if (variadic)
520 type = make_pack_expansion (type);
522 /* Make member variable. */
523 member = build_decl (input_location, FIELD_DECL, name, type);
524 DECL_VLA_CAPTURE_P (member) = vla;
526 if (!explicit_init_p)
527 /* Normal captures are invisible to name lookup but uses are replaced
528 with references to the capture field; we implement this by only
529 really making them invisible in unevaluated context; see
530 qualify_lookup. For now, let's make explicitly initialized captures
531 always visible. */
532 DECL_NORMAL_CAPTURE_P (member) = true;
534 if (id == this_identifier)
535 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
537 /* Add it to the appropriate closure class if we've started it. */
538 if (current_class_type
539 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
540 finish_member_declaration (member);
542 tree listmem = member;
543 if (variadic)
545 listmem = make_pack_expansion (member);
546 initializer = orig_init;
548 LAMBDA_EXPR_CAPTURE_LIST (lambda)
549 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
551 if (LAMBDA_EXPR_CLOSURE (lambda))
552 return build_capture_proxy (member);
553 /* For explicit captures we haven't started the function yet, so we wait
554 and build the proxy from cp_parser_lambda_body. */
555 return NULL_TREE;
558 /* Register all the capture members on the list CAPTURES, which is the
559 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
561 void
562 register_capture_members (tree captures)
564 if (captures == NULL_TREE)
565 return;
567 register_capture_members (TREE_CHAIN (captures));
569 tree field = TREE_PURPOSE (captures);
570 if (PACK_EXPANSION_P (field))
571 field = PACK_EXPANSION_PATTERN (field);
573 /* We set this in add_capture to avoid duplicates. */
574 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
575 finish_member_declaration (field);
578 /* Similar to add_capture, except this works on a stack of nested lambdas.
579 BY_REFERENCE_P in this case is derived from the default capture mode.
580 Returns the capture for the lambda at the bottom of the stack. */
582 tree
583 add_default_capture (tree lambda_stack, tree id, tree initializer)
585 bool this_capture_p = (id == this_identifier);
587 tree var = NULL_TREE;
589 tree saved_class_type = current_class_type;
591 tree node;
593 for (node = lambda_stack;
594 node;
595 node = TREE_CHAIN (node))
597 tree lambda = TREE_VALUE (node);
599 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
600 if (DECL_PACK_P (initializer))
601 initializer = make_pack_expansion (initializer);
602 var = add_capture (lambda,
604 initializer,
605 /*by_reference_p=*/
606 (!this_capture_p
607 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
608 == CPLD_REFERENCE)),
609 /*explicit_init_p=*/false);
610 initializer = convert_from_reference (var);
613 current_class_type = saved_class_type;
615 return var;
618 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the form of an
619 INDIRECT_REF, possibly adding it through default capturing. */
621 tree
622 lambda_expr_this_capture (tree lambda)
624 tree result;
626 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
628 /* In unevaluated context this isn't an odr-use, so just return the
629 nearest 'this'. */
630 if (cp_unevaluated_operand)
631 return lookup_name (this_identifier);
633 /* Try to default capture 'this' if we can. */
634 if (!this_capture
635 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)
637 tree lambda_stack = NULL_TREE;
638 tree init = NULL_TREE;
640 /* If we are in a lambda function, we can move out until we hit:
641 1. a non-lambda function or NSDMI,
642 2. a lambda function capturing 'this', or
643 3. a non-default capturing lambda function. */
644 for (tree tlambda = lambda; ;)
646 lambda_stack = tree_cons (NULL_TREE,
647 tlambda,
648 lambda_stack);
650 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
651 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
653 /* In an NSDMI, we don't have a function to look up the decl in,
654 but the fake 'this' pointer that we're using for parsing is
655 in scope_chain. */
656 init = scope_chain->x_current_class_ptr;
657 gcc_checking_assert
658 (init && (TREE_TYPE (TREE_TYPE (init))
659 == current_nonlambda_class_type ()));
660 break;
663 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
664 tree containing_function = decl_function_context (closure_decl);
666 if (containing_function == NULL_TREE)
667 /* We ran out of scopes; there's no 'this' to capture. */
668 break;
670 if (!LAMBDA_FUNCTION_P (containing_function))
672 /* We found a non-lambda function. */
673 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
674 /* First parameter is 'this'. */
675 init = DECL_ARGUMENTS (containing_function);
676 break;
679 tlambda
680 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
682 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
684 /* An outer lambda has already captured 'this'. */
685 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
686 break;
689 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
690 /* An outer lambda won't let us capture 'this'. */
691 break;
694 if (init)
695 this_capture = add_default_capture (lambda_stack,
696 /*id=*/this_identifier,
697 init);
700 if (!this_capture)
702 error ("%<this%> was not captured for this lambda function");
703 result = error_mark_node;
705 else
707 /* To make sure that current_class_ref is for the lambda. */
708 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
709 == LAMBDA_EXPR_CLOSURE (lambda));
711 result = this_capture;
713 /* If 'this' is captured, each use of 'this' is transformed into an
714 access to the corresponding unnamed data member of the closure
715 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
716 ensures that the transformed expression is an rvalue. ] */
717 result = rvalue (result);
720 return result;
723 /* We don't want to capture 'this' until we know we need it, i.e. after
724 overload resolution has chosen a non-static member function. At that
725 point we call this function to turn a dummy object into a use of the
726 'this' capture. */
728 tree
729 maybe_resolve_dummy (tree object)
731 if (!is_dummy_object (object))
732 return object;
734 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
735 gcc_assert (!TYPE_PTR_P (type));
737 if (type != current_class_type
738 && current_class_type
739 && LAMBDA_TYPE_P (current_class_type)
740 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
742 /* In a lambda, need to go through 'this' capture. */
743 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
744 tree cap = lambda_expr_this_capture (lam);
745 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
746 RO_NULL, tf_warning_or_error);
749 return object;
752 /* Returns the method basetype of the innermost non-lambda function, or
753 NULL_TREE if none. */
755 tree
756 nonlambda_method_basetype (void)
758 tree fn, type;
759 if (!current_class_ref)
760 return NULL_TREE;
762 type = current_class_type;
763 if (!LAMBDA_TYPE_P (type))
764 return type;
766 /* Find the nearest enclosing non-lambda function. */
767 fn = TYPE_NAME (type);
769 fn = decl_function_context (fn);
770 while (fn && LAMBDA_FUNCTION_P (fn));
772 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
773 return NULL_TREE;
775 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
778 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
779 indicated FN and NARGS, but do not initialize the return type or any of the
780 argument slots. */
782 static tree
783 prepare_op_call (tree fn, int nargs)
785 tree t;
787 t = build_vl_exp (CALL_EXPR, nargs + 3);
788 CALL_EXPR_FN (t) = fn;
789 CALL_EXPR_STATIC_CHAIN (t) = NULL;
791 return t;
794 /* If the closure TYPE has a static op(), also add a conversion to function
795 pointer. */
797 void
798 maybe_add_lambda_conv_op (tree type)
800 bool nested = (current_function_decl != NULL_TREE);
801 tree callop = lambda_function (type);
803 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
804 return;
806 if (processing_template_decl)
807 return;
809 bool const generic_lambda_p
810 = (DECL_TEMPLATE_INFO (callop)
811 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
813 if (DECL_INITIAL (callop) == NULL_TREE)
815 /* If the op() wasn't instantiated due to errors, give up. */
816 gcc_assert (errorcount || sorrycount);
817 return;
820 /* Non-template conversion operators are defined directly with build_call_a
821 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
822 deferred and the CALL is built in-place. In the case of a deduced return
823 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
824 the return type is also built in-place. The arguments of DECLTYPE_CALL in
825 the return expression may differ in flags from those in the body CALL. In
826 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
827 the body CALL, but not in DECLTYPE_CALL. */
829 vec<tree, va_gc> *direct_argvec = 0;
830 tree decltype_call = 0, call = 0;
831 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
833 if (generic_lambda_p)
835 /* Prepare the dependent member call for the static member function
836 '_FUN' and, potentially, prepare another call to be used in a decltype
837 return expression for a deduced return call op to allow for simple
838 implementation of the conversion operator. */
840 tree instance = build_nop (type, null_pointer_node);
841 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
842 instance, DECL_NAME (callop), NULL_TREE);
843 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
845 call = prepare_op_call (objfn, nargs);
846 if (type_uses_auto (fn_result))
847 decltype_call = prepare_op_call (objfn, nargs);
849 else
851 direct_argvec = make_tree_vector ();
852 direct_argvec->quick_push (build1 (NOP_EXPR,
853 TREE_TYPE (DECL_ARGUMENTS (callop)),
854 null_pointer_node));
857 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
858 declare the static member function "_FUN" below. For each arg append to
859 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
860 call args (for the template case). If a parameter pack is found, expand
861 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
863 tree fn_args = NULL_TREE;
865 int ix = 0;
866 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
867 tree tgt;
869 while (src)
871 tree new_node = copy_node (src);
873 if (!fn_args)
874 fn_args = tgt = new_node;
875 else
877 TREE_CHAIN (tgt) = new_node;
878 tgt = new_node;
881 mark_exp_read (tgt);
883 if (generic_lambda_p)
885 if (DECL_PACK_P (tgt))
887 tree a = make_pack_expansion (tgt);
888 if (decltype_call)
889 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
890 PACK_EXPANSION_LOCAL_P (a) = true;
891 CALL_EXPR_ARG (call, ix) = a;
893 else
895 tree a = convert_from_reference (tgt);
896 CALL_EXPR_ARG (call, ix) = a;
897 if (decltype_call)
898 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
900 ++ix;
902 else
903 vec_safe_push (direct_argvec, tgt);
905 src = TREE_CHAIN (src);
910 if (generic_lambda_p)
912 if (decltype_call)
914 ++processing_template_decl;
915 fn_result = finish_decltype_type
916 (decltype_call, /*id_expression_or_member_access_p=*/false,
917 tf_warning_or_error);
918 --processing_template_decl;
921 else
922 call = build_call_a (callop,
923 direct_argvec->length (),
924 direct_argvec->address ());
926 CALL_FROM_THUNK_P (call) = 1;
928 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
930 /* First build up the conversion op. */
932 tree rettype = build_pointer_type (stattype);
933 tree name = mangle_conv_op_name_for_type (rettype);
934 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
935 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
936 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
937 tree fn = convfn;
938 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
940 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
941 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
942 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
944 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
945 grokclassfn (type, fn, NO_SPECIAL);
946 set_linkage_according_to_type (type, fn);
947 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
948 DECL_IN_AGGR_P (fn) = 1;
949 DECL_ARTIFICIAL (fn) = 1;
950 DECL_NOT_REALLY_EXTERN (fn) = 1;
951 DECL_DECLARED_INLINE_P (fn) = 1;
952 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
953 if (nested)
954 DECL_INTERFACE_KNOWN (fn) = 1;
956 if (generic_lambda_p)
957 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
959 add_method (type, fn, NULL_TREE);
961 /* Generic thunk code fails for varargs; we'll complain in mark_used if
962 the conversion op is used. */
963 if (varargs_function_p (callop))
965 DECL_DELETED_FN (fn) = 1;
966 return;
969 /* Now build up the thunk to be returned. */
971 name = get_identifier ("_FUN");
972 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
973 fn = statfn;
974 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
975 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
976 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
977 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
978 grokclassfn (type, fn, NO_SPECIAL);
979 set_linkage_according_to_type (type, fn);
980 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
981 DECL_IN_AGGR_P (fn) = 1;
982 DECL_ARTIFICIAL (fn) = 1;
983 DECL_NOT_REALLY_EXTERN (fn) = 1;
984 DECL_DECLARED_INLINE_P (fn) = 1;
985 DECL_STATIC_FUNCTION_P (fn) = 1;
986 DECL_ARGUMENTS (fn) = fn_args;
987 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
989 /* Avoid duplicate -Wshadow warnings. */
990 DECL_NAME (arg) = NULL_TREE;
991 DECL_CONTEXT (arg) = fn;
993 if (nested)
994 DECL_INTERFACE_KNOWN (fn) = 1;
996 if (generic_lambda_p)
997 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
999 add_method (type, fn, NULL_TREE);
1001 if (nested)
1002 push_function_context ();
1003 else
1004 /* Still increment function_depth so that we don't GC in the
1005 middle of an expression. */
1006 ++function_depth;
1008 /* Generate the body of the thunk. */
1010 start_preparsed_function (statfn, NULL_TREE,
1011 SF_PRE_PARSED | SF_INCLASS_INLINE);
1012 if (DECL_ONE_ONLY (statfn))
1014 /* Put the thunk in the same comdat group as the call op. */
1015 symtab_add_to_same_comdat_group
1016 ((symtab_node) cgraph_get_create_node (statfn),
1017 (symtab_node) cgraph_get_create_node (callop));
1019 tree body = begin_function_body ();
1020 tree compound_stmt = begin_compound_stmt (0);
1021 if (!generic_lambda_p)
1023 set_flags_from_callee (call);
1024 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1025 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1027 call = convert_from_reference (call);
1028 finish_return_stmt (call);
1030 finish_compound_stmt (compound_stmt);
1031 finish_function_body (body);
1033 fn = finish_function (/*inline*/2);
1034 if (!generic_lambda_p)
1035 expand_or_defer_fn (fn);
1037 /* Generate the body of the conversion op. */
1039 start_preparsed_function (convfn, NULL_TREE,
1040 SF_PRE_PARSED | SF_INCLASS_INLINE);
1041 body = begin_function_body ();
1042 compound_stmt = begin_compound_stmt (0);
1044 /* decl_needed_p needs to see that it's used. */
1045 TREE_USED (statfn) = 1;
1046 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1048 finish_compound_stmt (compound_stmt);
1049 finish_function_body (body);
1051 fn = finish_function (/*inline*/2);
1052 if (!generic_lambda_p)
1053 expand_or_defer_fn (fn);
1055 if (nested)
1056 pop_function_context ();
1057 else
1058 --function_depth;
1061 /* Returns true iff VAL is a lambda-related declaration which should
1062 be ignored by unqualified lookup. */
1064 bool
1065 is_lambda_ignored_entity (tree val)
1067 /* In unevaluated context, look past normal capture proxies. */
1068 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1069 return true;
1071 /* Always ignore lambda fields, their names are only for debugging. */
1072 if (TREE_CODE (val) == FIELD_DECL
1073 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1074 return true;
1076 /* None of the lookups that use qualify_lookup want the op() from the
1077 lambda; they want the one from the enclosing class. */
1078 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1079 return true;
1081 return false;