1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
6 Copyright (C) 1998-2015 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "stringpool.h"
33 #include "hard-reg-set.h"
36 #include "tree-iterator.h"
40 /* Constructor for a lambda expression. */
43 build_lambda_expr (void)
45 tree lambda
= make_node (LAMBDA_EXPR
);
46 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
) = CPLD_NONE
;
47 LAMBDA_EXPR_CAPTURE_LIST (lambda
) = NULL_TREE
;
48 LAMBDA_EXPR_THIS_CAPTURE (lambda
) = NULL_TREE
;
49 LAMBDA_EXPR_PENDING_PROXIES (lambda
) = NULL
;
50 LAMBDA_EXPR_RETURN_TYPE (lambda
) = NULL_TREE
;
51 LAMBDA_EXPR_MUTABLE_P (lambda
) = false;
55 /* Create the closure object for a LAMBDA_EXPR. */
58 build_lambda_object (tree lambda_expr
)
60 /* Build aggregate constructor call.
61 - cp_parser_braced_list
62 - cp_parser_functional_cast */
63 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
64 tree node
, expr
, type
;
67 if (processing_template_decl
)
70 /* Make sure any error messages refer to the lambda-introducer. */
71 saved_loc
= input_location
;
72 input_location
= LAMBDA_EXPR_LOCATION (lambda_expr
);
74 for (node
= LAMBDA_EXPR_CAPTURE_LIST (lambda_expr
);
76 node
= TREE_CHAIN (node
))
78 tree field
= TREE_PURPOSE (node
);
79 tree val
= TREE_VALUE (node
);
81 if (field
== error_mark_node
)
83 expr
= error_mark_node
;
90 /* Mere mortals can't copy arrays with aggregate initialization, so
91 do some magic to make it work here. */
92 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
93 val
= build_array_copy (val
);
94 else if (DECL_NORMAL_CAPTURE_P (field
)
95 && !DECL_VLA_CAPTURE_P (field
)
96 && TREE_CODE (TREE_TYPE (field
)) != REFERENCE_TYPE
)
98 /* "the entities that are captured by copy are used to
99 direct-initialize each corresponding non-static data
100 member of the resulting closure object."
102 There's normally no way to express direct-initialization
103 from an element of a CONSTRUCTOR, so we build up a special
104 TARGET_EXPR to bypass the usual copy-initialization. */
105 val
= force_rvalue (val
, tf_warning_or_error
);
106 if (TREE_CODE (val
) == TARGET_EXPR
)
107 TARGET_EXPR_DIRECT_INIT_P (val
) = true;
110 CONSTRUCTOR_APPEND_ELT (elts
, DECL_NAME (field
), val
);
113 expr
= build_constructor (init_list_type_node
, elts
);
114 CONSTRUCTOR_IS_DIRECT_INIT (expr
) = 1;
116 /* N2927: "[The closure] class type is not an aggregate."
117 But we briefly treat it as an aggregate to make this simpler. */
118 type
= LAMBDA_EXPR_CLOSURE (lambda_expr
);
119 CLASSTYPE_NON_AGGREGATE (type
) = 0;
120 expr
= finish_compound_literal (type
, expr
, tf_warning_or_error
);
121 CLASSTYPE_NON_AGGREGATE (type
) = 1;
124 input_location
= saved_loc
;
128 /* Return an initialized RECORD_TYPE for LAMBDA.
129 LAMBDA must have its explicit captures already. */
132 begin_lambda_type (tree lambda
)
137 /* Unique name. This is just like an unnamed class, but we cannot use
138 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
140 name
= make_lambda_name ();
142 /* Create the new RECORD_TYPE for this lambda. */
143 type
= xref_tag (/*tag_code=*/record_type
,
146 /*template_header_p=*/false);
147 if (type
== error_mark_node
)
148 return error_mark_node
;
151 /* Designate it as a struct so that we can use aggregate initialization. */
152 CLASSTYPE_DECLARED_CLASS (type
) = false;
154 /* Cross-reference the expression and the type. */
155 LAMBDA_EXPR_CLOSURE (lambda
) = type
;
156 CLASSTYPE_LAMBDA_EXPR (type
) = lambda
;
158 /* Clear base types. */
159 xref_basetypes (type
, /*bases=*/NULL_TREE
);
161 /* Start the class. */
162 type
= begin_class_definition (type
);
167 /* Returns the type to use for the return type of the operator() of a
171 lambda_return_type (tree expr
)
173 if (expr
== NULL_TREE
)
174 return void_type_node
;
175 if (type_unknown_p (expr
)
176 || BRACE_ENCLOSED_INITIALIZER_P (expr
))
178 cxx_incomplete_type_error (expr
, TREE_TYPE (expr
));
179 return void_type_node
;
181 gcc_checking_assert (!type_dependent_expression_p (expr
));
182 return cv_unqualified (type_decays_to (unlowered_expr_type (expr
)));
185 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
189 lambda_function (tree lambda
)
192 if (TREE_CODE (lambda
) == LAMBDA_EXPR
)
193 type
= LAMBDA_EXPR_CLOSURE (lambda
);
196 gcc_assert (LAMBDA_TYPE_P (type
));
197 /* Don't let debug_tree cause instantiation. */
198 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type
)
199 && !COMPLETE_OR_OPEN_TYPE_P (type
))
201 lambda
= lookup_member (type
, ansi_opname (CALL_EXPR
),
202 /*protect=*/0, /*want_type=*/false,
203 tf_warning_or_error
);
205 lambda
= STRIP_TEMPLATE (get_first_fn (lambda
));
209 /* Returns the type to use for the FIELD_DECL corresponding to the
211 The caller should add REFERENCE_TYPE for capture by reference. */
214 lambda_capture_field_type (tree expr
, bool explicit_init_p
)
220 type
= do_auto_deduction (type
, expr
, type
);
223 type
= non_reference (unlowered_expr_type (expr
));
224 if (type_dependent_expression_p (expr
)
225 && !is_this_parameter (tree_strip_nop_conversions (expr
)))
227 type
= cxx_make_type (DECLTYPE_TYPE
);
228 DECLTYPE_TYPE_EXPR (type
) = expr
;
229 DECLTYPE_FOR_LAMBDA_CAPTURE (type
) = true;
230 DECLTYPE_FOR_INIT_CAPTURE (type
) = explicit_init_p
;
231 SET_TYPE_STRUCTURAL_EQUALITY (type
);
236 /* Returns true iff DECL is a lambda capture proxy variable created by
237 build_capture_proxy. */
240 is_capture_proxy (tree decl
)
243 && DECL_HAS_VALUE_EXPR_P (decl
)
244 && !DECL_ANON_UNION_VAR_P (decl
)
245 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl
)));
248 /* Returns true iff DECL is a capture proxy for a normal capture
249 (i.e. without explicit initializer). */
252 is_normal_capture_proxy (tree decl
)
254 if (!is_capture_proxy (decl
))
255 /* It's not a capture proxy. */
258 if (variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
262 /* It is a capture proxy, is it a normal capture? */
263 tree val
= DECL_VALUE_EXPR (decl
);
264 if (val
== error_mark_node
)
267 gcc_assert (TREE_CODE (val
) == COMPONENT_REF
);
268 val
= TREE_OPERAND (val
, 1);
269 return DECL_NORMAL_CAPTURE_P (val
);
272 /* VAR is a capture proxy created by build_capture_proxy; add it to the
273 current function, which is the operator() for the appropriate lambda. */
276 insert_capture_proxy (tree var
)
281 /* Put the capture proxy in the extra body block so that it won't clash
282 with a later local variable. */
283 b
= current_binding_level
;
286 cp_binding_level
*n
= b
->level_chain
;
287 if (n
->kind
== sk_function_parms
)
291 pushdecl_with_scope (var
, b
, false);
293 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
294 var
= build_stmt (DECL_SOURCE_LOCATION (var
), DECL_EXPR
, var
);
295 stmt_list
= (*stmt_list_stack
)[1];
296 gcc_assert (stmt_list
);
297 append_to_statement_list_force (var
, &stmt_list
);
300 /* We've just finished processing a lambda; if the containing scope is also
301 a lambda, insert any capture proxies that were created while processing
302 the nested lambda. */
305 insert_pending_capture_proxies (void)
308 vec
<tree
, va_gc
> *proxies
;
311 if (!current_function_decl
|| !LAMBDA_FUNCTION_P (current_function_decl
))
314 lam
= CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl
));
315 proxies
= LAMBDA_EXPR_PENDING_PROXIES (lam
);
316 for (i
= 0; i
< vec_safe_length (proxies
); ++i
)
318 tree var
= (*proxies
)[i
];
319 insert_capture_proxy (var
);
321 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam
));
322 LAMBDA_EXPR_PENDING_PROXIES (lam
) = NULL
;
325 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
326 return the type we want the proxy to have: the type of the field itself,
327 with added const-qualification if the lambda isn't mutable and the
328 capture is by value. */
331 lambda_proxy_type (tree ref
)
334 if (ref
== error_mark_node
)
335 return error_mark_node
;
336 if (REFERENCE_REF_P (ref
))
337 ref
= TREE_OPERAND (ref
, 0);
338 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
339 type
= TREE_TYPE (ref
);
340 if (!type
|| WILDCARD_TYPE_P (non_reference (type
)))
342 type
= cxx_make_type (DECLTYPE_TYPE
);
343 DECLTYPE_TYPE_EXPR (type
) = ref
;
344 DECLTYPE_FOR_LAMBDA_PROXY (type
) = true;
345 SET_TYPE_STRUCTURAL_EQUALITY (type
);
347 if (DECL_PACK_P (TREE_OPERAND (ref
, 1)))
348 type
= make_pack_expansion (type
);
352 /* MEMBER is a capture field in a lambda closure class. Now that we're
353 inside the operator(), build a placeholder var for future lookups and
357 build_capture_proxy (tree member
)
359 tree var
, object
, fn
, closure
, name
, lam
, type
;
361 if (PACK_EXPANSION_P (member
))
362 member
= PACK_EXPANSION_PATTERN (member
);
364 closure
= DECL_CONTEXT (member
);
365 fn
= lambda_function (closure
);
366 lam
= CLASSTYPE_LAMBDA_EXPR (closure
);
368 /* The proxy variable forwards to the capture field. */
369 object
= build_fold_indirect_ref (DECL_ARGUMENTS (fn
));
370 object
= finish_non_static_data_member (member
, object
, NULL_TREE
);
371 if (REFERENCE_REF_P (object
))
372 object
= TREE_OPERAND (object
, 0);
374 /* Remove the __ inserted by add_capture. */
375 name
= get_identifier (IDENTIFIER_POINTER (DECL_NAME (member
)) + 2);
377 type
= lambda_proxy_type (object
);
379 if (DECL_VLA_CAPTURE_P (member
))
381 /* Rebuild the VLA type from the pointer and maxindex. */
382 tree field
= next_initializable_field (TYPE_FIELDS (type
));
383 tree ptr
= build_simple_component_ref (object
, field
);
384 field
= next_initializable_field (DECL_CHAIN (field
));
385 tree max
= build_simple_component_ref (object
, field
);
386 type
= build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr
)),
387 build_index_type (max
));
388 type
= build_reference_type (type
);
389 REFERENCE_VLA_OK (type
) = true;
390 object
= convert (type
, ptr
);
393 var
= build_decl (input_location
, VAR_DECL
, name
, type
);
394 SET_DECL_VALUE_EXPR (var
, object
);
395 DECL_HAS_VALUE_EXPR_P (var
) = 1;
396 DECL_ARTIFICIAL (var
) = 1;
398 DECL_CONTEXT (var
) = fn
;
400 if (name
== this_identifier
)
402 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam
) == member
);
403 LAMBDA_EXPR_THIS_CAPTURE (lam
) = var
;
406 if (fn
== current_function_decl
)
407 insert_capture_proxy (var
);
409 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam
), var
);
414 /* Return a struct containing a pointer and a length for lambda capture of
415 an array of runtime length. */
418 vla_capture_type (tree array_type
)
420 static tree ptr_id
, max_id
;
421 tree type
= xref_tag (record_type
, make_anon_name (), ts_current
, false);
422 xref_basetypes (type
, NULL_TREE
);
423 type
= begin_class_definition (type
);
426 ptr_id
= get_identifier ("ptr");
427 max_id
= get_identifier ("max");
429 tree ptrtype
= build_pointer_type (TREE_TYPE (array_type
));
430 tree field
= build_decl (input_location
, FIELD_DECL
, ptr_id
, ptrtype
);
431 finish_member_declaration (field
);
432 field
= build_decl (input_location
, FIELD_DECL
, max_id
, sizetype
);
433 finish_member_declaration (field
);
434 return finish_struct (type
, NULL_TREE
);
437 /* From an ID and INITIALIZER, create a capture (by reference if
438 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
442 add_capture (tree lambda
, tree id
, tree orig_init
, bool by_reference_p
,
443 bool explicit_init_p
)
446 tree type
, member
, name
;
448 bool variadic
= false;
449 tree initializer
= orig_init
;
451 if (PACK_EXPANSION_P (initializer
))
453 initializer
= PACK_EXPANSION_PATTERN (initializer
);
457 if (TREE_CODE (initializer
) == TREE_LIST
)
458 initializer
= build_x_compound_expr_from_list (initializer
, ELK_INIT
,
459 tf_warning_or_error
);
460 type
= TREE_TYPE (initializer
);
461 if (type
== error_mark_node
)
462 return error_mark_node
;
464 if (array_of_runtime_bound_p (type
))
468 error ("array of runtime bound cannot be captured by copy, "
469 "only by reference");
471 /* For a VLA, we capture the address of the first element and the
472 maximum index, and then reconstruct the VLA for the proxy. */
473 tree elt
= cp_build_array_ref (input_location
, initializer
,
474 integer_zero_node
, tf_warning_or_error
);
475 initializer
= build_constructor_va (init_list_type_node
, 2,
476 NULL_TREE
, build_address (elt
),
477 NULL_TREE
, array_type_nelts (type
));
478 type
= vla_capture_type (type
);
480 else if (!dependent_type_p (type
)
481 && variably_modified_type_p (type
, NULL_TREE
))
483 error ("capture of variable-size type %qT that is not an N3639 array "
484 "of runtime bound", type
);
485 if (TREE_CODE (type
) == ARRAY_TYPE
486 && variably_modified_type_p (TREE_TYPE (type
), NULL_TREE
))
487 inform (input_location
, "because the array element type %qT has "
488 "variable size", TREE_TYPE (type
));
489 type
= error_mark_node
;
493 type
= lambda_capture_field_type (initializer
, explicit_init_p
);
496 type
= build_reference_type (type
);
497 if (!dependent_type_p (type
) && !real_lvalue_p (initializer
))
498 error ("cannot capture %qE by reference", initializer
);
502 /* Capture by copy requires a complete type. */
503 type
= complete_type (type
);
504 if (!dependent_type_p (type
) && !COMPLETE_TYPE_P (type
))
506 error ("capture by copy of incomplete type %qT", type
);
507 cxx_incomplete_type_inform (type
);
508 return error_mark_node
;
513 /* Add __ to the beginning of the field name so that user code
514 won't find the field with name lookup. We can't just leave the name
515 unset because template instantiation uses the name to find
516 instantiated fields. */
517 buf
= (char *) alloca (IDENTIFIER_LENGTH (id
) + 3);
518 buf
[1] = buf
[0] = '_';
519 memcpy (buf
+ 2, IDENTIFIER_POINTER (id
),
520 IDENTIFIER_LENGTH (id
) + 1);
521 name
= get_identifier (buf
);
523 /* If TREE_TYPE isn't set, we're still in the introducer, so check
525 if (!LAMBDA_EXPR_CLOSURE (lambda
))
527 if (IDENTIFIER_MARKED (name
))
529 pedwarn (input_location
, 0,
530 "already captured %qD in lambda expression", id
);
533 IDENTIFIER_MARKED (name
) = true;
537 type
= make_pack_expansion (type
);
539 /* Make member variable. */
540 member
= build_decl (input_location
, FIELD_DECL
, name
, type
);
541 DECL_VLA_CAPTURE_P (member
) = vla
;
543 if (!explicit_init_p
)
544 /* Normal captures are invisible to name lookup but uses are replaced
545 with references to the capture field; we implement this by only
546 really making them invisible in unevaluated context; see
547 qualify_lookup. For now, let's make explicitly initialized captures
549 DECL_NORMAL_CAPTURE_P (member
) = true;
551 if (id
== this_identifier
)
552 LAMBDA_EXPR_THIS_CAPTURE (lambda
) = member
;
554 /* Add it to the appropriate closure class if we've started it. */
555 if (current_class_type
556 && current_class_type
== LAMBDA_EXPR_CLOSURE (lambda
))
557 finish_member_declaration (member
);
559 tree listmem
= member
;
562 listmem
= make_pack_expansion (member
);
563 initializer
= orig_init
;
565 LAMBDA_EXPR_CAPTURE_LIST (lambda
)
566 = tree_cons (listmem
, initializer
, LAMBDA_EXPR_CAPTURE_LIST (lambda
));
568 if (LAMBDA_EXPR_CLOSURE (lambda
))
569 return build_capture_proxy (member
);
570 /* For explicit captures we haven't started the function yet, so we wait
571 and build the proxy from cp_parser_lambda_body. */
575 /* Register all the capture members on the list CAPTURES, which is the
576 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
579 register_capture_members (tree captures
)
581 if (captures
== NULL_TREE
)
584 register_capture_members (TREE_CHAIN (captures
));
586 tree field
= TREE_PURPOSE (captures
);
587 if (PACK_EXPANSION_P (field
))
588 field
= PACK_EXPANSION_PATTERN (field
);
590 /* We set this in add_capture to avoid duplicates. */
591 IDENTIFIER_MARKED (DECL_NAME (field
)) = false;
592 finish_member_declaration (field
);
595 /* Similar to add_capture, except this works on a stack of nested lambdas.
596 BY_REFERENCE_P in this case is derived from the default capture mode.
597 Returns the capture for the lambda at the bottom of the stack. */
600 add_default_capture (tree lambda_stack
, tree id
, tree initializer
)
602 bool this_capture_p
= (id
== this_identifier
);
604 tree var
= NULL_TREE
;
606 tree saved_class_type
= current_class_type
;
610 for (node
= lambda_stack
;
612 node
= TREE_CHAIN (node
))
614 tree lambda
= TREE_VALUE (node
);
616 current_class_type
= LAMBDA_EXPR_CLOSURE (lambda
);
617 if (DECL_PACK_P (initializer
))
618 initializer
= make_pack_expansion (initializer
);
619 var
= add_capture (lambda
,
624 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
)
626 /*explicit_init_p=*/false);
627 initializer
= convert_from_reference (var
);
630 current_class_type
= saved_class_type
;
635 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
636 form of an INDIRECT_REF, possibly adding it through default
637 capturing, if ADD_CAPTURE_P is true. */
640 lambda_expr_this_capture (tree lambda
, bool add_capture_p
)
644 tree this_capture
= LAMBDA_EXPR_THIS_CAPTURE (lambda
);
646 /* In unevaluated context this isn't an odr-use, so don't capture. */
647 if (cp_unevaluated_operand
)
648 add_capture_p
= false;
650 /* Try to default capture 'this' if we can. */
653 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda
) != CPLD_NONE
))
655 tree lambda_stack
= NULL_TREE
;
656 tree init
= NULL_TREE
;
658 /* If we are in a lambda function, we can move out until we hit:
659 1. a non-lambda function or NSDMI,
660 2. a lambda function capturing 'this', or
661 3. a non-default capturing lambda function. */
662 for (tree tlambda
= lambda
; ;)
664 lambda_stack
= tree_cons (NULL_TREE
,
668 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda
)
669 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda
)) == FIELD_DECL
)
671 /* In an NSDMI, we don't have a function to look up the decl in,
672 but the fake 'this' pointer that we're using for parsing is
674 init
= scope_chain
->x_current_class_ptr
;
676 (init
&& (TREE_TYPE (TREE_TYPE (init
))
677 == current_nonlambda_class_type ()));
681 tree closure_decl
= TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda
));
682 tree containing_function
= decl_function_context (closure_decl
);
684 if (containing_function
== NULL_TREE
)
685 /* We ran out of scopes; there's no 'this' to capture. */
688 if (!LAMBDA_FUNCTION_P (containing_function
))
690 /* We found a non-lambda function. */
691 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function
))
692 /* First parameter is 'this'. */
693 init
= DECL_ARGUMENTS (containing_function
);
698 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function
));
700 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda
))
702 /* An outer lambda has already captured 'this'. */
703 init
= LAMBDA_EXPR_THIS_CAPTURE (tlambda
);
707 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda
) == CPLD_NONE
)
708 /* An outer lambda won't let us capture 'this'. */
715 this_capture
= add_default_capture (lambda_stack
,
716 /*id=*/this_identifier
,
723 if (cp_unevaluated_operand
)
724 result
= this_capture
;
725 else if (!this_capture
)
729 error ("%<this%> was not captured for this lambda function");
730 result
= error_mark_node
;
737 /* To make sure that current_class_ref is for the lambda. */
738 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref
))
739 == LAMBDA_EXPR_CLOSURE (lambda
));
741 result
= this_capture
;
743 /* If 'this' is captured, each use of 'this' is transformed into an
744 access to the corresponding unnamed data member of the closure
745 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
746 ensures that the transformed expression is an rvalue. ] */
747 result
= rvalue (result
);
753 /* We don't want to capture 'this' until we know we need it, i.e. after
754 overload resolution has chosen a non-static member function. At that
755 point we call this function to turn a dummy object into a use of the
759 maybe_resolve_dummy (tree object
, bool add_capture_p
)
761 if (!is_dummy_object (object
))
764 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (object
));
765 gcc_assert (!TYPE_PTR_P (type
));
767 if (type
!= current_class_type
768 && current_class_type
769 && LAMBDA_TYPE_P (current_class_type
)
770 && lambda_function (current_class_type
)
771 && DERIVED_FROM_P (type
, current_nonlambda_class_type ()))
773 /* In a lambda, need to go through 'this' capture. */
774 tree lam
= CLASSTYPE_LAMBDA_EXPR (current_class_type
);
775 tree cap
= lambda_expr_this_capture (lam
, add_capture_p
);
776 if (cap
&& cap
!= error_mark_node
)
777 object
= build_x_indirect_ref (EXPR_LOCATION (object
), cap
,
778 RO_NULL
, tf_warning_or_error
);
784 /* Returns the innermost non-lambda function. */
787 current_nonlambda_function (void)
789 tree fn
= current_function_decl
;
790 while (fn
&& LAMBDA_FUNCTION_P (fn
))
791 fn
= decl_function_context (fn
);
795 /* Returns the method basetype of the innermost non-lambda function, or
796 NULL_TREE if none. */
799 nonlambda_method_basetype (void)
802 if (!current_class_ref
)
805 type
= current_class_type
;
806 if (!LAMBDA_TYPE_P (type
))
809 /* Find the nearest enclosing non-lambda function. */
810 fn
= TYPE_NAME (type
);
812 fn
= decl_function_context (fn
);
813 while (fn
&& LAMBDA_FUNCTION_P (fn
));
815 if (!fn
|| !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn
))
818 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn
));
821 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
822 indicated FN and NARGS, but do not initialize the return type or any of the
826 prepare_op_call (tree fn
, int nargs
)
830 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
831 CALL_EXPR_FN (t
) = fn
;
832 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
837 /* If the closure TYPE has a static op(), also add a conversion to function
841 maybe_add_lambda_conv_op (tree type
)
843 bool nested
= (cfun
!= NULL
);
844 bool nested_def
= decl_function_context (TYPE_MAIN_DECL (type
));
845 tree callop
= lambda_function (type
);
847 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type
)) != NULL_TREE
)
850 if (processing_template_decl
)
853 bool const generic_lambda_p
854 = (DECL_TEMPLATE_INFO (callop
)
855 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop
)) == callop
);
857 if (!generic_lambda_p
&& DECL_INITIAL (callop
) == NULL_TREE
)
859 /* If the op() wasn't instantiated due to errors, give up. */
860 gcc_assert (errorcount
|| sorrycount
);
864 /* Non-template conversion operators are defined directly with build_call_a
865 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
866 deferred and the CALL is built in-place. In the case of a deduced return
867 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
868 the return type is also built in-place. The arguments of DECLTYPE_CALL in
869 the return expression may differ in flags from those in the body CALL. In
870 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
871 the body CALL, but not in DECLTYPE_CALL. */
873 vec
<tree
, va_gc
> *direct_argvec
= 0;
874 tree decltype_call
= 0, call
= 0;
875 tree fn_result
= TREE_TYPE (TREE_TYPE (callop
));
877 if (generic_lambda_p
)
879 /* Prepare the dependent member call for the static member function
880 '_FUN' and, potentially, prepare another call to be used in a decltype
881 return expression for a deduced return call op to allow for simple
882 implementation of the conversion operator. */
884 tree instance
= build_nop (type
, null_pointer_node
);
885 tree objfn
= build_min (COMPONENT_REF
, NULL_TREE
,
886 instance
, DECL_NAME (callop
), NULL_TREE
);
887 int nargs
= list_length (DECL_ARGUMENTS (callop
)) - 1;
889 call
= prepare_op_call (objfn
, nargs
);
890 if (type_uses_auto (fn_result
))
891 decltype_call
= prepare_op_call (objfn
, nargs
);
895 direct_argvec
= make_tree_vector ();
896 direct_argvec
->quick_push (build1 (NOP_EXPR
,
897 TREE_TYPE (DECL_ARGUMENTS (callop
)),
901 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
902 declare the static member function "_FUN" below. For each arg append to
903 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
904 call args (for the template case). If a parameter pack is found, expand
905 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
907 tree fn_args
= NULL_TREE
;
910 tree src
= DECL_CHAIN (DECL_ARGUMENTS (callop
));
915 tree new_node
= copy_node (src
);
918 fn_args
= tgt
= new_node
;
921 TREE_CHAIN (tgt
) = new_node
;
927 if (generic_lambda_p
)
929 if (DECL_PACK_P (tgt
))
931 tree a
= make_pack_expansion (tgt
);
933 CALL_EXPR_ARG (decltype_call
, ix
) = copy_node (a
);
934 PACK_EXPANSION_LOCAL_P (a
) = true;
935 CALL_EXPR_ARG (call
, ix
) = a
;
939 tree a
= convert_from_reference (tgt
);
940 CALL_EXPR_ARG (call
, ix
) = a
;
942 CALL_EXPR_ARG (decltype_call
, ix
) = copy_node (a
);
947 vec_safe_push (direct_argvec
, tgt
);
949 src
= TREE_CHAIN (src
);
954 if (generic_lambda_p
)
958 ++processing_template_decl
;
959 fn_result
= finish_decltype_type
960 (decltype_call
, /*id_expression_or_member_access_p=*/false,
961 tf_warning_or_error
);
962 --processing_template_decl
;
966 call
= build_call_a (callop
,
967 direct_argvec
->length (),
968 direct_argvec
->address ());
970 CALL_FROM_THUNK_P (call
) = 1;
972 tree stattype
= build_function_type (fn_result
, FUNCTION_ARG_CHAIN (callop
));
974 /* First build up the conversion op. */
976 tree rettype
= build_pointer_type (stattype
);
977 tree name
= mangle_conv_op_name_for_type (rettype
);
978 tree thistype
= cp_build_qualified_type (type
, TYPE_QUAL_CONST
);
979 tree fntype
= build_method_type_directly (thistype
, rettype
, void_list_node
);
980 tree convfn
= build_lang_decl (FUNCTION_DECL
, name
, fntype
);
982 DECL_SOURCE_LOCATION (fn
) = DECL_SOURCE_LOCATION (callop
);
984 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
985 && DECL_ALIGN (fn
) < 2 * BITS_PER_UNIT
)
986 DECL_ALIGN (fn
) = 2 * BITS_PER_UNIT
;
988 SET_OVERLOADED_OPERATOR_CODE (fn
, TYPE_EXPR
);
989 grokclassfn (type
, fn
, NO_SPECIAL
);
990 set_linkage_according_to_type (type
, fn
);
991 rest_of_decl_compilation (fn
, toplevel_bindings_p (), at_eof
);
992 DECL_IN_AGGR_P (fn
) = 1;
993 DECL_ARTIFICIAL (fn
) = 1;
994 DECL_NOT_REALLY_EXTERN (fn
) = 1;
995 DECL_DECLARED_INLINE_P (fn
) = 1;
996 DECL_ARGUMENTS (fn
) = build_this_parm (fntype
, TYPE_QUAL_CONST
);
998 DECL_INTERFACE_KNOWN (fn
) = 1;
1000 if (generic_lambda_p
)
1001 fn
= add_inherited_template_parms (fn
, DECL_TI_TEMPLATE (callop
));
1003 add_method (type
, fn
, NULL_TREE
);
1005 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1006 the conversion op is used. */
1007 if (varargs_function_p (callop
))
1009 DECL_DELETED_FN (fn
) = 1;
1013 /* Now build up the thunk to be returned. */
1015 name
= get_identifier ("_FUN");
1016 tree statfn
= build_lang_decl (FUNCTION_DECL
, name
, stattype
);
1018 DECL_SOURCE_LOCATION (fn
) = DECL_SOURCE_LOCATION (callop
);
1019 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
1020 && DECL_ALIGN (fn
) < 2 * BITS_PER_UNIT
)
1021 DECL_ALIGN (fn
) = 2 * BITS_PER_UNIT
;
1022 grokclassfn (type
, fn
, NO_SPECIAL
);
1023 set_linkage_according_to_type (type
, fn
);
1024 rest_of_decl_compilation (fn
, toplevel_bindings_p (), at_eof
);
1025 DECL_IN_AGGR_P (fn
) = 1;
1026 DECL_ARTIFICIAL (fn
) = 1;
1027 DECL_NOT_REALLY_EXTERN (fn
) = 1;
1028 DECL_DECLARED_INLINE_P (fn
) = 1;
1029 DECL_STATIC_FUNCTION_P (fn
) = 1;
1030 DECL_ARGUMENTS (fn
) = fn_args
;
1031 for (tree arg
= fn_args
; arg
; arg
= DECL_CHAIN (arg
))
1033 /* Avoid duplicate -Wshadow warnings. */
1034 DECL_NAME (arg
) = NULL_TREE
;
1035 DECL_CONTEXT (arg
) = fn
;
1038 DECL_INTERFACE_KNOWN (fn
) = 1;
1040 if (generic_lambda_p
)
1041 fn
= add_inherited_template_parms (fn
, DECL_TI_TEMPLATE (callop
));
1043 add_method (type
, fn
, NULL_TREE
);
1046 push_function_context ();
1048 /* Still increment function_depth so that we don't GC in the
1049 middle of an expression. */
1052 /* Generate the body of the thunk. */
1054 start_preparsed_function (statfn
, NULL_TREE
,
1055 SF_PRE_PARSED
| SF_INCLASS_INLINE
);
1056 if (DECL_ONE_ONLY (statfn
))
1058 /* Put the thunk in the same comdat group as the call op. */
1059 cgraph_node::get_create (statfn
)->add_to_same_comdat_group
1060 (cgraph_node::get_create (callop
));
1062 tree body
= begin_function_body ();
1063 tree compound_stmt
= begin_compound_stmt (0);
1064 if (!generic_lambda_p
)
1066 set_flags_from_callee (call
);
1067 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call
)))
1068 call
= build_cplus_new (TREE_TYPE (call
), call
, tf_warning_or_error
);
1070 call
= convert_from_reference (call
);
1071 finish_return_stmt (call
);
1073 finish_compound_stmt (compound_stmt
);
1074 finish_function_body (body
);
1076 fn
= finish_function (/*inline*/2);
1077 if (!generic_lambda_p
)
1078 expand_or_defer_fn (fn
);
1080 /* Generate the body of the conversion op. */
1082 start_preparsed_function (convfn
, NULL_TREE
,
1083 SF_PRE_PARSED
| SF_INCLASS_INLINE
);
1084 body
= begin_function_body ();
1085 compound_stmt
= begin_compound_stmt (0);
1087 /* decl_needed_p needs to see that it's used. */
1088 TREE_USED (statfn
) = 1;
1089 finish_return_stmt (decay_conversion (statfn
, tf_warning_or_error
));
1091 finish_compound_stmt (compound_stmt
);
1092 finish_function_body (body
);
1094 fn
= finish_function (/*inline*/2);
1095 if (!generic_lambda_p
)
1096 expand_or_defer_fn (fn
);
1099 pop_function_context ();
1104 /* Returns true iff VAL is a lambda-related declaration which should
1105 be ignored by unqualified lookup. */
1108 is_lambda_ignored_entity (tree val
)
1110 /* In unevaluated context, look past normal capture proxies. */
1111 if (cp_unevaluated_operand
&& is_normal_capture_proxy (val
))
1114 /* Always ignore lambda fields, their names are only for debugging. */
1115 if (TREE_CODE (val
) == FIELD_DECL
1116 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val
)))
1119 /* None of the lookups that use qualify_lookup want the op() from the
1120 lambda; they want the one from the enclosing class. */
1121 if (TREE_CODE (val
) == FUNCTION_DECL
&& LAMBDA_FUNCTION_P (val
))