xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/lambda.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2020 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 
35 /* Constructor for a lambda expression.  */
36 
37 tree
build_lambda_expr(void)38 build_lambda_expr (void)
39 {
40   tree lambda = make_node (LAMBDA_EXPR);
41   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
43   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
44   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
45   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
46   return lambda;
47 }
48 
49 /* Create the closure object for a LAMBDA_EXPR.  */
50 
51 tree
build_lambda_object(tree lambda_expr)52 build_lambda_object (tree lambda_expr)
53 {
54   /* Build aggregate constructor call.
55      - cp_parser_braced_list
56      - cp_parser_functional_cast  */
57   vec<constructor_elt, va_gc> *elts = NULL;
58   tree node, expr, type;
59   location_t saved_loc;
60 
61   if (processing_template_decl || lambda_expr == error_mark_node)
62     return lambda_expr;
63 
64   /* Make sure any error messages refer to the lambda-introducer.  */
65   saved_loc = input_location;
66   input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
67 
68   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69        node;
70        node = TREE_CHAIN (node))
71     {
72       tree field = TREE_PURPOSE (node);
73       tree val = TREE_VALUE (node);
74 
75       if (field == error_mark_node)
76 	{
77 	  expr = error_mark_node;
78 	  goto out;
79 	}
80 
81       if (TREE_CODE (val) == TREE_LIST)
82 	val = build_x_compound_expr_from_list (val, ELK_INIT,
83 					       tf_warning_or_error);
84 
85       if (DECL_P (val))
86 	mark_used (val);
87 
88       /* Mere mortals can't copy arrays with aggregate initialization, so
89 	 do some magic to make it work here.  */
90       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 	val = build_array_copy (val);
92       else if (DECL_NORMAL_CAPTURE_P (field)
93 	       && !DECL_VLA_CAPTURE_P (field)
94 	       && !TYPE_REF_P (TREE_TYPE (field)))
95 	{
96 	  /* "the entities that are captured by copy are used to
97 	     direct-initialize each corresponding non-static data
98 	     member of the resulting closure object."
99 
100 	     There's normally no way to express direct-initialization
101 	     from an element of a CONSTRUCTOR, so we build up a special
102 	     TARGET_EXPR to bypass the usual copy-initialization.  */
103 	  val = force_rvalue (val, tf_warning_or_error);
104 	  if (TREE_CODE (val) == TARGET_EXPR)
105 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 	}
107 
108       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109     }
110 
111   expr = build_constructor (init_list_type_node, elts);
112   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113 
114   /* N2927: "[The closure] class type is not an aggregate."
115      But we briefly treat it as an aggregate to make this simpler.  */
116   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117   CLASSTYPE_NON_AGGREGATE (type) = 0;
118   expr = finish_compound_literal (type, expr, tf_warning_or_error);
119   CLASSTYPE_NON_AGGREGATE (type) = 1;
120 
121  out:
122   input_location = saved_loc;
123   return expr;
124 }
125 
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127    LAMBDA must have its explicit captures already.  */
128 
129 tree
begin_lambda_type(tree lambda)130 begin_lambda_type (tree lambda)
131 {
132   /* Lambda names are nearly but not quite anonymous.  */
133   tree name = make_anon_name ();
134   IDENTIFIER_LAMBDA_P (name) = true;
135 
136   /* Create the new RECORD_TYPE for this lambda.  */
137   tree type = xref_tag (/*tag_code=*/record_type, name,
138 			/*scope=*/ts_lambda, /*template_header_p=*/false);
139   if (type == error_mark_node)
140     return error_mark_node;
141 
142   /* Designate it as a struct so that we can use aggregate initialization.  */
143   CLASSTYPE_DECLARED_CLASS (type) = false;
144 
145   /* Cross-reference the expression and the type.  */
146   LAMBDA_EXPR_CLOSURE (lambda) = type;
147   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
148 
149   /* In C++17, assume the closure is literal; we'll clear the flag later if
150      necessary.  */
151   if (cxx_dialect >= cxx17)
152     CLASSTYPE_LITERAL_P (type) = true;
153 
154   /* Clear base types.  */
155   xref_basetypes (type, /*bases=*/NULL_TREE);
156 
157   /* Start the class.  */
158   type = begin_class_definition (type);
159 
160   return type;
161 }
162 
163 /* Returns the type to use for the return type of the operator() of a
164    closure class.  */
165 
166 tree
lambda_return_type(tree expr)167 lambda_return_type (tree expr)
168 {
169   if (expr == NULL_TREE)
170     return void_type_node;
171   if (type_unknown_p (expr)
172       || BRACE_ENCLOSED_INITIALIZER_P (expr))
173     {
174       cxx_incomplete_type_error (expr, TREE_TYPE (expr));
175       return error_mark_node;
176     }
177   gcc_checking_assert (!type_dependent_expression_p (expr));
178   return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
179 }
180 
181 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
182    closure type.  */
183 
184 tree
lambda_function(tree lambda)185 lambda_function (tree lambda)
186 {
187   tree type;
188   if (TREE_CODE (lambda) == LAMBDA_EXPR)
189     type = LAMBDA_EXPR_CLOSURE (lambda);
190   else
191     type = lambda;
192   gcc_assert (LAMBDA_TYPE_P (type));
193   /* Don't let debug_tree cause instantiation.  */
194   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
195       && !COMPLETE_OR_OPEN_TYPE_P (type))
196     return NULL_TREE;
197   lambda = lookup_member (type, call_op_identifier,
198 			  /*protect=*/0, /*want_type=*/false,
199 			  tf_warning_or_error);
200   if (lambda)
201     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
202   return lambda;
203 }
204 
205 /* True if EXPR is an expression whose type can be used directly in lambda
206    capture.  Not to be used for 'auto'.  */
207 
208 static bool
type_deducible_expression_p(tree expr)209 type_deducible_expression_p (tree expr)
210 {
211   if (!type_dependent_expression_p (expr))
212     return true;
213   if (BRACE_ENCLOSED_INITIALIZER_P (expr)
214       || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
215     return false;
216   tree t = non_reference (TREE_TYPE (expr));
217   if (!t) return false;
218   while (TREE_CODE (t) == POINTER_TYPE)
219     t = TREE_TYPE (t);
220   return currently_open_class (t);
221 }
222 
223 /* Returns the type to use for the FIELD_DECL corresponding to the
224    capture of EXPR.  EXPLICIT_INIT_P indicates whether this is a
225    C++14 init capture, and BY_REFERENCE_P indicates whether we're
226    capturing by reference.  */
227 
228 tree
lambda_capture_field_type(tree expr,bool explicit_init_p,bool by_reference_p)229 lambda_capture_field_type (tree expr, bool explicit_init_p,
230 			   bool by_reference_p)
231 {
232   tree type;
233   bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
234 
235   if (!is_this && explicit_init_p)
236     {
237       tree auto_node = make_auto ();
238 
239       type = auto_node;
240       if (by_reference_p)
241 	/* Add the reference now, so deduction doesn't lose
242 	   outermost CV qualifiers of EXPR.  */
243 	type = build_reference_type (type);
244       if (uses_parameter_packs (expr))
245 	/* Stick with 'auto' even if the type could be deduced.  */;
246       else
247 	type = do_auto_deduction (type, expr, auto_node);
248     }
249   else if (!is_this && !type_deducible_expression_p (expr))
250     {
251       type = cxx_make_type (DECLTYPE_TYPE);
252       DECLTYPE_TYPE_EXPR (type) = expr;
253       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
254       DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
255       SET_TYPE_STRUCTURAL_EQUALITY (type);
256     }
257   else
258     {
259       type = non_reference (unlowered_expr_type (expr));
260 
261       if (!is_this
262 	  && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
263 	type = build_reference_type (type);
264     }
265 
266   return type;
267 }
268 
269 /* Returns true iff DECL is a lambda capture proxy variable created by
270    build_capture_proxy.  */
271 
272 bool
is_capture_proxy(tree decl)273 is_capture_proxy (tree decl)
274 {
275   return (VAR_P (decl)
276 	  && DECL_HAS_VALUE_EXPR_P (decl)
277 	  && !DECL_ANON_UNION_VAR_P (decl)
278 	  && !DECL_DECOMPOSITION_P (decl)
279 	  && !DECL_FNAME_P (decl)
280 	  && !(DECL_ARTIFICIAL (decl)
281 	       && DECL_LANG_SPECIFIC (decl)
282 	       && DECL_OMP_PRIVATIZED_MEMBER (decl))
283 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
284 }
285 
286 /* Returns true iff DECL is a capture proxy for a normal capture
287    (i.e. without explicit initializer).  */
288 
289 bool
is_normal_capture_proxy(tree decl)290 is_normal_capture_proxy (tree decl)
291 {
292   if (!is_capture_proxy (decl))
293     /* It's not a capture proxy.  */
294     return false;
295 
296   return (DECL_LANG_SPECIFIC (decl)
297 	  && DECL_CAPTURED_VARIABLE (decl));
298 }
299 
300 /* Returns true iff DECL is a capture proxy for a normal capture
301    of a constant variable.  */
302 
303 bool
is_constant_capture_proxy(tree decl)304 is_constant_capture_proxy (tree decl)
305 {
306   if (is_normal_capture_proxy (decl))
307     return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
308   return false;
309 }
310 
311 /* VAR is a capture proxy created by build_capture_proxy; add it to the
312    current function, which is the operator() for the appropriate lambda.  */
313 
314 void
insert_capture_proxy(tree var)315 insert_capture_proxy (tree var)
316 {
317   if (is_normal_capture_proxy (var))
318     {
319       tree cap = DECL_CAPTURED_VARIABLE (var);
320       if (CHECKING_P)
321 	{
322 	  gcc_assert (!is_normal_capture_proxy (cap));
323 	  tree old = retrieve_local_specialization (cap);
324 	  if (old)
325 	    gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
326 	}
327       register_local_specialization (var, cap);
328     }
329 
330   /* Put the capture proxy in the extra body block so that it won't clash
331      with a later local variable.  */
332   pushdecl_outermost_localscope (var);
333 
334   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
335   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
336   tree stmt_list = (*stmt_list_stack)[1];
337   gcc_assert (stmt_list);
338   append_to_statement_list_force (var, &stmt_list);
339 }
340 
341 /* We've just finished processing a lambda; if the containing scope is also
342    a lambda, insert any capture proxies that were created while processing
343    the nested lambda.  */
344 
345 void
insert_pending_capture_proxies(void)346 insert_pending_capture_proxies (void)
347 {
348   tree lam;
349   vec<tree, va_gc> *proxies;
350   unsigned i;
351 
352   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
353     return;
354 
355   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
356   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
357   for (i = 0; i < vec_safe_length (proxies); ++i)
358     {
359       tree var = (*proxies)[i];
360       insert_capture_proxy (var);
361     }
362   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
363   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
364 }
365 
366 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
367    return the type we want the proxy to have: the type of the field itself,
368    with added const-qualification if the lambda isn't mutable and the
369    capture is by value.  */
370 
371 tree
lambda_proxy_type(tree ref)372 lambda_proxy_type (tree ref)
373 {
374   tree type;
375   if (ref == error_mark_node)
376     return error_mark_node;
377   if (REFERENCE_REF_P (ref))
378     ref = TREE_OPERAND (ref, 0);
379   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
380   type = TREE_TYPE (ref);
381   if (!type || WILDCARD_TYPE_P (non_reference (type)))
382     {
383       type = cxx_make_type (DECLTYPE_TYPE);
384       DECLTYPE_TYPE_EXPR (type) = ref;
385       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
386       SET_TYPE_STRUCTURAL_EQUALITY (type);
387     }
388   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
389     type = make_pack_expansion (type);
390   return type;
391 }
392 
393 /* MEMBER is a capture field in a lambda closure class.  Now that we're
394    inside the operator(), build a placeholder var for future lookups and
395    debugging.  */
396 
397 static tree
build_capture_proxy(tree member,tree init)398 build_capture_proxy (tree member, tree init)
399 {
400   tree var, object, fn, closure, name, lam, type;
401 
402   if (PACK_EXPANSION_P (member))
403     member = PACK_EXPANSION_PATTERN (member);
404 
405   closure = DECL_CONTEXT (member);
406   fn = lambda_function (closure);
407   lam = CLASSTYPE_LAMBDA_EXPR (closure);
408 
409   /* The proxy variable forwards to the capture field.  */
410   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
411   object = finish_non_static_data_member (member, object, NULL_TREE);
412   if (REFERENCE_REF_P (object))
413     object = TREE_OPERAND (object, 0);
414 
415   /* Remove the __ inserted by add_capture.  */
416   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
417 
418   type = lambda_proxy_type (object);
419 
420   if (name == this_identifier && !INDIRECT_TYPE_P (type))
421     {
422       type = build_pointer_type (type);
423       type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
424       object = build_fold_addr_expr_with_type (object, type);
425     }
426 
427   if (DECL_VLA_CAPTURE_P (member))
428     {
429       /* Rebuild the VLA type from the pointer and maxindex.  */
430       tree field = next_initializable_field (TYPE_FIELDS (type));
431       tree ptr = build_simple_component_ref (object, field);
432       field = next_initializable_field (DECL_CHAIN (field));
433       tree max = build_simple_component_ref (object, field);
434       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
435 				     build_index_type (max));
436       type = build_reference_type (type);
437       object = convert (type, ptr);
438     }
439 
440   complete_type (type);
441 
442   var = build_decl (input_location, VAR_DECL, name, type);
443   SET_DECL_VALUE_EXPR (var, object);
444   DECL_HAS_VALUE_EXPR_P (var) = 1;
445   DECL_ARTIFICIAL (var) = 1;
446   TREE_USED (var) = 1;
447   DECL_CONTEXT (var) = fn;
448 
449   if (DECL_NORMAL_CAPTURE_P (member))
450     {
451       if (DECL_VLA_CAPTURE_P (member))
452 	{
453 	  init = CONSTRUCTOR_ELT (init, 0)->value;
454 	  init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
455 	  init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
456 	}
457       else
458 	{
459 	  if (PACK_EXPANSION_P (init))
460 	    init = PACK_EXPANSION_PATTERN (init);
461 	}
462 
463       if (INDIRECT_REF_P (init))
464 	init = TREE_OPERAND (init, 0);
465       STRIP_NOPS (init);
466 
467       gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
468       while (is_normal_capture_proxy (init))
469 	init = DECL_CAPTURED_VARIABLE (init);
470       retrofit_lang_decl (var);
471       DECL_CAPTURED_VARIABLE (var) = init;
472     }
473 
474   if (name == this_identifier)
475     {
476       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
477       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
478     }
479 
480   if (fn == current_function_decl)
481     insert_capture_proxy (var);
482   else
483     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
484 
485   return var;
486 }
487 
488 static GTY(()) tree ptr_id;
489 static GTY(()) tree max_id;
490 
491 /* Return a struct containing a pointer and a length for lambda capture of
492    an array of runtime length.  */
493 
494 static tree
vla_capture_type(tree array_type)495 vla_capture_type (tree array_type)
496 {
497   tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
498   xref_basetypes (type, NULL_TREE);
499   type = begin_class_definition (type);
500   if (!ptr_id)
501     {
502       ptr_id = get_identifier ("ptr");
503       max_id = get_identifier ("max");
504     }
505   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
506   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
507   finish_member_declaration (field);
508   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
509   finish_member_declaration (field);
510   return finish_struct (type, NULL_TREE);
511 }
512 
513 /* From an ID and INITIALIZER, create a capture (by reference if
514    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
515    and return it.  If ID is `this', BY_REFERENCE_P says whether
516    `*this' is captured by reference.  */
517 
518 tree
add_capture(tree lambda,tree id,tree orig_init,bool by_reference_p,bool explicit_init_p)519 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
520 	     bool explicit_init_p)
521 {
522   char *buf;
523   tree type, member, name;
524   bool vla = false;
525   bool variadic = false;
526   tree initializer = orig_init;
527 
528   if (PACK_EXPANSION_P (initializer))
529     {
530       initializer = PACK_EXPANSION_PATTERN (initializer);
531       variadic = true;
532     }
533 
534   if (TREE_CODE (initializer) == TREE_LIST
535       /* A pack expansion might end up with multiple elements.  */
536       && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
537     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
538 						   tf_warning_or_error);
539   type = TREE_TYPE (initializer);
540   if (type == error_mark_node)
541     return error_mark_node;
542 
543   if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
544     {
545       vla = true;
546       if (!by_reference_p)
547 	error ("array of runtime bound cannot be captured by copy, "
548 	       "only by reference");
549 
550       /* For a VLA, we capture the address of the first element and the
551 	 maximum index, and then reconstruct the VLA for the proxy.  */
552       tree elt = cp_build_array_ref (input_location, initializer,
553 				     integer_zero_node, tf_warning_or_error);
554       initializer = build_constructor_va (init_list_type_node, 2,
555 					  NULL_TREE, build_address (elt),
556 					  NULL_TREE, array_type_nelts (type));
557       type = vla_capture_type (type);
558     }
559   else if (!dependent_type_p (type)
560 	   && variably_modified_type_p (type, NULL_TREE))
561     {
562       sorry ("capture of variably-modified type %qT that is not an N3639 array "
563 	     "of runtime bound", type);
564       if (TREE_CODE (type) == ARRAY_TYPE
565 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
566 	inform (input_location, "because the array element type %qT has "
567 		"variable size", TREE_TYPE (type));
568       return error_mark_node;
569     }
570   else
571     {
572       type = lambda_capture_field_type (initializer, explicit_init_p,
573 					by_reference_p);
574       if (type == error_mark_node)
575 	return error_mark_node;
576 
577       if (id == this_identifier && !by_reference_p)
578 	{
579 	  gcc_assert (INDIRECT_TYPE_P (type));
580 	  type = TREE_TYPE (type);
581 	  initializer = cp_build_fold_indirect_ref (initializer);
582 	}
583 
584       if (dependent_type_p (type))
585 	;
586       else if (id != this_identifier && by_reference_p)
587 	{
588 	  if (!lvalue_p (initializer))
589 	    {
590 	      error ("cannot capture %qE by reference", initializer);
591 	      return error_mark_node;
592 	    }
593 	}
594       else
595 	{
596 	  /* Capture by copy requires a complete type.  */
597 	  type = complete_type (type);
598 	  if (!COMPLETE_TYPE_P (type))
599 	    {
600 	      error ("capture by copy of incomplete type %qT", type);
601 	      cxx_incomplete_type_inform (type);
602 	      return error_mark_node;
603 	    }
604 	  else if (!verify_type_context (input_location,
605 					 TCTX_CAPTURE_BY_COPY, type))
606 	    return error_mark_node;
607 	}
608     }
609 
610   /* Add __ to the beginning of the field name so that user code
611      won't find the field with name lookup.  We can't just leave the name
612      unset because template instantiation uses the name to find
613      instantiated fields.  */
614   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
615   buf[1] = buf[0] = '_';
616   memcpy (buf + 2, IDENTIFIER_POINTER (id),
617 	  IDENTIFIER_LENGTH (id) + 1);
618   name = get_identifier (buf);
619 
620   if (variadic)
621     {
622       type = make_pack_expansion (type);
623       if (explicit_init_p)
624 	/* With an explicit initializer 'type' is auto, which isn't really a
625 	   parameter pack in this context.  We will want as many fields as we
626 	   have elements in the expansion of the initializer, so use its packs
627 	   instead.  */
628 	{
629 	  PACK_EXPANSION_PARAMETER_PACKS (type)
630 	    = uses_parameter_packs (initializer);
631 	  PACK_EXPANSION_AUTO_P (type) = true;
632 	}
633     }
634 
635   /* Make member variable.  */
636   member = build_decl (input_location, FIELD_DECL, name, type);
637   DECL_VLA_CAPTURE_P (member) = vla;
638 
639   if (!explicit_init_p)
640     /* Normal captures are invisible to name lookup but uses are replaced
641        with references to the capture field; we implement this by only
642        really making them invisible in unevaluated context; see
643        qualify_lookup.  For now, let's make explicitly initialized captures
644        always visible.  */
645     DECL_NORMAL_CAPTURE_P (member) = true;
646 
647   if (id == this_identifier)
648     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
649 
650   /* Add it to the appropriate closure class if we've started it.  */
651   if (current_class_type
652       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
653     {
654       if (COMPLETE_TYPE_P (current_class_type))
655 	internal_error ("trying to capture %qD in instantiation of "
656 			"generic lambda", id);
657       finish_member_declaration (member);
658     }
659 
660   tree listmem = member;
661   if (variadic)
662     {
663       listmem = make_pack_expansion (member);
664       initializer = orig_init;
665     }
666   LAMBDA_EXPR_CAPTURE_LIST (lambda)
667     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
668 
669   if (LAMBDA_EXPR_CLOSURE (lambda))
670     return build_capture_proxy (member, initializer);
671   /* For explicit captures we haven't started the function yet, so we wait
672      and build the proxy from cp_parser_lambda_body.  */
673   LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
674   return NULL_TREE;
675 }
676 
677 /* Register all the capture members on the list CAPTURES, which is the
678    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
679 
680 void
register_capture_members(tree captures)681 register_capture_members (tree captures)
682 {
683   if (captures == NULL_TREE)
684     return;
685 
686   register_capture_members (TREE_CHAIN (captures));
687 
688   tree field = TREE_PURPOSE (captures);
689   if (PACK_EXPANSION_P (field))
690     field = PACK_EXPANSION_PATTERN (field);
691 
692   finish_member_declaration (field);
693 }
694 
695 /* Similar to add_capture, except this works on a stack of nested lambdas.
696    BY_REFERENCE_P in this case is derived from the default capture mode.
697    Returns the capture for the lambda at the bottom of the stack.  */
698 
699 tree
add_default_capture(tree lambda_stack,tree id,tree initializer)700 add_default_capture (tree lambda_stack, tree id, tree initializer)
701 {
702   bool this_capture_p = (id == this_identifier);
703   tree var = NULL_TREE;
704   tree saved_class_type = current_class_type;
705 
706   for (tree node = lambda_stack;
707        node;
708        node = TREE_CHAIN (node))
709     {
710       tree lambda = TREE_VALUE (node);
711 
712       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
713       if (DECL_PACK_P (initializer))
714 	initializer = make_pack_expansion (initializer);
715       var = add_capture (lambda,
716                             id,
717                             initializer,
718                             /*by_reference_p=*/
719 			    (this_capture_p
720 			     || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
721 				 == CPLD_REFERENCE)),
722 			    /*explicit_init_p=*/false);
723       initializer = convert_from_reference (var);
724 
725       /* Warn about deprecated implicit capture of this via [=].  */
726       if (cxx_dialect >= cxx2a
727 	  && this_capture_p
728 	  && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
729 	{
730 	  if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
731 			  "implicit capture of %qE via %<[=]%> is deprecated "
732 			  "in C++20", this_identifier))
733 	    inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
734 		    "%<*this%> capture");
735 	}
736     }
737 
738   current_class_type = saved_class_type;
739 
740   return var;
741 }
742 
743 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
744    form of an INDIRECT_REF, possibly adding it through default
745    capturing, if ADD_CAPTURE_P is nonzero.  If ADD_CAPTURE_P is negative,
746    try to capture but don't complain if we can't.  */
747 
748 tree
lambda_expr_this_capture(tree lambda,int add_capture_p)749 lambda_expr_this_capture (tree lambda, int add_capture_p)
750 {
751   tree result;
752 
753   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
754 
755   /* In unevaluated context this isn't an odr-use, so don't capture.  */
756   if (cp_unevaluated_operand)
757     add_capture_p = false;
758 
759   /* Try to default capture 'this' if we can.  */
760   if (!this_capture)
761     {
762       tree lambda_stack = NULL_TREE;
763       tree init = NULL_TREE;
764       bool saw_complete = false;
765 
766       /* If we are in a lambda function, we can move out until we hit:
767            1. a non-lambda function or NSDMI,
768            2. a lambda function capturing 'this', or
769            3. a non-default capturing lambda function.  */
770       for (tree tlambda = lambda; ;)
771 	{
772 	  if (add_capture_p
773 	      && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
774 	    /* tlambda won't let us capture 'this'.  */
775 	    break;
776 
777 	  if (add_capture_p)
778 	    lambda_stack = tree_cons (NULL_TREE,
779 				      tlambda,
780 				      lambda_stack);
781 
782 	  tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
783 	  if (COMPLETE_TYPE_P (closure))
784 	    /* We're instantiating a generic lambda op(), the containing
785 	       scope may be gone.  */
786 	    saw_complete = true;
787 
788 	  tree containing_function
789 	    = decl_function_context (TYPE_NAME (closure));
790 
791 	  tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
792 	  if (ex && TREE_CODE (ex) == FIELD_DECL)
793 	    {
794 	      /* Lambda in an NSDMI.  We don't have a function to look up
795 		 'this' in, but we can find (or rebuild) the fake one from
796 		 inject_this_parameter.  */
797 	      if (!containing_function && !saw_complete)
798 		/* If we're parsing a lambda in a non-local class,
799 		   we can find the fake 'this' in scope_chain.  */
800 		init = scope_chain->x_current_class_ptr;
801 	      else
802 		/* Otherwise it's either gone or buried in
803 		   function_context_stack, so make another.  */
804 		init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
805 					TYPE_UNQUALIFIED);
806 	      gcc_checking_assert
807 		(init && (TREE_TYPE (TREE_TYPE (init))
808 			  == current_nonlambda_class_type ()));
809 	      break;
810 	    }
811 
812 	  if (containing_function == NULL_TREE)
813 	    /* We ran out of scopes; there's no 'this' to capture.  */
814 	    break;
815 
816 	  if (!LAMBDA_FUNCTION_P (containing_function))
817 	    {
818 	      /* We found a non-lambda function.  */
819 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
820 		/* First parameter is 'this'.  */
821 		init = DECL_ARGUMENTS (containing_function);
822 	      break;
823 	    }
824 
825 	  tlambda
826             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
827 
828           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
829 	    {
830 	      /* An outer lambda has already captured 'this'.  */
831 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
832 	      break;
833 	    }
834 	}
835 
836       if (init)
837         {
838           if (add_capture_p)
839 	    this_capture = add_default_capture (lambda_stack,
840 					        /*id=*/this_identifier,
841 					        init);
842           else
843 	    this_capture = init;
844         }
845     }
846 
847   if (cp_unevaluated_operand)
848     result = this_capture;
849   else if (!this_capture)
850     {
851       if (add_capture_p == 1)
852 	{
853 	  error ("%<this%> was not captured for this lambda function");
854 	  result = error_mark_node;
855 	}
856       else
857 	result = NULL_TREE;
858     }
859   else
860     {
861       /* To make sure that current_class_ref is for the lambda.  */
862       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
863 		  == LAMBDA_EXPR_CLOSURE (lambda));
864 
865       result = this_capture;
866 
867       /* If 'this' is captured, each use of 'this' is transformed into an
868 	 access to the corresponding unnamed data member of the closure
869 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
870 	 ensures that the transformed expression is an rvalue. ] */
871       result = rvalue (result);
872     }
873 
874   return result;
875 }
876 
877 /* Return the innermost LAMBDA_EXPR we're currently in, if any.  */
878 
879 tree
current_lambda_expr(void)880 current_lambda_expr (void)
881 {
882   tree type = current_class_type;
883   while (type && !LAMBDA_TYPE_P (type))
884     type = decl_type_context (TYPE_NAME (type));
885   if (type)
886     return CLASSTYPE_LAMBDA_EXPR (type);
887   else
888     return NULL_TREE;
889 }
890 
891 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
892    object.  NULL otherwise..  */
893 
894 static tree
resolvable_dummy_lambda(tree object)895 resolvable_dummy_lambda (tree object)
896 {
897   if (!is_dummy_object (object))
898     return NULL_TREE;
899 
900   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
901   gcc_assert (!TYPE_PTR_P (type));
902 
903   if (type != current_class_type
904       && current_class_type
905       && LAMBDA_TYPE_P (current_class_type)
906       && lambda_function (current_class_type)
907       && DERIVED_FROM_P (type, nonlambda_method_basetype()))
908     return CLASSTYPE_LAMBDA_EXPR (current_class_type);
909 
910   return NULL_TREE;
911 }
912 
913 /* We don't want to capture 'this' until we know we need it, i.e. after
914    overload resolution has chosen a non-static member function.  At that
915    point we call this function to turn a dummy object into a use of the
916    'this' capture.  */
917 
918 tree
maybe_resolve_dummy(tree object,bool add_capture_p)919 maybe_resolve_dummy (tree object, bool add_capture_p)
920 {
921   if (tree lam = resolvable_dummy_lambda (object))
922     if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
923       if (cap != error_mark_node)
924 	object = build_fold_indirect_ref (cap);
925 
926   return object;
927 }
928 
929 /* When parsing a generic lambda containing an argument-dependent
930    member function call we defer overload resolution to instantiation
931    time.  But we have to know now whether to capture this or not.
932    Do that if FNS contains any non-static fns.
933    The std doesn't anticipate this case, but I expect this to be the
934    outcome of discussion.  */
935 
936 void
maybe_generic_this_capture(tree object,tree fns)937 maybe_generic_this_capture (tree object, tree fns)
938 {
939   if (tree lam = resolvable_dummy_lambda (object))
940     if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
941       {
942 	/* We've not yet captured, so look at the function set of
943 	   interest.  */
944 	if (BASELINK_P (fns))
945 	  fns = BASELINK_FUNCTIONS (fns);
946 	bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
947 	if (id_expr)
948 	  fns = TREE_OPERAND (fns, 0);
949 
950 	for (lkp_iterator iter (fns); iter; ++iter)
951 	  if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
952 	       || TREE_CODE (*iter) == TEMPLATE_DECL)
953 	      && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
954 	    {
955 	      /* Found a non-static member.  Capture this.  */
956 	      lambda_expr_this_capture (lam, /*maybe*/-1);
957 	      break;
958 	    }
959       }
960 }
961 
962 /* Returns the innermost non-lambda function.  */
963 
964 tree
current_nonlambda_function(void)965 current_nonlambda_function (void)
966 {
967   tree fn = current_function_decl;
968   while (fn && LAMBDA_FUNCTION_P (fn))
969     fn = decl_function_context (fn);
970   return fn;
971 }
972 
973 /* Returns the method basetype of the innermost non-lambda function, including
974    a hypothetical constructor if inside an NSDMI, or NULL_TREE if none.  */
975 
976 tree
nonlambda_method_basetype(void)977 nonlambda_method_basetype (void)
978 {
979   if (!current_class_ref)
980     return NULL_TREE;
981 
982   tree type = current_class_type;
983   if (!type || !LAMBDA_TYPE_P (type))
984     return type;
985 
986   while (true)
987     {
988       tree lam = CLASSTYPE_LAMBDA_EXPR (type);
989       tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
990       if (ex && TREE_CODE (ex) == FIELD_DECL)
991 	/* Lambda in an NSDMI.  */
992 	return DECL_CONTEXT (ex);
993 
994       tree fn = TYPE_CONTEXT (type);
995       if (!fn || TREE_CODE (fn) != FUNCTION_DECL
996 	  || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
997 	/* No enclosing non-lambda method.  */
998 	return NULL_TREE;
999       if (!LAMBDA_FUNCTION_P (fn))
1000 	/* Found an enclosing non-lambda method.  */
1001 	return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1002       type = DECL_CONTEXT (fn);
1003     }
1004 }
1005 
1006 /* Like current_scope, but looking through lambdas.  */
1007 
1008 tree
current_nonlambda_scope(void)1009 current_nonlambda_scope (void)
1010 {
1011   tree scope = current_scope ();
1012   for (;;)
1013     {
1014       if (TREE_CODE (scope) == FUNCTION_DECL
1015 	  && LAMBDA_FUNCTION_P (scope))
1016 	{
1017 	  scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1018 	  continue;
1019 	}
1020       else if (LAMBDA_TYPE_P (scope))
1021 	{
1022 	  scope = CP_TYPE_CONTEXT (scope);
1023 	  continue;
1024 	}
1025       break;
1026     }
1027   return scope;
1028 }
1029 
1030 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1031    indicated FN and NARGS, but do not initialize the return type or any of the
1032    argument slots.  */
1033 
1034 static tree
prepare_op_call(tree fn,int nargs)1035 prepare_op_call (tree fn, int nargs)
1036 {
1037   tree t;
1038 
1039   t = build_vl_exp (CALL_EXPR, nargs + 3);
1040   CALL_EXPR_FN (t) = fn;
1041   CALL_EXPR_STATIC_CHAIN (t) = NULL;
1042 
1043   return t;
1044 }
1045 
1046 /* Return true iff CALLOP is the op() for a generic lambda.  */
1047 
1048 bool
generic_lambda_fn_p(tree callop)1049 generic_lambda_fn_p (tree callop)
1050 {
1051   return (LAMBDA_FUNCTION_P (callop)
1052 	  && DECL_TEMPLATE_INFO (callop)
1053 	  && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1054 }
1055 
1056 /* If the closure TYPE has a static op(), also add a conversion to function
1057    pointer.  */
1058 
1059 void
maybe_add_lambda_conv_op(tree type)1060 maybe_add_lambda_conv_op (tree type)
1061 {
1062   bool nested = (cfun != NULL);
1063   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1064   tree callop = lambda_function (type);
1065   tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1066 
1067   if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1068       || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1069     return;
1070 
1071   if (processing_template_decl)
1072     return;
1073 
1074   bool const generic_lambda_p = generic_lambda_fn_p (callop);
1075 
1076   if (!generic_lambda_p && undeduced_auto_decl (callop))
1077     {
1078       /* If the op() wasn't deduced due to errors, give up.  */
1079       gcc_assert (errorcount || sorrycount);
1080       return;
1081     }
1082 
1083   /* Non-generic non-capturing lambdas only have a conversion function to
1084      pointer to function when the trailing requires-clause's constraints are
1085      satisfied.  */
1086   if (!generic_lambda_p && !constraints_satisfied_p (callop))
1087     return;
1088 
1089   /* Non-template conversion operators are defined directly with build_call_a
1090      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
1091      deferred and the CALL is built in-place.  In the case of a deduced return
1092      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1093      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
1094      the return expression may differ in flags from those in the body CALL.  In
1095      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1096      the body CALL, but not in DECLTYPE_CALL.  */
1097 
1098   vec<tree, va_gc> *direct_argvec = 0;
1099   tree decltype_call = 0, call = 0;
1100   tree optype = TREE_TYPE (callop);
1101   tree fn_result = TREE_TYPE (optype);
1102 
1103   tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1104   if (generic_lambda_p)
1105     {
1106       ++processing_template_decl;
1107 
1108       /* Prepare the dependent member call for the static member function
1109 	 '_FUN' and, potentially, prepare another call to be used in a decltype
1110 	 return expression for a deduced return call op to allow for simple
1111 	 implementation of the conversion operator.  */
1112 
1113       tree instance = cp_build_fold_indirect_ref (thisarg);
1114       tree objfn = lookup_template_function (DECL_NAME (callop),
1115 					     DECL_TI_ARGS (callop));
1116       objfn = build_min (COMPONENT_REF, NULL_TREE,
1117 			 instance, objfn, NULL_TREE);
1118       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1119 
1120       call = prepare_op_call (objfn, nargs);
1121       if (type_uses_auto (fn_result))
1122 	decltype_call = prepare_op_call (objfn, nargs);
1123     }
1124   else
1125     {
1126       direct_argvec = make_tree_vector ();
1127       direct_argvec->quick_push (thisarg);
1128     }
1129 
1130   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1131      declare the static member function "_FUN" below.  For each arg append to
1132      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1133      call args (for the template case).  If a parameter pack is found, expand
1134      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
1135 
1136   tree fn_args = NULL_TREE;
1137   {
1138     int ix = 0;
1139     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1140     tree tgt = NULL;
1141 
1142     while (src)
1143       {
1144 	tree new_node = copy_node (src);
1145 
1146 	/* Clear TREE_ADDRESSABLE on thunk arguments.  */
1147 	TREE_ADDRESSABLE (new_node) = 0;
1148 
1149 	if (!fn_args)
1150 	  fn_args = tgt = new_node;
1151 	else
1152 	  {
1153 	    TREE_CHAIN (tgt) = new_node;
1154 	    tgt = new_node;
1155 	  }
1156 
1157 	mark_exp_read (tgt);
1158 
1159 	if (generic_lambda_p)
1160 	  {
1161 	    tree a = tgt;
1162 	    if (DECL_PACK_P (tgt))
1163 	      {
1164 		a = make_pack_expansion (a);
1165 		PACK_EXPANSION_LOCAL_P (a) = true;
1166 	      }
1167 	    CALL_EXPR_ARG (call, ix) = a;
1168 
1169 	    if (decltype_call)
1170 	      {
1171 		/* Avoid capturing variables in this context.  */
1172 		++cp_unevaluated_operand;
1173 		CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1174 		--cp_unevaluated_operand;
1175 	      }
1176 
1177 	    ++ix;
1178 	  }
1179 	else
1180 	  vec_safe_push (direct_argvec, tgt);
1181 
1182 	src = TREE_CHAIN (src);
1183       }
1184   }
1185 
1186   if (generic_lambda_p)
1187     {
1188       if (decltype_call)
1189 	{
1190 	  fn_result = finish_decltype_type
1191 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
1192 	     tf_warning_or_error);
1193 	}
1194     }
1195   else
1196     call = build_call_a (callop,
1197 			 direct_argvec->length (),
1198 			 direct_argvec->address ());
1199 
1200   CALL_FROM_THUNK_P (call) = 1;
1201   SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1202 
1203   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1204   stattype = (cp_build_type_attribute_variant
1205 	      (stattype, TYPE_ATTRIBUTES (optype)));
1206   if (flag_noexcept_type
1207       && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1208     stattype = build_exception_variant (stattype, noexcept_true_spec);
1209 
1210   if (generic_lambda_p)
1211     --processing_template_decl;
1212 
1213   /* First build up the conversion op.  */
1214 
1215   tree rettype = build_pointer_type (stattype);
1216   tree name = make_conv_op_name (rettype);
1217   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1218   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1219   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1220   SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1221   tree fn = convfn;
1222   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1223   SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1224   grokclassfn (type, fn, NO_SPECIAL);
1225   set_linkage_according_to_type (type, fn);
1226   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1227   DECL_IN_AGGR_P (fn) = 1;
1228   DECL_ARTIFICIAL (fn) = 1;
1229   DECL_NOT_REALLY_EXTERN (fn) = 1;
1230   DECL_DECLARED_INLINE_P (fn) = 1;
1231   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1232   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1233     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1234   DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1235 
1236   if (nested_def)
1237     DECL_INTERFACE_KNOWN (fn) = 1;
1238 
1239   if (generic_lambda_p)
1240     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1241 
1242   add_method (type, fn, false);
1243 
1244   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1245      the conversion op is used.  */
1246   if (varargs_function_p (callop))
1247     {
1248       DECL_DELETED_FN (fn) = 1;
1249       return;
1250     }
1251 
1252   /* Now build up the thunk to be returned.  */
1253 
1254   tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1255   SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1256   fn = statfn;
1257   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1258   grokclassfn (type, fn, NO_SPECIAL);
1259   set_linkage_according_to_type (type, fn);
1260   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1261   DECL_IN_AGGR_P (fn) = 1;
1262   DECL_ARTIFICIAL (fn) = 1;
1263   DECL_NOT_REALLY_EXTERN (fn) = 1;
1264   DECL_DECLARED_INLINE_P (fn) = 1;
1265   DECL_STATIC_FUNCTION_P (fn) = 1;
1266   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1267   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1268     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1269   DECL_ARGUMENTS (fn) = fn_args;
1270   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1271     {
1272       /* Avoid duplicate -Wshadow warnings.  */
1273       DECL_NAME (arg) = NULL_TREE;
1274       DECL_CONTEXT (arg) = fn;
1275     }
1276   if (nested_def)
1277     DECL_INTERFACE_KNOWN (fn) = 1;
1278 
1279   if (generic_lambda_p)
1280     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1281 
1282   if (flag_sanitize & SANITIZE_NULL)
1283     /* Don't UBsan this function; we're deliberately calling op() with a null
1284        object argument.  */
1285     add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1286 
1287   add_method (type, fn, false);
1288 
1289   if (nested)
1290     push_function_context ();
1291   else
1292     /* Still increment function_depth so that we don't GC in the
1293        middle of an expression.  */
1294     ++function_depth;
1295 
1296   /* Generate the body of the thunk.  */
1297 
1298   start_preparsed_function (statfn, NULL_TREE,
1299 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1300   tree body = begin_function_body ();
1301   tree compound_stmt = begin_compound_stmt (0);
1302   if (!generic_lambda_p)
1303     {
1304       set_flags_from_callee (call);
1305       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1306 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1307     }
1308   call = convert_from_reference (call);
1309   finish_return_stmt (call);
1310 
1311   finish_compound_stmt (compound_stmt);
1312   finish_function_body (body);
1313 
1314   fn = finish_function (/*inline_p=*/true);
1315   if (!generic_lambda_p)
1316     expand_or_defer_fn (fn);
1317 
1318   /* Generate the body of the conversion op.  */
1319 
1320   start_preparsed_function (convfn, NULL_TREE,
1321 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1322   body = begin_function_body ();
1323   compound_stmt = begin_compound_stmt (0);
1324 
1325   /* decl_needed_p needs to see that it's used.  */
1326   TREE_USED (statfn) = 1;
1327   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1328 
1329   finish_compound_stmt (compound_stmt);
1330   finish_function_body (body);
1331 
1332   fn = finish_function (/*inline_p=*/true);
1333   if (!generic_lambda_p)
1334     expand_or_defer_fn (fn);
1335 
1336   if (nested)
1337     pop_function_context ();
1338   else
1339     --function_depth;
1340 }
1341 
1342 /* True if FN is the static function "_FUN" that gets returned from the lambda
1343    conversion operator.  */
1344 
1345 bool
lambda_static_thunk_p(tree fn)1346 lambda_static_thunk_p (tree fn)
1347 {
1348   return (fn && TREE_CODE (fn) == FUNCTION_DECL
1349 	  && DECL_ARTIFICIAL (fn)
1350 	  && DECL_STATIC_FUNCTION_P (fn)
1351 	  && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1352 }
1353 
1354 /* Returns true iff VAL is a lambda-related declaration which should
1355    be ignored by unqualified lookup.  */
1356 
1357 bool
is_lambda_ignored_entity(tree val)1358 is_lambda_ignored_entity (tree val)
1359 {
1360   /* Look past normal, non-VLA capture proxies.  */
1361   if (is_normal_capture_proxy (val)
1362       && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1363     return true;
1364 
1365   /* Always ignore lambda fields, their names are only for debugging.  */
1366   if (TREE_CODE (val) == FIELD_DECL
1367       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1368     return true;
1369 
1370   /* None of the lookups that use qualify_lookup want the op() from the
1371      lambda; they want the one from the enclosing class.  */
1372   val = OVL_FIRST (val);
1373   if (LAMBDA_FUNCTION_P (val))
1374     return true;
1375 
1376   return false;
1377 }
1378 
1379 /* Lambdas that appear in variable initializer or default argument scope
1380    get that in their mangling, so we need to record it.  We might as well
1381    use the count for function and namespace scopes as well.  */
1382 static GTY(()) tree lambda_scope;
1383 static GTY(()) int lambda_count;
1384 struct GTY(()) tree_int
1385 {
1386   tree t;
1387   int i;
1388 };
1389 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1390 
1391 void
start_lambda_scope(tree decl)1392 start_lambda_scope (tree decl)
1393 {
1394   tree_int ti;
1395   gcc_assert (decl);
1396   /* Once we're inside a function, we ignore variable scope and just push
1397      the function again so that popping works properly.  */
1398   if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1399     decl = current_function_decl;
1400   ti.t = lambda_scope;
1401   ti.i = lambda_count;
1402   vec_safe_push (lambda_scope_stack, ti);
1403   if (lambda_scope != decl)
1404     {
1405       /* Don't reset the count if we're still in the same function.  */
1406       lambda_scope = decl;
1407       lambda_count = 0;
1408     }
1409 }
1410 
1411 void
record_lambda_scope(tree lambda)1412 record_lambda_scope (tree lambda)
1413 {
1414   LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1415   LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1416 }
1417 
1418 /* This lambda is an instantiation of a lambda in a template default argument
1419    that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either.  But we do
1420    need to use and increment the global count to avoid collisions.  */
1421 
1422 void
record_null_lambda_scope(tree lambda)1423 record_null_lambda_scope (tree lambda)
1424 {
1425   if (vec_safe_is_empty (lambda_scope_stack))
1426     record_lambda_scope (lambda);
1427   else
1428     {
1429       tree_int *p = lambda_scope_stack->begin();
1430       LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1431       LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1432     }
1433   gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1434 }
1435 
1436 void
finish_lambda_scope(void)1437 finish_lambda_scope (void)
1438 {
1439   tree_int *p = &lambda_scope_stack->last ();
1440   if (lambda_scope != p->t)
1441     {
1442       lambda_scope = p->t;
1443       lambda_count = p->i;
1444     }
1445   lambda_scope_stack->pop ();
1446 }
1447 
1448 tree
start_lambda_function(tree fco,tree lambda_expr)1449 start_lambda_function (tree fco, tree lambda_expr)
1450 {
1451   /* Let the front end know that we are going to be defining this
1452      function.  */
1453   start_preparsed_function (fco,
1454 			    NULL_TREE,
1455 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1456 
1457   tree body = begin_function_body ();
1458 
1459   /* Push the proxies for any explicit captures.  */
1460   for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1461        cap = TREE_CHAIN (cap))
1462     build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1463 
1464   return body;
1465 }
1466 
1467 /* Subroutine of prune_lambda_captures: CAP is a node in
1468    LAMBDA_EXPR_CAPTURE_LIST.  Return the variable it captures for which we
1469    might optimize away the capture, or NULL_TREE if there is no such
1470    variable.  */
1471 
1472 static tree
var_to_maybe_prune(tree cap)1473 var_to_maybe_prune (tree cap)
1474 {
1475   if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1476     /* Don't prune explicit captures.  */
1477     return NULL_TREE;
1478 
1479   tree mem = TREE_PURPOSE (cap);
1480   if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1481     /* Packs and init-captures aren't captures of constant vars.  */
1482     return NULL_TREE;
1483 
1484   tree init = TREE_VALUE (cap);
1485   if (is_normal_capture_proxy (init))
1486     init = DECL_CAPTURED_VARIABLE (init);
1487   if (decl_constant_var_p (init))
1488     return init;
1489 
1490   return NULL_TREE;
1491 }
1492 
1493 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1494    for constant variables are actually used in the lambda body.
1495 
1496    There will always be a DECL_EXPR for the capture proxy; remember it when we
1497    see it, but replace it with any other use.  */
1498 
1499 static tree
mark_const_cap_r(tree * t,int * walk_subtrees,void * data)1500 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1501 {
1502   hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1503 
1504   tree var = NULL_TREE;
1505   if (TREE_CODE (*t) == DECL_EXPR)
1506     {
1507       tree decl = DECL_EXPR_DECL (*t);
1508       if (is_constant_capture_proxy (decl))
1509 	{
1510 	  var = DECL_CAPTURED_VARIABLE (decl);
1511 	  *walk_subtrees = 0;
1512 	}
1513     }
1514   else if (is_constant_capture_proxy (*t))
1515     var = DECL_CAPTURED_VARIABLE (*t);
1516 
1517   if (var)
1518     {
1519       tree *&slot = const_vars.get_or_insert (var);
1520       if (!slot || VAR_P (*t))
1521 	slot = t;
1522     }
1523 
1524   return NULL_TREE;
1525 }
1526 
1527 /* We're at the end of processing a lambda; go back and remove any captures of
1528    constant variables for which we've folded away all uses.  */
1529 
1530 static void
prune_lambda_captures(tree body)1531 prune_lambda_captures (tree body)
1532 {
1533   tree lam = current_lambda_expr ();
1534   if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1535     /* No uses were optimized away.  */
1536     return;
1537   if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1538     /* No default captures, and we don't prune explicit captures.  */
1539     return;
1540   /* Don't bother pruning in a template, we'll prune at instantiation time.  */
1541   if (dependent_type_p (TREE_TYPE (lam)))
1542     return;
1543 
1544   hash_map<tree,tree*> const_vars;
1545 
1546   cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1547 
1548   tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1549   for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1550     {
1551       tree cap = *capp;
1552       if (tree var = var_to_maybe_prune (cap))
1553 	{
1554 	  tree **use = const_vars.get (var);
1555 	  if (use && TREE_CODE (**use) == DECL_EXPR)
1556 	    {
1557 	      /* All uses of this capture were folded away, leaving only the
1558 		 proxy declaration.  */
1559 
1560 	      /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST.  */
1561 	      *capp = TREE_CHAIN (cap);
1562 
1563 	      /* And out of TYPE_FIELDS.  */
1564 	      tree field = TREE_PURPOSE (cap);
1565 	      while (*fieldp != field)
1566 		fieldp = &DECL_CHAIN (*fieldp);
1567 	      *fieldp = DECL_CHAIN (*fieldp);
1568 
1569 	      /* And remove the capture proxy declaration.  */
1570 	      **use = void_node;
1571 	      continue;
1572 	    }
1573 	}
1574 
1575       capp = &TREE_CHAIN (cap);
1576     }
1577 }
1578 
1579 void
finish_lambda_function(tree body)1580 finish_lambda_function (tree body)
1581 {
1582   finish_function_body (body);
1583 
1584   prune_lambda_captures (body);
1585 
1586   /* Finish the function and generate code for it if necessary.  */
1587   tree fn = finish_function (/*inline_p=*/true);
1588 
1589   /* Only expand if the call op is not a template.  */
1590   if (!DECL_TEMPLATE_INFO (fn))
1591     expand_or_defer_fn (fn);
1592 }
1593 
1594 #include "gt-cp-lambda.h"
1595