xref: /netbsd-src/external/gpl3/gcc/dist/gcc/cp/lambda.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2022 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 #include "decl.h"
35 
36 /* Constructor for a lambda expression.  */
37 
38 tree
build_lambda_expr(void)39 build_lambda_expr (void)
40 {
41   tree lambda = make_node (LAMBDA_EXPR);
42   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
44   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
45   LAMBDA_EXPR_REGEN_INFO           (lambda) = NULL_TREE;
46   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
47   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
48   return lambda;
49 }
50 
51 /* Create the closure object for a LAMBDA_EXPR.  */
52 
53 tree
build_lambda_object(tree lambda_expr)54 build_lambda_object (tree lambda_expr)
55 {
56   /* Build aggregate constructor call.
57      - cp_parser_braced_list
58      - cp_parser_functional_cast  */
59   vec<constructor_elt, va_gc> *elts = NULL;
60   tree node, expr, type;
61 
62   if (processing_template_decl || lambda_expr == error_mark_node)
63     return lambda_expr;
64 
65   /* Make sure any error messages refer to the lambda-introducer.  */
66   location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
67   iloc_sentinel il (loc);
68 
69   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70        node;
71        node = TREE_CHAIN (node))
72     {
73       tree field = TREE_PURPOSE (node);
74       tree val = TREE_VALUE (node);
75 
76       if (field == error_mark_node)
77 	{
78 	  expr = error_mark_node;
79 	  goto out;
80 	}
81 
82       if (TREE_CODE (val) == TREE_LIST)
83 	val = build_x_compound_expr_from_list (val, ELK_INIT,
84 					       tf_warning_or_error);
85 
86       if (DECL_P (val))
87 	mark_used (val);
88 
89       /* Mere mortals can't copy arrays with aggregate initialization, so
90 	 do some magic to make it work here.  */
91       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 	val = build_array_copy (val);
93       else if (DECL_NORMAL_CAPTURE_P (field)
94 	       && !DECL_VLA_CAPTURE_P (field)
95 	       && !TYPE_REF_P (TREE_TYPE (field)))
96 	{
97 	  /* "the entities that are captured by copy are used to
98 	     direct-initialize each corresponding non-static data
99 	     member of the resulting closure object."
100 
101 	     There's normally no way to express direct-initialization
102 	     from an element of a CONSTRUCTOR, so we build up a special
103 	     TARGET_EXPR to bypass the usual copy-initialization.  */
104 	  val = force_rvalue (val, tf_warning_or_error);
105 	  if (TREE_CODE (val) == TARGET_EXPR)
106 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
107 	}
108 
109       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
110     }
111 
112   expr = build_constructor (init_list_type_node, elts);
113   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 
115   /* N2927: "[The closure] class type is not an aggregate."
116      But we briefly treat it as an aggregate to make this simpler.  */
117   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118   CLASSTYPE_NON_AGGREGATE (type) = 0;
119   expr = finish_compound_literal (type, expr, tf_warning_or_error);
120   protected_set_expr_location (expr, loc);
121   CLASSTYPE_NON_AGGREGATE (type) = 1;
122 
123  out:
124   return expr;
125 }
126 
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128    LAMBDA must have its explicit captures already.  */
129 
130 tree
begin_lambda_type(tree lambda)131 begin_lambda_type (tree lambda)
132 {
133   /* Lambda names are nearly but not quite anonymous.  */
134   tree name = make_anon_name ();
135   IDENTIFIER_LAMBDA_P (name) = true;
136 
137   /* Create the new RECORD_TYPE for this lambda.  */
138   tree type = xref_tag (/*tag_code=*/record_type, name);
139   if (type == error_mark_node)
140     return error_mark_node;
141 
142   /* Designate it as a struct so that we can use aggregate initialization.  */
143   CLASSTYPE_DECLARED_CLASS (type) = false;
144 
145   /* Cross-reference the expression and the type.  */
146   LAMBDA_EXPR_CLOSURE (lambda) = type;
147   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
148 
149   /* In C++17, assume the closure is literal; we'll clear the flag later if
150      necessary.  */
151   if (cxx_dialect >= cxx17)
152     CLASSTYPE_LITERAL_P (type) = true;
153 
154   /* Clear base types.  */
155   xref_basetypes (type, /*bases=*/NULL_TREE);
156 
157   /* Start the class.  */
158   type = begin_class_definition (type);
159 
160   return type;
161 }
162 
163 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
164    closure type.  */
165 
166 tree
lambda_function(tree lambda)167 lambda_function (tree lambda)
168 {
169   tree type;
170   if (TREE_CODE (lambda) == LAMBDA_EXPR)
171     type = LAMBDA_EXPR_CLOSURE (lambda);
172   else
173     type = lambda;
174   gcc_assert (LAMBDA_TYPE_P (type));
175   /* Don't let debug_tree cause instantiation.  */
176   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
177       && !COMPLETE_OR_OPEN_TYPE_P (type))
178     return NULL_TREE;
179   lambda = lookup_member (type, call_op_identifier,
180 			  /*protect=*/0, /*want_type=*/false,
181 			  tf_warning_or_error);
182   if (lambda)
183     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
184   return lambda;
185 }
186 
187 /* True if EXPR is an expression whose type can be used directly in lambda
188    capture.  Not to be used for 'auto'.  */
189 
190 static bool
type_deducible_expression_p(tree expr)191 type_deducible_expression_p (tree expr)
192 {
193   if (!type_dependent_expression_p (expr))
194     return true;
195   if (BRACE_ENCLOSED_INITIALIZER_P (expr)
196       || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
197     return false;
198   tree t = non_reference (TREE_TYPE (expr));
199   if (!t) return false;
200   while (TREE_CODE (t) == POINTER_TYPE)
201     t = TREE_TYPE (t);
202   return currently_open_class (t);
203 }
204 
205 /* Returns the type to use for the FIELD_DECL corresponding to the
206    capture of EXPR.  EXPLICIT_INIT_P indicates whether this is a
207    C++14 init capture, and BY_REFERENCE_P indicates whether we're
208    capturing by reference.  */
209 
210 tree
lambda_capture_field_type(tree expr,bool explicit_init_p,bool by_reference_p)211 lambda_capture_field_type (tree expr, bool explicit_init_p,
212 			   bool by_reference_p)
213 {
214   tree type;
215   bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
216 
217   if (is_this)
218     type = TREE_TYPE (expr);
219   else if (explicit_init_p)
220     {
221       tree auto_node = make_auto ();
222 
223       type = auto_node;
224       if (by_reference_p)
225 	/* Add the reference now, so deduction doesn't lose
226 	   outermost CV qualifiers of EXPR.  */
227 	type = build_reference_type (type);
228       if (uses_parameter_packs (expr))
229 	/* Stick with 'auto' even if the type could be deduced.  */;
230       else
231 	type = do_auto_deduction (type, expr, auto_node);
232     }
233   else if (!type_deducible_expression_p (expr))
234     {
235       type = cxx_make_type (DECLTYPE_TYPE);
236       DECLTYPE_TYPE_EXPR (type) = expr;
237       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
238       DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
239       SET_TYPE_STRUCTURAL_EQUALITY (type);
240     }
241   else
242     {
243       STRIP_ANY_LOCATION_WRAPPER (expr);
244 
245       if (!by_reference_p && is_capture_proxy (expr))
246 	{
247 	  /* When capturing by-value another capture proxy from an enclosing
248 	     lambda, consider the type of the corresponding field instead,
249 	     as the proxy may be additionally const-qualifed if the enclosing
250 	     lambda is non-mutable (PR94376).  */
251 	  gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
252 	  expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
253 	}
254 
255       type = non_reference (unlowered_expr_type (expr));
256 
257       if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
258 	type = build_reference_type (type);
259     }
260 
261   return type;
262 }
263 
264 /* Returns true iff DECL is a lambda capture proxy variable created by
265    build_capture_proxy.  */
266 
267 bool
is_capture_proxy(tree decl)268 is_capture_proxy (tree decl)
269 {
270   /* Location wrappers should be stripped or otherwise handled by the
271      caller before using this predicate.  */
272   gcc_checking_assert (!location_wrapper_p (decl));
273 
274   return (VAR_P (decl)
275 	  && DECL_HAS_VALUE_EXPR_P (decl)
276 	  && !DECL_ANON_UNION_VAR_P (decl)
277 	  && !DECL_DECOMPOSITION_P (decl)
278 	  && !DECL_FNAME_P (decl)
279 	  && !(DECL_ARTIFICIAL (decl)
280 	       && DECL_LANG_SPECIFIC (decl)
281 	       && DECL_OMP_PRIVATIZED_MEMBER (decl))
282 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
283 }
284 
285 /* Returns true iff DECL is a capture proxy for a normal capture
286    (i.e. without explicit initializer).  */
287 
288 bool
is_normal_capture_proxy(tree decl)289 is_normal_capture_proxy (tree decl)
290 {
291   if (!is_capture_proxy (decl))
292     /* It's not a capture proxy.  */
293     return false;
294 
295   return (DECL_LANG_SPECIFIC (decl)
296 	  && DECL_CAPTURED_VARIABLE (decl));
297 }
298 
299 /* Returns true iff DECL is a capture proxy for a normal capture
300    of a constant variable.  */
301 
302 bool
is_constant_capture_proxy(tree decl)303 is_constant_capture_proxy (tree decl)
304 {
305   if (is_normal_capture_proxy (decl))
306     return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
307   return false;
308 }
309 
310 /* VAR is a capture proxy created by build_capture_proxy; add it to the
311    current function, which is the operator() for the appropriate lambda.  */
312 
313 void
insert_capture_proxy(tree var)314 insert_capture_proxy (tree var)
315 {
316   if (is_normal_capture_proxy (var))
317     {
318       tree cap = DECL_CAPTURED_VARIABLE (var);
319       if (CHECKING_P)
320 	{
321 	  gcc_assert (!is_normal_capture_proxy (cap));
322 	  tree old = retrieve_local_specialization (cap);
323 	  if (old)
324 	    gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
325 	}
326       register_local_specialization (var, cap);
327     }
328 
329   /* Put the capture proxy in the extra body block so that it won't clash
330      with a later local variable.  */
331   pushdecl_outermost_localscope (var);
332 
333   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
334   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
335   tree stmt_list = (*stmt_list_stack)[1];
336   gcc_assert (stmt_list);
337   append_to_statement_list_force (var, &stmt_list);
338 }
339 
340 /* We've just finished processing a lambda; if the containing scope is also
341    a lambda, insert any capture proxies that were created while processing
342    the nested lambda.  */
343 
344 void
insert_pending_capture_proxies(void)345 insert_pending_capture_proxies (void)
346 {
347   tree lam;
348   vec<tree, va_gc> *proxies;
349   unsigned i;
350 
351   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
352     return;
353 
354   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
355   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
356   for (i = 0; i < vec_safe_length (proxies); ++i)
357     {
358       tree var = (*proxies)[i];
359       insert_capture_proxy (var);
360     }
361   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
362   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
363 }
364 
365 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
366    return the type we want the proxy to have: the type of the field itself,
367    with added const-qualification if the lambda isn't mutable and the
368    capture is by value.  */
369 
370 tree
lambda_proxy_type(tree ref)371 lambda_proxy_type (tree ref)
372 {
373   tree type;
374   if (ref == error_mark_node)
375     return error_mark_node;
376   if (REFERENCE_REF_P (ref))
377     ref = TREE_OPERAND (ref, 0);
378   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
379   type = TREE_TYPE (ref);
380   if (!type || WILDCARD_TYPE_P (non_reference (type)))
381     {
382       type = cxx_make_type (DECLTYPE_TYPE);
383       DECLTYPE_TYPE_EXPR (type) = ref;
384       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
385       SET_TYPE_STRUCTURAL_EQUALITY (type);
386     }
387   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
388     type = make_pack_expansion (type);
389   return type;
390 }
391 
392 /* MEMBER is a capture field in a lambda closure class.  Now that we're
393    inside the operator(), build a placeholder var for future lookups and
394    debugging.  */
395 
396 static tree
build_capture_proxy(tree member,tree init)397 build_capture_proxy (tree member, tree init)
398 {
399   tree var, object, fn, closure, name, lam, type;
400 
401   if (PACK_EXPANSION_P (member))
402     member = PACK_EXPANSION_PATTERN (member);
403 
404   closure = DECL_CONTEXT (member);
405   fn = lambda_function (closure);
406   lam = CLASSTYPE_LAMBDA_EXPR (closure);
407 
408   /* The proxy variable forwards to the capture field.  */
409   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
410   object = finish_non_static_data_member (member, object, NULL_TREE);
411   if (REFERENCE_REF_P (object))
412     object = TREE_OPERAND (object, 0);
413 
414   /* Remove the __ inserted by add_capture.  */
415   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
416 
417   type = lambda_proxy_type (object);
418 
419   if (name == this_identifier && !INDIRECT_TYPE_P (type))
420     {
421       type = build_pointer_type (type);
422       type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
423       object = build_fold_addr_expr_with_type (object, type);
424     }
425 
426   if (DECL_VLA_CAPTURE_P (member))
427     {
428       /* Rebuild the VLA type from the pointer and maxindex.  */
429       tree field = next_initializable_field (TYPE_FIELDS (type));
430       tree ptr = build_simple_component_ref (object, field);
431       field = next_initializable_field (DECL_CHAIN (field));
432       tree max = build_simple_component_ref (object, field);
433       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
434 				     build_index_type (max));
435       type = build_reference_type (type);
436       object = convert (type, ptr);
437     }
438 
439   complete_type (type);
440 
441   var = build_decl (input_location, VAR_DECL, name, type);
442   SET_DECL_VALUE_EXPR (var, object);
443   DECL_HAS_VALUE_EXPR_P (var) = 1;
444   DECL_ARTIFICIAL (var) = 1;
445   TREE_USED (var) = 1;
446   DECL_CONTEXT (var) = fn;
447 
448   if (DECL_NORMAL_CAPTURE_P (member))
449     {
450       if (DECL_VLA_CAPTURE_P (member))
451 	{
452 	  init = CONSTRUCTOR_ELT (init, 0)->value;
453 	  init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
454 	  init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
455 	}
456       else
457 	{
458 	  if (PACK_EXPANSION_P (init))
459 	    init = PACK_EXPANSION_PATTERN (init);
460 	}
461 
462       if (INDIRECT_REF_P (init))
463 	init = TREE_OPERAND (init, 0);
464       STRIP_NOPS (init);
465 
466       gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
467       while (is_normal_capture_proxy (init))
468 	init = DECL_CAPTURED_VARIABLE (init);
469       retrofit_lang_decl (var);
470       DECL_CAPTURED_VARIABLE (var) = init;
471     }
472 
473   if (name == this_identifier)
474     {
475       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
476       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
477     }
478 
479   if (fn == current_function_decl)
480     insert_capture_proxy (var);
481   else
482     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
483 
484   return var;
485 }
486 
487 static GTY(()) tree ptr_id;
488 static GTY(()) tree max_id;
489 
490 /* Return a struct containing a pointer and a length for lambda capture of
491    an array of runtime length.  */
492 
493 static tree
vla_capture_type(tree array_type)494 vla_capture_type (tree array_type)
495 {
496   tree type = xref_tag (record_type, make_anon_name ());
497   xref_basetypes (type, NULL_TREE);
498   type = begin_class_definition (type);
499   if (!ptr_id)
500     {
501       ptr_id = get_identifier ("ptr");
502       max_id = get_identifier ("max");
503     }
504   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
505   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
506   finish_member_declaration (field);
507   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
508   finish_member_declaration (field);
509   return finish_struct (type, NULL_TREE);
510 }
511 
512 /* From an ID and INITIALIZER, create a capture (by reference if
513    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
514    and return it.  If ID is `this', BY_REFERENCE_P says whether
515    `*this' is captured by reference.  */
516 
517 tree
add_capture(tree lambda,tree id,tree orig_init,bool by_reference_p,bool explicit_init_p)518 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
519 	     bool explicit_init_p)
520 {
521   char *buf;
522   tree type, member, name;
523   bool vla = false;
524   bool variadic = false;
525   tree initializer = orig_init;
526 
527   if (PACK_EXPANSION_P (initializer))
528     {
529       initializer = PACK_EXPANSION_PATTERN (initializer);
530       variadic = true;
531     }
532 
533   if (TREE_CODE (initializer) == TREE_LIST
534       /* A pack expansion might end up with multiple elements.  */
535       && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
536     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
537 						   tf_warning_or_error);
538   type = TREE_TYPE (initializer);
539   if (type == error_mark_node)
540     return error_mark_node;
541 
542   if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
543     {
544       vla = true;
545       if (!by_reference_p)
546 	error ("array of runtime bound cannot be captured by copy, "
547 	       "only by reference");
548 
549       /* For a VLA, we capture the address of the first element and the
550 	 maximum index, and then reconstruct the VLA for the proxy.  */
551       tree elt = cp_build_array_ref (input_location, initializer,
552 				     integer_zero_node, tf_warning_or_error);
553       initializer = build_constructor_va (init_list_type_node, 2,
554 					  NULL_TREE, build_address (elt),
555 					  NULL_TREE, array_type_nelts (type));
556       type = vla_capture_type (type);
557     }
558   else if (!dependent_type_p (type)
559 	   && variably_modified_type_p (type, NULL_TREE))
560     {
561       sorry ("capture of variably-modified type %qT that is not an N3639 array "
562 	     "of runtime bound", type);
563       if (TREE_CODE (type) == ARRAY_TYPE
564 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
565 	inform (input_location, "because the array element type %qT has "
566 		"variable size", TREE_TYPE (type));
567       return error_mark_node;
568     }
569   else
570     {
571       type = lambda_capture_field_type (initializer, explicit_init_p,
572 					by_reference_p);
573       if (type == error_mark_node)
574 	return error_mark_node;
575 
576       if (id == this_identifier && !by_reference_p)
577 	{
578 	  gcc_assert (INDIRECT_TYPE_P (type));
579 	  type = TREE_TYPE (type);
580 	  initializer = cp_build_fold_indirect_ref (initializer);
581 	}
582 
583       if (dependent_type_p (type))
584 	;
585       else if (id != this_identifier && by_reference_p)
586 	{
587 	  if (!lvalue_p (initializer))
588 	    {
589 	      error ("cannot capture %qE by reference", initializer);
590 	      return error_mark_node;
591 	    }
592 	}
593       else
594 	{
595 	  /* Capture by copy requires a complete type.  */
596 	  type = complete_type (type);
597 	  if (!COMPLETE_TYPE_P (type))
598 	    {
599 	      error ("capture by copy of incomplete type %qT", type);
600 	      cxx_incomplete_type_inform (type);
601 	      return error_mark_node;
602 	    }
603 	  else if (!verify_type_context (input_location,
604 					 TCTX_CAPTURE_BY_COPY, type))
605 	    return error_mark_node;
606 	}
607     }
608 
609   /* Add __ to the beginning of the field name so that user code
610      won't find the field with name lookup.  We can't just leave the name
611      unset because template instantiation uses the name to find
612      instantiated fields.  */
613   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
614   buf[1] = buf[0] = '_';
615   memcpy (buf + 2, IDENTIFIER_POINTER (id),
616 	  IDENTIFIER_LENGTH (id) + 1);
617   name = get_identifier (buf);
618 
619   if (variadic)
620     {
621       type = make_pack_expansion (type);
622       if (explicit_init_p)
623 	/* With an explicit initializer 'type' is auto, which isn't really a
624 	   parameter pack in this context.  We will want as many fields as we
625 	   have elements in the expansion of the initializer, so use its packs
626 	   instead.  */
627 	{
628 	  PACK_EXPANSION_PARAMETER_PACKS (type)
629 	    = uses_parameter_packs (initializer);
630 	  PACK_EXPANSION_AUTO_P (type) = true;
631 	}
632     }
633 
634   /* Make member variable.  */
635   member = build_decl (input_location, FIELD_DECL, name, type);
636   DECL_VLA_CAPTURE_P (member) = vla;
637 
638   if (!explicit_init_p)
639     /* Normal captures are invisible to name lookup but uses are replaced
640        with references to the capture field; we implement this by only
641        really making them invisible in unevaluated context; see
642        qualify_lookup.  For now, let's make explicitly initialized captures
643        always visible.  */
644     DECL_NORMAL_CAPTURE_P (member) = true;
645 
646   if (id == this_identifier)
647     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
648 
649   /* Add it to the appropriate closure class if we've started it.  */
650   if (current_class_type
651       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
652     {
653       if (COMPLETE_TYPE_P (current_class_type))
654 	internal_error ("trying to capture %qD in instantiation of "
655 			"generic lambda", id);
656       finish_member_declaration (member);
657     }
658 
659   tree listmem = member;
660   if (variadic)
661     {
662       listmem = make_pack_expansion (member);
663       initializer = orig_init;
664     }
665   LAMBDA_EXPR_CAPTURE_LIST (lambda)
666     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
667 
668   if (LAMBDA_EXPR_CLOSURE (lambda))
669     return build_capture_proxy (member, initializer);
670   /* For explicit captures we haven't started the function yet, so we wait
671      and build the proxy from cp_parser_lambda_body.  */
672   LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
673   return NULL_TREE;
674 }
675 
676 /* Register all the capture members on the list CAPTURES, which is the
677    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
678 
679 void
register_capture_members(tree captures)680 register_capture_members (tree captures)
681 {
682   if (captures == NULL_TREE)
683     return;
684 
685   register_capture_members (TREE_CHAIN (captures));
686 
687   tree field = TREE_PURPOSE (captures);
688   if (PACK_EXPANSION_P (field))
689     field = PACK_EXPANSION_PATTERN (field);
690 
691   finish_member_declaration (field);
692 }
693 
694 /* Similar to add_capture, except this works on a stack of nested lambdas.
695    BY_REFERENCE_P in this case is derived from the default capture mode.
696    Returns the capture for the lambda at the bottom of the stack.  */
697 
698 tree
add_default_capture(tree lambda_stack,tree id,tree initializer)699 add_default_capture (tree lambda_stack, tree id, tree initializer)
700 {
701   bool this_capture_p = (id == this_identifier);
702   tree var = NULL_TREE;
703   tree saved_class_type = current_class_type;
704 
705   for (tree node = lambda_stack;
706        node;
707        node = TREE_CHAIN (node))
708     {
709       tree lambda = TREE_VALUE (node);
710 
711       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
712       if (DECL_PACK_P (initializer))
713 	initializer = make_pack_expansion (initializer);
714       var = add_capture (lambda,
715                             id,
716                             initializer,
717                             /*by_reference_p=*/
718 			    (this_capture_p
719 			     || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
720 				 == CPLD_REFERENCE)),
721 			    /*explicit_init_p=*/false);
722       initializer = convert_from_reference (var);
723 
724       /* Warn about deprecated implicit capture of this via [=].  */
725       if (cxx_dialect >= cxx20
726 	  && this_capture_p
727 	  && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
728 	{
729 	  if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
730 			  "implicit capture of %qE via %<[=]%> is deprecated "
731 			  "in C++20", this_identifier))
732 	    inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
733 		    "%<*this%> capture");
734 	}
735     }
736 
737   current_class_type = saved_class_type;
738 
739   return var;
740 }
741 
742 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
743    form of an INDIRECT_REF, possibly adding it through default
744    capturing, if ADD_CAPTURE_P is nonzero.  If ADD_CAPTURE_P is negative,
745    try to capture but don't complain if we can't.  */
746 
747 tree
lambda_expr_this_capture(tree lambda,int add_capture_p)748 lambda_expr_this_capture (tree lambda, int add_capture_p)
749 {
750   tree result;
751 
752   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
753 
754   /* In unevaluated context this isn't an odr-use, so don't capture.  */
755   if (cp_unevaluated_operand)
756     add_capture_p = false;
757 
758   /* Try to default capture 'this' if we can.  */
759   if (!this_capture)
760     {
761       tree lambda_stack = NULL_TREE;
762       tree init = NULL_TREE;
763       bool saw_complete = false;
764 
765       /* If we are in a lambda function, we can move out until we hit:
766            1. a non-lambda function or NSDMI,
767            2. a lambda function capturing 'this', or
768            3. a non-default capturing lambda function.  */
769       for (tree tlambda = lambda; ;)
770 	{
771 	  if (add_capture_p
772 	      && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
773 	    /* tlambda won't let us capture 'this'.  */
774 	    break;
775 
776 	  if (add_capture_p)
777 	    lambda_stack = tree_cons (NULL_TREE,
778 				      tlambda,
779 				      lambda_stack);
780 
781 	  tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
782 	  if (COMPLETE_TYPE_P (closure))
783 	    /* We're instantiating a generic lambda op(), the containing
784 	       scope may be gone.  */
785 	    saw_complete = true;
786 
787 	  tree containing_function
788 	    = decl_function_context (TYPE_NAME (closure));
789 
790 	  tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
791 	  if (ex && TREE_CODE (ex) == FIELD_DECL)
792 	    {
793 	      /* Lambda in an NSDMI.  We don't have a function to look up
794 		 'this' in, but we can find (or rebuild) the fake one from
795 		 inject_this_parameter.  */
796 	      if (!containing_function && !saw_complete)
797 		/* If we're parsing a lambda in a non-local class,
798 		   we can find the fake 'this' in scope_chain.  */
799 		init = scope_chain->x_current_class_ptr;
800 	      else
801 		/* Otherwise it's either gone or buried in
802 		   function_context_stack, so make another.  */
803 		init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
804 					TYPE_UNQUALIFIED);
805 	      gcc_checking_assert
806 		(init && (TREE_TYPE (TREE_TYPE (init))
807 			  == current_nonlambda_class_type ()));
808 	      break;
809 	    }
810 
811 	  if (containing_function == NULL_TREE)
812 	    /* We ran out of scopes; there's no 'this' to capture.  */
813 	    break;
814 
815 	  if (!LAMBDA_FUNCTION_P (containing_function))
816 	    {
817 	      /* We found a non-lambda function.  */
818 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
819 		/* First parameter is 'this'.  */
820 		init = DECL_ARGUMENTS (containing_function);
821 	      break;
822 	    }
823 
824 	  tlambda
825             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
826 
827           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
828 	    {
829 	      /* An outer lambda has already captured 'this'.  */
830 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
831 	      break;
832 	    }
833 	}
834 
835       if (init)
836         {
837           if (add_capture_p)
838 	    this_capture = add_default_capture (lambda_stack,
839 					        /*id=*/this_identifier,
840 					        init);
841           else
842 	    this_capture = init;
843         }
844     }
845 
846   if (cp_unevaluated_operand)
847     result = this_capture;
848   else if (!this_capture)
849     {
850       if (add_capture_p == 1)
851 	{
852 	  error ("%<this%> was not captured for this lambda function");
853 	  result = error_mark_node;
854 	}
855       else
856 	result = NULL_TREE;
857     }
858   else
859     {
860       /* To make sure that current_class_ref is for the lambda.  */
861       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
862 		  == LAMBDA_EXPR_CLOSURE (lambda));
863 
864       result = this_capture;
865 
866       /* If 'this' is captured, each use of 'this' is transformed into an
867 	 access to the corresponding unnamed data member of the closure
868 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
869 	 ensures that the transformed expression is an rvalue. ] */
870       result = rvalue (result);
871     }
872 
873   return result;
874 }
875 
876 /* Return the innermost LAMBDA_EXPR we're currently in, if any.  */
877 
878 tree
current_lambda_expr(void)879 current_lambda_expr (void)
880 {
881   tree type = current_class_type;
882   while (type && !LAMBDA_TYPE_P (type))
883     type = decl_type_context (TYPE_NAME (type));
884   if (type)
885     return CLASSTYPE_LAMBDA_EXPR (type);
886   else
887     return NULL_TREE;
888 }
889 
890 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
891    object.  NULL otherwise..  */
892 
893 static tree
resolvable_dummy_lambda(tree object)894 resolvable_dummy_lambda (tree object)
895 {
896   if (!is_dummy_object (object))
897     return NULL_TREE;
898 
899   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
900   gcc_assert (!TYPE_PTR_P (type));
901 
902   if (type != current_class_type
903       && current_class_type
904       && LAMBDA_TYPE_P (current_class_type)
905       && lambda_function (current_class_type)
906       && DERIVED_FROM_P (type, nonlambda_method_basetype()))
907     return CLASSTYPE_LAMBDA_EXPR (current_class_type);
908 
909   return NULL_TREE;
910 }
911 
912 /* We don't want to capture 'this' until we know we need it, i.e. after
913    overload resolution has chosen a non-static member function.  At that
914    point we call this function to turn a dummy object into a use of the
915    'this' capture.  */
916 
917 tree
maybe_resolve_dummy(tree object,bool add_capture_p)918 maybe_resolve_dummy (tree object, bool add_capture_p)
919 {
920   if (tree lam = resolvable_dummy_lambda (object))
921     if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
922       if (cap != error_mark_node)
923 	object = build_fold_indirect_ref (cap);
924 
925   return object;
926 }
927 
928 /* When parsing a generic lambda containing an argument-dependent
929    member function call we defer overload resolution to instantiation
930    time.  But we have to know now whether to capture this or not.
931    Do that if FNS contains any non-static fns.
932    The std doesn't anticipate this case, but I expect this to be the
933    outcome of discussion.  */
934 
935 void
maybe_generic_this_capture(tree object,tree fns)936 maybe_generic_this_capture (tree object, tree fns)
937 {
938   if (tree lam = resolvable_dummy_lambda (object))
939     if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
940       {
941 	/* We've not yet captured, so look at the function set of
942 	   interest.  */
943 	if (BASELINK_P (fns))
944 	  fns = BASELINK_FUNCTIONS (fns);
945 	bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
946 	if (id_expr)
947 	  fns = TREE_OPERAND (fns, 0);
948 
949 	for (lkp_iterator iter (fns); iter; ++iter)
950 	  if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
951 	       || TREE_CODE (*iter) == TEMPLATE_DECL)
952 	      && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
953 	    {
954 	      /* Found a non-static member.  Capture this.  */
955 	      lambda_expr_this_capture (lam, /*maybe*/-1);
956 	      break;
957 	    }
958       }
959 }
960 
961 /* Returns the innermost non-lambda function.  */
962 
963 tree
current_nonlambda_function(void)964 current_nonlambda_function (void)
965 {
966   tree fn = current_function_decl;
967   while (fn && LAMBDA_FUNCTION_P (fn))
968     fn = decl_function_context (fn);
969   return fn;
970 }
971 
972 /* Returns the method basetype of the innermost non-lambda function, including
973    a hypothetical constructor if inside an NSDMI, or NULL_TREE if none.  */
974 
975 tree
nonlambda_method_basetype(void)976 nonlambda_method_basetype (void)
977 {
978   if (!current_class_ref)
979     return NULL_TREE;
980 
981   tree type = current_class_type;
982   if (!type || !LAMBDA_TYPE_P (type))
983     return type;
984 
985   while (true)
986     {
987       tree lam = CLASSTYPE_LAMBDA_EXPR (type);
988       tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
989       if (ex && TREE_CODE (ex) == FIELD_DECL)
990 	/* Lambda in an NSDMI.  */
991 	return DECL_CONTEXT (ex);
992 
993       tree fn = TYPE_CONTEXT (type);
994       if (!fn || TREE_CODE (fn) != FUNCTION_DECL
995 	  || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
996 	/* No enclosing non-lambda method.  */
997 	return NULL_TREE;
998       if (!LAMBDA_FUNCTION_P (fn))
999 	/* Found an enclosing non-lambda method.  */
1000 	return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1001       type = DECL_CONTEXT (fn);
1002     }
1003 }
1004 
1005 /* Like current_scope, but looking through lambdas.  */
1006 
1007 tree
current_nonlambda_scope(void)1008 current_nonlambda_scope (void)
1009 {
1010   tree scope = current_scope ();
1011   for (;;)
1012     {
1013       if (TREE_CODE (scope) == FUNCTION_DECL
1014 	  && LAMBDA_FUNCTION_P (scope))
1015 	{
1016 	  scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1017 	  continue;
1018 	}
1019       else if (LAMBDA_TYPE_P (scope))
1020 	{
1021 	  scope = CP_TYPE_CONTEXT (scope);
1022 	  continue;
1023 	}
1024       break;
1025     }
1026   return scope;
1027 }
1028 
1029 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1030    indicated FN and NARGS, but do not initialize the return type or any of the
1031    argument slots.  */
1032 
1033 static tree
prepare_op_call(tree fn,int nargs)1034 prepare_op_call (tree fn, int nargs)
1035 {
1036   tree t;
1037 
1038   t = build_vl_exp (CALL_EXPR, nargs + 3);
1039   CALL_EXPR_FN (t) = fn;
1040   CALL_EXPR_STATIC_CHAIN (t) = NULL;
1041 
1042   return t;
1043 }
1044 
1045 /* Return true iff CALLOP is the op() for a generic lambda.  */
1046 
1047 bool
generic_lambda_fn_p(tree callop)1048 generic_lambda_fn_p (tree callop)
1049 {
1050   return (LAMBDA_FUNCTION_P (callop)
1051 	  && DECL_TEMPLATE_INFO (callop)
1052 	  && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1053 }
1054 
1055 /* If the closure TYPE has a static op(), also add a conversion to function
1056    pointer.  */
1057 
1058 void
maybe_add_lambda_conv_op(tree type)1059 maybe_add_lambda_conv_op (tree type)
1060 {
1061   bool nested = (cfun != NULL);
1062   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1063   tree callop = lambda_function (type);
1064   tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1065 
1066   if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1067       || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1068     return;
1069 
1070   if (processing_template_decl)
1071     return;
1072 
1073   bool const generic_lambda_p = generic_lambda_fn_p (callop);
1074 
1075   if (!generic_lambda_p && undeduced_auto_decl (callop))
1076     {
1077       /* If the op() wasn't deduced due to errors, give up.  */
1078       gcc_assert (errorcount || sorrycount);
1079       return;
1080     }
1081 
1082   /* Non-generic non-capturing lambdas only have a conversion function to
1083      pointer to function when the trailing requires-clause's constraints are
1084      satisfied.  */
1085   if (!generic_lambda_p && !constraints_satisfied_p (callop))
1086     return;
1087 
1088   /* Non-template conversion operators are defined directly with build_call_a
1089      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
1090      deferred and the CALL is built in-place.  In the case of a deduced return
1091      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1092      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
1093      the return expression may differ in flags from those in the body CALL.  In
1094      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1095      the body CALL, but not in DECLTYPE_CALL.  */
1096 
1097   vec<tree, va_gc> *direct_argvec = 0;
1098   tree decltype_call = 0, call = 0;
1099   tree optype = TREE_TYPE (callop);
1100   tree fn_result = TREE_TYPE (optype);
1101 
1102   tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1103   if (generic_lambda_p)
1104     {
1105       ++processing_template_decl;
1106 
1107       /* Prepare the dependent member call for the static member function
1108 	 '_FUN' and, potentially, prepare another call to be used in a decltype
1109 	 return expression for a deduced return call op to allow for simple
1110 	 implementation of the conversion operator.  */
1111 
1112       tree instance = cp_build_fold_indirect_ref (thisarg);
1113       tree objfn = lookup_template_function (DECL_NAME (callop),
1114 					     DECL_TI_ARGS (callop));
1115       objfn = build_min (COMPONENT_REF, NULL_TREE,
1116 			 instance, objfn, NULL_TREE);
1117       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1118 
1119       call = prepare_op_call (objfn, nargs);
1120       if (type_uses_auto (fn_result))
1121 	decltype_call = prepare_op_call (objfn, nargs);
1122     }
1123   else
1124     {
1125       direct_argvec = make_tree_vector ();
1126       direct_argvec->quick_push (thisarg);
1127     }
1128 
1129   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1130      declare the static member function "_FUN" below.  For each arg append to
1131      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1132      call args (for the template case).  If a parameter pack is found, expand
1133      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
1134 
1135   tree fn_args = NULL_TREE;
1136   {
1137     int ix = 0;
1138     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1139     tree tgt = NULL;
1140 
1141     while (src)
1142       {
1143 	tree new_node = copy_node (src);
1144 	/* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1145 	   Notice this is creating a recursive type!  */
1146 
1147 	/* Clear TREE_ADDRESSABLE on thunk arguments.  */
1148 	TREE_ADDRESSABLE (new_node) = 0;
1149 
1150 	if (!fn_args)
1151 	  fn_args = tgt = new_node;
1152 	else
1153 	  {
1154 	    TREE_CHAIN (tgt) = new_node;
1155 	    tgt = new_node;
1156 	  }
1157 
1158 	mark_exp_read (tgt);
1159 
1160 	if (generic_lambda_p)
1161 	  {
1162 	    tree a = tgt;
1163 	    if (DECL_PACK_P (tgt))
1164 	      {
1165 		a = make_pack_expansion (a);
1166 		PACK_EXPANSION_LOCAL_P (a) = true;
1167 	      }
1168 	    CALL_EXPR_ARG (call, ix) = a;
1169 
1170 	    if (decltype_call)
1171 	      {
1172 		/* Avoid capturing variables in this context.  */
1173 		++cp_unevaluated_operand;
1174 		CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1175 		--cp_unevaluated_operand;
1176 	      }
1177 
1178 	    ++ix;
1179 	  }
1180 	else
1181 	  vec_safe_push (direct_argvec, tgt);
1182 
1183 	src = TREE_CHAIN (src);
1184       }
1185   }
1186 
1187   if (generic_lambda_p)
1188     {
1189       if (decltype_call)
1190 	{
1191 	  fn_result = finish_decltype_type
1192 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
1193 	     tf_warning_or_error);
1194 	}
1195     }
1196   else
1197     {
1198       /* Don't warn on deprecated or unavailable lambda declarations, unless
1199 	 the lambda is actually called.  */
1200       auto du = make_temp_override (deprecated_state,
1201 				    UNAVAILABLE_DEPRECATED_SUPPRESS);
1202       call = build_call_a (callop, direct_argvec->length (),
1203 			   direct_argvec->address ());
1204     }
1205 
1206   CALL_FROM_THUNK_P (call) = 1;
1207   SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1208 
1209   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1210   stattype = (cp_build_type_attribute_variant
1211 	      (stattype, TYPE_ATTRIBUTES (optype)));
1212   if (flag_noexcept_type
1213       && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1214     stattype = build_exception_variant (stattype, noexcept_true_spec);
1215 
1216   if (generic_lambda_p)
1217     --processing_template_decl;
1218 
1219   /* First build up the conversion op.  */
1220 
1221   tree rettype = build_pointer_type (stattype);
1222   tree name = make_conv_op_name (rettype);
1223   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1224   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1225   /* DR 1722: The conversion function should be noexcept.  */
1226   fntype = build_exception_variant (fntype, noexcept_true_spec);
1227   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1228   SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1229   tree fn = convfn;
1230   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1231   SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1232   grokclassfn (type, fn, NO_SPECIAL);
1233   set_linkage_according_to_type (type, fn);
1234   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1235   DECL_IN_AGGR_P (fn) = 1;
1236   DECL_ARTIFICIAL (fn) = 1;
1237   DECL_NOT_REALLY_EXTERN (fn) = 1;
1238   DECL_DECLARED_INLINE_P (fn) = 1;
1239   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1240   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1241     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1242   DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1243 
1244   if (nested_def)
1245     DECL_INTERFACE_KNOWN (fn) = 1;
1246 
1247   if (generic_lambda_p)
1248     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1249 
1250   add_method (type, fn, false);
1251 
1252   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1253      the conversion op is used.  */
1254   if (varargs_function_p (callop))
1255     {
1256       DECL_DELETED_FN (fn) = 1;
1257       return;
1258     }
1259 
1260   /* Now build up the thunk to be returned.  */
1261 
1262   tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1263   SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1264   fn = statfn;
1265   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1266   grokclassfn (type, fn, NO_SPECIAL);
1267   set_linkage_according_to_type (type, fn);
1268   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1269   DECL_IN_AGGR_P (fn) = 1;
1270   DECL_ARTIFICIAL (fn) = 1;
1271   DECL_NOT_REALLY_EXTERN (fn) = 1;
1272   DECL_DECLARED_INLINE_P (fn) = 1;
1273   DECL_STATIC_FUNCTION_P (fn) = 1;
1274   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1275   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1276     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1277   DECL_ARGUMENTS (fn) = fn_args;
1278   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1279     {
1280       /* Avoid duplicate -Wshadow warnings.  */
1281       DECL_NAME (arg) = NULL_TREE;
1282       DECL_CONTEXT (arg) = fn;
1283     }
1284   if (nested_def)
1285     DECL_INTERFACE_KNOWN (fn) = 1;
1286 
1287   if (generic_lambda_p)
1288     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1289 
1290   if (flag_sanitize & SANITIZE_NULL)
1291     /* Don't UBsan this function; we're deliberately calling op() with a null
1292        object argument.  */
1293     add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1294 
1295   add_method (type, fn, false);
1296 
1297   if (nested)
1298     push_function_context ();
1299   else
1300     /* Still increment function_depth so that we don't GC in the
1301        middle of an expression.  */
1302     ++function_depth;
1303 
1304   /* Generate the body of the thunk.  */
1305 
1306   start_preparsed_function (statfn, NULL_TREE,
1307 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1308   tree body = begin_function_body ();
1309   tree compound_stmt = begin_compound_stmt (0);
1310   if (!generic_lambda_p)
1311     {
1312       set_flags_from_callee (call);
1313       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1314 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1315     }
1316   call = convert_from_reference (call);
1317   finish_return_stmt (call);
1318 
1319   finish_compound_stmt (compound_stmt);
1320   finish_function_body (body);
1321 
1322   fn = finish_function (/*inline_p=*/true);
1323   if (!generic_lambda_p)
1324     expand_or_defer_fn (fn);
1325 
1326   /* Generate the body of the conversion op.  */
1327 
1328   start_preparsed_function (convfn, NULL_TREE,
1329 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1330   body = begin_function_body ();
1331   compound_stmt = begin_compound_stmt (0);
1332 
1333   /* decl_needed_p needs to see that it's used.  */
1334   TREE_USED (statfn) = 1;
1335   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1336 
1337   finish_compound_stmt (compound_stmt);
1338   finish_function_body (body);
1339 
1340   fn = finish_function (/*inline_p=*/true);
1341   if (!generic_lambda_p)
1342     expand_or_defer_fn (fn);
1343 
1344   if (nested)
1345     pop_function_context ();
1346   else
1347     --function_depth;
1348 }
1349 
1350 /* True if FN is the static function "_FUN" that gets returned from the lambda
1351    conversion operator.  */
1352 
1353 bool
lambda_static_thunk_p(tree fn)1354 lambda_static_thunk_p (tree fn)
1355 {
1356   return (fn && TREE_CODE (fn) == FUNCTION_DECL
1357 	  && DECL_ARTIFICIAL (fn)
1358 	  && DECL_STATIC_FUNCTION_P (fn)
1359 	  && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1360 }
1361 
1362 bool
call_from_lambda_thunk_p(tree call)1363 call_from_lambda_thunk_p (tree call)
1364 {
1365   return (CALL_FROM_THUNK_P (call)
1366 	  && lambda_static_thunk_p (current_function_decl));
1367 }
1368 
1369 /* Returns true iff VAL is a lambda-related declaration which should
1370    be ignored by unqualified lookup.  */
1371 
1372 bool
is_lambda_ignored_entity(tree val)1373 is_lambda_ignored_entity (tree val)
1374 {
1375   /* Look past normal, non-VLA capture proxies.  */
1376   if (is_normal_capture_proxy (val)
1377       && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1378     return true;
1379 
1380   /* Always ignore lambda fields, their names are only for debugging.  */
1381   if (TREE_CODE (val) == FIELD_DECL
1382       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1383     return true;
1384 
1385   /* None of the lookups that use qualify_lookup want the op() from the
1386      lambda; they want the one from the enclosing class.  */
1387   if (tree fns = maybe_get_fns (val))
1388     if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1389       return true;
1390 
1391   return false;
1392 }
1393 
1394 /* Lambdas that appear in variable initializer or default argument scope
1395    get that in their mangling, so we need to record it.  We might as well
1396    use the count for function and namespace scopes as well.  */
1397 static GTY(()) tree lambda_scope;
1398 static GTY(()) int lambda_count;
1399 struct GTY(()) tree_int
1400 {
1401   tree t;
1402   int i;
1403 };
1404 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1405 
1406 void
start_lambda_scope(tree decl)1407 start_lambda_scope (tree decl)
1408 {
1409   tree_int ti;
1410   gcc_assert (decl);
1411   /* Once we're inside a function, we ignore variable scope and just push
1412      the function again so that popping works properly.  */
1413   if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1414     decl = current_function_decl;
1415   ti.t = lambda_scope;
1416   ti.i = lambda_count;
1417   vec_safe_push (lambda_scope_stack, ti);
1418   if (lambda_scope != decl)
1419     {
1420       /* Don't reset the count if we're still in the same function.  */
1421       lambda_scope = decl;
1422       lambda_count = 0;
1423     }
1424 }
1425 
1426 void
record_lambda_scope(tree lambda)1427 record_lambda_scope (tree lambda)
1428 {
1429   LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1430   LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1431   if (lambda_scope)
1432     {
1433       tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1434       gcc_checking_assert (closure);
1435       maybe_attach_decl (lambda_scope, TYPE_NAME (closure));
1436     }
1437 }
1438 
1439 /* This lambda is an instantiation of a lambda in a template default argument
1440    that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either.  But we do
1441    need to use and increment the global count to avoid collisions.  */
1442 
1443 void
record_null_lambda_scope(tree lambda)1444 record_null_lambda_scope (tree lambda)
1445 {
1446   if (vec_safe_is_empty (lambda_scope_stack))
1447     record_lambda_scope (lambda);
1448   else
1449     {
1450       tree_int *p = lambda_scope_stack->begin();
1451       LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1452       LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1453     }
1454   gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1455 }
1456 
1457 void
finish_lambda_scope(void)1458 finish_lambda_scope (void)
1459 {
1460   tree_int *p = &lambda_scope_stack->last ();
1461   if (lambda_scope != p->t)
1462     {
1463       lambda_scope = p->t;
1464       lambda_count = p->i;
1465     }
1466   lambda_scope_stack->pop ();
1467 }
1468 
1469 tree
start_lambda_function(tree fco,tree lambda_expr)1470 start_lambda_function (tree fco, tree lambda_expr)
1471 {
1472   /* Let the front end know that we are going to be defining this
1473      function.  */
1474   start_preparsed_function (fco,
1475 			    NULL_TREE,
1476 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1477 
1478   tree body = begin_function_body ();
1479 
1480   /* Push the proxies for any explicit captures.  */
1481   for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1482        cap = TREE_CHAIN (cap))
1483     build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1484 
1485   return body;
1486 }
1487 
1488 /* Subroutine of prune_lambda_captures: CAP is a node in
1489    LAMBDA_EXPR_CAPTURE_LIST.  Return the variable it captures for which we
1490    might optimize away the capture, or NULL_TREE if there is no such
1491    variable.  */
1492 
1493 static tree
var_to_maybe_prune(tree cap)1494 var_to_maybe_prune (tree cap)
1495 {
1496   if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1497     /* Don't prune explicit captures.  */
1498     return NULL_TREE;
1499 
1500   tree mem = TREE_PURPOSE (cap);
1501   if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1502     /* Packs and init-captures aren't captures of constant vars.  */
1503     return NULL_TREE;
1504 
1505   tree init = TREE_VALUE (cap);
1506   if (is_normal_capture_proxy (init))
1507     init = DECL_CAPTURED_VARIABLE (init);
1508   if (decl_constant_var_p (init))
1509     return init;
1510 
1511   return NULL_TREE;
1512 }
1513 
1514 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1515    for constant variables are actually used in the lambda body.
1516 
1517    There will always be a DECL_EXPR for the capture proxy; remember it when we
1518    see it, but replace it with any other use.  */
1519 
1520 static tree
mark_const_cap_r(tree * t,int * walk_subtrees,void * data)1521 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1522 {
1523   hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1524 
1525   tree var = NULL_TREE;
1526   if (TREE_CODE (*t) == DECL_EXPR)
1527     {
1528       tree decl = DECL_EXPR_DECL (*t);
1529       if (is_constant_capture_proxy (decl))
1530 	{
1531 	  var = DECL_CAPTURED_VARIABLE (decl);
1532 	  *walk_subtrees = 0;
1533 	}
1534     }
1535   else if (!location_wrapper_p (*t) /* is_capture_proxy dislikes them.  */
1536 	   && is_constant_capture_proxy (*t))
1537     var = DECL_CAPTURED_VARIABLE (*t);
1538 
1539   if (var)
1540     {
1541       tree *&slot = const_vars.get_or_insert (var);
1542       if (!slot || VAR_P (*t))
1543 	slot = t;
1544     }
1545 
1546   return NULL_TREE;
1547 }
1548 
1549 /* We're at the end of processing a lambda; go back and remove any captures of
1550    constant variables for which we've folded away all uses.  */
1551 
1552 static void
prune_lambda_captures(tree body)1553 prune_lambda_captures (tree body)
1554 {
1555   tree lam = current_lambda_expr ();
1556   if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1557     /* No uses were optimized away.  */
1558     return;
1559   if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1560     /* No default captures, and we don't prune explicit captures.  */
1561     return;
1562   /* Don't bother pruning in a template, we'll prune at instantiation time.  */
1563   if (dependent_type_p (TREE_TYPE (lam)))
1564     return;
1565 
1566   hash_map<tree,tree*> const_vars;
1567 
1568   cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1569 
1570   tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1571   for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1572     {
1573       tree cap = *capp;
1574       if (tree var = var_to_maybe_prune (cap))
1575 	{
1576 	  tree **use = const_vars.get (var);
1577 	  if (use && TREE_CODE (**use) == DECL_EXPR)
1578 	    {
1579 	      /* All uses of this capture were folded away, leaving only the
1580 		 proxy declaration.  */
1581 
1582 	      /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST.  */
1583 	      *capp = TREE_CHAIN (cap);
1584 
1585 	      /* And out of TYPE_FIELDS.  */
1586 	      tree field = TREE_PURPOSE (cap);
1587 	      while (*fieldp != field)
1588 		fieldp = &DECL_CHAIN (*fieldp);
1589 	      *fieldp = DECL_CHAIN (*fieldp);
1590 
1591 	      /* And remove the capture proxy declaration.  */
1592 	      **use = void_node;
1593 	      continue;
1594 	    }
1595 	}
1596 
1597       capp = &TREE_CHAIN (cap);
1598     }
1599 }
1600 
1601 void
finish_lambda_function(tree body)1602 finish_lambda_function (tree body)
1603 {
1604   finish_function_body (body);
1605 
1606   prune_lambda_captures (body);
1607 
1608   /* Finish the function and generate code for it if necessary.  */
1609   tree fn = finish_function (/*inline_p=*/true);
1610 
1611   /* Only expand if the call op is not a template.  */
1612   if (!DECL_TEMPLATE_INFO (fn))
1613     expand_or_defer_fn (fn);
1614 }
1615 
1616 #include "gt-cp-lambda.h"
1617