xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/lambda.c (revision 7330f729ccf0bd976a06f95fad452fe774fc7fd1)
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2017 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
34 
35 /* Constructor for a lambda expression.  */
36 
37 tree
38 build_lambda_expr (void)
39 {
40   tree lambda = make_node (LAMBDA_EXPR);
41   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
43   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
44   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
45   LAMBDA_EXPR_RETURN_TYPE          (lambda) = NULL_TREE;
46   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
47   return lambda;
48 }
49 
50 /* Create the closure object for a LAMBDA_EXPR.  */
51 
52 tree
53 build_lambda_object (tree lambda_expr)
54 {
55   /* Build aggregate constructor call.
56      - cp_parser_braced_list
57      - cp_parser_functional_cast  */
58   vec<constructor_elt, va_gc> *elts = NULL;
59   tree node, expr, type;
60   location_t saved_loc;
61 
62   if (processing_template_decl)
63     return lambda_expr;
64 
65   /* Make sure any error messages refer to the lambda-introducer.  */
66   saved_loc = input_location;
67   input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 
69   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70        node;
71        node = TREE_CHAIN (node))
72     {
73       tree field = TREE_PURPOSE (node);
74       tree val = TREE_VALUE (node);
75 
76       if (field == error_mark_node)
77 	{
78 	  expr = error_mark_node;
79 	  goto out;
80 	}
81 
82       if (TREE_CODE (val) == TREE_LIST)
83 	val = build_x_compound_expr_from_list (val, ELK_INIT,
84 					       tf_warning_or_error);
85 
86       if (DECL_P (val))
87 	mark_used (val);
88 
89       /* Mere mortals can't copy arrays with aggregate initialization, so
90 	 do some magic to make it work here.  */
91       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 	val = build_array_copy (val);
93       else if (DECL_NORMAL_CAPTURE_P (field)
94 	       && !DECL_VLA_CAPTURE_P (field)
95 	       && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 	{
97 	  /* "the entities that are captured by copy are used to
98 	     direct-initialize each corresponding non-static data
99 	     member of the resulting closure object."
100 
101 	     There's normally no way to express direct-initialization
102 	     from an element of a CONSTRUCTOR, so we build up a special
103 	     TARGET_EXPR to bypass the usual copy-initialization.  */
104 	  val = force_rvalue (val, tf_warning_or_error);
105 	  if (TREE_CODE (val) == TARGET_EXPR)
106 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
107 	}
108 
109       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
110     }
111 
112   expr = build_constructor (init_list_type_node, elts);
113   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 
115   /* N2927: "[The closure] class type is not an aggregate."
116      But we briefly treat it as an aggregate to make this simpler.  */
117   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118   CLASSTYPE_NON_AGGREGATE (type) = 0;
119   expr = finish_compound_literal (type, expr, tf_warning_or_error);
120   CLASSTYPE_NON_AGGREGATE (type) = 1;
121 
122  out:
123   input_location = saved_loc;
124   return expr;
125 }
126 
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128    LAMBDA must have its explicit captures already.  */
129 
130 tree
131 begin_lambda_type (tree lambda)
132 {
133   tree type;
134 
135   {
136     /* Unique name.  This is just like an unnamed class, but we cannot use
137        make_anon_name because of certain checks against TYPE_UNNAMED_P.  */
138     tree name;
139     name = make_lambda_name ();
140 
141     /* Create the new RECORD_TYPE for this lambda.  */
142     type = xref_tag (/*tag_code=*/record_type,
143                      name,
144                      /*scope=*/ts_lambda,
145                      /*template_header_p=*/false);
146     if (type == error_mark_node)
147       return error_mark_node;
148   }
149 
150   /* Designate it as a struct so that we can use aggregate initialization.  */
151   CLASSTYPE_DECLARED_CLASS (type) = false;
152 
153   /* Cross-reference the expression and the type.  */
154   LAMBDA_EXPR_CLOSURE (lambda) = type;
155   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156 
157   /* In C++17, assume the closure is literal; we'll clear the flag later if
158      necessary.  */
159   if (cxx_dialect >= cxx1z)
160     CLASSTYPE_LITERAL_P (type) = true;
161 
162   /* Clear base types.  */
163   xref_basetypes (type, /*bases=*/NULL_TREE);
164 
165   /* Start the class.  */
166   type = begin_class_definition (type);
167 
168   return type;
169 }
170 
171 /* Returns the type to use for the return type of the operator() of a
172    closure class.  */
173 
174 tree
175 lambda_return_type (tree expr)
176 {
177   if (expr == NULL_TREE)
178     return void_type_node;
179   if (type_unknown_p (expr)
180       || BRACE_ENCLOSED_INITIALIZER_P (expr))
181     {
182       cxx_incomplete_type_error (expr, TREE_TYPE (expr));
183       return error_mark_node;
184     }
185   gcc_checking_assert (!type_dependent_expression_p (expr));
186   return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
187 }
188 
189 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
190    closure type.  */
191 
192 tree
193 lambda_function (tree lambda)
194 {
195   tree type;
196   if (TREE_CODE (lambda) == LAMBDA_EXPR)
197     type = LAMBDA_EXPR_CLOSURE (lambda);
198   else
199     type = lambda;
200   gcc_assert (LAMBDA_TYPE_P (type));
201   /* Don't let debug_tree cause instantiation.  */
202   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
203       && !COMPLETE_OR_OPEN_TYPE_P (type))
204     return NULL_TREE;
205   lambda = lookup_member (type, cp_operator_id (CALL_EXPR),
206 			  /*protect=*/0, /*want_type=*/false,
207 			  tf_warning_or_error);
208   if (lambda)
209     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
210   return lambda;
211 }
212 
213 /* Returns the type to use for the FIELD_DECL corresponding to the
214    capture of EXPR.  EXPLICIT_INIT_P indicates whether this is a
215    C++14 init capture, and BY_REFERENCE_P indicates whether we're
216    capturing by reference.  */
217 
218 tree
219 lambda_capture_field_type (tree expr, bool explicit_init_p,
220 			   bool by_reference_p)
221 {
222   tree type;
223   bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
224 
225   if (!is_this && type_dependent_expression_p (expr))
226     {
227       type = cxx_make_type (DECLTYPE_TYPE);
228       DECLTYPE_TYPE_EXPR (type) = expr;
229       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
230       DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
231       DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
232       SET_TYPE_STRUCTURAL_EQUALITY (type);
233     }
234   else if (!is_this && explicit_init_p)
235     {
236       tree auto_node = make_auto ();
237 
238       type = auto_node;
239       if (by_reference_p)
240 	/* Add the reference now, so deduction doesn't lose
241 	   outermost CV qualifiers of EXPR.  */
242 	type = build_reference_type (type);
243       type = do_auto_deduction (type, expr, auto_node);
244     }
245   else
246     {
247       type = non_reference (unlowered_expr_type (expr));
248 
249       if (!is_this && by_reference_p)
250 	type = build_reference_type (type);
251     }
252 
253   return type;
254 }
255 
256 /* Returns true iff DECL is a lambda capture proxy variable created by
257    build_capture_proxy.  */
258 
259 bool
260 is_capture_proxy (tree decl)
261 {
262   return (VAR_P (decl)
263 	  && DECL_HAS_VALUE_EXPR_P (decl)
264 	  && !DECL_ANON_UNION_VAR_P (decl)
265 	  && !DECL_DECOMPOSITION_P (decl)
266 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
267 }
268 
269 /* Returns true iff DECL is a capture proxy for a normal capture
270    (i.e. without explicit initializer).  */
271 
272 bool
273 is_normal_capture_proxy (tree decl)
274 {
275   if (!is_capture_proxy (decl))
276     /* It's not a capture proxy.  */
277     return false;
278 
279   if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
280     /* VLA capture.  */
281     return true;
282 
283   /* It is a capture proxy, is it a normal capture?  */
284   tree val = DECL_VALUE_EXPR (decl);
285   if (val == error_mark_node)
286     return true;
287 
288   gcc_assert (TREE_CODE (val) == COMPONENT_REF);
289   val = TREE_OPERAND (val, 1);
290   return DECL_NORMAL_CAPTURE_P (val);
291 }
292 
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294    current function, which is the operator() for the appropriate lambda.  */
295 
296 void
297 insert_capture_proxy (tree var)
298 {
299   cp_binding_level *b;
300   tree stmt_list;
301 
302   /* Put the capture proxy in the extra body block so that it won't clash
303      with a later local variable.  */
304   b = current_binding_level;
305   for (;;)
306     {
307       cp_binding_level *n = b->level_chain;
308       if (n->kind == sk_function_parms)
309 	break;
310       b = n;
311     }
312   pushdecl_with_scope (var, b, false);
313 
314   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
315   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
316   stmt_list = (*stmt_list_stack)[1];
317   gcc_assert (stmt_list);
318   append_to_statement_list_force (var, &stmt_list);
319 }
320 
321 /* We've just finished processing a lambda; if the containing scope is also
322    a lambda, insert any capture proxies that were created while processing
323    the nested lambda.  */
324 
325 void
326 insert_pending_capture_proxies (void)
327 {
328   tree lam;
329   vec<tree, va_gc> *proxies;
330   unsigned i;
331 
332   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
333     return;
334 
335   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
336   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
337   for (i = 0; i < vec_safe_length (proxies); ++i)
338     {
339       tree var = (*proxies)[i];
340       insert_capture_proxy (var);
341     }
342   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
343   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
344 }
345 
346 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
347    return the type we want the proxy to have: the type of the field itself,
348    with added const-qualification if the lambda isn't mutable and the
349    capture is by value.  */
350 
351 tree
352 lambda_proxy_type (tree ref)
353 {
354   tree type;
355   if (ref == error_mark_node)
356     return error_mark_node;
357   if (REFERENCE_REF_P (ref))
358     ref = TREE_OPERAND (ref, 0);
359   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
360   type = TREE_TYPE (ref);
361   if (!type || WILDCARD_TYPE_P (non_reference (type)))
362     {
363       type = cxx_make_type (DECLTYPE_TYPE);
364       DECLTYPE_TYPE_EXPR (type) = ref;
365       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
366       SET_TYPE_STRUCTURAL_EQUALITY (type);
367     }
368   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
369     type = make_pack_expansion (type);
370   return type;
371 }
372 
373 /* MEMBER is a capture field in a lambda closure class.  Now that we're
374    inside the operator(), build a placeholder var for future lookups and
375    debugging.  */
376 
377 tree
378 build_capture_proxy (tree member)
379 {
380   tree var, object, fn, closure, name, lam, type;
381 
382   if (PACK_EXPANSION_P (member))
383     member = PACK_EXPANSION_PATTERN (member);
384 
385   closure = DECL_CONTEXT (member);
386   fn = lambda_function (closure);
387   lam = CLASSTYPE_LAMBDA_EXPR (closure);
388 
389   /* The proxy variable forwards to the capture field.  */
390   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
391   object = finish_non_static_data_member (member, object, NULL_TREE);
392   if (REFERENCE_REF_P (object))
393     object = TREE_OPERAND (object, 0);
394 
395   /* Remove the __ inserted by add_capture.  */
396   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
397 
398   type = lambda_proxy_type (object);
399 
400   if (name == this_identifier && !POINTER_TYPE_P (type))
401     {
402       type = build_pointer_type (type);
403       type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
404       object = build_fold_addr_expr_with_type (object, type);
405     }
406 
407   if (DECL_VLA_CAPTURE_P (member))
408     {
409       /* Rebuild the VLA type from the pointer and maxindex.  */
410       tree field = next_initializable_field (TYPE_FIELDS (type));
411       tree ptr = build_simple_component_ref (object, field);
412       field = next_initializable_field (DECL_CHAIN (field));
413       tree max = build_simple_component_ref (object, field);
414       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
415 				     build_index_type (max));
416       type = build_reference_type (type);
417       REFERENCE_VLA_OK (type) = true;
418       object = convert (type, ptr);
419     }
420 
421   var = build_decl (input_location, VAR_DECL, name, type);
422   SET_DECL_VALUE_EXPR (var, object);
423   DECL_HAS_VALUE_EXPR_P (var) = 1;
424   DECL_ARTIFICIAL (var) = 1;
425   TREE_USED (var) = 1;
426   DECL_CONTEXT (var) = fn;
427 
428   if (name == this_identifier)
429     {
430       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
431       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
432     }
433 
434   if (fn == current_function_decl)
435     insert_capture_proxy (var);
436   else
437     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
438 
439   return var;
440 }
441 
442 /* Return a struct containing a pointer and a length for lambda capture of
443    an array of runtime length.  */
444 
445 static tree
446 vla_capture_type (tree array_type)
447 {
448   static tree ptr_id, max_id;
449   tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
450   xref_basetypes (type, NULL_TREE);
451   type = begin_class_definition (type);
452   if (!ptr_id)
453     {
454       ptr_id = get_identifier ("ptr");
455       max_id = get_identifier ("max");
456     }
457   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
458   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
459   finish_member_declaration (field);
460   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
461   finish_member_declaration (field);
462   return finish_struct (type, NULL_TREE);
463 }
464 
465 /* From an ID and INITIALIZER, create a capture (by reference if
466    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
467    and return it.  If ID is `this', BY_REFERENCE_P says whether
468    `*this' is captured by reference.  */
469 
470 tree
471 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
472 	     bool explicit_init_p)
473 {
474   char *buf;
475   tree type, member, name;
476   bool vla = false;
477   bool variadic = false;
478   tree initializer = orig_init;
479 
480   if (PACK_EXPANSION_P (initializer))
481     {
482       initializer = PACK_EXPANSION_PATTERN (initializer);
483       variadic = true;
484     }
485 
486   if (TREE_CODE (initializer) == TREE_LIST
487       /* A pack expansion might end up with multiple elements.  */
488       && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
489     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
490 						   tf_warning_or_error);
491   type = TREE_TYPE (initializer);
492   if (type == error_mark_node)
493     return error_mark_node;
494 
495   if (array_of_runtime_bound_p (type))
496     {
497       vla = true;
498       if (!by_reference_p)
499 	error ("array of runtime bound cannot be captured by copy, "
500 	       "only by reference");
501 
502       /* For a VLA, we capture the address of the first element and the
503 	 maximum index, and then reconstruct the VLA for the proxy.  */
504       tree elt = cp_build_array_ref (input_location, initializer,
505 				     integer_zero_node, tf_warning_or_error);
506       initializer = build_constructor_va (init_list_type_node, 2,
507 					  NULL_TREE, build_address (elt),
508 					  NULL_TREE, array_type_nelts (type));
509       type = vla_capture_type (type);
510     }
511   else if (!dependent_type_p (type)
512 	   && variably_modified_type_p (type, NULL_TREE))
513     {
514       error ("capture of variable-size type %qT that is not an N3639 array "
515 	     "of runtime bound", type);
516       if (TREE_CODE (type) == ARRAY_TYPE
517 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
518 	inform (input_location, "because the array element type %qT has "
519 		"variable size", TREE_TYPE (type));
520       type = error_mark_node;
521     }
522   else
523     {
524       type = lambda_capture_field_type (initializer, explicit_init_p,
525 					by_reference_p);
526       if (type == error_mark_node)
527 	return error_mark_node;
528 
529       if (id == this_identifier && !by_reference_p)
530 	{
531 	  gcc_assert (POINTER_TYPE_P (type));
532 	  type = TREE_TYPE (type);
533 	  initializer = cp_build_indirect_ref (initializer, RO_NULL,
534 					       tf_warning_or_error);
535 	}
536 
537       if (dependent_type_p (type))
538 	;
539       else if (id != this_identifier && by_reference_p)
540 	{
541 	  if (!lvalue_p (initializer))
542 	    {
543 	      error ("cannot capture %qE by reference", initializer);
544 	      return error_mark_node;
545 	    }
546 	}
547       else
548 	{
549 	  /* Capture by copy requires a complete type.  */
550 	  type = complete_type (type);
551 	  if (!COMPLETE_TYPE_P (type))
552 	    {
553 	      error ("capture by copy of incomplete type %qT", type);
554 	      cxx_incomplete_type_inform (type);
555 	      return error_mark_node;
556 	    }
557 	}
558     }
559 
560   /* Add __ to the beginning of the field name so that user code
561      won't find the field with name lookup.  We can't just leave the name
562      unset because template instantiation uses the name to find
563      instantiated fields.  */
564   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
565   buf[1] = buf[0] = '_';
566   memcpy (buf + 2, IDENTIFIER_POINTER (id),
567 	  IDENTIFIER_LENGTH (id) + 1);
568   name = get_identifier (buf);
569 
570   /* If TREE_TYPE isn't set, we're still in the introducer, so check
571      for duplicates.  */
572   if (!LAMBDA_EXPR_CLOSURE (lambda))
573     {
574       if (IDENTIFIER_MARKED (name))
575 	{
576 	  pedwarn (input_location, 0,
577 		   "already captured %qD in lambda expression", id);
578 	  return NULL_TREE;
579 	}
580       IDENTIFIER_MARKED (name) = true;
581     }
582 
583   if (variadic)
584     type = make_pack_expansion (type);
585 
586   /* Make member variable.  */
587   member = build_decl (input_location, FIELD_DECL, name, type);
588   DECL_VLA_CAPTURE_P (member) = vla;
589 
590   if (!explicit_init_p)
591     /* Normal captures are invisible to name lookup but uses are replaced
592        with references to the capture field; we implement this by only
593        really making them invisible in unevaluated context; see
594        qualify_lookup.  For now, let's make explicitly initialized captures
595        always visible.  */
596     DECL_NORMAL_CAPTURE_P (member) = true;
597 
598   if (id == this_identifier)
599     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
600 
601   /* Add it to the appropriate closure class if we've started it.  */
602   if (current_class_type
603       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
604     finish_member_declaration (member);
605 
606   tree listmem = member;
607   if (variadic)
608     {
609       listmem = make_pack_expansion (member);
610       initializer = orig_init;
611     }
612   LAMBDA_EXPR_CAPTURE_LIST (lambda)
613     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
614 
615   if (LAMBDA_EXPR_CLOSURE (lambda))
616     return build_capture_proxy (member);
617   /* For explicit captures we haven't started the function yet, so we wait
618      and build the proxy from cp_parser_lambda_body.  */
619   return NULL_TREE;
620 }
621 
622 /* Register all the capture members on the list CAPTURES, which is the
623    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
624 
625 void
626 register_capture_members (tree captures)
627 {
628   if (captures == NULL_TREE)
629     return;
630 
631   register_capture_members (TREE_CHAIN (captures));
632 
633   tree field = TREE_PURPOSE (captures);
634   if (PACK_EXPANSION_P (field))
635     field = PACK_EXPANSION_PATTERN (field);
636 
637   /* We set this in add_capture to avoid duplicates.  */
638   IDENTIFIER_MARKED (DECL_NAME (field)) = false;
639   finish_member_declaration (field);
640 }
641 
642 /* Similar to add_capture, except this works on a stack of nested lambdas.
643    BY_REFERENCE_P in this case is derived from the default capture mode.
644    Returns the capture for the lambda at the bottom of the stack.  */
645 
646 tree
647 add_default_capture (tree lambda_stack, tree id, tree initializer)
648 {
649   bool this_capture_p = (id == this_identifier);
650 
651   tree var = NULL_TREE;
652 
653   tree saved_class_type = current_class_type;
654 
655   tree node;
656 
657   for (node = lambda_stack;
658        node;
659        node = TREE_CHAIN (node))
660     {
661       tree lambda = TREE_VALUE (node);
662 
663       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
664       if (DECL_PACK_P (initializer))
665 	initializer = make_pack_expansion (initializer);
666       var = add_capture (lambda,
667                             id,
668                             initializer,
669                             /*by_reference_p=*/
670 			    (this_capture_p
671 			     || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
672 				 == CPLD_REFERENCE)),
673 			    /*explicit_init_p=*/false);
674       initializer = convert_from_reference (var);
675     }
676 
677   current_class_type = saved_class_type;
678 
679   return var;
680 }
681 
682 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
683    form of an INDIRECT_REF, possibly adding it through default
684    capturing, if ADD_CAPTURE_P is true.  */
685 
686 tree
687 lambda_expr_this_capture (tree lambda, bool add_capture_p)
688 {
689   tree result;
690 
691   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
692 
693   /* In unevaluated context this isn't an odr-use, so don't capture.  */
694   if (cp_unevaluated_operand)
695     add_capture_p = false;
696 
697   /* Try to default capture 'this' if we can.  */
698   if (!this_capture
699       && (!add_capture_p
700           || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
701     {
702       tree lambda_stack = NULL_TREE;
703       tree init = NULL_TREE;
704 
705       /* If we are in a lambda function, we can move out until we hit:
706            1. a non-lambda function or NSDMI,
707            2. a lambda function capturing 'this', or
708            3. a non-default capturing lambda function.  */
709       for (tree tlambda = lambda; ;)
710 	{
711           lambda_stack = tree_cons (NULL_TREE,
712                                     tlambda,
713                                     lambda_stack);
714 
715 	  if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
716 	      && !COMPLETE_TYPE_P (LAMBDA_EXPR_CLOSURE (tlambda))
717 	      && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
718 	    {
719 	      /* In an NSDMI, we don't have a function to look up the decl in,
720 		 but the fake 'this' pointer that we're using for parsing is
721 		 in scope_chain.  But if the closure is already complete, we're
722 	         in an instantiation of a generic lambda, and the fake 'this'
723 	         is gone.  */
724 	      init = scope_chain->x_current_class_ptr;
725 	      gcc_checking_assert
726 		(init && (TREE_TYPE (TREE_TYPE (init))
727 			  == current_nonlambda_class_type ()));
728 	      break;
729 	    }
730 
731 	  tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
732 	  tree containing_function = decl_function_context (closure_decl);
733 
734 	  if (containing_function == NULL_TREE)
735 	    /* We ran out of scopes; there's no 'this' to capture.  */
736 	    break;
737 
738 	  if (!LAMBDA_FUNCTION_P (containing_function))
739 	    {
740 	      /* We found a non-lambda function.  */
741 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
742 		/* First parameter is 'this'.  */
743 		init = DECL_ARGUMENTS (containing_function);
744 	      break;
745 	    }
746 
747 	  tlambda
748             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
749 
750           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
751 	    {
752 	      /* An outer lambda has already captured 'this'.  */
753 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
754 	      break;
755 	    }
756 
757 	  if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
758 	    /* An outer lambda won't let us capture 'this'.  */
759 	    break;
760 	}
761 
762       if (init)
763         {
764           if (add_capture_p)
765 	    this_capture = add_default_capture (lambda_stack,
766 					        /*id=*/this_identifier,
767 					        init);
768           else
769 	    this_capture = init;
770         }
771     }
772 
773   if (cp_unevaluated_operand)
774     result = this_capture;
775   else if (!this_capture)
776     {
777       if (add_capture_p)
778 	{
779 	  error ("%<this%> was not captured for this lambda function");
780 	  result = error_mark_node;
781 	}
782       else
783 	result = NULL_TREE;
784     }
785   else
786     {
787       /* To make sure that current_class_ref is for the lambda.  */
788       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
789 		  == LAMBDA_EXPR_CLOSURE (lambda));
790 
791       result = this_capture;
792 
793       /* If 'this' is captured, each use of 'this' is transformed into an
794 	 access to the corresponding unnamed data member of the closure
795 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
796 	 ensures that the transformed expression is an rvalue. ] */
797       result = rvalue (result);
798     }
799 
800   return result;
801 }
802 
803 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
804    object.  NULL otherwise..  */
805 
806 static tree
807 resolvable_dummy_lambda (tree object)
808 {
809   if (!is_dummy_object (object))
810     return NULL_TREE;
811 
812   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
813   gcc_assert (!TYPE_PTR_P (type));
814 
815   if (type != current_class_type
816       && current_class_type
817       && LAMBDA_TYPE_P (current_class_type)
818       && lambda_function (current_class_type)
819       && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
820     return CLASSTYPE_LAMBDA_EXPR (current_class_type);
821 
822   return NULL_TREE;
823 }
824 
825 /* We don't want to capture 'this' until we know we need it, i.e. after
826    overload resolution has chosen a non-static member function.  At that
827    point we call this function to turn a dummy object into a use of the
828    'this' capture.  */
829 
830 tree
831 maybe_resolve_dummy (tree object, bool add_capture_p)
832 {
833   if (tree lam = resolvable_dummy_lambda (object))
834     if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
835       if (cap != error_mark_node)
836 	object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
837 				       RO_NULL, tf_warning_or_error);
838 
839   return object;
840 }
841 
842 /* When parsing a generic lambda containing an argument-dependent
843    member function call we defer overload resolution to instantiation
844    time.  But we have to know now whether to capture this or not.
845    Do that if FNS contains any non-static fns.
846    The std doesn't anticipate this case, but I expect this to be the
847    outcome of discussion.  */
848 
849 void
850 maybe_generic_this_capture (tree object, tree fns)
851 {
852   if (tree lam = resolvable_dummy_lambda (object))
853     if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
854       {
855 	/* We've not yet captured, so look at the function set of
856 	   interest.  */
857 	if (BASELINK_P (fns))
858 	  fns = BASELINK_FUNCTIONS (fns);
859 	bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
860 	if (id_expr)
861 	  fns = TREE_OPERAND (fns, 0);
862 	for (; fns; fns = OVL_NEXT (fns))
863 	  {
864 	    tree fn = OVL_CURRENT (fns);
865 
866 	    if ((!id_expr || TREE_CODE (fn) == TEMPLATE_DECL)
867 		&& DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
868 	      {
869 		/* Found a non-static member.  Capture this.  */
870 		lambda_expr_this_capture (lam, true);
871 		break;
872 	      }
873 	  }
874       }
875 }
876 
877 /* Returns the innermost non-lambda function.  */
878 
879 tree
880 current_nonlambda_function (void)
881 {
882   tree fn = current_function_decl;
883   while (fn && LAMBDA_FUNCTION_P (fn))
884     fn = decl_function_context (fn);
885   return fn;
886 }
887 
888 /* Returns the method basetype of the innermost non-lambda function, or
889    NULL_TREE if none.  */
890 
891 tree
892 nonlambda_method_basetype (void)
893 {
894   tree fn, type;
895   if (!current_class_ref)
896     return NULL_TREE;
897 
898   type = current_class_type;
899   if (!LAMBDA_TYPE_P (type))
900     return type;
901 
902   /* Find the nearest enclosing non-lambda function.  */
903   fn = TYPE_NAME (type);
904   do
905     fn = decl_function_context (fn);
906   while (fn && LAMBDA_FUNCTION_P (fn));
907 
908   if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
909     return NULL_TREE;
910 
911   return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
912 }
913 
914 /* Like current_scope, but looking through lambdas.  */
915 
916 tree
917 current_nonlambda_scope (void)
918 {
919   tree scope = current_scope ();
920   for (;;)
921     {
922       if (TREE_CODE (scope) == FUNCTION_DECL
923 	  && LAMBDA_FUNCTION_P (scope))
924 	{
925 	  scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
926 	  continue;
927 	}
928       else if (LAMBDA_TYPE_P (scope))
929 	{
930 	  scope = CP_TYPE_CONTEXT (scope);
931 	  continue;
932 	}
933       break;
934     }
935   return scope;
936 }
937 
938 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
939    indicated FN and NARGS, but do not initialize the return type or any of the
940    argument slots.  */
941 
942 static tree
943 prepare_op_call (tree fn, int nargs)
944 {
945   tree t;
946 
947   t = build_vl_exp (CALL_EXPR, nargs + 3);
948   CALL_EXPR_FN (t) = fn;
949   CALL_EXPR_STATIC_CHAIN (t) = NULL;
950 
951   return t;
952 }
953 
954 /* Return true iff CALLOP is the op() for a generic lambda.  */
955 
956 bool
957 generic_lambda_fn_p (tree callop)
958 {
959   return (LAMBDA_FUNCTION_P (callop)
960 	  && DECL_TEMPLATE_INFO (callop)
961 	  && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
962 }
963 
964 /* If the closure TYPE has a static op(), also add a conversion to function
965    pointer.  */
966 
967 void
968 maybe_add_lambda_conv_op (tree type)
969 {
970   bool nested = (cfun != NULL);
971   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
972   tree callop = lambda_function (type);
973   tree lam = CLASSTYPE_LAMBDA_EXPR (type);
974 
975   if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
976       || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
977     return;
978 
979   if (processing_template_decl)
980     return;
981 
982   bool const generic_lambda_p = generic_lambda_fn_p (callop);
983 
984   if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
985     {
986       /* If the op() wasn't instantiated due to errors, give up.  */
987       gcc_assert (errorcount || sorrycount);
988       return;
989     }
990 
991   /* Non-template conversion operators are defined directly with build_call_a
992      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
993      deferred and the CALL is built in-place.  In the case of a deduced return
994      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
995      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
996      the return expression may differ in flags from those in the body CALL.  In
997      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
998      the body CALL, but not in DECLTYPE_CALL.  */
999 
1000   vec<tree, va_gc> *direct_argvec = 0;
1001   tree decltype_call = 0, call = 0;
1002   tree optype = TREE_TYPE (callop);
1003   tree fn_result = TREE_TYPE (optype);
1004 
1005   tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
1006 			    null_pointer_node);
1007   if (generic_lambda_p)
1008     {
1009       ++processing_template_decl;
1010 
1011       /* Prepare the dependent member call for the static member function
1012 	 '_FUN' and, potentially, prepare another call to be used in a decltype
1013 	 return expression for a deduced return call op to allow for simple
1014 	 implementation of the conversion operator.  */
1015 
1016       tree instance = cp_build_indirect_ref (thisarg, RO_NULL,
1017 					     tf_warning_or_error);
1018       tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1019 			      instance, DECL_NAME (callop), NULL_TREE);
1020       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1021 
1022       call = prepare_op_call (objfn, nargs);
1023       if (type_uses_auto (fn_result))
1024 	decltype_call = prepare_op_call (objfn, nargs);
1025     }
1026   else
1027     {
1028       direct_argvec = make_tree_vector ();
1029       direct_argvec->quick_push (thisarg);
1030     }
1031 
1032   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1033      declare the static member function "_FUN" below.  For each arg append to
1034      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1035      call args (for the template case).  If a parameter pack is found, expand
1036      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
1037 
1038   tree fn_args = NULL_TREE;
1039   {
1040     int ix = 0;
1041     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1042     tree tgt = NULL;
1043 
1044     while (src)
1045       {
1046 	tree new_node = copy_node (src);
1047 
1048 	if (!fn_args)
1049 	  fn_args = tgt = new_node;
1050 	else
1051 	  {
1052 	    TREE_CHAIN (tgt) = new_node;
1053 	    tgt = new_node;
1054 	  }
1055 
1056 	mark_exp_read (tgt);
1057 
1058 	if (generic_lambda_p)
1059 	  {
1060 	    tree a = forward_parm (tgt);
1061 
1062 	    CALL_EXPR_ARG (call, ix) = a;
1063 	    if (decltype_call)
1064 	      CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1065 
1066 	    if (PACK_EXPANSION_P (a))
1067 	      /* Set this after unsharing so it's not in decltype_call.  */
1068 	      PACK_EXPANSION_LOCAL_P (a) = true;
1069 
1070 	    ++ix;
1071 	  }
1072 	else
1073 	  vec_safe_push (direct_argvec, tgt);
1074 
1075 	src = TREE_CHAIN (src);
1076       }
1077   }
1078 
1079   if (generic_lambda_p)
1080     {
1081       if (decltype_call)
1082 	{
1083 	  fn_result = finish_decltype_type
1084 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
1085 	     tf_warning_or_error);
1086 	}
1087     }
1088   else
1089     call = build_call_a (callop,
1090 			 direct_argvec->length (),
1091 			 direct_argvec->address ());
1092 
1093   CALL_FROM_THUNK_P (call) = 1;
1094   SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1095 
1096   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1097   stattype = (cp_build_type_attribute_variant
1098 	      (stattype, TYPE_ATTRIBUTES (optype)));
1099   if (flag_noexcept_type
1100       && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1101     stattype = build_exception_variant (stattype, noexcept_true_spec);
1102 
1103   if (generic_lambda_p)
1104     --processing_template_decl;
1105 
1106   /* First build up the conversion op.  */
1107 
1108   tree rettype = build_pointer_type (stattype);
1109   tree name = mangle_conv_op_name_for_type (rettype);
1110   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1111   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1112   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1113   tree fn = convfn;
1114   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1115   SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1116   SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1117   grokclassfn (type, fn, NO_SPECIAL);
1118   set_linkage_according_to_type (type, fn);
1119   rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1120   DECL_IN_AGGR_P (fn) = 1;
1121   DECL_ARTIFICIAL (fn) = 1;
1122   DECL_NOT_REALLY_EXTERN (fn) = 1;
1123   DECL_DECLARED_INLINE_P (fn) = 1;
1124   DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
1125   if (nested_def)
1126     DECL_INTERFACE_KNOWN (fn) = 1;
1127 
1128   if (generic_lambda_p)
1129     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1130 
1131   add_method (type, fn, NULL_TREE);
1132 
1133   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1134      the conversion op is used.  */
1135   if (varargs_function_p (callop))
1136     {
1137       DECL_DELETED_FN (fn) = 1;
1138       return;
1139     }
1140 
1141   /* Now build up the thunk to be returned.  */
1142 
1143   name = get_identifier ("_FUN");
1144   tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1145   fn = statfn;
1146   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1147   grokclassfn (type, fn, NO_SPECIAL);
1148   set_linkage_according_to_type (type, fn);
1149   rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1150   DECL_IN_AGGR_P (fn) = 1;
1151   DECL_ARTIFICIAL (fn) = 1;
1152   DECL_NOT_REALLY_EXTERN (fn) = 1;
1153   DECL_DECLARED_INLINE_P (fn) = 1;
1154   DECL_STATIC_FUNCTION_P (fn) = 1;
1155   DECL_ARGUMENTS (fn) = fn_args;
1156   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1157     {
1158       /* Avoid duplicate -Wshadow warnings.  */
1159       DECL_NAME (arg) = NULL_TREE;
1160       DECL_CONTEXT (arg) = fn;
1161     }
1162   if (nested_def)
1163     DECL_INTERFACE_KNOWN (fn) = 1;
1164 
1165   if (generic_lambda_p)
1166     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1167 
1168   if (flag_sanitize & SANITIZE_NULL)
1169     {
1170       /* Don't UBsan this function; we're deliberately calling op() with a null
1171 	 object argument.  */
1172       tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"),
1173 				    NULL_TREE);
1174       cplus_decl_attributes (&fn, attrs, 0);
1175     }
1176 
1177   add_method (type, fn, NULL_TREE);
1178 
1179   if (nested)
1180     push_function_context ();
1181   else
1182     /* Still increment function_depth so that we don't GC in the
1183        middle of an expression.  */
1184     ++function_depth;
1185 
1186   /* Generate the body of the thunk.  */
1187 
1188   start_preparsed_function (statfn, NULL_TREE,
1189 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1190   if (DECL_ONE_ONLY (statfn))
1191     {
1192       /* Put the thunk in the same comdat group as the call op.  */
1193       cgraph_node::get_create (statfn)->add_to_same_comdat_group
1194 	(cgraph_node::get_create (callop));
1195     }
1196   tree body = begin_function_body ();
1197   tree compound_stmt = begin_compound_stmt (0);
1198   if (!generic_lambda_p)
1199     {
1200       set_flags_from_callee (call);
1201       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1202 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1203     }
1204   call = convert_from_reference (call);
1205   finish_return_stmt (call);
1206 
1207   finish_compound_stmt (compound_stmt);
1208   finish_function_body (body);
1209 
1210   fn = finish_function (/*inline*/2);
1211   if (!generic_lambda_p)
1212     expand_or_defer_fn (fn);
1213 
1214   /* Generate the body of the conversion op.  */
1215 
1216   start_preparsed_function (convfn, NULL_TREE,
1217 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1218   body = begin_function_body ();
1219   compound_stmt = begin_compound_stmt (0);
1220 
1221   /* decl_needed_p needs to see that it's used.  */
1222   TREE_USED (statfn) = 1;
1223   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1224 
1225   finish_compound_stmt (compound_stmt);
1226   finish_function_body (body);
1227 
1228   fn = finish_function (/*inline*/2);
1229   if (!generic_lambda_p)
1230     expand_or_defer_fn (fn);
1231 
1232   if (nested)
1233     pop_function_context ();
1234   else
1235     --function_depth;
1236 }
1237 
1238 /* True if FN is the static function "_FUN" that gets returned from the lambda
1239    conversion operator.  */
1240 
1241 bool
1242 lambda_static_thunk_p (tree fn)
1243 {
1244   return (fn && TREE_CODE (fn) == FUNCTION_DECL
1245 	  && DECL_ARTIFICIAL (fn)
1246 	  && DECL_STATIC_FUNCTION_P (fn)
1247 	  && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1248 }
1249 
1250 /* Returns true iff VAL is a lambda-related declaration which should
1251    be ignored by unqualified lookup.  */
1252 
1253 bool
1254 is_lambda_ignored_entity (tree val)
1255 {
1256   /* In unevaluated context, look past normal capture proxies.  */
1257   if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1258     return true;
1259 
1260   /* Always ignore lambda fields, their names are only for debugging.  */
1261   if (TREE_CODE (val) == FIELD_DECL
1262       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1263     return true;
1264 
1265   /* None of the lookups that use qualify_lookup want the op() from the
1266      lambda; they want the one from the enclosing class.  */
1267   if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1268     return true;
1269 
1270   return false;
1271 }
1272