xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/lambda.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2015 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "options.h"
35 #include "wide-int.h"
36 #include "inchash.h"
37 #include "tree.h"
38 #include "stringpool.h"
39 #include "hash-map.h"
40 #include "is-a.h"
41 #include "plugin-api.h"
42 #include "tm.h"
43 #include "hard-reg-set.h"
44 #include "input.h"
45 #include "function.h"
46 #include "ipa-ref.h"
47 #include "cgraph.h"
48 #include "tree-iterator.h"
49 #include "cp-tree.h"
50 #include "toplev.h"
51 
52 /* Constructor for a lambda expression.  */
53 
54 tree
55 build_lambda_expr (void)
56 {
57   tree lambda = make_node (LAMBDA_EXPR);
58   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
59   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
60   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
61   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
62   LAMBDA_EXPR_RETURN_TYPE          (lambda) = NULL_TREE;
63   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
64   return lambda;
65 }
66 
67 /* Create the closure object for a LAMBDA_EXPR.  */
68 
69 tree
70 build_lambda_object (tree lambda_expr)
71 {
72   /* Build aggregate constructor call.
73      - cp_parser_braced_list
74      - cp_parser_functional_cast  */
75   vec<constructor_elt, va_gc> *elts = NULL;
76   tree node, expr, type;
77   location_t saved_loc;
78 
79   if (processing_template_decl)
80     return lambda_expr;
81 
82   /* Make sure any error messages refer to the lambda-introducer.  */
83   saved_loc = input_location;
84   input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
85 
86   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
87        node;
88        node = TREE_CHAIN (node))
89     {
90       tree field = TREE_PURPOSE (node);
91       tree val = TREE_VALUE (node);
92 
93       if (field == error_mark_node)
94 	{
95 	  expr = error_mark_node;
96 	  goto out;
97 	}
98 
99       if (DECL_P (val))
100 	mark_used (val);
101 
102       /* Mere mortals can't copy arrays with aggregate initialization, so
103 	 do some magic to make it work here.  */
104       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
105 	val = build_array_copy (val);
106       else if (DECL_NORMAL_CAPTURE_P (field)
107 	       && !DECL_VLA_CAPTURE_P (field)
108 	       && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
109 	{
110 	  /* "the entities that are captured by copy are used to
111 	     direct-initialize each corresponding non-static data
112 	     member of the resulting closure object."
113 
114 	     There's normally no way to express direct-initialization
115 	     from an element of a CONSTRUCTOR, so we build up a special
116 	     TARGET_EXPR to bypass the usual copy-initialization.  */
117 	  val = force_rvalue (val, tf_warning_or_error);
118 	  if (TREE_CODE (val) == TARGET_EXPR)
119 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
120 	}
121 
122       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
123     }
124 
125   expr = build_constructor (init_list_type_node, elts);
126   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
127 
128   /* N2927: "[The closure] class type is not an aggregate."
129      But we briefly treat it as an aggregate to make this simpler.  */
130   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
131   CLASSTYPE_NON_AGGREGATE (type) = 0;
132   expr = finish_compound_literal (type, expr, tf_warning_or_error);
133   CLASSTYPE_NON_AGGREGATE (type) = 1;
134 
135  out:
136   input_location = saved_loc;
137   return expr;
138 }
139 
140 /* Return an initialized RECORD_TYPE for LAMBDA.
141    LAMBDA must have its explicit captures already.  */
142 
143 tree
144 begin_lambda_type (tree lambda)
145 {
146   tree type;
147 
148   {
149     /* Unique name.  This is just like an unnamed class, but we cannot use
150        make_anon_name because of certain checks against TYPE_ANONYMOUS_P.  */
151     tree name;
152     name = make_lambda_name ();
153 
154     /* Create the new RECORD_TYPE for this lambda.  */
155     type = xref_tag (/*tag_code=*/record_type,
156                      name,
157                      /*scope=*/ts_lambda,
158                      /*template_header_p=*/false);
159     if (type == error_mark_node)
160       return error_mark_node;
161   }
162 
163   /* Designate it as a struct so that we can use aggregate initialization.  */
164   CLASSTYPE_DECLARED_CLASS (type) = false;
165 
166   /* Cross-reference the expression and the type.  */
167   LAMBDA_EXPR_CLOSURE (lambda) = type;
168   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
169 
170   /* Clear base types.  */
171   xref_basetypes (type, /*bases=*/NULL_TREE);
172 
173   /* Start the class.  */
174   type = begin_class_definition (type);
175 
176   return type;
177 }
178 
179 /* Returns the type to use for the return type of the operator() of a
180    closure class.  */
181 
182 tree
183 lambda_return_type (tree expr)
184 {
185   if (expr == NULL_TREE)
186     return void_type_node;
187   if (type_unknown_p (expr)
188       || BRACE_ENCLOSED_INITIALIZER_P (expr))
189     {
190       cxx_incomplete_type_error (expr, TREE_TYPE (expr));
191       return void_type_node;
192     }
193   gcc_checking_assert (!type_dependent_expression_p (expr));
194   return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
195 }
196 
197 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
198    closure type.  */
199 
200 tree
201 lambda_function (tree lambda)
202 {
203   tree type;
204   if (TREE_CODE (lambda) == LAMBDA_EXPR)
205     type = LAMBDA_EXPR_CLOSURE (lambda);
206   else
207     type = lambda;
208   gcc_assert (LAMBDA_TYPE_P (type));
209   /* Don't let debug_tree cause instantiation.  */
210   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
211       && !COMPLETE_OR_OPEN_TYPE_P (type))
212     return NULL_TREE;
213   lambda = lookup_member (type, ansi_opname (CALL_EXPR),
214 			  /*protect=*/0, /*want_type=*/false,
215 			  tf_warning_or_error);
216   if (lambda)
217     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
218   return lambda;
219 }
220 
221 /* Returns the type to use for the FIELD_DECL corresponding to the
222    capture of EXPR.
223    The caller should add REFERENCE_TYPE for capture by reference.  */
224 
225 tree
226 lambda_capture_field_type (tree expr, bool explicit_init_p)
227 {
228   tree type;
229   if (explicit_init_p)
230     {
231       type = make_auto ();
232       type = do_auto_deduction (type, expr, type);
233     }
234   else
235     type = non_reference (unlowered_expr_type (expr));
236   if (type_dependent_expression_p (expr)
237       && !is_this_parameter (tree_strip_nop_conversions (expr)))
238     {
239       type = cxx_make_type (DECLTYPE_TYPE);
240       DECLTYPE_TYPE_EXPR (type) = expr;
241       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
242       DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
243       SET_TYPE_STRUCTURAL_EQUALITY (type);
244     }
245   return type;
246 }
247 
248 /* Returns true iff DECL is a lambda capture proxy variable created by
249    build_capture_proxy.  */
250 
251 bool
252 is_capture_proxy (tree decl)
253 {
254   return (VAR_P (decl)
255 	  && DECL_HAS_VALUE_EXPR_P (decl)
256 	  && !DECL_ANON_UNION_VAR_P (decl)
257 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
258 }
259 
260 /* Returns true iff DECL is a capture proxy for a normal capture
261    (i.e. without explicit initializer).  */
262 
263 bool
264 is_normal_capture_proxy (tree decl)
265 {
266   if (!is_capture_proxy (decl))
267     /* It's not a capture proxy.  */
268     return false;
269 
270   if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
271     /* VLA capture.  */
272     return true;
273 
274   /* It is a capture proxy, is it a normal capture?  */
275   tree val = DECL_VALUE_EXPR (decl);
276   if (val == error_mark_node)
277     return true;
278 
279   gcc_assert (TREE_CODE (val) == COMPONENT_REF);
280   val = TREE_OPERAND (val, 1);
281   return DECL_NORMAL_CAPTURE_P (val);
282 }
283 
284 /* VAR is a capture proxy created by build_capture_proxy; add it to the
285    current function, which is the operator() for the appropriate lambda.  */
286 
287 void
288 insert_capture_proxy (tree var)
289 {
290   cp_binding_level *b;
291   tree stmt_list;
292 
293   /* Put the capture proxy in the extra body block so that it won't clash
294      with a later local variable.  */
295   b = current_binding_level;
296   for (;;)
297     {
298       cp_binding_level *n = b->level_chain;
299       if (n->kind == sk_function_parms)
300 	break;
301       b = n;
302     }
303   pushdecl_with_scope (var, b, false);
304 
305   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
306   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
307   stmt_list = (*stmt_list_stack)[1];
308   gcc_assert (stmt_list);
309   append_to_statement_list_force (var, &stmt_list);
310 }
311 
312 /* We've just finished processing a lambda; if the containing scope is also
313    a lambda, insert any capture proxies that were created while processing
314    the nested lambda.  */
315 
316 void
317 insert_pending_capture_proxies (void)
318 {
319   tree lam;
320   vec<tree, va_gc> *proxies;
321   unsigned i;
322 
323   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
324     return;
325 
326   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
327   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
328   for (i = 0; i < vec_safe_length (proxies); ++i)
329     {
330       tree var = (*proxies)[i];
331       insert_capture_proxy (var);
332     }
333   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
334   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
335 }
336 
337 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
338    return the type we want the proxy to have: the type of the field itself,
339    with added const-qualification if the lambda isn't mutable and the
340    capture is by value.  */
341 
342 tree
343 lambda_proxy_type (tree ref)
344 {
345   tree type;
346   if (ref == error_mark_node)
347     return error_mark_node;
348   if (REFERENCE_REF_P (ref))
349     ref = TREE_OPERAND (ref, 0);
350   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
351   type = TREE_TYPE (ref);
352   if (!type || WILDCARD_TYPE_P (non_reference (type)))
353     {
354       type = cxx_make_type (DECLTYPE_TYPE);
355       DECLTYPE_TYPE_EXPR (type) = ref;
356       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
357       SET_TYPE_STRUCTURAL_EQUALITY (type);
358     }
359   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
360     type = make_pack_expansion (type);
361   return type;
362 }
363 
364 /* MEMBER is a capture field in a lambda closure class.  Now that we're
365    inside the operator(), build a placeholder var for future lookups and
366    debugging.  */
367 
368 tree
369 build_capture_proxy (tree member)
370 {
371   tree var, object, fn, closure, name, lam, type;
372 
373   if (PACK_EXPANSION_P (member))
374     member = PACK_EXPANSION_PATTERN (member);
375 
376   closure = DECL_CONTEXT (member);
377   fn = lambda_function (closure);
378   lam = CLASSTYPE_LAMBDA_EXPR (closure);
379 
380   /* The proxy variable forwards to the capture field.  */
381   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
382   object = finish_non_static_data_member (member, object, NULL_TREE);
383   if (REFERENCE_REF_P (object))
384     object = TREE_OPERAND (object, 0);
385 
386   /* Remove the __ inserted by add_capture.  */
387   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
388 
389   type = lambda_proxy_type (object);
390 
391   if (DECL_VLA_CAPTURE_P (member))
392     {
393       /* Rebuild the VLA type from the pointer and maxindex.  */
394       tree field = next_initializable_field (TYPE_FIELDS (type));
395       tree ptr = build_simple_component_ref (object, field);
396       field = next_initializable_field (DECL_CHAIN (field));
397       tree max = build_simple_component_ref (object, field);
398       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
399 				     build_index_type (max));
400       type = build_reference_type (type);
401       REFERENCE_VLA_OK (type) = true;
402       object = convert (type, ptr);
403     }
404 
405   var = build_decl (input_location, VAR_DECL, name, type);
406   SET_DECL_VALUE_EXPR (var, object);
407   DECL_HAS_VALUE_EXPR_P (var) = 1;
408   DECL_ARTIFICIAL (var) = 1;
409   TREE_USED (var) = 1;
410   DECL_CONTEXT (var) = fn;
411 
412   if (name == this_identifier)
413     {
414       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
415       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
416     }
417 
418   if (fn == current_function_decl)
419     insert_capture_proxy (var);
420   else
421     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
422 
423   return var;
424 }
425 
426 /* Return a struct containing a pointer and a length for lambda capture of
427    an array of runtime length.  */
428 
429 static tree
430 vla_capture_type (tree array_type)
431 {
432   static tree ptr_id, max_id;
433   tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
434   xref_basetypes (type, NULL_TREE);
435   type = begin_class_definition (type);
436   if (!ptr_id)
437     {
438       ptr_id = get_identifier ("ptr");
439       max_id = get_identifier ("max");
440     }
441   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
442   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
443   finish_member_declaration (field);
444   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
445   finish_member_declaration (field);
446   return finish_struct (type, NULL_TREE);
447 }
448 
449 /* From an ID and INITIALIZER, create a capture (by reference if
450    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
451    and return it.  */
452 
453 tree
454 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
455 	     bool explicit_init_p)
456 {
457   char *buf;
458   tree type, member, name;
459   bool vla = false;
460   bool variadic = false;
461   tree initializer = orig_init;
462 
463   if (PACK_EXPANSION_P (initializer))
464     {
465       initializer = PACK_EXPANSION_PATTERN (initializer);
466       variadic = true;
467     }
468 
469   if (TREE_CODE (initializer) == TREE_LIST)
470     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
471 						   tf_warning_or_error);
472   type = TREE_TYPE (initializer);
473   if (type == error_mark_node)
474     return error_mark_node;
475 
476   if (array_of_runtime_bound_p (type))
477     {
478       vla = true;
479       if (!by_reference_p)
480 	error ("array of runtime bound cannot be captured by copy, "
481 	       "only by reference");
482 
483       /* For a VLA, we capture the address of the first element and the
484 	 maximum index, and then reconstruct the VLA for the proxy.  */
485       tree elt = cp_build_array_ref (input_location, initializer,
486 				     integer_zero_node, tf_warning_or_error);
487       initializer = build_constructor_va (init_list_type_node, 2,
488 					  NULL_TREE, build_address (elt),
489 					  NULL_TREE, array_type_nelts (type));
490       type = vla_capture_type (type);
491     }
492   else if (!dependent_type_p (type)
493 	   && variably_modified_type_p (type, NULL_TREE))
494     {
495       error ("capture of variable-size type %qT that is not an N3639 array "
496 	     "of runtime bound", type);
497       if (TREE_CODE (type) == ARRAY_TYPE
498 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
499 	inform (input_location, "because the array element type %qT has "
500 		"variable size", TREE_TYPE (type));
501       type = error_mark_node;
502     }
503   else
504     {
505       type = lambda_capture_field_type (initializer, explicit_init_p);
506       if (by_reference_p)
507 	{
508 	  type = build_reference_type (type);
509 	  if (!dependent_type_p (type) && !real_lvalue_p (initializer))
510 	    error ("cannot capture %qE by reference", initializer);
511 	}
512       else
513 	{
514 	  /* Capture by copy requires a complete type.  */
515 	  type = complete_type (type);
516 	  if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
517 	    {
518 	      error ("capture by copy of incomplete type %qT", type);
519 	      cxx_incomplete_type_inform (type);
520 	      return error_mark_node;
521 	    }
522 	}
523     }
524 
525   /* Add __ to the beginning of the field name so that user code
526      won't find the field with name lookup.  We can't just leave the name
527      unset because template instantiation uses the name to find
528      instantiated fields.  */
529   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
530   buf[1] = buf[0] = '_';
531   memcpy (buf + 2, IDENTIFIER_POINTER (id),
532 	  IDENTIFIER_LENGTH (id) + 1);
533   name = get_identifier (buf);
534 
535   /* If TREE_TYPE isn't set, we're still in the introducer, so check
536      for duplicates.  */
537   if (!LAMBDA_EXPR_CLOSURE (lambda))
538     {
539       if (IDENTIFIER_MARKED (name))
540 	{
541 	  pedwarn (input_location, 0,
542 		   "already captured %qD in lambda expression", id);
543 	  return NULL_TREE;
544 	}
545       IDENTIFIER_MARKED (name) = true;
546     }
547 
548   if (variadic)
549     type = make_pack_expansion (type);
550 
551   /* Make member variable.  */
552   member = build_decl (input_location, FIELD_DECL, name, type);
553   DECL_VLA_CAPTURE_P (member) = vla;
554 
555   if (!explicit_init_p)
556     /* Normal captures are invisible to name lookup but uses are replaced
557        with references to the capture field; we implement this by only
558        really making them invisible in unevaluated context; see
559        qualify_lookup.  For now, let's make explicitly initialized captures
560        always visible.  */
561     DECL_NORMAL_CAPTURE_P (member) = true;
562 
563   if (id == this_identifier)
564     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
565 
566   /* Add it to the appropriate closure class if we've started it.  */
567   if (current_class_type
568       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
569     finish_member_declaration (member);
570 
571   tree listmem = member;
572   if (variadic)
573     {
574       listmem = make_pack_expansion (member);
575       initializer = orig_init;
576     }
577   LAMBDA_EXPR_CAPTURE_LIST (lambda)
578     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
579 
580   if (LAMBDA_EXPR_CLOSURE (lambda))
581     return build_capture_proxy (member);
582   /* For explicit captures we haven't started the function yet, so we wait
583      and build the proxy from cp_parser_lambda_body.  */
584   return NULL_TREE;
585 }
586 
587 /* Register all the capture members on the list CAPTURES, which is the
588    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
589 
590 void
591 register_capture_members (tree captures)
592 {
593   if (captures == NULL_TREE)
594     return;
595 
596   register_capture_members (TREE_CHAIN (captures));
597 
598   tree field = TREE_PURPOSE (captures);
599   if (PACK_EXPANSION_P (field))
600     field = PACK_EXPANSION_PATTERN (field);
601 
602   /* We set this in add_capture to avoid duplicates.  */
603   IDENTIFIER_MARKED (DECL_NAME (field)) = false;
604   finish_member_declaration (field);
605 }
606 
607 /* Similar to add_capture, except this works on a stack of nested lambdas.
608    BY_REFERENCE_P in this case is derived from the default capture mode.
609    Returns the capture for the lambda at the bottom of the stack.  */
610 
611 tree
612 add_default_capture (tree lambda_stack, tree id, tree initializer)
613 {
614   bool this_capture_p = (id == this_identifier);
615 
616   tree var = NULL_TREE;
617 
618   tree saved_class_type = current_class_type;
619 
620   tree node;
621 
622   for (node = lambda_stack;
623        node;
624        node = TREE_CHAIN (node))
625     {
626       tree lambda = TREE_VALUE (node);
627 
628       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
629       if (DECL_PACK_P (initializer))
630 	initializer = make_pack_expansion (initializer);
631       var = add_capture (lambda,
632                             id,
633                             initializer,
634                             /*by_reference_p=*/
635 			    (!this_capture_p
636 			     && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
637 				 == CPLD_REFERENCE)),
638 			    /*explicit_init_p=*/false);
639       initializer = convert_from_reference (var);
640     }
641 
642   current_class_type = saved_class_type;
643 
644   return var;
645 }
646 
647 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
648    form of an INDIRECT_REF, possibly adding it through default
649    capturing, if ADD_CAPTURE_P is false.  */
650 
651 tree
652 lambda_expr_this_capture (tree lambda, bool add_capture_p)
653 {
654   tree result;
655 
656   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
657 
658   /* In unevaluated context this isn't an odr-use, so just return the
659      nearest 'this'.  */
660   if (cp_unevaluated_operand)
661     {
662       /* In an NSDMI the fake 'this' pointer that we're using for
663 	 parsing is in scope_chain.  */
664       if (LAMBDA_EXPR_EXTRA_SCOPE (lambda)
665 	  && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (lambda)) == FIELD_DECL)
666 	return scope_chain->x_current_class_ptr;
667       return lookup_name (this_identifier);
668     }
669 
670   /* Try to default capture 'this' if we can.  */
671   if (!this_capture
672       && (!add_capture_p
673           || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
674     {
675       tree lambda_stack = NULL_TREE;
676       tree init = NULL_TREE;
677 
678       /* If we are in a lambda function, we can move out until we hit:
679            1. a non-lambda function or NSDMI,
680            2. a lambda function capturing 'this', or
681            3. a non-default capturing lambda function.  */
682       for (tree tlambda = lambda; ;)
683 	{
684           lambda_stack = tree_cons (NULL_TREE,
685                                     tlambda,
686                                     lambda_stack);
687 
688 	  if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
689 	      && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
690 	    {
691 	      /* In an NSDMI, we don't have a function to look up the decl in,
692 		 but the fake 'this' pointer that we're using for parsing is
693 		 in scope_chain.  */
694 	      init = scope_chain->x_current_class_ptr;
695 	      gcc_checking_assert
696 		(init && (TREE_TYPE (TREE_TYPE (init))
697 			  == current_nonlambda_class_type ()));
698 	      break;
699 	    }
700 
701 	  tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
702 	  tree containing_function = decl_function_context (closure_decl);
703 
704 	  if (containing_function == NULL_TREE)
705 	    /* We ran out of scopes; there's no 'this' to capture.  */
706 	    break;
707 
708 	  if (!LAMBDA_FUNCTION_P (containing_function))
709 	    {
710 	      /* We found a non-lambda function.  */
711 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
712 		/* First parameter is 'this'.  */
713 		init = DECL_ARGUMENTS (containing_function);
714 	      break;
715 	    }
716 
717 	  tlambda
718             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
719 
720           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
721 	    {
722 	      /* An outer lambda has already captured 'this'.  */
723 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
724 	      break;
725 	    }
726 
727 	  if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
728 	    /* An outer lambda won't let us capture 'this'.  */
729 	    break;
730 	}
731 
732       if (init)
733         {
734           if (add_capture_p)
735 	    this_capture = add_default_capture (lambda_stack,
736 					        /*id=*/this_identifier,
737 					        init);
738           else
739 	    this_capture = init;
740         }
741     }
742 
743   if (!this_capture)
744     {
745       if (add_capture_p)
746 	error ("%<this%> was not captured for this lambda function");
747       result = error_mark_node;
748     }
749   else
750     {
751       /* To make sure that current_class_ref is for the lambda.  */
752       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
753 		  == LAMBDA_EXPR_CLOSURE (lambda));
754 
755       result = this_capture;
756 
757       /* If 'this' is captured, each use of 'this' is transformed into an
758 	 access to the corresponding unnamed data member of the closure
759 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
760 	 ensures that the transformed expression is an rvalue. ] */
761       result = rvalue (result);
762     }
763 
764   return result;
765 }
766 
767 /* We don't want to capture 'this' until we know we need it, i.e. after
768    overload resolution has chosen a non-static member function.  At that
769    point we call this function to turn a dummy object into a use of the
770    'this' capture.  */
771 
772 tree
773 maybe_resolve_dummy (tree object, bool add_capture_p)
774 {
775   if (!is_dummy_object (object))
776     return object;
777 
778   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
779   gcc_assert (!TYPE_PTR_P (type));
780 
781   if (type != current_class_type
782       && current_class_type
783       && LAMBDA_TYPE_P (current_class_type)
784       && lambda_function (current_class_type)
785       && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
786     {
787       /* In a lambda, need to go through 'this' capture.  */
788       tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
789       tree cap = lambda_expr_this_capture (lam, add_capture_p);
790       if (cap && cap != error_mark_node)
791 	object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
792 				       RO_NULL, tf_warning_or_error);
793     }
794 
795   return object;
796 }
797 
798 /* Returns the innermost non-lambda function.  */
799 
800 tree
801 current_nonlambda_function (void)
802 {
803   tree fn = current_function_decl;
804   while (fn && LAMBDA_FUNCTION_P (fn))
805     fn = decl_function_context (fn);
806   return fn;
807 }
808 
809 /* Returns the method basetype of the innermost non-lambda function, or
810    NULL_TREE if none.  */
811 
812 tree
813 nonlambda_method_basetype (void)
814 {
815   tree fn, type;
816   if (!current_class_ref)
817     return NULL_TREE;
818 
819   type = current_class_type;
820   if (!LAMBDA_TYPE_P (type))
821     return type;
822 
823   /* Find the nearest enclosing non-lambda function.  */
824   fn = TYPE_NAME (type);
825   do
826     fn = decl_function_context (fn);
827   while (fn && LAMBDA_FUNCTION_P (fn));
828 
829   if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
830     return NULL_TREE;
831 
832   return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
833 }
834 
835 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
836    indicated FN and NARGS, but do not initialize the return type or any of the
837    argument slots.  */
838 
839 static tree
840 prepare_op_call (tree fn, int nargs)
841 {
842   tree t;
843 
844   t = build_vl_exp (CALL_EXPR, nargs + 3);
845   CALL_EXPR_FN (t) = fn;
846   CALL_EXPR_STATIC_CHAIN (t) = NULL;
847 
848   return t;
849 }
850 
851 /* If the closure TYPE has a static op(), also add a conversion to function
852    pointer.  */
853 
854 void
855 maybe_add_lambda_conv_op (tree type)
856 {
857   bool nested = (cfun != NULL);
858   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
859   tree callop = lambda_function (type);
860 
861   if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
862     return;
863 
864   if (processing_template_decl)
865     return;
866 
867   bool const generic_lambda_p
868     = (DECL_TEMPLATE_INFO (callop)
869     && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
870 
871   if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
872     {
873       /* If the op() wasn't instantiated due to errors, give up.  */
874       gcc_assert (errorcount || sorrycount);
875       return;
876     }
877 
878   /* Non-template conversion operators are defined directly with build_call_a
879      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
880      deferred and the CALL is built in-place.  In the case of a deduced return
881      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
882      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
883      the return expression may differ in flags from those in the body CALL.  In
884      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
885      the body CALL, but not in DECLTYPE_CALL.  */
886 
887   vec<tree, va_gc> *direct_argvec = 0;
888   tree decltype_call = 0, call = 0;
889   tree fn_result = TREE_TYPE (TREE_TYPE (callop));
890 
891   if (generic_lambda_p)
892     {
893       /* Prepare the dependent member call for the static member function
894 	 '_FUN' and, potentially, prepare another call to be used in a decltype
895 	 return expression for a deduced return call op to allow for simple
896 	 implementation of the conversion operator.  */
897 
898       tree instance = build_nop (type, null_pointer_node);
899       tree objfn = build_min (COMPONENT_REF, NULL_TREE,
900 			      instance, DECL_NAME (callop), NULL_TREE);
901       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
902 
903       call = prepare_op_call (objfn, nargs);
904       if (type_uses_auto (fn_result))
905 	decltype_call = prepare_op_call (objfn, nargs);
906     }
907   else
908     {
909       direct_argvec = make_tree_vector ();
910       direct_argvec->quick_push (build1 (NOP_EXPR,
911 					 TREE_TYPE (DECL_ARGUMENTS (callop)),
912 					 null_pointer_node));
913     }
914 
915   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
916      declare the static member function "_FUN" below.  For each arg append to
917      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
918      call args (for the template case).  If a parameter pack is found, expand
919      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
920 
921   tree fn_args = NULL_TREE;
922   {
923     int ix = 0;
924     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
925     tree tgt;
926 
927     while (src)
928       {
929 	tree new_node = copy_node (src);
930 
931 	if (!fn_args)
932 	  fn_args = tgt = new_node;
933 	else
934 	  {
935 	    TREE_CHAIN (tgt) = new_node;
936 	    tgt = new_node;
937 	  }
938 
939 	mark_exp_read (tgt);
940 
941 	if (generic_lambda_p)
942 	  {
943 	    if (DECL_PACK_P (tgt))
944 	      {
945 		tree a = make_pack_expansion (tgt);
946 		if (decltype_call)
947 		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
948 		PACK_EXPANSION_LOCAL_P (a) = true;
949 		CALL_EXPR_ARG (call, ix) = a;
950 	      }
951 	    else
952 	      {
953 		tree a = convert_from_reference (tgt);
954 		CALL_EXPR_ARG (call, ix) = a;
955 		if (decltype_call)
956 		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
957 	      }
958 	    ++ix;
959 	  }
960 	else
961 	  vec_safe_push (direct_argvec, tgt);
962 
963 	src = TREE_CHAIN (src);
964       }
965   }
966 
967 
968   if (generic_lambda_p)
969     {
970       if (decltype_call)
971 	{
972 	  ++processing_template_decl;
973 	  fn_result = finish_decltype_type
974 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
975 	     tf_warning_or_error);
976 	  --processing_template_decl;
977 	}
978     }
979   else
980     call = build_call_a (callop,
981 			 direct_argvec->length (),
982 			 direct_argvec->address ());
983 
984   CALL_FROM_THUNK_P (call) = 1;
985 
986   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
987 
988   /* First build up the conversion op.  */
989 
990   tree rettype = build_pointer_type (stattype);
991   tree name = mangle_conv_op_name_for_type (rettype);
992   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
993   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
994   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
995   tree fn = convfn;
996   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
997 
998   if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
999       && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1000     DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1001 
1002   SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1003   grokclassfn (type, fn, NO_SPECIAL);
1004   set_linkage_according_to_type (type, fn);
1005   rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1006   DECL_IN_AGGR_P (fn) = 1;
1007   DECL_ARTIFICIAL (fn) = 1;
1008   DECL_NOT_REALLY_EXTERN (fn) = 1;
1009   DECL_DECLARED_INLINE_P (fn) = 1;
1010   DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
1011   if (nested_def)
1012     DECL_INTERFACE_KNOWN (fn) = 1;
1013 
1014   if (generic_lambda_p)
1015     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1016 
1017   add_method (type, fn, NULL_TREE);
1018 
1019   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1020      the conversion op is used.  */
1021   if (varargs_function_p (callop))
1022     {
1023       DECL_DELETED_FN (fn) = 1;
1024       return;
1025     }
1026 
1027   /* Now build up the thunk to be returned.  */
1028 
1029   name = get_identifier ("_FUN");
1030   tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1031   fn = statfn;
1032   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1033   if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
1034       && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1035     DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1036   grokclassfn (type, fn, NO_SPECIAL);
1037   set_linkage_according_to_type (type, fn);
1038   rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1039   DECL_IN_AGGR_P (fn) = 1;
1040   DECL_ARTIFICIAL (fn) = 1;
1041   DECL_NOT_REALLY_EXTERN (fn) = 1;
1042   DECL_DECLARED_INLINE_P (fn) = 1;
1043   DECL_STATIC_FUNCTION_P (fn) = 1;
1044   DECL_ARGUMENTS (fn) = fn_args;
1045   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1046     {
1047       /* Avoid duplicate -Wshadow warnings.  */
1048       DECL_NAME (arg) = NULL_TREE;
1049       DECL_CONTEXT (arg) = fn;
1050     }
1051   if (nested_def)
1052     DECL_INTERFACE_KNOWN (fn) = 1;
1053 
1054   if (generic_lambda_p)
1055     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1056 
1057   /* Don't UBsan this function; we're deliberately calling op() with a null
1058      object argument.  */
1059   tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"),
1060 				NULL_TREE);
1061   cplus_decl_attributes (&fn, attrs, 0);
1062 
1063   add_method (type, fn, NULL_TREE);
1064 
1065   if (nested)
1066     push_function_context ();
1067   else
1068     /* Still increment function_depth so that we don't GC in the
1069        middle of an expression.  */
1070     ++function_depth;
1071 
1072   /* Generate the body of the thunk.  */
1073 
1074   start_preparsed_function (statfn, NULL_TREE,
1075 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1076   if (DECL_ONE_ONLY (statfn))
1077     {
1078       /* Put the thunk in the same comdat group as the call op.  */
1079       cgraph_node::get_create (statfn)->add_to_same_comdat_group
1080 	(cgraph_node::get_create (callop));
1081     }
1082   tree body = begin_function_body ();
1083   tree compound_stmt = begin_compound_stmt (0);
1084   if (!generic_lambda_p)
1085     {
1086       set_flags_from_callee (call);
1087       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1088 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1089     }
1090   call = convert_from_reference (call);
1091   finish_return_stmt (call);
1092 
1093   finish_compound_stmt (compound_stmt);
1094   finish_function_body (body);
1095 
1096   fn = finish_function (/*inline*/2);
1097   if (!generic_lambda_p)
1098     expand_or_defer_fn (fn);
1099 
1100   /* Generate the body of the conversion op.  */
1101 
1102   start_preparsed_function (convfn, NULL_TREE,
1103 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1104   body = begin_function_body ();
1105   compound_stmt = begin_compound_stmt (0);
1106 
1107   /* decl_needed_p needs to see that it's used.  */
1108   TREE_USED (statfn) = 1;
1109   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1110 
1111   finish_compound_stmt (compound_stmt);
1112   finish_function_body (body);
1113 
1114   fn = finish_function (/*inline*/2);
1115   if (!generic_lambda_p)
1116     expand_or_defer_fn (fn);
1117 
1118   if (nested)
1119     pop_function_context ();
1120   else
1121     --function_depth;
1122 }
1123 
1124 /* Returns true iff VAL is a lambda-related declaration which should
1125    be ignored by unqualified lookup.  */
1126 
1127 bool
1128 is_lambda_ignored_entity (tree val)
1129 {
1130   /* In unevaluated context, look past normal capture proxies.  */
1131   if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1132     return true;
1133 
1134   /* Always ignore lambda fields, their names are only for debugging.  */
1135   if (TREE_CODE (val) == FIELD_DECL
1136       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1137     return true;
1138 
1139   /* None of the lookups that use qualify_lookup want the op() from the
1140      lambda; they want the one from the enclosing class.  */
1141   if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1142     return true;
1143 
1144   return false;
1145 }
1146