1 /* Perform the semantic phase of lambda parsing, i.e., the process of 2 building tree structure, checking semantic consistency, and 3 building RTL. These routines are used both during actual parsing 4 and during the instantiation of template functions. 5 6 Copyright (C) 1998-2016 Free Software Foundation, Inc. 7 8 This file is part of GCC. 9 10 GCC is free software; you can redistribute it and/or modify it 11 under the terms of the GNU General Public License as published by 12 the Free Software Foundation; either version 3, or (at your option) 13 any later version. 14 15 GCC is distributed in the hope that it will be useful, but 16 WITHOUT ANY WARRANTY; without even the implied warranty of 17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 General Public License for more details. 19 20 You should have received a copy of the GNU General Public License 21 along with GCC; see the file COPYING3. If not see 22 <http://www.gnu.org/licenses/>. */ 23 24 #include "config.h" 25 #include "system.h" 26 #include "coretypes.h" 27 #include "cp-tree.h" 28 #include "stringpool.h" 29 #include "cgraph.h" 30 #include "tree-iterator.h" 31 #include "toplev.h" 32 #include "gimplify.h" 33 34 /* Constructor for a lambda expression. */ 35 36 tree 37 build_lambda_expr (void) 38 { 39 tree lambda = make_node (LAMBDA_EXPR); 40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; 41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; 42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; 43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; 44 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE; 45 LAMBDA_EXPR_MUTABLE_P (lambda) = false; 46 return lambda; 47 } 48 49 /* Create the closure object for a LAMBDA_EXPR. */ 50 51 tree 52 build_lambda_object (tree lambda_expr) 53 { 54 /* Build aggregate constructor call. 55 - cp_parser_braced_list 56 - cp_parser_functional_cast */ 57 vec<constructor_elt, va_gc> *elts = NULL; 58 tree node, expr, type; 59 location_t saved_loc; 60 61 if (processing_template_decl) 62 return lambda_expr; 63 64 /* Make sure any error messages refer to the lambda-introducer. */ 65 saved_loc = input_location; 66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr); 67 68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); 69 node; 70 node = TREE_CHAIN (node)) 71 { 72 tree field = TREE_PURPOSE (node); 73 tree val = TREE_VALUE (node); 74 75 if (field == error_mark_node) 76 { 77 expr = error_mark_node; 78 goto out; 79 } 80 81 if (DECL_P (val)) 82 mark_used (val); 83 84 /* Mere mortals can't copy arrays with aggregate initialization, so 85 do some magic to make it work here. */ 86 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) 87 val = build_array_copy (val); 88 else if (DECL_NORMAL_CAPTURE_P (field) 89 && !DECL_VLA_CAPTURE_P (field) 90 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE) 91 { 92 /* "the entities that are captured by copy are used to 93 direct-initialize each corresponding non-static data 94 member of the resulting closure object." 95 96 There's normally no way to express direct-initialization 97 from an element of a CONSTRUCTOR, so we build up a special 98 TARGET_EXPR to bypass the usual copy-initialization. */ 99 val = force_rvalue (val, tf_warning_or_error); 100 if (TREE_CODE (val) == TARGET_EXPR) 101 TARGET_EXPR_DIRECT_INIT_P (val) = true; 102 } 103 104 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); 105 } 106 107 expr = build_constructor (init_list_type_node, elts); 108 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; 109 110 /* N2927: "[The closure] class type is not an aggregate." 111 But we briefly treat it as an aggregate to make this simpler. */ 112 type = LAMBDA_EXPR_CLOSURE (lambda_expr); 113 CLASSTYPE_NON_AGGREGATE (type) = 0; 114 expr = finish_compound_literal (type, expr, tf_warning_or_error); 115 CLASSTYPE_NON_AGGREGATE (type) = 1; 116 117 out: 118 input_location = saved_loc; 119 return expr; 120 } 121 122 /* Return an initialized RECORD_TYPE for LAMBDA. 123 LAMBDA must have its explicit captures already. */ 124 125 tree 126 begin_lambda_type (tree lambda) 127 { 128 tree type; 129 130 { 131 /* Unique name. This is just like an unnamed class, but we cannot use 132 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */ 133 tree name; 134 name = make_lambda_name (); 135 136 /* Create the new RECORD_TYPE for this lambda. */ 137 type = xref_tag (/*tag_code=*/record_type, 138 name, 139 /*scope=*/ts_lambda, 140 /*template_header_p=*/false); 141 if (type == error_mark_node) 142 return error_mark_node; 143 } 144 145 /* Designate it as a struct so that we can use aggregate initialization. */ 146 CLASSTYPE_DECLARED_CLASS (type) = false; 147 148 /* Cross-reference the expression and the type. */ 149 LAMBDA_EXPR_CLOSURE (lambda) = type; 150 CLASSTYPE_LAMBDA_EXPR (type) = lambda; 151 152 /* Clear base types. */ 153 xref_basetypes (type, /*bases=*/NULL_TREE); 154 155 /* Start the class. */ 156 type = begin_class_definition (type); 157 158 return type; 159 } 160 161 /* Returns the type to use for the return type of the operator() of a 162 closure class. */ 163 164 tree 165 lambda_return_type (tree expr) 166 { 167 if (expr == NULL_TREE) 168 return void_type_node; 169 if (type_unknown_p (expr) 170 || BRACE_ENCLOSED_INITIALIZER_P (expr)) 171 { 172 cxx_incomplete_type_error (expr, TREE_TYPE (expr)); 173 return error_mark_node; 174 } 175 gcc_checking_assert (!type_dependent_expression_p (expr)); 176 return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); 177 } 178 179 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the 180 closure type. */ 181 182 tree 183 lambda_function (tree lambda) 184 { 185 tree type; 186 if (TREE_CODE (lambda) == LAMBDA_EXPR) 187 type = LAMBDA_EXPR_CLOSURE (lambda); 188 else 189 type = lambda; 190 gcc_assert (LAMBDA_TYPE_P (type)); 191 /* Don't let debug_tree cause instantiation. */ 192 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) 193 && !COMPLETE_OR_OPEN_TYPE_P (type)) 194 return NULL_TREE; 195 lambda = lookup_member (type, ansi_opname (CALL_EXPR), 196 /*protect=*/0, /*want_type=*/false, 197 tf_warning_or_error); 198 if (lambda) 199 lambda = STRIP_TEMPLATE (get_first_fn (lambda)); 200 return lambda; 201 } 202 203 /* Returns the type to use for the FIELD_DECL corresponding to the 204 capture of EXPR. 205 The caller should add REFERENCE_TYPE for capture by reference. */ 206 207 tree 208 lambda_capture_field_type (tree expr, bool explicit_init_p) 209 { 210 tree type; 211 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr)); 212 if (!is_this && type_dependent_expression_p (expr)) 213 { 214 type = cxx_make_type (DECLTYPE_TYPE); 215 DECLTYPE_TYPE_EXPR (type) = expr; 216 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; 217 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p; 218 SET_TYPE_STRUCTURAL_EQUALITY (type); 219 } 220 else if (!is_this && explicit_init_p) 221 { 222 type = make_auto (); 223 type = do_auto_deduction (type, expr, type); 224 } 225 else 226 type = non_reference (unlowered_expr_type (expr)); 227 return type; 228 } 229 230 /* Returns true iff DECL is a lambda capture proxy variable created by 231 build_capture_proxy. */ 232 233 bool 234 is_capture_proxy (tree decl) 235 { 236 return (VAR_P (decl) 237 && DECL_HAS_VALUE_EXPR_P (decl) 238 && !DECL_ANON_UNION_VAR_P (decl) 239 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); 240 } 241 242 /* Returns true iff DECL is a capture proxy for a normal capture 243 (i.e. without explicit initializer). */ 244 245 bool 246 is_normal_capture_proxy (tree decl) 247 { 248 if (!is_capture_proxy (decl)) 249 /* It's not a capture proxy. */ 250 return false; 251 252 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) 253 /* VLA capture. */ 254 return true; 255 256 /* It is a capture proxy, is it a normal capture? */ 257 tree val = DECL_VALUE_EXPR (decl); 258 if (val == error_mark_node) 259 return true; 260 261 gcc_assert (TREE_CODE (val) == COMPONENT_REF); 262 val = TREE_OPERAND (val, 1); 263 return DECL_NORMAL_CAPTURE_P (val); 264 } 265 266 /* VAR is a capture proxy created by build_capture_proxy; add it to the 267 current function, which is the operator() for the appropriate lambda. */ 268 269 void 270 insert_capture_proxy (tree var) 271 { 272 cp_binding_level *b; 273 tree stmt_list; 274 275 /* Put the capture proxy in the extra body block so that it won't clash 276 with a later local variable. */ 277 b = current_binding_level; 278 for (;;) 279 { 280 cp_binding_level *n = b->level_chain; 281 if (n->kind == sk_function_parms) 282 break; 283 b = n; 284 } 285 pushdecl_with_scope (var, b, false); 286 287 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ 288 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); 289 stmt_list = (*stmt_list_stack)[1]; 290 gcc_assert (stmt_list); 291 append_to_statement_list_force (var, &stmt_list); 292 } 293 294 /* We've just finished processing a lambda; if the containing scope is also 295 a lambda, insert any capture proxies that were created while processing 296 the nested lambda. */ 297 298 void 299 insert_pending_capture_proxies (void) 300 { 301 tree lam; 302 vec<tree, va_gc> *proxies; 303 unsigned i; 304 305 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) 306 return; 307 308 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); 309 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); 310 for (i = 0; i < vec_safe_length (proxies); ++i) 311 { 312 tree var = (*proxies)[i]; 313 insert_capture_proxy (var); 314 } 315 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); 316 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; 317 } 318 319 /* Given REF, a COMPONENT_REF designating a field in the lambda closure, 320 return the type we want the proxy to have: the type of the field itself, 321 with added const-qualification if the lambda isn't mutable and the 322 capture is by value. */ 323 324 tree 325 lambda_proxy_type (tree ref) 326 { 327 tree type; 328 if (ref == error_mark_node) 329 return error_mark_node; 330 if (REFERENCE_REF_P (ref)) 331 ref = TREE_OPERAND (ref, 0); 332 gcc_assert (TREE_CODE (ref) == COMPONENT_REF); 333 type = TREE_TYPE (ref); 334 if (!type || WILDCARD_TYPE_P (non_reference (type))) 335 { 336 type = cxx_make_type (DECLTYPE_TYPE); 337 DECLTYPE_TYPE_EXPR (type) = ref; 338 DECLTYPE_FOR_LAMBDA_PROXY (type) = true; 339 SET_TYPE_STRUCTURAL_EQUALITY (type); 340 } 341 if (DECL_PACK_P (TREE_OPERAND (ref, 1))) 342 type = make_pack_expansion (type); 343 return type; 344 } 345 346 /* MEMBER is a capture field in a lambda closure class. Now that we're 347 inside the operator(), build a placeholder var for future lookups and 348 debugging. */ 349 350 tree 351 build_capture_proxy (tree member) 352 { 353 tree var, object, fn, closure, name, lam, type; 354 355 if (PACK_EXPANSION_P (member)) 356 member = PACK_EXPANSION_PATTERN (member); 357 358 closure = DECL_CONTEXT (member); 359 fn = lambda_function (closure); 360 lam = CLASSTYPE_LAMBDA_EXPR (closure); 361 362 /* The proxy variable forwards to the capture field. */ 363 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); 364 object = finish_non_static_data_member (member, object, NULL_TREE); 365 if (REFERENCE_REF_P (object)) 366 object = TREE_OPERAND (object, 0); 367 368 /* Remove the __ inserted by add_capture. */ 369 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); 370 371 type = lambda_proxy_type (object); 372 373 if (DECL_VLA_CAPTURE_P (member)) 374 { 375 /* Rebuild the VLA type from the pointer and maxindex. */ 376 tree field = next_initializable_field (TYPE_FIELDS (type)); 377 tree ptr = build_simple_component_ref (object, field); 378 field = next_initializable_field (DECL_CHAIN (field)); 379 tree max = build_simple_component_ref (object, field); 380 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), 381 build_index_type (max)); 382 type = build_reference_type (type); 383 REFERENCE_VLA_OK (type) = true; 384 object = convert (type, ptr); 385 } 386 387 var = build_decl (input_location, VAR_DECL, name, type); 388 SET_DECL_VALUE_EXPR (var, object); 389 DECL_HAS_VALUE_EXPR_P (var) = 1; 390 DECL_ARTIFICIAL (var) = 1; 391 TREE_USED (var) = 1; 392 DECL_CONTEXT (var) = fn; 393 394 if (name == this_identifier) 395 { 396 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); 397 LAMBDA_EXPR_THIS_CAPTURE (lam) = var; 398 } 399 400 if (fn == current_function_decl) 401 insert_capture_proxy (var); 402 else 403 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); 404 405 return var; 406 } 407 408 /* Return a struct containing a pointer and a length for lambda capture of 409 an array of runtime length. */ 410 411 static tree 412 vla_capture_type (tree array_type) 413 { 414 static tree ptr_id, max_id; 415 tree type = xref_tag (record_type, make_anon_name (), ts_current, false); 416 xref_basetypes (type, NULL_TREE); 417 type = begin_class_definition (type); 418 if (!ptr_id) 419 { 420 ptr_id = get_identifier ("ptr"); 421 max_id = get_identifier ("max"); 422 } 423 tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); 424 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); 425 finish_member_declaration (field); 426 field = build_decl (input_location, FIELD_DECL, max_id, sizetype); 427 finish_member_declaration (field); 428 return finish_struct (type, NULL_TREE); 429 } 430 431 /* From an ID and INITIALIZER, create a capture (by reference if 432 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, 433 and return it. */ 434 435 tree 436 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, 437 bool explicit_init_p) 438 { 439 char *buf; 440 tree type, member, name; 441 bool vla = false; 442 bool variadic = false; 443 tree initializer = orig_init; 444 445 if (PACK_EXPANSION_P (initializer)) 446 { 447 initializer = PACK_EXPANSION_PATTERN (initializer); 448 variadic = true; 449 } 450 451 if (TREE_CODE (initializer) == TREE_LIST) 452 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, 453 tf_warning_or_error); 454 type = TREE_TYPE (initializer); 455 if (type == error_mark_node) 456 return error_mark_node; 457 458 if (array_of_runtime_bound_p (type)) 459 { 460 vla = true; 461 if (!by_reference_p) 462 error ("array of runtime bound cannot be captured by copy, " 463 "only by reference"); 464 465 /* For a VLA, we capture the address of the first element and the 466 maximum index, and then reconstruct the VLA for the proxy. */ 467 tree elt = cp_build_array_ref (input_location, initializer, 468 integer_zero_node, tf_warning_or_error); 469 initializer = build_constructor_va (init_list_type_node, 2, 470 NULL_TREE, build_address (elt), 471 NULL_TREE, array_type_nelts (type)); 472 type = vla_capture_type (type); 473 } 474 else if (!dependent_type_p (type) 475 && variably_modified_type_p (type, NULL_TREE)) 476 { 477 error ("capture of variable-size type %qT that is not an N3639 array " 478 "of runtime bound", type); 479 if (TREE_CODE (type) == ARRAY_TYPE 480 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) 481 inform (input_location, "because the array element type %qT has " 482 "variable size", TREE_TYPE (type)); 483 type = error_mark_node; 484 } 485 else 486 { 487 type = lambda_capture_field_type (initializer, explicit_init_p); 488 if (type == error_mark_node) 489 return error_mark_node; 490 if (by_reference_p) 491 { 492 type = build_reference_type (type); 493 if (!dependent_type_p (type) && !real_lvalue_p (initializer)) 494 { 495 error ("cannot capture %qE by reference", initializer); 496 return error_mark_node; 497 } 498 } 499 else 500 { 501 /* Capture by copy requires a complete type. */ 502 type = complete_type (type); 503 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type)) 504 { 505 error ("capture by copy of incomplete type %qT", type); 506 cxx_incomplete_type_inform (type); 507 return error_mark_node; 508 } 509 } 510 } 511 512 /* Add __ to the beginning of the field name so that user code 513 won't find the field with name lookup. We can't just leave the name 514 unset because template instantiation uses the name to find 515 instantiated fields. */ 516 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); 517 buf[1] = buf[0] = '_'; 518 memcpy (buf + 2, IDENTIFIER_POINTER (id), 519 IDENTIFIER_LENGTH (id) + 1); 520 name = get_identifier (buf); 521 522 /* If TREE_TYPE isn't set, we're still in the introducer, so check 523 for duplicates. */ 524 if (!LAMBDA_EXPR_CLOSURE (lambda)) 525 { 526 if (IDENTIFIER_MARKED (name)) 527 { 528 pedwarn (input_location, 0, 529 "already captured %qD in lambda expression", id); 530 return NULL_TREE; 531 } 532 IDENTIFIER_MARKED (name) = true; 533 } 534 535 if (variadic) 536 type = make_pack_expansion (type); 537 538 /* Make member variable. */ 539 member = build_decl (input_location, FIELD_DECL, name, type); 540 DECL_VLA_CAPTURE_P (member) = vla; 541 542 if (!explicit_init_p) 543 /* Normal captures are invisible to name lookup but uses are replaced 544 with references to the capture field; we implement this by only 545 really making them invisible in unevaluated context; see 546 qualify_lookup. For now, let's make explicitly initialized captures 547 always visible. */ 548 DECL_NORMAL_CAPTURE_P (member) = true; 549 550 if (id == this_identifier) 551 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; 552 553 /* Add it to the appropriate closure class if we've started it. */ 554 if (current_class_type 555 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) 556 finish_member_declaration (member); 557 558 tree listmem = member; 559 if (variadic) 560 { 561 listmem = make_pack_expansion (member); 562 initializer = orig_init; 563 } 564 LAMBDA_EXPR_CAPTURE_LIST (lambda) 565 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); 566 567 if (LAMBDA_EXPR_CLOSURE (lambda)) 568 return build_capture_proxy (member); 569 /* For explicit captures we haven't started the function yet, so we wait 570 and build the proxy from cp_parser_lambda_body. */ 571 return NULL_TREE; 572 } 573 574 /* Register all the capture members on the list CAPTURES, which is the 575 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ 576 577 void 578 register_capture_members (tree captures) 579 { 580 if (captures == NULL_TREE) 581 return; 582 583 register_capture_members (TREE_CHAIN (captures)); 584 585 tree field = TREE_PURPOSE (captures); 586 if (PACK_EXPANSION_P (field)) 587 field = PACK_EXPANSION_PATTERN (field); 588 589 /* We set this in add_capture to avoid duplicates. */ 590 IDENTIFIER_MARKED (DECL_NAME (field)) = false; 591 finish_member_declaration (field); 592 } 593 594 /* Similar to add_capture, except this works on a stack of nested lambdas. 595 BY_REFERENCE_P in this case is derived from the default capture mode. 596 Returns the capture for the lambda at the bottom of the stack. */ 597 598 tree 599 add_default_capture (tree lambda_stack, tree id, tree initializer) 600 { 601 bool this_capture_p = (id == this_identifier); 602 603 tree var = NULL_TREE; 604 605 tree saved_class_type = current_class_type; 606 607 tree node; 608 609 for (node = lambda_stack; 610 node; 611 node = TREE_CHAIN (node)) 612 { 613 tree lambda = TREE_VALUE (node); 614 615 current_class_type = LAMBDA_EXPR_CLOSURE (lambda); 616 if (DECL_PACK_P (initializer)) 617 initializer = make_pack_expansion (initializer); 618 var = add_capture (lambda, 619 id, 620 initializer, 621 /*by_reference_p=*/ 622 (!this_capture_p 623 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) 624 == CPLD_REFERENCE)), 625 /*explicit_init_p=*/false); 626 initializer = convert_from_reference (var); 627 } 628 629 current_class_type = saved_class_type; 630 631 return var; 632 } 633 634 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the 635 form of an INDIRECT_REF, possibly adding it through default 636 capturing, if ADD_CAPTURE_P is true. */ 637 638 tree 639 lambda_expr_this_capture (tree lambda, bool add_capture_p) 640 { 641 tree result; 642 643 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); 644 645 /* In unevaluated context this isn't an odr-use, so don't capture. */ 646 if (cp_unevaluated_operand) 647 add_capture_p = false; 648 649 /* Try to default capture 'this' if we can. */ 650 if (!this_capture 651 && (!add_capture_p 652 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)) 653 { 654 tree lambda_stack = NULL_TREE; 655 tree init = NULL_TREE; 656 657 /* If we are in a lambda function, we can move out until we hit: 658 1. a non-lambda function or NSDMI, 659 2. a lambda function capturing 'this', or 660 3. a non-default capturing lambda function. */ 661 for (tree tlambda = lambda; ;) 662 { 663 lambda_stack = tree_cons (NULL_TREE, 664 tlambda, 665 lambda_stack); 666 667 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda) 668 && !COMPLETE_TYPE_P (LAMBDA_EXPR_CLOSURE (tlambda)) 669 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL) 670 { 671 /* In an NSDMI, we don't have a function to look up the decl in, 672 but the fake 'this' pointer that we're using for parsing is 673 in scope_chain. But if the closure is already complete, we're 674 in an instantiation of a generic lambda, and the fake 'this' 675 is gone. */ 676 init = scope_chain->x_current_class_ptr; 677 gcc_checking_assert 678 (init && (TREE_TYPE (TREE_TYPE (init)) 679 == current_nonlambda_class_type ())); 680 break; 681 } 682 683 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda)); 684 tree containing_function = decl_function_context (closure_decl); 685 686 if (containing_function == NULL_TREE) 687 /* We ran out of scopes; there's no 'this' to capture. */ 688 break; 689 690 if (!LAMBDA_FUNCTION_P (containing_function)) 691 { 692 /* We found a non-lambda function. */ 693 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) 694 /* First parameter is 'this'. */ 695 init = DECL_ARGUMENTS (containing_function); 696 break; 697 } 698 699 tlambda 700 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); 701 702 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) 703 { 704 /* An outer lambda has already captured 'this'. */ 705 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); 706 break; 707 } 708 709 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) 710 /* An outer lambda won't let us capture 'this'. */ 711 break; 712 } 713 714 if (init) 715 { 716 if (add_capture_p) 717 this_capture = add_default_capture (lambda_stack, 718 /*id=*/this_identifier, 719 init); 720 else 721 this_capture = init; 722 } 723 } 724 725 if (cp_unevaluated_operand) 726 result = this_capture; 727 else if (!this_capture) 728 { 729 if (add_capture_p) 730 { 731 error ("%<this%> was not captured for this lambda function"); 732 result = error_mark_node; 733 } 734 else 735 result = NULL_TREE; 736 } 737 else 738 { 739 /* To make sure that current_class_ref is for the lambda. */ 740 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) 741 == LAMBDA_EXPR_CLOSURE (lambda)); 742 743 result = this_capture; 744 745 /* If 'this' is captured, each use of 'this' is transformed into an 746 access to the corresponding unnamed data member of the closure 747 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast 748 ensures that the transformed expression is an rvalue. ] */ 749 result = rvalue (result); 750 } 751 752 return result; 753 } 754 755 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy 756 object. NULL otherwise.. */ 757 758 static tree 759 resolvable_dummy_lambda (tree object) 760 { 761 if (!is_dummy_object (object)) 762 return NULL_TREE; 763 764 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); 765 gcc_assert (!TYPE_PTR_P (type)); 766 767 if (type != current_class_type 768 && current_class_type 769 && LAMBDA_TYPE_P (current_class_type) 770 && lambda_function (current_class_type) 771 && DERIVED_FROM_P (type, current_nonlambda_class_type ())) 772 return CLASSTYPE_LAMBDA_EXPR (current_class_type); 773 774 return NULL_TREE; 775 } 776 777 /* We don't want to capture 'this' until we know we need it, i.e. after 778 overload resolution has chosen a non-static member function. At that 779 point we call this function to turn a dummy object into a use of the 780 'this' capture. */ 781 782 tree 783 maybe_resolve_dummy (tree object, bool add_capture_p) 784 { 785 if (tree lam = resolvable_dummy_lambda (object)) 786 if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) 787 if (cap != error_mark_node) 788 object = build_x_indirect_ref (EXPR_LOCATION (object), cap, 789 RO_NULL, tf_warning_or_error); 790 791 return object; 792 } 793 794 /* When parsing a generic lambda containing an argument-dependent 795 member function call we defer overload resolution to instantiation 796 time. But we have to know now whether to capture this or not. 797 Do that if FNS contains any non-static fns. 798 The std doesn't anticipate this case, but I expect this to be the 799 outcome of discussion. */ 800 801 void 802 maybe_generic_this_capture (tree object, tree fns) 803 { 804 if (tree lam = resolvable_dummy_lambda (object)) 805 if (!LAMBDA_EXPR_THIS_CAPTURE (lam)) 806 { 807 /* We've not yet captured, so look at the function set of 808 interest. */ 809 if (BASELINK_P (fns)) 810 fns = BASELINK_FUNCTIONS (fns); 811 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR; 812 if (id_expr) 813 fns = TREE_OPERAND (fns, 0); 814 for (; fns; fns = OVL_NEXT (fns)) 815 { 816 tree fn = OVL_CURRENT (fns); 817 818 if (identifier_p (fns) 819 || ((!id_expr || TREE_CODE (fn) == TEMPLATE_DECL) 820 && DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))) 821 { 822 /* Found a non-static member. Capture this. */ 823 lambda_expr_this_capture (lam, true); 824 break; 825 } 826 } 827 } 828 } 829 830 /* Returns the innermost non-lambda function. */ 831 832 tree 833 current_nonlambda_function (void) 834 { 835 tree fn = current_function_decl; 836 while (fn && LAMBDA_FUNCTION_P (fn)) 837 fn = decl_function_context (fn); 838 return fn; 839 } 840 841 /* Returns the method basetype of the innermost non-lambda function, or 842 NULL_TREE if none. */ 843 844 tree 845 nonlambda_method_basetype (void) 846 { 847 tree fn, type; 848 if (!current_class_ref) 849 return NULL_TREE; 850 851 type = current_class_type; 852 if (!LAMBDA_TYPE_P (type)) 853 return type; 854 855 /* Find the nearest enclosing non-lambda function. */ 856 fn = TYPE_NAME (type); 857 do 858 fn = decl_function_context (fn); 859 while (fn && LAMBDA_FUNCTION_P (fn)); 860 861 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) 862 return NULL_TREE; 863 864 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); 865 } 866 867 /* Like current_scope, but looking through lambdas. */ 868 869 tree 870 current_nonlambda_scope (void) 871 { 872 tree scope = current_scope (); 873 for (;;) 874 { 875 if (TREE_CODE (scope) == FUNCTION_DECL 876 && LAMBDA_FUNCTION_P (scope)) 877 { 878 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope)); 879 continue; 880 } 881 else if (LAMBDA_TYPE_P (scope)) 882 { 883 scope = CP_TYPE_CONTEXT (scope); 884 continue; 885 } 886 break; 887 } 888 return scope; 889 } 890 891 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with 892 indicated FN and NARGS, but do not initialize the return type or any of the 893 argument slots. */ 894 895 static tree 896 prepare_op_call (tree fn, int nargs) 897 { 898 tree t; 899 900 t = build_vl_exp (CALL_EXPR, nargs + 3); 901 CALL_EXPR_FN (t) = fn; 902 CALL_EXPR_STATIC_CHAIN (t) = NULL; 903 904 return t; 905 } 906 907 /* Return true iff CALLOP is the op() for a generic lambda. */ 908 909 bool 910 generic_lambda_fn_p (tree callop) 911 { 912 return (LAMBDA_FUNCTION_P (callop) 913 && DECL_TEMPLATE_INFO (callop) 914 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); 915 } 916 917 /* If the closure TYPE has a static op(), also add a conversion to function 918 pointer. */ 919 920 void 921 maybe_add_lambda_conv_op (tree type) 922 { 923 bool nested = (cfun != NULL); 924 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); 925 tree callop = lambda_function (type); 926 tree lam = CLASSTYPE_LAMBDA_EXPR (type); 927 928 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE 929 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE) 930 return; 931 932 if (processing_template_decl) 933 return; 934 935 bool const generic_lambda_p = generic_lambda_fn_p (callop); 936 937 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) 938 { 939 /* If the op() wasn't instantiated due to errors, give up. */ 940 gcc_assert (errorcount || sorrycount); 941 return; 942 } 943 944 /* Non-template conversion operators are defined directly with build_call_a 945 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are 946 deferred and the CALL is built in-place. In the case of a deduced return 947 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for 948 the return type is also built in-place. The arguments of DECLTYPE_CALL in 949 the return expression may differ in flags from those in the body CALL. In 950 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in 951 the body CALL, but not in DECLTYPE_CALL. */ 952 953 vec<tree, va_gc> *direct_argvec = 0; 954 tree decltype_call = 0, call = 0; 955 tree optype = TREE_TYPE (callop); 956 tree fn_result = TREE_TYPE (optype); 957 958 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)), 959 null_pointer_node); 960 if (generic_lambda_p) 961 { 962 /* Prepare the dependent member call for the static member function 963 '_FUN' and, potentially, prepare another call to be used in a decltype 964 return expression for a deduced return call op to allow for simple 965 implementation of the conversion operator. */ 966 967 tree instance = cp_build_indirect_ref (thisarg, RO_NULL, 968 tf_warning_or_error); 969 tree objfn = build_min (COMPONENT_REF, NULL_TREE, 970 instance, DECL_NAME (callop), NULL_TREE); 971 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; 972 973 call = prepare_op_call (objfn, nargs); 974 if (type_uses_auto (fn_result)) 975 decltype_call = prepare_op_call (objfn, nargs); 976 } 977 else 978 { 979 direct_argvec = make_tree_vector (); 980 direct_argvec->quick_push (thisarg); 981 } 982 983 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to 984 declare the static member function "_FUN" below. For each arg append to 985 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated 986 call args (for the template case). If a parameter pack is found, expand 987 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ 988 989 tree fn_args = NULL_TREE; 990 { 991 int ix = 0; 992 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); 993 tree tgt; 994 995 while (src) 996 { 997 tree new_node = copy_node (src); 998 999 if (!fn_args) 1000 fn_args = tgt = new_node; 1001 else 1002 { 1003 TREE_CHAIN (tgt) = new_node; 1004 tgt = new_node; 1005 } 1006 1007 mark_exp_read (tgt); 1008 1009 if (generic_lambda_p) 1010 { 1011 ++processing_template_decl; 1012 tree a = forward_parm (tgt); 1013 --processing_template_decl; 1014 1015 CALL_EXPR_ARG (call, ix) = a; 1016 if (decltype_call) 1017 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a); 1018 1019 if (PACK_EXPANSION_P (a)) 1020 /* Set this after unsharing so it's not in decltype_call. */ 1021 PACK_EXPANSION_LOCAL_P (a) = true; 1022 1023 ++ix; 1024 } 1025 else 1026 vec_safe_push (direct_argvec, tgt); 1027 1028 src = TREE_CHAIN (src); 1029 } 1030 } 1031 1032 1033 if (generic_lambda_p) 1034 { 1035 if (decltype_call) 1036 { 1037 ++processing_template_decl; 1038 fn_result = finish_decltype_type 1039 (decltype_call, /*id_expression_or_member_access_p=*/false, 1040 tf_warning_or_error); 1041 --processing_template_decl; 1042 } 1043 } 1044 else 1045 call = build_call_a (callop, 1046 direct_argvec->length (), 1047 direct_argvec->address ()); 1048 1049 CALL_FROM_THUNK_P (call) = 1; 1050 1051 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); 1052 stattype = (cp_build_type_attribute_variant 1053 (stattype, TYPE_ATTRIBUTES (optype))); 1054 1055 /* First build up the conversion op. */ 1056 1057 tree rettype = build_pointer_type (stattype); 1058 tree name = mangle_conv_op_name_for_type (rettype); 1059 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); 1060 tree fntype = build_method_type_directly (thistype, rettype, void_list_node); 1061 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); 1062 tree fn = convfn; 1063 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1064 DECL_ALIGN (fn) = MINIMUM_METHOD_BOUNDARY; 1065 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); 1066 grokclassfn (type, fn, NO_SPECIAL); 1067 set_linkage_according_to_type (type, fn); 1068 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); 1069 DECL_IN_AGGR_P (fn) = 1; 1070 DECL_ARTIFICIAL (fn) = 1; 1071 DECL_NOT_REALLY_EXTERN (fn) = 1; 1072 DECL_DECLARED_INLINE_P (fn) = 1; 1073 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); 1074 if (nested_def) 1075 DECL_INTERFACE_KNOWN (fn) = 1; 1076 1077 if (generic_lambda_p) 1078 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1079 1080 add_method (type, fn, NULL_TREE); 1081 1082 /* Generic thunk code fails for varargs; we'll complain in mark_used if 1083 the conversion op is used. */ 1084 if (varargs_function_p (callop)) 1085 { 1086 DECL_DELETED_FN (fn) = 1; 1087 return; 1088 } 1089 1090 /* Now build up the thunk to be returned. */ 1091 1092 name = get_identifier ("_FUN"); 1093 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); 1094 fn = statfn; 1095 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1096 grokclassfn (type, fn, NO_SPECIAL); 1097 set_linkage_according_to_type (type, fn); 1098 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); 1099 DECL_IN_AGGR_P (fn) = 1; 1100 DECL_ARTIFICIAL (fn) = 1; 1101 DECL_NOT_REALLY_EXTERN (fn) = 1; 1102 DECL_DECLARED_INLINE_P (fn) = 1; 1103 DECL_STATIC_FUNCTION_P (fn) = 1; 1104 DECL_ARGUMENTS (fn) = fn_args; 1105 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) 1106 { 1107 /* Avoid duplicate -Wshadow warnings. */ 1108 DECL_NAME (arg) = NULL_TREE; 1109 DECL_CONTEXT (arg) = fn; 1110 } 1111 if (nested_def) 1112 DECL_INTERFACE_KNOWN (fn) = 1; 1113 1114 if (generic_lambda_p) 1115 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1116 1117 if (flag_sanitize & SANITIZE_NULL) 1118 { 1119 /* Don't UBsan this function; we're deliberately calling op() with a null 1120 object argument. */ 1121 tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"), 1122 NULL_TREE); 1123 cplus_decl_attributes (&fn, attrs, 0); 1124 } 1125 1126 add_method (type, fn, NULL_TREE); 1127 1128 if (nested) 1129 push_function_context (); 1130 else 1131 /* Still increment function_depth so that we don't GC in the 1132 middle of an expression. */ 1133 ++function_depth; 1134 1135 /* Generate the body of the thunk. */ 1136 1137 start_preparsed_function (statfn, NULL_TREE, 1138 SF_PRE_PARSED | SF_INCLASS_INLINE); 1139 if (DECL_ONE_ONLY (statfn)) 1140 { 1141 /* Put the thunk in the same comdat group as the call op. */ 1142 cgraph_node::get_create (statfn)->add_to_same_comdat_group 1143 (cgraph_node::get_create (callop)); 1144 } 1145 tree body = begin_function_body (); 1146 tree compound_stmt = begin_compound_stmt (0); 1147 if (!generic_lambda_p) 1148 { 1149 set_flags_from_callee (call); 1150 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) 1151 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); 1152 } 1153 call = convert_from_reference (call); 1154 finish_return_stmt (call); 1155 1156 finish_compound_stmt (compound_stmt); 1157 finish_function_body (body); 1158 1159 fn = finish_function (/*inline*/2); 1160 if (!generic_lambda_p) 1161 expand_or_defer_fn (fn); 1162 1163 /* Generate the body of the conversion op. */ 1164 1165 start_preparsed_function (convfn, NULL_TREE, 1166 SF_PRE_PARSED | SF_INCLASS_INLINE); 1167 body = begin_function_body (); 1168 compound_stmt = begin_compound_stmt (0); 1169 1170 /* decl_needed_p needs to see that it's used. */ 1171 TREE_USED (statfn) = 1; 1172 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); 1173 1174 finish_compound_stmt (compound_stmt); 1175 finish_function_body (body); 1176 1177 fn = finish_function (/*inline*/2); 1178 if (!generic_lambda_p) 1179 expand_or_defer_fn (fn); 1180 1181 if (nested) 1182 pop_function_context (); 1183 else 1184 --function_depth; 1185 } 1186 1187 /* Returns true iff VAL is a lambda-related declaration which should 1188 be ignored by unqualified lookup. */ 1189 1190 bool 1191 is_lambda_ignored_entity (tree val) 1192 { 1193 /* In unevaluated context, look past normal capture proxies. */ 1194 if (cp_unevaluated_operand && is_normal_capture_proxy (val)) 1195 return true; 1196 1197 /* Always ignore lambda fields, their names are only for debugging. */ 1198 if (TREE_CODE (val) == FIELD_DECL 1199 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) 1200 return true; 1201 1202 /* None of the lookups that use qualify_lookup want the op() from the 1203 lambda; they want the one from the enclosing class. */ 1204 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) 1205 return true; 1206 1207 return false; 1208 } 1209