1 /* Perform the semantic phase of lambda parsing, i.e., the process of 2 building tree structure, checking semantic consistency, and 3 building RTL. These routines are used both during actual parsing 4 and during the instantiation of template functions. 5 6 Copyright (C) 1998-2019 Free Software Foundation, Inc. 7 8 This file is part of GCC. 9 10 GCC is free software; you can redistribute it and/or modify it 11 under the terms of the GNU General Public License as published by 12 the Free Software Foundation; either version 3, or (at your option) 13 any later version. 14 15 GCC is distributed in the hope that it will be useful, but 16 WITHOUT ANY WARRANTY; without even the implied warranty of 17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 General Public License for more details. 19 20 You should have received a copy of the GNU General Public License 21 along with GCC; see the file COPYING3. If not see 22 <http://www.gnu.org/licenses/>. */ 23 24 #include "config.h" 25 #include "system.h" 26 #include "coretypes.h" 27 #include "cp-tree.h" 28 #include "stringpool.h" 29 #include "cgraph.h" 30 #include "tree-iterator.h" 31 #include "toplev.h" 32 #include "gimplify.h" 33 34 /* Constructor for a lambda expression. */ 35 36 tree 37 build_lambda_expr (void) 38 { 39 tree lambda = make_node (LAMBDA_EXPR); 40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; 41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; 42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; 43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; 44 LAMBDA_EXPR_MUTABLE_P (lambda) = false; 45 return lambda; 46 } 47 48 /* Create the closure object for a LAMBDA_EXPR. */ 49 50 tree 51 build_lambda_object (tree lambda_expr) 52 { 53 /* Build aggregate constructor call. 54 - cp_parser_braced_list 55 - cp_parser_functional_cast */ 56 vec<constructor_elt, va_gc> *elts = NULL; 57 tree node, expr, type; 58 location_t saved_loc; 59 60 if (processing_template_decl || lambda_expr == error_mark_node) 61 return lambda_expr; 62 63 /* Make sure any error messages refer to the lambda-introducer. */ 64 saved_loc = input_location; 65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr); 66 67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); 68 node; 69 node = TREE_CHAIN (node)) 70 { 71 tree field = TREE_PURPOSE (node); 72 tree val = TREE_VALUE (node); 73 74 if (field == error_mark_node) 75 { 76 expr = error_mark_node; 77 goto out; 78 } 79 80 if (TREE_CODE (val) == TREE_LIST) 81 val = build_x_compound_expr_from_list (val, ELK_INIT, 82 tf_warning_or_error); 83 84 if (DECL_P (val)) 85 mark_used (val); 86 87 /* Mere mortals can't copy arrays with aggregate initialization, so 88 do some magic to make it work here. */ 89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) 90 val = build_array_copy (val); 91 else if (DECL_NORMAL_CAPTURE_P (field) 92 && !DECL_VLA_CAPTURE_P (field) 93 && !TYPE_REF_P (TREE_TYPE (field))) 94 { 95 /* "the entities that are captured by copy are used to 96 direct-initialize each corresponding non-static data 97 member of the resulting closure object." 98 99 There's normally no way to express direct-initialization 100 from an element of a CONSTRUCTOR, so we build up a special 101 TARGET_EXPR to bypass the usual copy-initialization. */ 102 val = force_rvalue (val, tf_warning_or_error); 103 if (TREE_CODE (val) == TARGET_EXPR) 104 TARGET_EXPR_DIRECT_INIT_P (val) = true; 105 } 106 107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); 108 } 109 110 expr = build_constructor (init_list_type_node, elts); 111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; 112 113 /* N2927: "[The closure] class type is not an aggregate." 114 But we briefly treat it as an aggregate to make this simpler. */ 115 type = LAMBDA_EXPR_CLOSURE (lambda_expr); 116 CLASSTYPE_NON_AGGREGATE (type) = 0; 117 expr = finish_compound_literal (type, expr, tf_warning_or_error); 118 CLASSTYPE_NON_AGGREGATE (type) = 1; 119 120 out: 121 input_location = saved_loc; 122 return expr; 123 } 124 125 /* Return an initialized RECORD_TYPE for LAMBDA. 126 LAMBDA must have its explicit captures already. */ 127 128 tree 129 begin_lambda_type (tree lambda) 130 { 131 tree type; 132 133 { 134 /* Unique name. This is just like an unnamed class, but we cannot use 135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */ 136 tree name; 137 name = make_lambda_name (); 138 139 /* Create the new RECORD_TYPE for this lambda. */ 140 type = xref_tag (/*tag_code=*/record_type, 141 name, 142 /*scope=*/ts_lambda, 143 /*template_header_p=*/false); 144 if (type == error_mark_node) 145 return error_mark_node; 146 } 147 148 /* Designate it as a struct so that we can use aggregate initialization. */ 149 CLASSTYPE_DECLARED_CLASS (type) = false; 150 151 /* Cross-reference the expression and the type. */ 152 LAMBDA_EXPR_CLOSURE (lambda) = type; 153 CLASSTYPE_LAMBDA_EXPR (type) = lambda; 154 155 /* In C++17, assume the closure is literal; we'll clear the flag later if 156 necessary. */ 157 if (cxx_dialect >= cxx17) 158 CLASSTYPE_LITERAL_P (type) = true; 159 160 /* Clear base types. */ 161 xref_basetypes (type, /*bases=*/NULL_TREE); 162 163 /* Start the class. */ 164 type = begin_class_definition (type); 165 166 return type; 167 } 168 169 /* Returns the type to use for the return type of the operator() of a 170 closure class. */ 171 172 tree 173 lambda_return_type (tree expr) 174 { 175 if (expr == NULL_TREE) 176 return void_type_node; 177 if (type_unknown_p (expr) 178 || BRACE_ENCLOSED_INITIALIZER_P (expr)) 179 { 180 cxx_incomplete_type_error (expr, TREE_TYPE (expr)); 181 return error_mark_node; 182 } 183 gcc_checking_assert (!type_dependent_expression_p (expr)); 184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); 185 } 186 187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the 188 closure type. */ 189 190 tree 191 lambda_function (tree lambda) 192 { 193 tree type; 194 if (TREE_CODE (lambda) == LAMBDA_EXPR) 195 type = LAMBDA_EXPR_CLOSURE (lambda); 196 else 197 type = lambda; 198 gcc_assert (LAMBDA_TYPE_P (type)); 199 /* Don't let debug_tree cause instantiation. */ 200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) 201 && !COMPLETE_OR_OPEN_TYPE_P (type)) 202 return NULL_TREE; 203 lambda = lookup_member (type, call_op_identifier, 204 /*protect=*/0, /*want_type=*/false, 205 tf_warning_or_error); 206 if (lambda) 207 lambda = STRIP_TEMPLATE (get_first_fn (lambda)); 208 return lambda; 209 } 210 211 /* Returns the type to use for the FIELD_DECL corresponding to the 212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a 213 C++14 init capture, and BY_REFERENCE_P indicates whether we're 214 capturing by reference. */ 215 216 tree 217 lambda_capture_field_type (tree expr, bool explicit_init_p, 218 bool by_reference_p) 219 { 220 tree type; 221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr)); 222 223 if (!is_this && explicit_init_p) 224 { 225 tree auto_node = make_auto (); 226 227 type = auto_node; 228 if (by_reference_p) 229 /* Add the reference now, so deduction doesn't lose 230 outermost CV qualifiers of EXPR. */ 231 type = build_reference_type (type); 232 type = do_auto_deduction (type, expr, auto_node); 233 } 234 else if (!is_this && type_dependent_expression_p (expr)) 235 { 236 type = cxx_make_type (DECLTYPE_TYPE); 237 DECLTYPE_TYPE_EXPR (type) = expr; 238 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; 239 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p; 240 SET_TYPE_STRUCTURAL_EQUALITY (type); 241 } 242 else 243 { 244 type = non_reference (unlowered_expr_type (expr)); 245 246 if (!is_this 247 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)) 248 type = build_reference_type (type); 249 } 250 251 return type; 252 } 253 254 /* Returns true iff DECL is a lambda capture proxy variable created by 255 build_capture_proxy. */ 256 257 bool 258 is_capture_proxy (tree decl) 259 { 260 return (VAR_P (decl) 261 && DECL_HAS_VALUE_EXPR_P (decl) 262 && !DECL_ANON_UNION_VAR_P (decl) 263 && !DECL_DECOMPOSITION_P (decl) 264 && !DECL_FNAME_P (decl) 265 && !(DECL_ARTIFICIAL (decl) 266 && DECL_LANG_SPECIFIC (decl) 267 && DECL_OMP_PRIVATIZED_MEMBER (decl)) 268 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); 269 } 270 271 /* Returns true iff DECL is a capture proxy for a normal capture 272 (i.e. without explicit initializer). */ 273 274 bool 275 is_normal_capture_proxy (tree decl) 276 { 277 if (!is_capture_proxy (decl)) 278 /* It's not a capture proxy. */ 279 return false; 280 281 return (DECL_LANG_SPECIFIC (decl) 282 && DECL_CAPTURED_VARIABLE (decl)); 283 } 284 285 /* Returns true iff DECL is a capture proxy for a normal capture 286 of a constant variable. */ 287 288 bool 289 is_constant_capture_proxy (tree decl) 290 { 291 if (is_normal_capture_proxy (decl)) 292 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl)); 293 return false; 294 } 295 296 /* VAR is a capture proxy created by build_capture_proxy; add it to the 297 current function, which is the operator() for the appropriate lambda. */ 298 299 void 300 insert_capture_proxy (tree var) 301 { 302 if (is_normal_capture_proxy (var)) 303 { 304 tree cap = DECL_CAPTURED_VARIABLE (var); 305 if (CHECKING_P) 306 { 307 gcc_assert (!is_normal_capture_proxy (cap)); 308 tree old = retrieve_local_specialization (cap); 309 if (old) 310 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var)); 311 } 312 register_local_specialization (var, cap); 313 } 314 315 /* Put the capture proxy in the extra body block so that it won't clash 316 with a later local variable. */ 317 pushdecl_outermost_localscope (var); 318 319 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ 320 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); 321 tree stmt_list = (*stmt_list_stack)[1]; 322 gcc_assert (stmt_list); 323 append_to_statement_list_force (var, &stmt_list); 324 } 325 326 /* We've just finished processing a lambda; if the containing scope is also 327 a lambda, insert any capture proxies that were created while processing 328 the nested lambda. */ 329 330 void 331 insert_pending_capture_proxies (void) 332 { 333 tree lam; 334 vec<tree, va_gc> *proxies; 335 unsigned i; 336 337 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) 338 return; 339 340 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); 341 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); 342 for (i = 0; i < vec_safe_length (proxies); ++i) 343 { 344 tree var = (*proxies)[i]; 345 insert_capture_proxy (var); 346 } 347 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); 348 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; 349 } 350 351 /* Given REF, a COMPONENT_REF designating a field in the lambda closure, 352 return the type we want the proxy to have: the type of the field itself, 353 with added const-qualification if the lambda isn't mutable and the 354 capture is by value. */ 355 356 tree 357 lambda_proxy_type (tree ref) 358 { 359 tree type; 360 if (ref == error_mark_node) 361 return error_mark_node; 362 if (REFERENCE_REF_P (ref)) 363 ref = TREE_OPERAND (ref, 0); 364 gcc_assert (TREE_CODE (ref) == COMPONENT_REF); 365 type = TREE_TYPE (ref); 366 if (!type || WILDCARD_TYPE_P (non_reference (type))) 367 { 368 type = cxx_make_type (DECLTYPE_TYPE); 369 DECLTYPE_TYPE_EXPR (type) = ref; 370 DECLTYPE_FOR_LAMBDA_PROXY (type) = true; 371 SET_TYPE_STRUCTURAL_EQUALITY (type); 372 } 373 if (DECL_PACK_P (TREE_OPERAND (ref, 1))) 374 type = make_pack_expansion (type); 375 return type; 376 } 377 378 /* MEMBER is a capture field in a lambda closure class. Now that we're 379 inside the operator(), build a placeholder var for future lookups and 380 debugging. */ 381 382 static tree 383 build_capture_proxy (tree member, tree init) 384 { 385 tree var, object, fn, closure, name, lam, type; 386 387 if (PACK_EXPANSION_P (member)) 388 member = PACK_EXPANSION_PATTERN (member); 389 390 closure = DECL_CONTEXT (member); 391 fn = lambda_function (closure); 392 lam = CLASSTYPE_LAMBDA_EXPR (closure); 393 394 /* The proxy variable forwards to the capture field. */ 395 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); 396 object = finish_non_static_data_member (member, object, NULL_TREE); 397 if (REFERENCE_REF_P (object)) 398 object = TREE_OPERAND (object, 0); 399 400 /* Remove the __ inserted by add_capture. */ 401 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); 402 403 type = lambda_proxy_type (object); 404 405 if (name == this_identifier && !INDIRECT_TYPE_P (type)) 406 { 407 type = build_pointer_type (type); 408 type = cp_build_qualified_type (type, TYPE_QUAL_CONST); 409 object = build_fold_addr_expr_with_type (object, type); 410 } 411 412 if (DECL_VLA_CAPTURE_P (member)) 413 { 414 /* Rebuild the VLA type from the pointer and maxindex. */ 415 tree field = next_initializable_field (TYPE_FIELDS (type)); 416 tree ptr = build_simple_component_ref (object, field); 417 field = next_initializable_field (DECL_CHAIN (field)); 418 tree max = build_simple_component_ref (object, field); 419 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), 420 build_index_type (max)); 421 type = build_reference_type (type); 422 REFERENCE_VLA_OK (type) = true; 423 object = convert (type, ptr); 424 } 425 426 complete_type (type); 427 428 var = build_decl (input_location, VAR_DECL, name, type); 429 SET_DECL_VALUE_EXPR (var, object); 430 DECL_HAS_VALUE_EXPR_P (var) = 1; 431 DECL_ARTIFICIAL (var) = 1; 432 TREE_USED (var) = 1; 433 DECL_CONTEXT (var) = fn; 434 435 if (DECL_NORMAL_CAPTURE_P (member)) 436 { 437 if (DECL_VLA_CAPTURE_P (member)) 438 { 439 init = CONSTRUCTOR_ELT (init, 0)->value; 440 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR. 441 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF. 442 } 443 else 444 { 445 if (PACK_EXPANSION_P (init)) 446 init = PACK_EXPANSION_PATTERN (init); 447 } 448 449 if (INDIRECT_REF_P (init)) 450 init = TREE_OPERAND (init, 0); 451 STRIP_NOPS (init); 452 453 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); 454 while (is_normal_capture_proxy (init)) 455 init = DECL_CAPTURED_VARIABLE (init); 456 retrofit_lang_decl (var); 457 DECL_CAPTURED_VARIABLE (var) = init; 458 } 459 460 if (name == this_identifier) 461 { 462 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); 463 LAMBDA_EXPR_THIS_CAPTURE (lam) = var; 464 } 465 466 if (fn == current_function_decl) 467 insert_capture_proxy (var); 468 else 469 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); 470 471 return var; 472 } 473 474 static GTY(()) tree ptr_id; 475 static GTY(()) tree max_id; 476 477 /* Return a struct containing a pointer and a length for lambda capture of 478 an array of runtime length. */ 479 480 static tree 481 vla_capture_type (tree array_type) 482 { 483 tree type = xref_tag (record_type, make_anon_name (), ts_current, false); 484 xref_basetypes (type, NULL_TREE); 485 type = begin_class_definition (type); 486 if (!ptr_id) 487 { 488 ptr_id = get_identifier ("ptr"); 489 max_id = get_identifier ("max"); 490 } 491 tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); 492 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); 493 finish_member_declaration (field); 494 field = build_decl (input_location, FIELD_DECL, max_id, sizetype); 495 finish_member_declaration (field); 496 return finish_struct (type, NULL_TREE); 497 } 498 499 /* From an ID and INITIALIZER, create a capture (by reference if 500 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, 501 and return it. If ID is `this', BY_REFERENCE_P says whether 502 `*this' is captured by reference. */ 503 504 tree 505 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, 506 bool explicit_init_p) 507 { 508 char *buf; 509 tree type, member, name; 510 bool vla = false; 511 bool variadic = false; 512 tree initializer = orig_init; 513 514 if (PACK_EXPANSION_P (initializer)) 515 { 516 initializer = PACK_EXPANSION_PATTERN (initializer); 517 variadic = true; 518 } 519 520 if (TREE_CODE (initializer) == TREE_LIST 521 /* A pack expansion might end up with multiple elements. */ 522 && !PACK_EXPANSION_P (TREE_VALUE (initializer))) 523 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, 524 tf_warning_or_error); 525 type = TREE_TYPE (initializer); 526 if (type == error_mark_node) 527 return error_mark_node; 528 529 if (!dependent_type_p (type) && array_of_runtime_bound_p (type)) 530 { 531 vla = true; 532 if (!by_reference_p) 533 error ("array of runtime bound cannot be captured by copy, " 534 "only by reference"); 535 536 /* For a VLA, we capture the address of the first element and the 537 maximum index, and then reconstruct the VLA for the proxy. */ 538 tree elt = cp_build_array_ref (input_location, initializer, 539 integer_zero_node, tf_warning_or_error); 540 initializer = build_constructor_va (init_list_type_node, 2, 541 NULL_TREE, build_address (elt), 542 NULL_TREE, array_type_nelts (type)); 543 type = vla_capture_type (type); 544 } 545 else if (!dependent_type_p (type) 546 && variably_modified_type_p (type, NULL_TREE)) 547 { 548 sorry ("capture of variably-modified type %qT that is not an N3639 array " 549 "of runtime bound", type); 550 if (TREE_CODE (type) == ARRAY_TYPE 551 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) 552 inform (input_location, "because the array element type %qT has " 553 "variable size", TREE_TYPE (type)); 554 return error_mark_node; 555 } 556 else 557 { 558 type = lambda_capture_field_type (initializer, explicit_init_p, 559 by_reference_p); 560 if (type == error_mark_node) 561 return error_mark_node; 562 563 if (id == this_identifier && !by_reference_p) 564 { 565 gcc_assert (INDIRECT_TYPE_P (type)); 566 type = TREE_TYPE (type); 567 initializer = cp_build_fold_indirect_ref (initializer); 568 } 569 570 if (dependent_type_p (type)) 571 ; 572 else if (id != this_identifier && by_reference_p) 573 { 574 if (!lvalue_p (initializer)) 575 { 576 error ("cannot capture %qE by reference", initializer); 577 return error_mark_node; 578 } 579 } 580 else 581 { 582 /* Capture by copy requires a complete type. */ 583 type = complete_type (type); 584 if (!COMPLETE_TYPE_P (type)) 585 { 586 error ("capture by copy of incomplete type %qT", type); 587 cxx_incomplete_type_inform (type); 588 return error_mark_node; 589 } 590 } 591 } 592 593 /* Add __ to the beginning of the field name so that user code 594 won't find the field with name lookup. We can't just leave the name 595 unset because template instantiation uses the name to find 596 instantiated fields. */ 597 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); 598 buf[1] = buf[0] = '_'; 599 memcpy (buf + 2, IDENTIFIER_POINTER (id), 600 IDENTIFIER_LENGTH (id) + 1); 601 name = get_identifier (buf); 602 603 if (variadic) 604 { 605 type = make_pack_expansion (type); 606 if (explicit_init_p) 607 /* With an explicit initializer 'type' is auto, which isn't really a 608 parameter pack in this context. We will want as many fields as we 609 have elements in the expansion of the initializer, so use its packs 610 instead. */ 611 PACK_EXPANSION_PARAMETER_PACKS (type) 612 = uses_parameter_packs (initializer); 613 } 614 615 /* Make member variable. */ 616 member = build_decl (input_location, FIELD_DECL, name, type); 617 DECL_VLA_CAPTURE_P (member) = vla; 618 619 if (!explicit_init_p) 620 /* Normal captures are invisible to name lookup but uses are replaced 621 with references to the capture field; we implement this by only 622 really making them invisible in unevaluated context; see 623 qualify_lookup. For now, let's make explicitly initialized captures 624 always visible. */ 625 DECL_NORMAL_CAPTURE_P (member) = true; 626 627 if (id == this_identifier) 628 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; 629 630 /* Add it to the appropriate closure class if we've started it. */ 631 if (current_class_type 632 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) 633 { 634 if (COMPLETE_TYPE_P (current_class_type)) 635 internal_error ("trying to capture %qD in instantiation of " 636 "generic lambda", id); 637 finish_member_declaration (member); 638 } 639 640 tree listmem = member; 641 if (variadic) 642 { 643 listmem = make_pack_expansion (member); 644 initializer = orig_init; 645 } 646 LAMBDA_EXPR_CAPTURE_LIST (lambda) 647 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); 648 649 if (LAMBDA_EXPR_CLOSURE (lambda)) 650 return build_capture_proxy (member, initializer); 651 /* For explicit captures we haven't started the function yet, so we wait 652 and build the proxy from cp_parser_lambda_body. */ 653 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true; 654 return NULL_TREE; 655 } 656 657 /* Register all the capture members on the list CAPTURES, which is the 658 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ 659 660 void 661 register_capture_members (tree captures) 662 { 663 if (captures == NULL_TREE) 664 return; 665 666 register_capture_members (TREE_CHAIN (captures)); 667 668 tree field = TREE_PURPOSE (captures); 669 if (PACK_EXPANSION_P (field)) 670 field = PACK_EXPANSION_PATTERN (field); 671 672 finish_member_declaration (field); 673 } 674 675 /* Similar to add_capture, except this works on a stack of nested lambdas. 676 BY_REFERENCE_P in this case is derived from the default capture mode. 677 Returns the capture for the lambda at the bottom of the stack. */ 678 679 tree 680 add_default_capture (tree lambda_stack, tree id, tree initializer) 681 { 682 bool this_capture_p = (id == this_identifier); 683 tree var = NULL_TREE; 684 tree saved_class_type = current_class_type; 685 686 for (tree node = lambda_stack; 687 node; 688 node = TREE_CHAIN (node)) 689 { 690 tree lambda = TREE_VALUE (node); 691 692 current_class_type = LAMBDA_EXPR_CLOSURE (lambda); 693 if (DECL_PACK_P (initializer)) 694 initializer = make_pack_expansion (initializer); 695 var = add_capture (lambda, 696 id, 697 initializer, 698 /*by_reference_p=*/ 699 (this_capture_p 700 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) 701 == CPLD_REFERENCE)), 702 /*explicit_init_p=*/false); 703 initializer = convert_from_reference (var); 704 705 /* Warn about deprecated implicit capture of this via [=]. */ 706 if (cxx_dialect >= cxx2a 707 && this_capture_p 708 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY 709 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda))) 710 { 711 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated, 712 "implicit capture of %qE via %<[=]%> is deprecated " 713 "in C++20", this_identifier)) 714 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or " 715 "%<*this%> capture"); 716 } 717 } 718 719 current_class_type = saved_class_type; 720 721 return var; 722 } 723 724 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the 725 form of an INDIRECT_REF, possibly adding it through default 726 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative, 727 try to capture but don't complain if we can't. */ 728 729 tree 730 lambda_expr_this_capture (tree lambda, int add_capture_p) 731 { 732 tree result; 733 734 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); 735 736 /* In unevaluated context this isn't an odr-use, so don't capture. */ 737 if (cp_unevaluated_operand) 738 add_capture_p = false; 739 740 /* Try to default capture 'this' if we can. */ 741 if (!this_capture) 742 { 743 tree lambda_stack = NULL_TREE; 744 tree init = NULL_TREE; 745 746 /* If we are in a lambda function, we can move out until we hit: 747 1. a non-lambda function or NSDMI, 748 2. a lambda function capturing 'this', or 749 3. a non-default capturing lambda function. */ 750 for (tree tlambda = lambda; ;) 751 { 752 if (add_capture_p 753 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) 754 /* tlambda won't let us capture 'this'. */ 755 break; 756 757 if (add_capture_p) 758 lambda_stack = tree_cons (NULL_TREE, 759 tlambda, 760 lambda_stack); 761 762 tree closure = LAMBDA_EXPR_CLOSURE (tlambda); 763 tree containing_function 764 = decl_function_context (TYPE_NAME (closure)); 765 766 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda); 767 if (ex && TREE_CODE (ex) == FIELD_DECL) 768 { 769 /* Lambda in an NSDMI. We don't have a function to look up 770 'this' in, but we can find (or rebuild) the fake one from 771 inject_this_parameter. */ 772 if (!containing_function && !COMPLETE_TYPE_P (closure)) 773 /* If we're parsing a lambda in a non-local class, 774 we can find the fake 'this' in scope_chain. */ 775 init = scope_chain->x_current_class_ptr; 776 else 777 /* Otherwise it's either gone or buried in 778 function_context_stack, so make another. */ 779 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex), 780 TYPE_UNQUALIFIED); 781 gcc_checking_assert 782 (init && (TREE_TYPE (TREE_TYPE (init)) 783 == current_nonlambda_class_type ())); 784 break; 785 } 786 787 if (containing_function == NULL_TREE) 788 /* We ran out of scopes; there's no 'this' to capture. */ 789 break; 790 791 if (!LAMBDA_FUNCTION_P (containing_function)) 792 { 793 /* We found a non-lambda function. */ 794 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) 795 /* First parameter is 'this'. */ 796 init = DECL_ARGUMENTS (containing_function); 797 break; 798 } 799 800 tlambda 801 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); 802 803 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) 804 { 805 /* An outer lambda has already captured 'this'. */ 806 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); 807 break; 808 } 809 } 810 811 if (init) 812 { 813 if (add_capture_p) 814 this_capture = add_default_capture (lambda_stack, 815 /*id=*/this_identifier, 816 init); 817 else 818 this_capture = init; 819 } 820 } 821 822 if (cp_unevaluated_operand) 823 result = this_capture; 824 else if (!this_capture) 825 { 826 if (add_capture_p == 1) 827 { 828 error ("%<this%> was not captured for this lambda function"); 829 result = error_mark_node; 830 } 831 else 832 result = NULL_TREE; 833 } 834 else 835 { 836 /* To make sure that current_class_ref is for the lambda. */ 837 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) 838 == LAMBDA_EXPR_CLOSURE (lambda)); 839 840 result = this_capture; 841 842 /* If 'this' is captured, each use of 'this' is transformed into an 843 access to the corresponding unnamed data member of the closure 844 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast 845 ensures that the transformed expression is an rvalue. ] */ 846 result = rvalue (result); 847 } 848 849 return result; 850 } 851 852 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */ 853 854 tree 855 current_lambda_expr (void) 856 { 857 tree type = current_class_type; 858 while (type && !LAMBDA_TYPE_P (type)) 859 type = decl_type_context (TYPE_NAME (type)); 860 if (type) 861 return CLASSTYPE_LAMBDA_EXPR (type); 862 else 863 return NULL_TREE; 864 } 865 866 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy 867 object. NULL otherwise.. */ 868 869 static tree 870 resolvable_dummy_lambda (tree object) 871 { 872 if (!is_dummy_object (object)) 873 return NULL_TREE; 874 875 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); 876 gcc_assert (!TYPE_PTR_P (type)); 877 878 if (type != current_class_type 879 && current_class_type 880 && LAMBDA_TYPE_P (current_class_type) 881 && lambda_function (current_class_type) 882 && DERIVED_FROM_P (type, nonlambda_method_basetype())) 883 return CLASSTYPE_LAMBDA_EXPR (current_class_type); 884 885 return NULL_TREE; 886 } 887 888 /* We don't want to capture 'this' until we know we need it, i.e. after 889 overload resolution has chosen a non-static member function. At that 890 point we call this function to turn a dummy object into a use of the 891 'this' capture. */ 892 893 tree 894 maybe_resolve_dummy (tree object, bool add_capture_p) 895 { 896 if (tree lam = resolvable_dummy_lambda (object)) 897 if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) 898 if (cap != error_mark_node) 899 object = build_fold_indirect_ref (cap); 900 901 return object; 902 } 903 904 /* When parsing a generic lambda containing an argument-dependent 905 member function call we defer overload resolution to instantiation 906 time. But we have to know now whether to capture this or not. 907 Do that if FNS contains any non-static fns. 908 The std doesn't anticipate this case, but I expect this to be the 909 outcome of discussion. */ 910 911 void 912 maybe_generic_this_capture (tree object, tree fns) 913 { 914 if (tree lam = resolvable_dummy_lambda (object)) 915 if (!LAMBDA_EXPR_THIS_CAPTURE (lam)) 916 { 917 /* We've not yet captured, so look at the function set of 918 interest. */ 919 if (BASELINK_P (fns)) 920 fns = BASELINK_FUNCTIONS (fns); 921 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR; 922 if (id_expr) 923 fns = TREE_OPERAND (fns, 0); 924 925 for (lkp_iterator iter (fns); iter; ++iter) 926 if (((!id_expr && TREE_CODE (*iter) != USING_DECL) 927 || TREE_CODE (*iter) == TEMPLATE_DECL) 928 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter)) 929 { 930 /* Found a non-static member. Capture this. */ 931 lambda_expr_this_capture (lam, /*maybe*/-1); 932 break; 933 } 934 } 935 } 936 937 /* Returns the innermost non-lambda function. */ 938 939 tree 940 current_nonlambda_function (void) 941 { 942 tree fn = current_function_decl; 943 while (fn && LAMBDA_FUNCTION_P (fn)) 944 fn = decl_function_context (fn); 945 return fn; 946 } 947 948 /* Returns the method basetype of the innermost non-lambda function, including 949 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */ 950 951 tree 952 nonlambda_method_basetype (void) 953 { 954 if (!current_class_ref) 955 return NULL_TREE; 956 957 tree type = current_class_type; 958 if (!type || !LAMBDA_TYPE_P (type)) 959 return type; 960 961 while (true) 962 { 963 tree lam = CLASSTYPE_LAMBDA_EXPR (type); 964 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam); 965 if (ex && TREE_CODE (ex) == FIELD_DECL) 966 /* Lambda in an NSDMI. */ 967 return DECL_CONTEXT (ex); 968 969 tree fn = TYPE_CONTEXT (type); 970 if (!fn || TREE_CODE (fn) != FUNCTION_DECL 971 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) 972 /* No enclosing non-lambda method. */ 973 return NULL_TREE; 974 if (!LAMBDA_FUNCTION_P (fn)) 975 /* Found an enclosing non-lambda method. */ 976 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); 977 type = DECL_CONTEXT (fn); 978 } 979 } 980 981 /* Like current_scope, but looking through lambdas. */ 982 983 tree 984 current_nonlambda_scope (void) 985 { 986 tree scope = current_scope (); 987 for (;;) 988 { 989 if (TREE_CODE (scope) == FUNCTION_DECL 990 && LAMBDA_FUNCTION_P (scope)) 991 { 992 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope)); 993 continue; 994 } 995 else if (LAMBDA_TYPE_P (scope)) 996 { 997 scope = CP_TYPE_CONTEXT (scope); 998 continue; 999 } 1000 break; 1001 } 1002 return scope; 1003 } 1004 1005 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with 1006 indicated FN and NARGS, but do not initialize the return type or any of the 1007 argument slots. */ 1008 1009 static tree 1010 prepare_op_call (tree fn, int nargs) 1011 { 1012 tree t; 1013 1014 t = build_vl_exp (CALL_EXPR, nargs + 3); 1015 CALL_EXPR_FN (t) = fn; 1016 CALL_EXPR_STATIC_CHAIN (t) = NULL; 1017 1018 return t; 1019 } 1020 1021 /* Return true iff CALLOP is the op() for a generic lambda. */ 1022 1023 bool 1024 generic_lambda_fn_p (tree callop) 1025 { 1026 return (LAMBDA_FUNCTION_P (callop) 1027 && DECL_TEMPLATE_INFO (callop) 1028 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); 1029 } 1030 1031 /* If the closure TYPE has a static op(), also add a conversion to function 1032 pointer. */ 1033 1034 void 1035 maybe_add_lambda_conv_op (tree type) 1036 { 1037 bool nested = (cfun != NULL); 1038 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); 1039 tree callop = lambda_function (type); 1040 tree lam = CLASSTYPE_LAMBDA_EXPR (type); 1041 1042 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE 1043 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE) 1044 return; 1045 1046 if (processing_template_decl) 1047 return; 1048 1049 bool const generic_lambda_p = generic_lambda_fn_p (callop); 1050 1051 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) 1052 { 1053 /* If the op() wasn't instantiated due to errors, give up. */ 1054 gcc_assert (errorcount || sorrycount); 1055 return; 1056 } 1057 1058 /* Non-template conversion operators are defined directly with build_call_a 1059 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are 1060 deferred and the CALL is built in-place. In the case of a deduced return 1061 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for 1062 the return type is also built in-place. The arguments of DECLTYPE_CALL in 1063 the return expression may differ in flags from those in the body CALL. In 1064 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in 1065 the body CALL, but not in DECLTYPE_CALL. */ 1066 1067 vec<tree, va_gc> *direct_argvec = 0; 1068 tree decltype_call = 0, call = 0; 1069 tree optype = TREE_TYPE (callop); 1070 tree fn_result = TREE_TYPE (optype); 1071 1072 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0); 1073 if (generic_lambda_p) 1074 { 1075 ++processing_template_decl; 1076 1077 /* Prepare the dependent member call for the static member function 1078 '_FUN' and, potentially, prepare another call to be used in a decltype 1079 return expression for a deduced return call op to allow for simple 1080 implementation of the conversion operator. */ 1081 1082 tree instance = cp_build_fold_indirect_ref (thisarg); 1083 tree objfn = lookup_template_function (DECL_NAME (callop), 1084 DECL_TI_ARGS (callop)); 1085 objfn = build_min (COMPONENT_REF, NULL_TREE, 1086 instance, objfn, NULL_TREE); 1087 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; 1088 1089 call = prepare_op_call (objfn, nargs); 1090 if (type_uses_auto (fn_result)) 1091 decltype_call = prepare_op_call (objfn, nargs); 1092 } 1093 else 1094 { 1095 direct_argvec = make_tree_vector (); 1096 direct_argvec->quick_push (thisarg); 1097 } 1098 1099 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to 1100 declare the static member function "_FUN" below. For each arg append to 1101 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated 1102 call args (for the template case). If a parameter pack is found, expand 1103 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ 1104 1105 tree fn_args = NULL_TREE; 1106 { 1107 int ix = 0; 1108 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); 1109 tree tgt = NULL; 1110 1111 while (src) 1112 { 1113 tree new_node = copy_node (src); 1114 1115 /* Clear TREE_ADDRESSABLE on thunk arguments. */ 1116 TREE_ADDRESSABLE (new_node) = 0; 1117 1118 if (!fn_args) 1119 fn_args = tgt = new_node; 1120 else 1121 { 1122 TREE_CHAIN (tgt) = new_node; 1123 tgt = new_node; 1124 } 1125 1126 mark_exp_read (tgt); 1127 1128 if (generic_lambda_p) 1129 { 1130 tree a = tgt; 1131 if (DECL_PACK_P (tgt)) 1132 { 1133 a = make_pack_expansion (a); 1134 PACK_EXPANSION_LOCAL_P (a) = true; 1135 } 1136 CALL_EXPR_ARG (call, ix) = a; 1137 1138 if (decltype_call) 1139 { 1140 /* Avoid capturing variables in this context. */ 1141 ++cp_unevaluated_operand; 1142 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt); 1143 --cp_unevaluated_operand; 1144 } 1145 1146 ++ix; 1147 } 1148 else 1149 vec_safe_push (direct_argvec, tgt); 1150 1151 src = TREE_CHAIN (src); 1152 } 1153 } 1154 1155 if (generic_lambda_p) 1156 { 1157 if (decltype_call) 1158 { 1159 fn_result = finish_decltype_type 1160 (decltype_call, /*id_expression_or_member_access_p=*/false, 1161 tf_warning_or_error); 1162 } 1163 } 1164 else 1165 call = build_call_a (callop, 1166 direct_argvec->length (), 1167 direct_argvec->address ()); 1168 1169 CALL_FROM_THUNK_P (call) = 1; 1170 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION); 1171 1172 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); 1173 stattype = (cp_build_type_attribute_variant 1174 (stattype, TYPE_ATTRIBUTES (optype))); 1175 if (flag_noexcept_type 1176 && TYPE_NOTHROW_P (TREE_TYPE (callop))) 1177 stattype = build_exception_variant (stattype, noexcept_true_spec); 1178 1179 if (generic_lambda_p) 1180 --processing_template_decl; 1181 1182 /* First build up the conversion op. */ 1183 1184 tree rettype = build_pointer_type (stattype); 1185 tree name = make_conv_op_name (rettype); 1186 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); 1187 tree fntype = build_method_type_directly (thistype, rettype, void_list_node); 1188 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); 1189 SET_DECL_LANGUAGE (convfn, lang_cplusplus); 1190 tree fn = convfn; 1191 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1192 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); 1193 grokclassfn (type, fn, NO_SPECIAL); 1194 set_linkage_according_to_type (type, fn); 1195 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); 1196 DECL_IN_AGGR_P (fn) = 1; 1197 DECL_ARTIFICIAL (fn) = 1; 1198 DECL_NOT_REALLY_EXTERN (fn) = 1; 1199 DECL_DECLARED_INLINE_P (fn) = 1; 1200 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST); 1201 1202 if (nested_def) 1203 DECL_INTERFACE_KNOWN (fn) = 1; 1204 1205 if (generic_lambda_p) 1206 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1207 1208 add_method (type, fn, false); 1209 1210 /* Generic thunk code fails for varargs; we'll complain in mark_used if 1211 the conversion op is used. */ 1212 if (varargs_function_p (callop)) 1213 { 1214 DECL_DELETED_FN (fn) = 1; 1215 return; 1216 } 1217 1218 /* Now build up the thunk to be returned. */ 1219 1220 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype); 1221 SET_DECL_LANGUAGE (statfn, lang_cplusplus); 1222 fn = statfn; 1223 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1224 grokclassfn (type, fn, NO_SPECIAL); 1225 set_linkage_according_to_type (type, fn); 1226 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); 1227 DECL_IN_AGGR_P (fn) = 1; 1228 DECL_ARTIFICIAL (fn) = 1; 1229 DECL_NOT_REALLY_EXTERN (fn) = 1; 1230 DECL_DECLARED_INLINE_P (fn) = 1; 1231 DECL_STATIC_FUNCTION_P (fn) = 1; 1232 DECL_ARGUMENTS (fn) = fn_args; 1233 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) 1234 { 1235 /* Avoid duplicate -Wshadow warnings. */ 1236 DECL_NAME (arg) = NULL_TREE; 1237 DECL_CONTEXT (arg) = fn; 1238 } 1239 if (nested_def) 1240 DECL_INTERFACE_KNOWN (fn) = 1; 1241 1242 if (generic_lambda_p) 1243 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1244 1245 if (flag_sanitize & SANITIZE_NULL) 1246 /* Don't UBsan this function; we're deliberately calling op() with a null 1247 object argument. */ 1248 add_no_sanitize_value (fn, SANITIZE_UNDEFINED); 1249 1250 add_method (type, fn, false); 1251 1252 if (nested) 1253 push_function_context (); 1254 else 1255 /* Still increment function_depth so that we don't GC in the 1256 middle of an expression. */ 1257 ++function_depth; 1258 1259 /* Generate the body of the thunk. */ 1260 1261 start_preparsed_function (statfn, NULL_TREE, 1262 SF_PRE_PARSED | SF_INCLASS_INLINE); 1263 tree body = begin_function_body (); 1264 tree compound_stmt = begin_compound_stmt (0); 1265 if (!generic_lambda_p) 1266 { 1267 set_flags_from_callee (call); 1268 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) 1269 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); 1270 } 1271 call = convert_from_reference (call); 1272 finish_return_stmt (call); 1273 1274 finish_compound_stmt (compound_stmt); 1275 finish_function_body (body); 1276 1277 fn = finish_function (/*inline_p=*/true); 1278 if (!generic_lambda_p) 1279 expand_or_defer_fn (fn); 1280 1281 /* Generate the body of the conversion op. */ 1282 1283 start_preparsed_function (convfn, NULL_TREE, 1284 SF_PRE_PARSED | SF_INCLASS_INLINE); 1285 body = begin_function_body (); 1286 compound_stmt = begin_compound_stmt (0); 1287 1288 /* decl_needed_p needs to see that it's used. */ 1289 TREE_USED (statfn) = 1; 1290 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); 1291 1292 finish_compound_stmt (compound_stmt); 1293 finish_function_body (body); 1294 1295 fn = finish_function (/*inline_p=*/true); 1296 if (!generic_lambda_p) 1297 expand_or_defer_fn (fn); 1298 1299 if (nested) 1300 pop_function_context (); 1301 else 1302 --function_depth; 1303 } 1304 1305 /* True if FN is the static function "_FUN" that gets returned from the lambda 1306 conversion operator. */ 1307 1308 bool 1309 lambda_static_thunk_p (tree fn) 1310 { 1311 return (fn && TREE_CODE (fn) == FUNCTION_DECL 1312 && DECL_ARTIFICIAL (fn) 1313 && DECL_STATIC_FUNCTION_P (fn) 1314 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn))); 1315 } 1316 1317 /* Returns true iff VAL is a lambda-related declaration which should 1318 be ignored by unqualified lookup. */ 1319 1320 bool 1321 is_lambda_ignored_entity (tree val) 1322 { 1323 /* Look past normal capture proxies. */ 1324 if (is_normal_capture_proxy (val)) 1325 return true; 1326 1327 /* Always ignore lambda fields, their names are only for debugging. */ 1328 if (TREE_CODE (val) == FIELD_DECL 1329 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) 1330 return true; 1331 1332 /* None of the lookups that use qualify_lookup want the op() from the 1333 lambda; they want the one from the enclosing class. */ 1334 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) 1335 return true; 1336 1337 return false; 1338 } 1339 1340 /* Lambdas that appear in variable initializer or default argument scope 1341 get that in their mangling, so we need to record it. We might as well 1342 use the count for function and namespace scopes as well. */ 1343 static GTY(()) tree lambda_scope; 1344 static GTY(()) int lambda_count; 1345 struct GTY(()) tree_int 1346 { 1347 tree t; 1348 int i; 1349 }; 1350 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack; 1351 1352 void 1353 start_lambda_scope (tree decl) 1354 { 1355 tree_int ti; 1356 gcc_assert (decl); 1357 /* Once we're inside a function, we ignore variable scope and just push 1358 the function again so that popping works properly. */ 1359 if (current_function_decl && TREE_CODE (decl) == VAR_DECL) 1360 decl = current_function_decl; 1361 ti.t = lambda_scope; 1362 ti.i = lambda_count; 1363 vec_safe_push (lambda_scope_stack, ti); 1364 if (lambda_scope != decl) 1365 { 1366 /* Don't reset the count if we're still in the same function. */ 1367 lambda_scope = decl; 1368 lambda_count = 0; 1369 } 1370 } 1371 1372 void 1373 record_lambda_scope (tree lambda) 1374 { 1375 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; 1376 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; 1377 } 1378 1379 /* This lambda is an instantiation of a lambda in a template default argument 1380 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do 1381 need to use and increment the global count to avoid collisions. */ 1382 1383 void 1384 record_null_lambda_scope (tree lambda) 1385 { 1386 if (vec_safe_is_empty (lambda_scope_stack)) 1387 record_lambda_scope (lambda); 1388 else 1389 { 1390 tree_int *p = lambda_scope_stack->begin(); 1391 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t; 1392 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++; 1393 } 1394 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE); 1395 } 1396 1397 void 1398 finish_lambda_scope (void) 1399 { 1400 tree_int *p = &lambda_scope_stack->last (); 1401 if (lambda_scope != p->t) 1402 { 1403 lambda_scope = p->t; 1404 lambda_count = p->i; 1405 } 1406 lambda_scope_stack->pop (); 1407 } 1408 1409 tree 1410 start_lambda_function (tree fco, tree lambda_expr) 1411 { 1412 /* Let the front end know that we are going to be defining this 1413 function. */ 1414 start_preparsed_function (fco, 1415 NULL_TREE, 1416 SF_PRE_PARSED | SF_INCLASS_INLINE); 1417 1418 tree body = begin_function_body (); 1419 1420 /* Push the proxies for any explicit captures. */ 1421 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap; 1422 cap = TREE_CHAIN (cap)) 1423 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); 1424 1425 return body; 1426 } 1427 1428 /* Subroutine of prune_lambda_captures: CAP is a node in 1429 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we 1430 might optimize away the capture, or NULL_TREE if there is no such 1431 variable. */ 1432 1433 static tree 1434 var_to_maybe_prune (tree cap) 1435 { 1436 if (LAMBDA_CAPTURE_EXPLICIT_P (cap)) 1437 /* Don't prune explicit captures. */ 1438 return NULL_TREE; 1439 1440 tree mem = TREE_PURPOSE (cap); 1441 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem)) 1442 /* Packs and init-captures aren't captures of constant vars. */ 1443 return NULL_TREE; 1444 1445 tree init = TREE_VALUE (cap); 1446 if (is_normal_capture_proxy (init)) 1447 init = DECL_CAPTURED_VARIABLE (init); 1448 if (decl_constant_var_p (init)) 1449 return init; 1450 1451 return NULL_TREE; 1452 } 1453 1454 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies 1455 for constant variables are actually used in the lambda body. 1456 1457 There will always be a DECL_EXPR for the capture proxy; remember it when we 1458 see it, but replace it with any other use. */ 1459 1460 static tree 1461 mark_const_cap_r (tree *t, int *walk_subtrees, void *data) 1462 { 1463 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data; 1464 1465 tree var = NULL_TREE; 1466 if (TREE_CODE (*t) == DECL_EXPR) 1467 { 1468 tree decl = DECL_EXPR_DECL (*t); 1469 if (is_constant_capture_proxy (decl)) 1470 { 1471 var = DECL_CAPTURED_VARIABLE (decl); 1472 *walk_subtrees = 0; 1473 } 1474 } 1475 else if (is_constant_capture_proxy (*t)) 1476 var = DECL_CAPTURED_VARIABLE (*t); 1477 1478 if (var) 1479 { 1480 tree *&slot = const_vars.get_or_insert (var); 1481 if (!slot || VAR_P (*t)) 1482 slot = t; 1483 } 1484 1485 return NULL_TREE; 1486 } 1487 1488 /* We're at the end of processing a lambda; go back and remove any captures of 1489 constant variables for which we've folded away all uses. */ 1490 1491 static void 1492 prune_lambda_captures (tree body) 1493 { 1494 tree lam = current_lambda_expr (); 1495 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam)) 1496 /* No uses were optimized away. */ 1497 return; 1498 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) 1499 /* No default captures, and we don't prune explicit captures. */ 1500 return; 1501 1502 hash_map<tree,tree*> const_vars; 1503 1504 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars); 1505 1506 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam)); 1507 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; ) 1508 { 1509 tree cap = *capp; 1510 if (tree var = var_to_maybe_prune (cap)) 1511 { 1512 tree **use = const_vars.get (var); 1513 if (use && TREE_CODE (**use) == DECL_EXPR) 1514 { 1515 /* All uses of this capture were folded away, leaving only the 1516 proxy declaration. */ 1517 1518 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */ 1519 *capp = TREE_CHAIN (cap); 1520 1521 /* And out of TYPE_FIELDS. */ 1522 tree field = TREE_PURPOSE (cap); 1523 while (*fieldp != field) 1524 fieldp = &DECL_CHAIN (*fieldp); 1525 *fieldp = DECL_CHAIN (*fieldp); 1526 1527 /* And remove the capture proxy declaration. */ 1528 **use = void_node; 1529 continue; 1530 } 1531 } 1532 1533 capp = &TREE_CHAIN (cap); 1534 } 1535 } 1536 1537 void 1538 finish_lambda_function (tree body) 1539 { 1540 finish_function_body (body); 1541 1542 prune_lambda_captures (body); 1543 1544 /* Finish the function and generate code for it if necessary. */ 1545 tree fn = finish_function (/*inline_p=*/true); 1546 1547 /* Only expand if the call op is not a template. */ 1548 if (!DECL_TEMPLATE_INFO (fn)) 1549 expand_or_defer_fn (fn); 1550 } 1551 1552 #include "gt-cp-lambda.h" 1553