1 /* Perform the semantic phase of lambda parsing, i.e., the process of 2 building tree structure, checking semantic consistency, and 3 building RTL. These routines are used both during actual parsing 4 and during the instantiation of template functions. 5 6 Copyright (C) 1998-2018 Free Software Foundation, Inc. 7 8 This file is part of GCC. 9 10 GCC is free software; you can redistribute it and/or modify it 11 under the terms of the GNU General Public License as published by 12 the Free Software Foundation; either version 3, or (at your option) 13 any later version. 14 15 GCC is distributed in the hope that it will be useful, but 16 WITHOUT ANY WARRANTY; without even the implied warranty of 17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 General Public License for more details. 19 20 You should have received a copy of the GNU General Public License 21 along with GCC; see the file COPYING3. If not see 22 <http://www.gnu.org/licenses/>. */ 23 24 #include "config.h" 25 #include "system.h" 26 #include "coretypes.h" 27 #include "cp-tree.h" 28 #include "stringpool.h" 29 #include "cgraph.h" 30 #include "tree-iterator.h" 31 #include "toplev.h" 32 #include "gimplify.h" 33 34 /* Constructor for a lambda expression. */ 35 36 tree 37 build_lambda_expr (void) 38 { 39 tree lambda = make_node (LAMBDA_EXPR); 40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; 41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; 42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; 43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; 44 LAMBDA_EXPR_MUTABLE_P (lambda) = false; 45 return lambda; 46 } 47 48 /* Create the closure object for a LAMBDA_EXPR. */ 49 50 tree 51 build_lambda_object (tree lambda_expr) 52 { 53 /* Build aggregate constructor call. 54 - cp_parser_braced_list 55 - cp_parser_functional_cast */ 56 vec<constructor_elt, va_gc> *elts = NULL; 57 tree node, expr, type; 58 location_t saved_loc; 59 60 if (processing_template_decl || lambda_expr == error_mark_node) 61 return lambda_expr; 62 63 /* Make sure any error messages refer to the lambda-introducer. */ 64 saved_loc = input_location; 65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr); 66 67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); 68 node; 69 node = TREE_CHAIN (node)) 70 { 71 tree field = TREE_PURPOSE (node); 72 tree val = TREE_VALUE (node); 73 74 if (field == error_mark_node) 75 { 76 expr = error_mark_node; 77 goto out; 78 } 79 80 if (TREE_CODE (val) == TREE_LIST) 81 val = build_x_compound_expr_from_list (val, ELK_INIT, 82 tf_warning_or_error); 83 84 if (DECL_P (val)) 85 mark_used (val); 86 87 /* Mere mortals can't copy arrays with aggregate initialization, so 88 do some magic to make it work here. */ 89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) 90 val = build_array_copy (val); 91 else if (DECL_NORMAL_CAPTURE_P (field) 92 && !DECL_VLA_CAPTURE_P (field) 93 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE) 94 { 95 /* "the entities that are captured by copy are used to 96 direct-initialize each corresponding non-static data 97 member of the resulting closure object." 98 99 There's normally no way to express direct-initialization 100 from an element of a CONSTRUCTOR, so we build up a special 101 TARGET_EXPR to bypass the usual copy-initialization. */ 102 val = force_rvalue (val, tf_warning_or_error); 103 if (TREE_CODE (val) == TARGET_EXPR) 104 TARGET_EXPR_DIRECT_INIT_P (val) = true; 105 } 106 107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); 108 } 109 110 expr = build_constructor (init_list_type_node, elts); 111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; 112 113 /* N2927: "[The closure] class type is not an aggregate." 114 But we briefly treat it as an aggregate to make this simpler. */ 115 type = LAMBDA_EXPR_CLOSURE (lambda_expr); 116 CLASSTYPE_NON_AGGREGATE (type) = 0; 117 expr = finish_compound_literal (type, expr, tf_warning_or_error); 118 CLASSTYPE_NON_AGGREGATE (type) = 1; 119 120 out: 121 input_location = saved_loc; 122 return expr; 123 } 124 125 /* Return an initialized RECORD_TYPE for LAMBDA. 126 LAMBDA must have its explicit captures already. */ 127 128 tree 129 begin_lambda_type (tree lambda) 130 { 131 tree type; 132 133 { 134 /* Unique name. This is just like an unnamed class, but we cannot use 135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */ 136 tree name; 137 name = make_lambda_name (); 138 139 /* Create the new RECORD_TYPE for this lambda. */ 140 type = xref_tag (/*tag_code=*/record_type, 141 name, 142 /*scope=*/ts_lambda, 143 /*template_header_p=*/false); 144 if (type == error_mark_node) 145 return error_mark_node; 146 } 147 148 /* Designate it as a struct so that we can use aggregate initialization. */ 149 CLASSTYPE_DECLARED_CLASS (type) = false; 150 151 /* Cross-reference the expression and the type. */ 152 LAMBDA_EXPR_CLOSURE (lambda) = type; 153 CLASSTYPE_LAMBDA_EXPR (type) = lambda; 154 155 /* In C++17, assume the closure is literal; we'll clear the flag later if 156 necessary. */ 157 if (cxx_dialect >= cxx17) 158 CLASSTYPE_LITERAL_P (type) = true; 159 160 /* Clear base types. */ 161 xref_basetypes (type, /*bases=*/NULL_TREE); 162 163 /* Start the class. */ 164 type = begin_class_definition (type); 165 166 return type; 167 } 168 169 /* Returns the type to use for the return type of the operator() of a 170 closure class. */ 171 172 tree 173 lambda_return_type (tree expr) 174 { 175 if (expr == NULL_TREE) 176 return void_type_node; 177 if (type_unknown_p (expr) 178 || BRACE_ENCLOSED_INITIALIZER_P (expr)) 179 { 180 cxx_incomplete_type_error (expr, TREE_TYPE (expr)); 181 return error_mark_node; 182 } 183 gcc_checking_assert (!type_dependent_expression_p (expr)); 184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); 185 } 186 187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the 188 closure type. */ 189 190 tree 191 lambda_function (tree lambda) 192 { 193 tree type; 194 if (TREE_CODE (lambda) == LAMBDA_EXPR) 195 type = LAMBDA_EXPR_CLOSURE (lambda); 196 else 197 type = lambda; 198 gcc_assert (LAMBDA_TYPE_P (type)); 199 /* Don't let debug_tree cause instantiation. */ 200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) 201 && !COMPLETE_OR_OPEN_TYPE_P (type)) 202 return NULL_TREE; 203 lambda = lookup_member (type, call_op_identifier, 204 /*protect=*/0, /*want_type=*/false, 205 tf_warning_or_error); 206 if (lambda) 207 lambda = STRIP_TEMPLATE (get_first_fn (lambda)); 208 return lambda; 209 } 210 211 /* Returns the type to use for the FIELD_DECL corresponding to the 212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a 213 C++14 init capture, and BY_REFERENCE_P indicates whether we're 214 capturing by reference. */ 215 216 tree 217 lambda_capture_field_type (tree expr, bool explicit_init_p, 218 bool by_reference_p) 219 { 220 tree type; 221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr)); 222 223 if (!is_this && type_dependent_expression_p (expr)) 224 { 225 type = cxx_make_type (DECLTYPE_TYPE); 226 DECLTYPE_TYPE_EXPR (type) = expr; 227 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; 228 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p; 229 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p; 230 SET_TYPE_STRUCTURAL_EQUALITY (type); 231 } 232 else if (!is_this && explicit_init_p) 233 { 234 tree auto_node = make_auto (); 235 236 type = auto_node; 237 if (by_reference_p) 238 /* Add the reference now, so deduction doesn't lose 239 outermost CV qualifiers of EXPR. */ 240 type = build_reference_type (type); 241 type = do_auto_deduction (type, expr, auto_node); 242 } 243 else 244 { 245 type = non_reference (unlowered_expr_type (expr)); 246 247 if (!is_this 248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)) 249 type = build_reference_type (type); 250 } 251 252 return type; 253 } 254 255 /* Returns true iff DECL is a lambda capture proxy variable created by 256 build_capture_proxy. */ 257 258 bool 259 is_capture_proxy (tree decl) 260 { 261 return (VAR_P (decl) 262 && DECL_HAS_VALUE_EXPR_P (decl) 263 && !DECL_ANON_UNION_VAR_P (decl) 264 && !DECL_DECOMPOSITION_P (decl) 265 && !(DECL_ARTIFICIAL (decl) 266 && DECL_LANG_SPECIFIC (decl) 267 && DECL_OMP_PRIVATIZED_MEMBER (decl)) 268 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); 269 } 270 271 /* Returns true iff DECL is a capture proxy for a normal capture 272 (i.e. without explicit initializer). */ 273 274 bool 275 is_normal_capture_proxy (tree decl) 276 { 277 if (!is_capture_proxy (decl)) 278 /* It's not a capture proxy. */ 279 return false; 280 281 return (DECL_LANG_SPECIFIC (decl) 282 && DECL_CAPTURED_VARIABLE (decl)); 283 } 284 285 /* Returns true iff DECL is a capture proxy for a normal capture 286 of a constant variable. */ 287 288 bool 289 is_constant_capture_proxy (tree decl) 290 { 291 if (is_normal_capture_proxy (decl)) 292 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl)); 293 return false; 294 } 295 296 /* VAR is a capture proxy created by build_capture_proxy; add it to the 297 current function, which is the operator() for the appropriate lambda. */ 298 299 void 300 insert_capture_proxy (tree var) 301 { 302 if (is_normal_capture_proxy (var)) 303 { 304 tree cap = DECL_CAPTURED_VARIABLE (var); 305 if (CHECKING_P) 306 { 307 gcc_assert (!is_normal_capture_proxy (cap)); 308 tree old = retrieve_local_specialization (cap); 309 if (old) 310 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var)); 311 } 312 register_local_specialization (var, cap); 313 } 314 315 /* Put the capture proxy in the extra body block so that it won't clash 316 with a later local variable. */ 317 pushdecl_outermost_localscope (var); 318 319 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ 320 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); 321 tree stmt_list = (*stmt_list_stack)[1]; 322 gcc_assert (stmt_list); 323 append_to_statement_list_force (var, &stmt_list); 324 } 325 326 /* We've just finished processing a lambda; if the containing scope is also 327 a lambda, insert any capture proxies that were created while processing 328 the nested lambda. */ 329 330 void 331 insert_pending_capture_proxies (void) 332 { 333 tree lam; 334 vec<tree, va_gc> *proxies; 335 unsigned i; 336 337 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) 338 return; 339 340 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); 341 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); 342 for (i = 0; i < vec_safe_length (proxies); ++i) 343 { 344 tree var = (*proxies)[i]; 345 insert_capture_proxy (var); 346 } 347 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); 348 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; 349 } 350 351 /* Given REF, a COMPONENT_REF designating a field in the lambda closure, 352 return the type we want the proxy to have: the type of the field itself, 353 with added const-qualification if the lambda isn't mutable and the 354 capture is by value. */ 355 356 tree 357 lambda_proxy_type (tree ref) 358 { 359 tree type; 360 if (ref == error_mark_node) 361 return error_mark_node; 362 if (REFERENCE_REF_P (ref)) 363 ref = TREE_OPERAND (ref, 0); 364 gcc_assert (TREE_CODE (ref) == COMPONENT_REF); 365 type = TREE_TYPE (ref); 366 if (!type || WILDCARD_TYPE_P (non_reference (type))) 367 { 368 type = cxx_make_type (DECLTYPE_TYPE); 369 DECLTYPE_TYPE_EXPR (type) = ref; 370 DECLTYPE_FOR_LAMBDA_PROXY (type) = true; 371 SET_TYPE_STRUCTURAL_EQUALITY (type); 372 } 373 if (DECL_PACK_P (TREE_OPERAND (ref, 1))) 374 type = make_pack_expansion (type); 375 return type; 376 } 377 378 /* MEMBER is a capture field in a lambda closure class. Now that we're 379 inside the operator(), build a placeholder var for future lookups and 380 debugging. */ 381 382 static tree 383 build_capture_proxy (tree member, tree init) 384 { 385 tree var, object, fn, closure, name, lam, type; 386 387 if (PACK_EXPANSION_P (member)) 388 member = PACK_EXPANSION_PATTERN (member); 389 390 closure = DECL_CONTEXT (member); 391 fn = lambda_function (closure); 392 lam = CLASSTYPE_LAMBDA_EXPR (closure); 393 394 /* The proxy variable forwards to the capture field. */ 395 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); 396 object = finish_non_static_data_member (member, object, NULL_TREE); 397 if (REFERENCE_REF_P (object)) 398 object = TREE_OPERAND (object, 0); 399 400 /* Remove the __ inserted by add_capture. */ 401 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); 402 403 type = lambda_proxy_type (object); 404 405 if (name == this_identifier && !POINTER_TYPE_P (type)) 406 { 407 type = build_pointer_type (type); 408 type = cp_build_qualified_type (type, TYPE_QUAL_CONST); 409 object = build_fold_addr_expr_with_type (object, type); 410 } 411 412 if (DECL_VLA_CAPTURE_P (member)) 413 { 414 /* Rebuild the VLA type from the pointer and maxindex. */ 415 tree field = next_initializable_field (TYPE_FIELDS (type)); 416 tree ptr = build_simple_component_ref (object, field); 417 field = next_initializable_field (DECL_CHAIN (field)); 418 tree max = build_simple_component_ref (object, field); 419 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), 420 build_index_type (max)); 421 type = build_reference_type (type); 422 REFERENCE_VLA_OK (type) = true; 423 object = convert (type, ptr); 424 } 425 426 complete_type (type); 427 428 var = build_decl (input_location, VAR_DECL, name, type); 429 SET_DECL_VALUE_EXPR (var, object); 430 DECL_HAS_VALUE_EXPR_P (var) = 1; 431 DECL_ARTIFICIAL (var) = 1; 432 TREE_USED (var) = 1; 433 DECL_CONTEXT (var) = fn; 434 435 if (DECL_NORMAL_CAPTURE_P (member)) 436 { 437 if (DECL_VLA_CAPTURE_P (member)) 438 { 439 init = CONSTRUCTOR_ELT (init, 0)->value; 440 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR. 441 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF. 442 } 443 else 444 { 445 if (PACK_EXPANSION_P (init)) 446 init = PACK_EXPANSION_PATTERN (init); 447 } 448 449 if (INDIRECT_REF_P (init)) 450 init = TREE_OPERAND (init, 0); 451 STRIP_NOPS (init); 452 453 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); 454 while (is_normal_capture_proxy (init)) 455 init = DECL_CAPTURED_VARIABLE (init); 456 retrofit_lang_decl (var); 457 DECL_CAPTURED_VARIABLE (var) = init; 458 } 459 460 if (name == this_identifier) 461 { 462 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); 463 LAMBDA_EXPR_THIS_CAPTURE (lam) = var; 464 } 465 466 if (fn == current_function_decl) 467 insert_capture_proxy (var); 468 else 469 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); 470 471 return var; 472 } 473 474 static GTY(()) tree ptr_id; 475 static GTY(()) tree max_id; 476 477 /* Return a struct containing a pointer and a length for lambda capture of 478 an array of runtime length. */ 479 480 static tree 481 vla_capture_type (tree array_type) 482 { 483 tree type = xref_tag (record_type, make_anon_name (), ts_current, false); 484 xref_basetypes (type, NULL_TREE); 485 type = begin_class_definition (type); 486 if (!ptr_id) 487 { 488 ptr_id = get_identifier ("ptr"); 489 max_id = get_identifier ("max"); 490 } 491 tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); 492 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); 493 finish_member_declaration (field); 494 field = build_decl (input_location, FIELD_DECL, max_id, sizetype); 495 finish_member_declaration (field); 496 return finish_struct (type, NULL_TREE); 497 } 498 499 /* From an ID and INITIALIZER, create a capture (by reference if 500 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, 501 and return it. If ID is `this', BY_REFERENCE_P says whether 502 `*this' is captured by reference. */ 503 504 tree 505 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, 506 bool explicit_init_p) 507 { 508 char *buf; 509 tree type, member, name; 510 bool vla = false; 511 bool variadic = false; 512 tree initializer = orig_init; 513 514 if (PACK_EXPANSION_P (initializer)) 515 { 516 initializer = PACK_EXPANSION_PATTERN (initializer); 517 variadic = true; 518 } 519 520 if (TREE_CODE (initializer) == TREE_LIST 521 /* A pack expansion might end up with multiple elements. */ 522 && !PACK_EXPANSION_P (TREE_VALUE (initializer))) 523 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, 524 tf_warning_or_error); 525 type = TREE_TYPE (initializer); 526 if (type == error_mark_node) 527 return error_mark_node; 528 529 if (array_of_runtime_bound_p (type)) 530 { 531 vla = true; 532 if (!by_reference_p) 533 error ("array of runtime bound cannot be captured by copy, " 534 "only by reference"); 535 536 /* For a VLA, we capture the address of the first element and the 537 maximum index, and then reconstruct the VLA for the proxy. */ 538 tree elt = cp_build_array_ref (input_location, initializer, 539 integer_zero_node, tf_warning_or_error); 540 initializer = build_constructor_va (init_list_type_node, 2, 541 NULL_TREE, build_address (elt), 542 NULL_TREE, array_type_nelts (type)); 543 type = vla_capture_type (type); 544 } 545 else if (!dependent_type_p (type) 546 && variably_modified_type_p (type, NULL_TREE)) 547 { 548 sorry ("capture of variably-modified type %qT that is not an N3639 array " 549 "of runtime bound", type); 550 if (TREE_CODE (type) == ARRAY_TYPE 551 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) 552 inform (input_location, "because the array element type %qT has " 553 "variable size", TREE_TYPE (type)); 554 return error_mark_node; 555 } 556 else 557 { 558 type = lambda_capture_field_type (initializer, explicit_init_p, 559 by_reference_p); 560 if (type == error_mark_node) 561 return error_mark_node; 562 563 if (id == this_identifier && !by_reference_p) 564 { 565 gcc_assert (POINTER_TYPE_P (type)); 566 type = TREE_TYPE (type); 567 initializer = cp_build_fold_indirect_ref (initializer); 568 } 569 570 if (dependent_type_p (type)) 571 ; 572 else if (id != this_identifier && by_reference_p) 573 { 574 if (!lvalue_p (initializer)) 575 { 576 error ("cannot capture %qE by reference", initializer); 577 return error_mark_node; 578 } 579 } 580 else 581 { 582 /* Capture by copy requires a complete type. */ 583 type = complete_type (type); 584 if (!COMPLETE_TYPE_P (type)) 585 { 586 error ("capture by copy of incomplete type %qT", type); 587 cxx_incomplete_type_inform (type); 588 return error_mark_node; 589 } 590 } 591 } 592 593 /* Add __ to the beginning of the field name so that user code 594 won't find the field with name lookup. We can't just leave the name 595 unset because template instantiation uses the name to find 596 instantiated fields. */ 597 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); 598 buf[1] = buf[0] = '_'; 599 memcpy (buf + 2, IDENTIFIER_POINTER (id), 600 IDENTIFIER_LENGTH (id) + 1); 601 name = get_identifier (buf); 602 603 if (variadic) 604 type = make_pack_expansion (type); 605 606 /* Make member variable. */ 607 member = build_decl (input_location, FIELD_DECL, name, type); 608 DECL_VLA_CAPTURE_P (member) = vla; 609 610 if (!explicit_init_p) 611 /* Normal captures are invisible to name lookup but uses are replaced 612 with references to the capture field; we implement this by only 613 really making them invisible in unevaluated context; see 614 qualify_lookup. For now, let's make explicitly initialized captures 615 always visible. */ 616 DECL_NORMAL_CAPTURE_P (member) = true; 617 618 if (id == this_identifier) 619 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; 620 621 /* Add it to the appropriate closure class if we've started it. */ 622 if (current_class_type 623 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) 624 { 625 if (COMPLETE_TYPE_P (current_class_type)) 626 internal_error ("trying to capture %qD in instantiation of " 627 "generic lambda", id); 628 finish_member_declaration (member); 629 } 630 631 tree listmem = member; 632 if (variadic) 633 { 634 listmem = make_pack_expansion (member); 635 initializer = orig_init; 636 } 637 LAMBDA_EXPR_CAPTURE_LIST (lambda) 638 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); 639 640 if (LAMBDA_EXPR_CLOSURE (lambda)) 641 return build_capture_proxy (member, initializer); 642 /* For explicit captures we haven't started the function yet, so we wait 643 and build the proxy from cp_parser_lambda_body. */ 644 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true; 645 return NULL_TREE; 646 } 647 648 /* Register all the capture members on the list CAPTURES, which is the 649 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ 650 651 void 652 register_capture_members (tree captures) 653 { 654 if (captures == NULL_TREE) 655 return; 656 657 register_capture_members (TREE_CHAIN (captures)); 658 659 tree field = TREE_PURPOSE (captures); 660 if (PACK_EXPANSION_P (field)) 661 field = PACK_EXPANSION_PATTERN (field); 662 663 finish_member_declaration (field); 664 } 665 666 /* Similar to add_capture, except this works on a stack of nested lambdas. 667 BY_REFERENCE_P in this case is derived from the default capture mode. 668 Returns the capture for the lambda at the bottom of the stack. */ 669 670 tree 671 add_default_capture (tree lambda_stack, tree id, tree initializer) 672 { 673 bool this_capture_p = (id == this_identifier); 674 675 tree var = NULL_TREE; 676 677 tree saved_class_type = current_class_type; 678 679 tree node; 680 681 for (node = lambda_stack; 682 node; 683 node = TREE_CHAIN (node)) 684 { 685 tree lambda = TREE_VALUE (node); 686 687 current_class_type = LAMBDA_EXPR_CLOSURE (lambda); 688 if (DECL_PACK_P (initializer)) 689 initializer = make_pack_expansion (initializer); 690 var = add_capture (lambda, 691 id, 692 initializer, 693 /*by_reference_p=*/ 694 (this_capture_p 695 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) 696 == CPLD_REFERENCE)), 697 /*explicit_init_p=*/false); 698 initializer = convert_from_reference (var); 699 } 700 701 current_class_type = saved_class_type; 702 703 return var; 704 } 705 706 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the 707 form of an INDIRECT_REF, possibly adding it through default 708 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative, 709 try to capture but don't complain if we can't. */ 710 711 tree 712 lambda_expr_this_capture (tree lambda, int add_capture_p) 713 { 714 tree result; 715 716 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); 717 718 /* In unevaluated context this isn't an odr-use, so don't capture. */ 719 if (cp_unevaluated_operand) 720 add_capture_p = false; 721 722 /* Try to default capture 'this' if we can. */ 723 if (!this_capture 724 && (!add_capture_p 725 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)) 726 { 727 tree lambda_stack = NULL_TREE; 728 tree init = NULL_TREE; 729 730 /* If we are in a lambda function, we can move out until we hit: 731 1. a non-lambda function or NSDMI, 732 2. a lambda function capturing 'this', or 733 3. a non-default capturing lambda function. */ 734 for (tree tlambda = lambda; ;) 735 { 736 lambda_stack = tree_cons (NULL_TREE, 737 tlambda, 738 lambda_stack); 739 740 tree closure = LAMBDA_EXPR_CLOSURE (tlambda); 741 tree containing_function 742 = decl_function_context (TYPE_NAME (closure)); 743 744 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda); 745 if (ex && TREE_CODE (ex) == FIELD_DECL) 746 { 747 /* Lambda in an NSDMI. We don't have a function to look up 748 'this' in, but we can find (or rebuild) the fake one from 749 inject_this_parameter. */ 750 if (!containing_function && !COMPLETE_TYPE_P (closure)) 751 /* If we're parsing a lambda in a non-local class, 752 we can find the fake 'this' in scope_chain. */ 753 init = scope_chain->x_current_class_ptr; 754 else 755 /* Otherwise it's either gone or buried in 756 function_context_stack, so make another. */ 757 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex), 758 TYPE_UNQUALIFIED); 759 gcc_checking_assert 760 (init && (TREE_TYPE (TREE_TYPE (init)) 761 == current_nonlambda_class_type ())); 762 break; 763 } 764 765 if (containing_function == NULL_TREE) 766 /* We ran out of scopes; there's no 'this' to capture. */ 767 break; 768 769 if (!LAMBDA_FUNCTION_P (containing_function)) 770 { 771 /* We found a non-lambda function. */ 772 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) 773 /* First parameter is 'this'. */ 774 init = DECL_ARGUMENTS (containing_function); 775 break; 776 } 777 778 tlambda 779 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); 780 781 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) 782 { 783 /* An outer lambda has already captured 'this'. */ 784 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); 785 break; 786 } 787 788 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) 789 /* An outer lambda won't let us capture 'this'. */ 790 break; 791 } 792 793 if (init) 794 { 795 if (add_capture_p) 796 this_capture = add_default_capture (lambda_stack, 797 /*id=*/this_identifier, 798 init); 799 else 800 this_capture = init; 801 } 802 } 803 804 if (cp_unevaluated_operand) 805 result = this_capture; 806 else if (!this_capture) 807 { 808 if (add_capture_p == 1) 809 { 810 error ("%<this%> was not captured for this lambda function"); 811 result = error_mark_node; 812 } 813 else 814 result = NULL_TREE; 815 } 816 else 817 { 818 /* To make sure that current_class_ref is for the lambda. */ 819 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) 820 == LAMBDA_EXPR_CLOSURE (lambda)); 821 822 result = this_capture; 823 824 /* If 'this' is captured, each use of 'this' is transformed into an 825 access to the corresponding unnamed data member of the closure 826 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast 827 ensures that the transformed expression is an rvalue. ] */ 828 result = rvalue (result); 829 } 830 831 return result; 832 } 833 834 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */ 835 836 tree 837 current_lambda_expr (void) 838 { 839 tree type = current_class_type; 840 while (type && !LAMBDA_TYPE_P (type)) 841 type = decl_type_context (TYPE_NAME (type)); 842 if (type) 843 return CLASSTYPE_LAMBDA_EXPR (type); 844 else 845 return NULL_TREE; 846 } 847 848 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy 849 object. NULL otherwise.. */ 850 851 static tree 852 resolvable_dummy_lambda (tree object) 853 { 854 if (!is_dummy_object (object)) 855 return NULL_TREE; 856 857 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); 858 gcc_assert (!TYPE_PTR_P (type)); 859 860 if (type != current_class_type 861 && current_class_type 862 && LAMBDA_TYPE_P (current_class_type) 863 && lambda_function (current_class_type) 864 && DERIVED_FROM_P (type, nonlambda_method_basetype())) 865 return CLASSTYPE_LAMBDA_EXPR (current_class_type); 866 867 return NULL_TREE; 868 } 869 870 /* We don't want to capture 'this' until we know we need it, i.e. after 871 overload resolution has chosen a non-static member function. At that 872 point we call this function to turn a dummy object into a use of the 873 'this' capture. */ 874 875 tree 876 maybe_resolve_dummy (tree object, bool add_capture_p) 877 { 878 if (tree lam = resolvable_dummy_lambda (object)) 879 if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) 880 if (cap != error_mark_node) 881 object = build_fold_indirect_ref (cap); 882 883 return object; 884 } 885 886 /* When parsing a generic lambda containing an argument-dependent 887 member function call we defer overload resolution to instantiation 888 time. But we have to know now whether to capture this or not. 889 Do that if FNS contains any non-static fns. 890 The std doesn't anticipate this case, but I expect this to be the 891 outcome of discussion. */ 892 893 void 894 maybe_generic_this_capture (tree object, tree fns) 895 { 896 if (tree lam = resolvable_dummy_lambda (object)) 897 if (!LAMBDA_EXPR_THIS_CAPTURE (lam)) 898 { 899 /* We've not yet captured, so look at the function set of 900 interest. */ 901 if (BASELINK_P (fns)) 902 fns = BASELINK_FUNCTIONS (fns); 903 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR; 904 if (id_expr) 905 fns = TREE_OPERAND (fns, 0); 906 907 for (lkp_iterator iter (fns); iter; ++iter) 908 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL) 909 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter)) 910 { 911 /* Found a non-static member. Capture this. */ 912 lambda_expr_this_capture (lam, /*maybe*/-1); 913 break; 914 } 915 } 916 } 917 918 /* Returns the innermost non-lambda function. */ 919 920 tree 921 current_nonlambda_function (void) 922 { 923 tree fn = current_function_decl; 924 while (fn && LAMBDA_FUNCTION_P (fn)) 925 fn = decl_function_context (fn); 926 return fn; 927 } 928 929 /* Returns the method basetype of the innermost non-lambda function, including 930 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */ 931 932 tree 933 nonlambda_method_basetype (void) 934 { 935 if (!current_class_ref) 936 return NULL_TREE; 937 938 tree type = current_class_type; 939 if (!type || !LAMBDA_TYPE_P (type)) 940 return type; 941 942 while (true) 943 { 944 tree lam = CLASSTYPE_LAMBDA_EXPR (type); 945 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam); 946 if (ex && TREE_CODE (ex) == FIELD_DECL) 947 /* Lambda in an NSDMI. */ 948 return DECL_CONTEXT (ex); 949 950 tree fn = TYPE_CONTEXT (type); 951 if (!fn || TREE_CODE (fn) != FUNCTION_DECL 952 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) 953 /* No enclosing non-lambda method. */ 954 return NULL_TREE; 955 if (!LAMBDA_FUNCTION_P (fn)) 956 /* Found an enclosing non-lambda method. */ 957 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); 958 type = DECL_CONTEXT (fn); 959 } 960 } 961 962 /* Like current_scope, but looking through lambdas. */ 963 964 tree 965 current_nonlambda_scope (void) 966 { 967 tree scope = current_scope (); 968 for (;;) 969 { 970 if (TREE_CODE (scope) == FUNCTION_DECL 971 && LAMBDA_FUNCTION_P (scope)) 972 { 973 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope)); 974 continue; 975 } 976 else if (LAMBDA_TYPE_P (scope)) 977 { 978 scope = CP_TYPE_CONTEXT (scope); 979 continue; 980 } 981 break; 982 } 983 return scope; 984 } 985 986 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with 987 indicated FN and NARGS, but do not initialize the return type or any of the 988 argument slots. */ 989 990 static tree 991 prepare_op_call (tree fn, int nargs) 992 { 993 tree t; 994 995 t = build_vl_exp (CALL_EXPR, nargs + 3); 996 CALL_EXPR_FN (t) = fn; 997 CALL_EXPR_STATIC_CHAIN (t) = NULL; 998 999 return t; 1000 } 1001 1002 /* Return true iff CALLOP is the op() for a generic lambda. */ 1003 1004 bool 1005 generic_lambda_fn_p (tree callop) 1006 { 1007 return (LAMBDA_FUNCTION_P (callop) 1008 && DECL_TEMPLATE_INFO (callop) 1009 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); 1010 } 1011 1012 /* If the closure TYPE has a static op(), also add a conversion to function 1013 pointer. */ 1014 1015 void 1016 maybe_add_lambda_conv_op (tree type) 1017 { 1018 bool nested = (cfun != NULL); 1019 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); 1020 tree callop = lambda_function (type); 1021 tree lam = CLASSTYPE_LAMBDA_EXPR (type); 1022 1023 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE 1024 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE) 1025 return; 1026 1027 if (processing_template_decl) 1028 return; 1029 1030 bool const generic_lambda_p = generic_lambda_fn_p (callop); 1031 1032 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) 1033 { 1034 /* If the op() wasn't instantiated due to errors, give up. */ 1035 gcc_assert (errorcount || sorrycount); 1036 return; 1037 } 1038 1039 /* Non-template conversion operators are defined directly with build_call_a 1040 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are 1041 deferred and the CALL is built in-place. In the case of a deduced return 1042 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for 1043 the return type is also built in-place. The arguments of DECLTYPE_CALL in 1044 the return expression may differ in flags from those in the body CALL. In 1045 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in 1046 the body CALL, but not in DECLTYPE_CALL. */ 1047 1048 vec<tree, va_gc> *direct_argvec = 0; 1049 tree decltype_call = 0, call = 0; 1050 tree optype = TREE_TYPE (callop); 1051 tree fn_result = TREE_TYPE (optype); 1052 1053 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)), 1054 null_pointer_node); 1055 if (generic_lambda_p) 1056 { 1057 ++processing_template_decl; 1058 1059 /* Prepare the dependent member call for the static member function 1060 '_FUN' and, potentially, prepare another call to be used in a decltype 1061 return expression for a deduced return call op to allow for simple 1062 implementation of the conversion operator. */ 1063 1064 tree instance = cp_build_fold_indirect_ref (thisarg); 1065 tree objfn = build_min (COMPONENT_REF, NULL_TREE, 1066 instance, DECL_NAME (callop), NULL_TREE); 1067 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; 1068 1069 call = prepare_op_call (objfn, nargs); 1070 if (type_uses_auto (fn_result)) 1071 decltype_call = prepare_op_call (objfn, nargs); 1072 } 1073 else 1074 { 1075 direct_argvec = make_tree_vector (); 1076 direct_argvec->quick_push (thisarg); 1077 } 1078 1079 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to 1080 declare the static member function "_FUN" below. For each arg append to 1081 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated 1082 call args (for the template case). If a parameter pack is found, expand 1083 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ 1084 1085 tree fn_args = NULL_TREE; 1086 { 1087 int ix = 0; 1088 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); 1089 tree tgt = NULL; 1090 1091 while (src) 1092 { 1093 tree new_node = copy_node (src); 1094 1095 /* Clear TREE_ADDRESSABLE on thunk arguments. */ 1096 TREE_ADDRESSABLE (new_node) = 0; 1097 1098 if (!fn_args) 1099 fn_args = tgt = new_node; 1100 else 1101 { 1102 TREE_CHAIN (tgt) = new_node; 1103 tgt = new_node; 1104 } 1105 1106 mark_exp_read (tgt); 1107 1108 if (generic_lambda_p) 1109 { 1110 /* Avoid capturing variables in this context. */ 1111 ++cp_unevaluated_operand; 1112 tree a = forward_parm (tgt); 1113 --cp_unevaluated_operand; 1114 1115 CALL_EXPR_ARG (call, ix) = a; 1116 if (decltype_call) 1117 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a); 1118 1119 if (PACK_EXPANSION_P (a)) 1120 /* Set this after unsharing so it's not in decltype_call. */ 1121 PACK_EXPANSION_LOCAL_P (a) = true; 1122 1123 ++ix; 1124 } 1125 else 1126 vec_safe_push (direct_argvec, tgt); 1127 1128 src = TREE_CHAIN (src); 1129 } 1130 } 1131 1132 if (generic_lambda_p) 1133 { 1134 if (decltype_call) 1135 { 1136 fn_result = finish_decltype_type 1137 (decltype_call, /*id_expression_or_member_access_p=*/false, 1138 tf_warning_or_error); 1139 } 1140 } 1141 else 1142 call = build_call_a (callop, 1143 direct_argvec->length (), 1144 direct_argvec->address ()); 1145 1146 CALL_FROM_THUNK_P (call) = 1; 1147 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION); 1148 1149 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); 1150 stattype = (cp_build_type_attribute_variant 1151 (stattype, TYPE_ATTRIBUTES (optype))); 1152 if (flag_noexcept_type 1153 && TYPE_NOTHROW_P (TREE_TYPE (callop))) 1154 stattype = build_exception_variant (stattype, noexcept_true_spec); 1155 1156 if (generic_lambda_p) 1157 --processing_template_decl; 1158 1159 /* First build up the conversion op. */ 1160 1161 tree rettype = build_pointer_type (stattype); 1162 tree name = make_conv_op_name (rettype); 1163 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); 1164 tree fntype = build_method_type_directly (thistype, rettype, void_list_node); 1165 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); 1166 SET_DECL_LANGUAGE (convfn, lang_cplusplus); 1167 tree fn = convfn; 1168 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1169 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); 1170 grokclassfn (type, fn, NO_SPECIAL); 1171 set_linkage_according_to_type (type, fn); 1172 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); 1173 DECL_IN_AGGR_P (fn) = 1; 1174 DECL_ARTIFICIAL (fn) = 1; 1175 DECL_NOT_REALLY_EXTERN (fn) = 1; 1176 DECL_DECLARED_INLINE_P (fn) = 1; 1177 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST); 1178 1179 if (nested_def) 1180 DECL_INTERFACE_KNOWN (fn) = 1; 1181 1182 if (generic_lambda_p) 1183 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1184 1185 add_method (type, fn, false); 1186 1187 /* Generic thunk code fails for varargs; we'll complain in mark_used if 1188 the conversion op is used. */ 1189 if (varargs_function_p (callop)) 1190 { 1191 DECL_DELETED_FN (fn) = 1; 1192 return; 1193 } 1194 1195 /* Now build up the thunk to be returned. */ 1196 1197 name = get_identifier ("_FUN"); 1198 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); 1199 SET_DECL_LANGUAGE (statfn, lang_cplusplus); 1200 fn = statfn; 1201 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); 1202 grokclassfn (type, fn, NO_SPECIAL); 1203 set_linkage_according_to_type (type, fn); 1204 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); 1205 DECL_IN_AGGR_P (fn) = 1; 1206 DECL_ARTIFICIAL (fn) = 1; 1207 DECL_NOT_REALLY_EXTERN (fn) = 1; 1208 DECL_DECLARED_INLINE_P (fn) = 1; 1209 DECL_STATIC_FUNCTION_P (fn) = 1; 1210 DECL_ARGUMENTS (fn) = fn_args; 1211 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) 1212 { 1213 /* Avoid duplicate -Wshadow warnings. */ 1214 DECL_NAME (arg) = NULL_TREE; 1215 DECL_CONTEXT (arg) = fn; 1216 } 1217 if (nested_def) 1218 DECL_INTERFACE_KNOWN (fn) = 1; 1219 1220 if (generic_lambda_p) 1221 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); 1222 1223 if (flag_sanitize & SANITIZE_NULL) 1224 /* Don't UBsan this function; we're deliberately calling op() with a null 1225 object argument. */ 1226 add_no_sanitize_value (fn, SANITIZE_UNDEFINED); 1227 1228 add_method (type, fn, false); 1229 1230 if (nested) 1231 push_function_context (); 1232 else 1233 /* Still increment function_depth so that we don't GC in the 1234 middle of an expression. */ 1235 ++function_depth; 1236 1237 /* Generate the body of the thunk. */ 1238 1239 start_preparsed_function (statfn, NULL_TREE, 1240 SF_PRE_PARSED | SF_INCLASS_INLINE); 1241 if (DECL_ONE_ONLY (statfn)) 1242 { 1243 /* Put the thunk in the same comdat group as the call op. */ 1244 cgraph_node::get_create (statfn)->add_to_same_comdat_group 1245 (cgraph_node::get_create (callop)); 1246 } 1247 tree body = begin_function_body (); 1248 tree compound_stmt = begin_compound_stmt (0); 1249 if (!generic_lambda_p) 1250 { 1251 set_flags_from_callee (call); 1252 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) 1253 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); 1254 } 1255 call = convert_from_reference (call); 1256 finish_return_stmt (call); 1257 1258 finish_compound_stmt (compound_stmt); 1259 finish_function_body (body); 1260 1261 fn = finish_function (/*inline_p=*/true); 1262 if (!generic_lambda_p) 1263 expand_or_defer_fn (fn); 1264 1265 /* Generate the body of the conversion op. */ 1266 1267 start_preparsed_function (convfn, NULL_TREE, 1268 SF_PRE_PARSED | SF_INCLASS_INLINE); 1269 body = begin_function_body (); 1270 compound_stmt = begin_compound_stmt (0); 1271 1272 /* decl_needed_p needs to see that it's used. */ 1273 TREE_USED (statfn) = 1; 1274 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); 1275 1276 finish_compound_stmt (compound_stmt); 1277 finish_function_body (body); 1278 1279 fn = finish_function (/*inline_p=*/true); 1280 if (!generic_lambda_p) 1281 expand_or_defer_fn (fn); 1282 1283 if (nested) 1284 pop_function_context (); 1285 else 1286 --function_depth; 1287 } 1288 1289 /* True if FN is the static function "_FUN" that gets returned from the lambda 1290 conversion operator. */ 1291 1292 bool 1293 lambda_static_thunk_p (tree fn) 1294 { 1295 return (fn && TREE_CODE (fn) == FUNCTION_DECL 1296 && DECL_ARTIFICIAL (fn) 1297 && DECL_STATIC_FUNCTION_P (fn) 1298 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn))); 1299 } 1300 1301 /* Returns true iff VAL is a lambda-related declaration which should 1302 be ignored by unqualified lookup. */ 1303 1304 bool 1305 is_lambda_ignored_entity (tree val) 1306 { 1307 /* Look past normal capture proxies. */ 1308 if (is_normal_capture_proxy (val)) 1309 return true; 1310 1311 /* Always ignore lambda fields, their names are only for debugging. */ 1312 if (TREE_CODE (val) == FIELD_DECL 1313 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) 1314 return true; 1315 1316 /* None of the lookups that use qualify_lookup want the op() from the 1317 lambda; they want the one from the enclosing class. */ 1318 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) 1319 return true; 1320 1321 return false; 1322 } 1323 1324 /* Lambdas that appear in variable initializer or default argument scope 1325 get that in their mangling, so we need to record it. We might as well 1326 use the count for function and namespace scopes as well. */ 1327 static GTY(()) tree lambda_scope; 1328 static GTY(()) int lambda_count; 1329 struct GTY(()) tree_int 1330 { 1331 tree t; 1332 int i; 1333 }; 1334 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack; 1335 1336 void 1337 start_lambda_scope (tree decl) 1338 { 1339 tree_int ti; 1340 gcc_assert (decl); 1341 /* Once we're inside a function, we ignore variable scope and just push 1342 the function again so that popping works properly. */ 1343 if (current_function_decl && TREE_CODE (decl) == VAR_DECL) 1344 decl = current_function_decl; 1345 ti.t = lambda_scope; 1346 ti.i = lambda_count; 1347 vec_safe_push (lambda_scope_stack, ti); 1348 if (lambda_scope != decl) 1349 { 1350 /* Don't reset the count if we're still in the same function. */ 1351 lambda_scope = decl; 1352 lambda_count = 0; 1353 } 1354 } 1355 1356 void 1357 record_lambda_scope (tree lambda) 1358 { 1359 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; 1360 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; 1361 } 1362 1363 /* This lambda is an instantiation of a lambda in a template default argument 1364 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do 1365 need to use and increment the global count to avoid collisions. */ 1366 1367 void 1368 record_null_lambda_scope (tree lambda) 1369 { 1370 if (vec_safe_is_empty (lambda_scope_stack)) 1371 record_lambda_scope (lambda); 1372 else 1373 { 1374 tree_int *p = lambda_scope_stack->begin(); 1375 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t; 1376 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++; 1377 } 1378 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE); 1379 } 1380 1381 void 1382 finish_lambda_scope (void) 1383 { 1384 tree_int *p = &lambda_scope_stack->last (); 1385 if (lambda_scope != p->t) 1386 { 1387 lambda_scope = p->t; 1388 lambda_count = p->i; 1389 } 1390 lambda_scope_stack->pop (); 1391 } 1392 1393 tree 1394 start_lambda_function (tree fco, tree lambda_expr) 1395 { 1396 /* Let the front end know that we are going to be defining this 1397 function. */ 1398 start_preparsed_function (fco, 1399 NULL_TREE, 1400 SF_PRE_PARSED | SF_INCLASS_INLINE); 1401 1402 tree body = begin_function_body (); 1403 1404 /* Push the proxies for any explicit captures. */ 1405 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap; 1406 cap = TREE_CHAIN (cap)) 1407 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); 1408 1409 return body; 1410 } 1411 1412 /* Subroutine of prune_lambda_captures: CAP is a node in 1413 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we 1414 might optimize away the capture, or NULL_TREE if there is no such 1415 variable. */ 1416 1417 static tree 1418 var_to_maybe_prune (tree cap) 1419 { 1420 if (LAMBDA_CAPTURE_EXPLICIT_P (cap)) 1421 /* Don't prune explicit captures. */ 1422 return NULL_TREE; 1423 1424 tree mem = TREE_PURPOSE (cap); 1425 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem)) 1426 /* Packs and init-captures aren't captures of constant vars. */ 1427 return NULL_TREE; 1428 1429 tree init = TREE_VALUE (cap); 1430 if (is_normal_capture_proxy (init)) 1431 init = DECL_CAPTURED_VARIABLE (init); 1432 if (decl_constant_var_p (init)) 1433 return init; 1434 1435 return NULL_TREE; 1436 } 1437 1438 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies 1439 for constant variables are actually used in the lambda body. 1440 1441 There will always be a DECL_EXPR for the capture proxy; remember it when we 1442 see it, but replace it with any other use. */ 1443 1444 static tree 1445 mark_const_cap_r (tree *t, int *walk_subtrees, void *data) 1446 { 1447 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data; 1448 1449 tree var = NULL_TREE; 1450 if (TREE_CODE (*t) == DECL_EXPR) 1451 { 1452 tree decl = DECL_EXPR_DECL (*t); 1453 if (is_constant_capture_proxy (decl)) 1454 { 1455 var = DECL_CAPTURED_VARIABLE (decl); 1456 *walk_subtrees = 0; 1457 } 1458 } 1459 else if (is_constant_capture_proxy (*t)) 1460 var = DECL_CAPTURED_VARIABLE (*t); 1461 1462 if (var) 1463 { 1464 tree *&slot = const_vars.get_or_insert (var); 1465 if (!slot || VAR_P (*t)) 1466 slot = t; 1467 } 1468 1469 return NULL_TREE; 1470 } 1471 1472 /* We're at the end of processing a lambda; go back and remove any captures of 1473 constant variables for which we've folded away all uses. */ 1474 1475 static void 1476 prune_lambda_captures (tree body) 1477 { 1478 tree lam = current_lambda_expr (); 1479 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam)) 1480 /* No uses were optimized away. */ 1481 return; 1482 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) 1483 /* No default captures, and we don't prune explicit captures. */ 1484 return; 1485 1486 hash_map<tree,tree*> const_vars; 1487 1488 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars); 1489 1490 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam)); 1491 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; ) 1492 { 1493 tree cap = *capp; 1494 if (tree var = var_to_maybe_prune (cap)) 1495 { 1496 tree **use = const_vars.get (var); 1497 if (use && TREE_CODE (**use) == DECL_EXPR) 1498 { 1499 /* All uses of this capture were folded away, leaving only the 1500 proxy declaration. */ 1501 1502 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */ 1503 *capp = TREE_CHAIN (cap); 1504 1505 /* And out of TYPE_FIELDS. */ 1506 tree field = TREE_PURPOSE (cap); 1507 while (*fieldp != field) 1508 fieldp = &DECL_CHAIN (*fieldp); 1509 *fieldp = DECL_CHAIN (*fieldp); 1510 1511 /* And remove the capture proxy declaration. */ 1512 **use = void_node; 1513 continue; 1514 } 1515 } 1516 1517 capp = &TREE_CHAIN (cap); 1518 } 1519 } 1520 1521 void 1522 finish_lambda_function (tree body) 1523 { 1524 finish_function_body (body); 1525 1526 prune_lambda_captures (body); 1527 1528 /* Finish the function and generate code for it if necessary. */ 1529 tree fn = finish_function (/*inline_p=*/true); 1530 1531 /* Only expand if the call op is not a template. */ 1532 if (!DECL_TEMPLATE_INFO (fn)) 1533 expand_or_defer_fn (fn); 1534 } 1535 1536 #include "gt-cp-lambda.h" 1537