1 /* Gimple decl, type, and expression support functions. 2 3 Copyright (C) 2007-2020 Free Software Foundation, Inc. 4 Contributed by Aldy Hernandez <aldyh@redhat.com> 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "backend.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "stringpool.h" 29 #include "gimple-ssa.h" 30 #include "fold-const.h" 31 #include "tree-eh.h" 32 #include "gimplify.h" 33 #include "stor-layout.h" 34 #include "demangle.h" 35 #include "hash-set.h" 36 #include "rtl.h" 37 #include "tree-pass.h" 38 #include "stringpool.h" 39 #include "attribs.h" 40 #include "target.h" 41 42 /* ----- Type related ----- */ 43 44 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a 45 useless type conversion, otherwise return false. 46 47 This function implicitly defines the middle-end type system. With 48 the notion of 'a < b' meaning that useless_type_conversion_p (a, b) 49 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds, 50 the following invariants shall be fulfilled: 51 52 1) useless_type_conversion_p is transitive. 53 If a < b and b < c then a < c. 54 55 2) useless_type_conversion_p is not symmetric. 56 From a < b does not follow a > b. 57 58 3) Types define the available set of operations applicable to values. 59 A type conversion is useless if the operations for the target type 60 is a subset of the operations for the source type. For example 61 casts to void* are useless, casts from void* are not (void* can't 62 be dereferenced or offsetted, but copied, hence its set of operations 63 is a strict subset of that of all other data pointer types). Casts 64 to const T* are useless (can't be written to), casts from const T* 65 to T* are not. */ 66 67 bool 68 useless_type_conversion_p (tree outer_type, tree inner_type) 69 { 70 /* Do the following before stripping toplevel qualifiers. */ 71 if (POINTER_TYPE_P (inner_type) 72 && POINTER_TYPE_P (outer_type)) 73 { 74 /* Do not lose casts between pointers to different address spaces. */ 75 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) 76 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))) 77 return false; 78 /* Do not lose casts to function pointer types. */ 79 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE 80 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE) 81 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE 82 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE)) 83 return false; 84 } 85 86 /* From now on qualifiers on value types do not matter. */ 87 inner_type = TYPE_MAIN_VARIANT (inner_type); 88 outer_type = TYPE_MAIN_VARIANT (outer_type); 89 90 if (inner_type == outer_type) 91 return true; 92 93 /* Changes in machine mode are never useless conversions because the RTL 94 middle-end expects explicit conversions between modes. */ 95 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)) 96 return false; 97 98 /* If both the inner and outer types are integral types, then the 99 conversion is not necessary if they have the same mode and 100 signedness and precision, and both or neither are boolean. */ 101 if (INTEGRAL_TYPE_P (inner_type) 102 && INTEGRAL_TYPE_P (outer_type)) 103 { 104 /* Preserve changes in signedness or precision. */ 105 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 106 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 107 return false; 108 109 /* Preserve conversions to/from BOOLEAN_TYPE if types are not 110 of precision one. */ 111 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE) 112 != (TREE_CODE (outer_type) == BOOLEAN_TYPE)) 113 && TYPE_PRECISION (outer_type) != 1) 114 return false; 115 116 /* We don't need to preserve changes in the types minimum or 117 maximum value in general as these do not generate code 118 unless the types precisions are different. */ 119 return true; 120 } 121 122 /* Scalar floating point types with the same mode are compatible. */ 123 else if (SCALAR_FLOAT_TYPE_P (inner_type) 124 && SCALAR_FLOAT_TYPE_P (outer_type)) 125 return true; 126 127 /* Fixed point types with the same mode are compatible. */ 128 else if (FIXED_POINT_TYPE_P (inner_type) 129 && FIXED_POINT_TYPE_P (outer_type)) 130 return TYPE_SATURATING (inner_type) == TYPE_SATURATING (outer_type); 131 132 /* We need to take special care recursing to pointed-to types. */ 133 else if (POINTER_TYPE_P (inner_type) 134 && POINTER_TYPE_P (outer_type)) 135 { 136 /* We do not care for const qualification of the pointed-to types 137 as const qualification has no semantic value to the middle-end. */ 138 139 /* Otherwise pointers/references are equivalent. */ 140 return true; 141 } 142 143 /* Recurse for complex types. */ 144 else if (TREE_CODE (inner_type) == COMPLEX_TYPE 145 && TREE_CODE (outer_type) == COMPLEX_TYPE) 146 return useless_type_conversion_p (TREE_TYPE (outer_type), 147 TREE_TYPE (inner_type)); 148 149 /* Recurse for vector types with the same number of subparts. */ 150 else if (TREE_CODE (inner_type) == VECTOR_TYPE 151 && TREE_CODE (outer_type) == VECTOR_TYPE) 152 return (known_eq (TYPE_VECTOR_SUBPARTS (inner_type), 153 TYPE_VECTOR_SUBPARTS (outer_type)) 154 && useless_type_conversion_p (TREE_TYPE (outer_type), 155 TREE_TYPE (inner_type)) 156 && targetm.compatible_vector_types_p (inner_type, outer_type)); 157 158 else if (TREE_CODE (inner_type) == ARRAY_TYPE 159 && TREE_CODE (outer_type) == ARRAY_TYPE) 160 { 161 /* Preserve various attributes. */ 162 if (TYPE_REVERSE_STORAGE_ORDER (inner_type) 163 != TYPE_REVERSE_STORAGE_ORDER (outer_type)) 164 return false; 165 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type)) 166 return false; 167 168 /* Conversions from array types with unknown extent to 169 array types with known extent are not useless. */ 170 if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type)) 171 return false; 172 173 /* Nor are conversions from array types with non-constant size to 174 array types with constant size or to different size. */ 175 if (TYPE_SIZE (outer_type) 176 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST 177 && (!TYPE_SIZE (inner_type) 178 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST 179 || !tree_int_cst_equal (TYPE_SIZE (outer_type), 180 TYPE_SIZE (inner_type)))) 181 return false; 182 183 /* Check conversions between arrays with partially known extents. 184 If the array min/max values are constant they have to match. 185 Otherwise allow conversions to unknown and variable extents. 186 In particular this declares conversions that may change the 187 mode to BLKmode as useless. */ 188 if (TYPE_DOMAIN (inner_type) 189 && TYPE_DOMAIN (outer_type) 190 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type)) 191 { 192 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type)); 193 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type)); 194 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type)); 195 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type)); 196 197 /* After gimplification a variable min/max value carries no 198 additional information compared to a NULL value. All that 199 matters has been lowered to be part of the IL. */ 200 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST) 201 inner_min = NULL_TREE; 202 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST) 203 outer_min = NULL_TREE; 204 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST) 205 inner_max = NULL_TREE; 206 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST) 207 outer_max = NULL_TREE; 208 209 /* Conversions NULL / variable <- cst are useless, but not 210 the other way around. */ 211 if (outer_min 212 && (!inner_min 213 || !tree_int_cst_equal (inner_min, outer_min))) 214 return false; 215 if (outer_max 216 && (!inner_max 217 || !tree_int_cst_equal (inner_max, outer_max))) 218 return false; 219 } 220 221 /* Recurse on the element check. */ 222 return useless_type_conversion_p (TREE_TYPE (outer_type), 223 TREE_TYPE (inner_type)); 224 } 225 226 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE 227 || TREE_CODE (inner_type) == METHOD_TYPE) 228 && TREE_CODE (inner_type) == TREE_CODE (outer_type)) 229 { 230 tree outer_parm, inner_parm; 231 232 /* If the return types are not compatible bail out. */ 233 if (!useless_type_conversion_p (TREE_TYPE (outer_type), 234 TREE_TYPE (inner_type))) 235 return false; 236 237 /* Method types should belong to a compatible base class. */ 238 if (TREE_CODE (inner_type) == METHOD_TYPE 239 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type), 240 TYPE_METHOD_BASETYPE (inner_type))) 241 return false; 242 243 /* A conversion to an unprototyped argument list is ok. */ 244 if (!prototype_p (outer_type)) 245 return true; 246 247 /* If the unqualified argument types are compatible the conversion 248 is useless. */ 249 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type)) 250 return true; 251 252 for (outer_parm = TYPE_ARG_TYPES (outer_type), 253 inner_parm = TYPE_ARG_TYPES (inner_type); 254 outer_parm && inner_parm; 255 outer_parm = TREE_CHAIN (outer_parm), 256 inner_parm = TREE_CHAIN (inner_parm)) 257 if (!useless_type_conversion_p 258 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)), 259 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm)))) 260 return false; 261 262 /* If there is a mismatch in the number of arguments the functions 263 are not compatible. */ 264 if (outer_parm || inner_parm) 265 return false; 266 267 /* Defer to the target if necessary. */ 268 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type)) 269 return comp_type_attributes (outer_type, inner_type) != 0; 270 271 return true; 272 } 273 274 /* For aggregates we rely on TYPE_CANONICAL exclusively and require 275 explicit conversions for types involving to be structurally 276 compared types. */ 277 else if (AGGREGATE_TYPE_P (inner_type) 278 && TREE_CODE (inner_type) == TREE_CODE (outer_type)) 279 return TYPE_CANONICAL (inner_type) 280 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type); 281 282 else if (TREE_CODE (inner_type) == OFFSET_TYPE 283 && TREE_CODE (outer_type) == OFFSET_TYPE) 284 return useless_type_conversion_p (TREE_TYPE (outer_type), 285 TREE_TYPE (inner_type)) 286 && useless_type_conversion_p 287 (TYPE_OFFSET_BASETYPE (outer_type), 288 TYPE_OFFSET_BASETYPE (inner_type)); 289 290 return false; 291 } 292 293 294 /* ----- Decl related ----- */ 295 296 /* Set sequence SEQ to be the GIMPLE body for function FN. */ 297 298 void 299 gimple_set_body (tree fndecl, gimple_seq seq) 300 { 301 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); 302 if (fn == NULL) 303 { 304 /* If FNDECL still does not have a function structure associated 305 with it, then it does not make sense for it to receive a 306 GIMPLE body. */ 307 gcc_assert (seq == NULL); 308 } 309 else 310 fn->gimple_body = seq; 311 } 312 313 314 /* Return the body of GIMPLE statements for function FN. After the 315 CFG pass, the function body doesn't exist anymore because it has 316 been split up into basic blocks. In this case, it returns 317 NULL. */ 318 319 gimple_seq 320 gimple_body (tree fndecl) 321 { 322 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); 323 return fn ? fn->gimple_body : NULL; 324 } 325 326 /* Return true when FNDECL has Gimple body either in unlowered 327 or CFG form. */ 328 bool 329 gimple_has_body_p (tree fndecl) 330 { 331 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); 332 return (gimple_body (fndecl) || (fn && fn->cfg && !(fn->curr_properties & PROP_rtl))); 333 } 334 335 /* Return a printable name for symbol DECL. */ 336 337 const char * 338 gimple_decl_printable_name (tree decl, int verbosity) 339 { 340 if (!DECL_NAME (decl)) 341 return NULL; 342 343 if (HAS_DECL_ASSEMBLER_NAME_P (decl) && DECL_ASSEMBLER_NAME_SET_P (decl)) 344 { 345 int dmgl_opts = DMGL_NO_OPTS; 346 347 if (verbosity >= 2) 348 { 349 dmgl_opts = DMGL_VERBOSE 350 | DMGL_ANSI 351 | DMGL_GNU_V3 352 | DMGL_RET_POSTFIX; 353 if (TREE_CODE (decl) == FUNCTION_DECL) 354 dmgl_opts |= DMGL_PARAMS; 355 } 356 357 const char *mangled_str 358 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl)); 359 const char *str = cplus_demangle_v3 (mangled_str, dmgl_opts); 360 return str ? str : mangled_str; 361 } 362 363 return IDENTIFIER_POINTER (DECL_NAME (decl)); 364 } 365 366 367 /* Create a new VAR_DECL and copy information from VAR to it. */ 368 369 tree 370 copy_var_decl (tree var, tree name, tree type) 371 { 372 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type); 373 374 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var); 375 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var); 376 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var); 377 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var); 378 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var); 379 DECL_CONTEXT (copy) = DECL_CONTEXT (var); 380 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var); 381 TREE_USED (copy) = 1; 382 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1; 383 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var); 384 if (DECL_USER_ALIGN (var)) 385 { 386 SET_DECL_ALIGN (copy, DECL_ALIGN (var)); 387 DECL_USER_ALIGN (copy) = 1; 388 } 389 390 return copy; 391 } 392 393 /* Strip off a legitimate source ending from the input string NAME of 394 length LEN. Rather than having to know the names used by all of 395 our front ends, we strip off an ending of a period followed by 396 up to four characters. (like ".cpp".) */ 397 398 static inline void 399 remove_suffix (char *name, int len) 400 { 401 int i; 402 403 for (i = 2; i < 7 && len > i; i++) 404 { 405 if (name[len - i] == '.') 406 { 407 name[len - i] = '\0'; 408 break; 409 } 410 } 411 } 412 413 /* Create a new temporary name with PREFIX. Return an identifier. */ 414 415 static GTY(()) unsigned int tmp_var_id_num; 416 417 tree 418 create_tmp_var_name (const char *prefix) 419 { 420 char *tmp_name; 421 422 if (prefix) 423 { 424 char *preftmp = ASTRDUP (prefix); 425 426 remove_suffix (preftmp, strlen (preftmp)); 427 clean_symbol_name (preftmp); 428 429 prefix = preftmp; 430 } 431 432 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); 433 return get_identifier (tmp_name); 434 } 435 436 /* Create a new temporary variable declaration of type TYPE. 437 Do NOT push it into the current binding. */ 438 439 tree 440 create_tmp_var_raw (tree type, const char *prefix) 441 { 442 tree tmp_var; 443 444 tmp_var = build_decl (input_location, 445 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, 446 type); 447 448 /* The variable was declared by the compiler. */ 449 DECL_ARTIFICIAL (tmp_var) = 1; 450 /* And we don't want debug info for it. */ 451 DECL_IGNORED_P (tmp_var) = 1; 452 /* And we don't want even the fancy names of those printed in 453 -fdump-final-insns= dumps. */ 454 DECL_NAMELESS (tmp_var) = 1; 455 456 /* Make the variable writable. */ 457 TREE_READONLY (tmp_var) = 0; 458 459 DECL_EXTERNAL (tmp_var) = 0; 460 TREE_STATIC (tmp_var) = 0; 461 TREE_USED (tmp_var) = 1; 462 463 return tmp_var; 464 } 465 466 /* Create a new temporary variable declaration of type TYPE. DO push the 467 variable into the current binding. Further, assume that this is called 468 only from gimplification or optimization, at which point the creation of 469 certain types are bugs. */ 470 471 tree 472 create_tmp_var (tree type, const char *prefix) 473 { 474 tree tmp_var; 475 476 /* We don't allow types that are addressable (meaning we can't make copies), 477 or incomplete. We also used to reject every variable size objects here, 478 but now support those for which a constant upper bound can be obtained. 479 The processing for variable sizes is performed in gimple_add_tmp_var, 480 point at which it really matters and possibly reached via paths not going 481 through this function, e.g. after direct calls to create_tmp_var_raw. */ 482 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); 483 484 tmp_var = create_tmp_var_raw (type, prefix); 485 gimple_add_tmp_var (tmp_var); 486 return tmp_var; 487 } 488 489 /* Create a new temporary variable declaration of type TYPE by calling 490 create_tmp_var and if TYPE is a vector or a complex number, mark the new 491 temporary as gimple register. */ 492 493 tree 494 create_tmp_reg (tree type, const char *prefix) 495 { 496 tree tmp; 497 498 tmp = create_tmp_var (type, prefix); 499 if (TREE_CODE (type) == COMPLEX_TYPE 500 || TREE_CODE (type) == VECTOR_TYPE) 501 DECL_GIMPLE_REG_P (tmp) = 1; 502 503 return tmp; 504 } 505 506 /* Create a new temporary variable declaration of type TYPE by calling 507 create_tmp_var and if TYPE is a vector or a complex number, mark the new 508 temporary as gimple register. */ 509 510 tree 511 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix) 512 { 513 tree tmp; 514 515 tmp = create_tmp_var_raw (type, prefix); 516 gimple_add_tmp_var_fn (fn, tmp); 517 if (TREE_CODE (type) == COMPLEX_TYPE 518 || TREE_CODE (type) == VECTOR_TYPE) 519 DECL_GIMPLE_REG_P (tmp) = 1; 520 521 return tmp; 522 } 523 524 525 /* ----- Expression related ----- */ 526 527 /* Extract the operands and code for expression EXPR into *SUBCODE_P, 528 *OP1_P, *OP2_P and *OP3_P respectively. */ 529 530 void 531 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p, 532 tree *op2_p, tree *op3_p) 533 { 534 *subcode_p = TREE_CODE (expr); 535 switch (get_gimple_rhs_class (*subcode_p)) 536 { 537 case GIMPLE_TERNARY_RHS: 538 { 539 *op1_p = TREE_OPERAND (expr, 0); 540 *op2_p = TREE_OPERAND (expr, 1); 541 *op3_p = TREE_OPERAND (expr, 2); 542 break; 543 } 544 case GIMPLE_BINARY_RHS: 545 { 546 *op1_p = TREE_OPERAND (expr, 0); 547 *op2_p = TREE_OPERAND (expr, 1); 548 *op3_p = NULL_TREE; 549 break; 550 } 551 case GIMPLE_UNARY_RHS: 552 { 553 *op1_p = TREE_OPERAND (expr, 0); 554 *op2_p = NULL_TREE; 555 *op3_p = NULL_TREE; 556 break; 557 } 558 case GIMPLE_SINGLE_RHS: 559 { 560 *op1_p = expr; 561 *op2_p = NULL_TREE; 562 *op3_p = NULL_TREE; 563 break; 564 } 565 default: 566 gcc_unreachable (); 567 } 568 } 569 570 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */ 571 572 void 573 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, 574 tree *lhs_p, tree *rhs_p) 575 { 576 gcc_assert (COMPARISON_CLASS_P (cond) 577 || TREE_CODE (cond) == TRUTH_NOT_EXPR 578 || is_gimple_min_invariant (cond) 579 || SSA_VAR_P (cond)); 580 gcc_checking_assert (!tree_could_throw_p (cond)); 581 582 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p); 583 584 /* Canonicalize conditionals of the form 'if (!VAL)'. */ 585 if (*code_p == TRUTH_NOT_EXPR) 586 { 587 *code_p = EQ_EXPR; 588 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); 589 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); 590 } 591 /* Canonicalize conditionals of the form 'if (VAL)' */ 592 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) 593 { 594 *code_p = NE_EXPR; 595 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); 596 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); 597 } 598 } 599 600 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ 601 602 bool 603 is_gimple_lvalue (tree t) 604 { 605 return (is_gimple_addressable (t) 606 || TREE_CODE (t) == WITH_SIZE_EXPR 607 /* These are complex lvalues, but don't have addresses, so they 608 go here. */ 609 || TREE_CODE (t) == BIT_FIELD_REF); 610 } 611 612 /* Helper for is_gimple_condexpr and is_gimple_condexpr_for_cond. */ 613 614 static bool 615 is_gimple_condexpr_1 (tree t, bool allow_traps, bool allow_cplx) 616 { 617 tree op0; 618 return (is_gimple_val (t) 619 || (COMPARISON_CLASS_P (t) 620 && (allow_traps || !tree_could_throw_p (t)) 621 && ((op0 = TREE_OPERAND (t, 0)), true) 622 && (allow_cplx || TREE_CODE (TREE_TYPE (op0)) != COMPLEX_TYPE) 623 && is_gimple_val (op0) 624 && is_gimple_val (TREE_OPERAND (t, 1)))); 625 } 626 627 /* Return true if T is a GIMPLE condition. */ 628 629 bool 630 is_gimple_condexpr (tree t) 631 { 632 /* Always split out _Complex type compares since complex lowering 633 doesn't handle this case. */ 634 return is_gimple_condexpr_1 (t, true, false); 635 } 636 637 /* Like is_gimple_condexpr, but does not allow T to trap. */ 638 639 bool 640 is_gimple_condexpr_for_cond (tree t) 641 { 642 return is_gimple_condexpr_1 (t, false, true); 643 } 644 645 /* Return true if T is a gimple address. */ 646 647 bool 648 is_gimple_address (const_tree t) 649 { 650 tree op; 651 652 if (TREE_CODE (t) != ADDR_EXPR) 653 return false; 654 655 op = TREE_OPERAND (t, 0); 656 while (handled_component_p (op)) 657 { 658 if ((TREE_CODE (op) == ARRAY_REF 659 || TREE_CODE (op) == ARRAY_RANGE_REF) 660 && !is_gimple_val (TREE_OPERAND (op, 1))) 661 return false; 662 663 op = TREE_OPERAND (op, 0); 664 } 665 666 if (CONSTANT_CLASS_P (op) 667 || TREE_CODE (op) == TARGET_MEM_REF 668 || TREE_CODE (op) == MEM_REF) 669 return true; 670 671 switch (TREE_CODE (op)) 672 { 673 case PARM_DECL: 674 case RESULT_DECL: 675 case LABEL_DECL: 676 case FUNCTION_DECL: 677 case VAR_DECL: 678 case CONST_DECL: 679 return true; 680 681 default: 682 return false; 683 } 684 } 685 686 /* Return true if T is a gimple invariant address. */ 687 688 bool 689 is_gimple_invariant_address (const_tree t) 690 { 691 const_tree op; 692 693 if (TREE_CODE (t) != ADDR_EXPR) 694 return false; 695 696 op = strip_invariant_refs (TREE_OPERAND (t, 0)); 697 if (!op) 698 return false; 699 700 if (TREE_CODE (op) == MEM_REF) 701 { 702 const_tree op0 = TREE_OPERAND (op, 0); 703 return (TREE_CODE (op0) == ADDR_EXPR 704 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) 705 || decl_address_invariant_p (TREE_OPERAND (op0, 0)))); 706 } 707 708 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); 709 } 710 711 /* Return true if T is a gimple invariant address at IPA level 712 (so addresses of variables on stack are not allowed). */ 713 714 bool 715 is_gimple_ip_invariant_address (const_tree t) 716 { 717 const_tree op; 718 719 if (TREE_CODE (t) != ADDR_EXPR) 720 return false; 721 722 op = strip_invariant_refs (TREE_OPERAND (t, 0)); 723 if (!op) 724 return false; 725 726 if (TREE_CODE (op) == MEM_REF) 727 { 728 const_tree op0 = TREE_OPERAND (op, 0); 729 return (TREE_CODE (op0) == ADDR_EXPR 730 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) 731 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0)))); 732 } 733 734 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op); 735 } 736 737 /* Return true if T is a GIMPLE minimal invariant. It's a restricted 738 form of function invariant. */ 739 740 bool 741 is_gimple_min_invariant (const_tree t) 742 { 743 if (TREE_CODE (t) == ADDR_EXPR) 744 return is_gimple_invariant_address (t); 745 746 return is_gimple_constant (t); 747 } 748 749 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted 750 form of gimple minimal invariant. */ 751 752 bool 753 is_gimple_ip_invariant (const_tree t) 754 { 755 if (TREE_CODE (t) == ADDR_EXPR) 756 return is_gimple_ip_invariant_address (t); 757 758 return is_gimple_constant (t); 759 } 760 761 /* Return true if T is a non-aggregate register variable. */ 762 763 bool 764 is_gimple_reg (tree t) 765 { 766 if (virtual_operand_p (t)) 767 return false; 768 769 if (TREE_CODE (t) == SSA_NAME) 770 return true; 771 772 if (!is_gimple_variable (t)) 773 return false; 774 775 if (!is_gimple_reg_type (TREE_TYPE (t))) 776 return false; 777 778 /* A volatile decl is not acceptable because we can't reuse it as 779 needed. We need to copy it into a temp first. */ 780 if (TREE_THIS_VOLATILE (t)) 781 return false; 782 783 /* We define "registers" as things that can be renamed as needed, 784 which with our infrastructure does not apply to memory. */ 785 if (needs_to_live_in_memory (t)) 786 return false; 787 788 /* Hard register variables are an interesting case. For those that 789 are call-clobbered, we don't know where all the calls are, since 790 we don't (want to) take into account which operations will turn 791 into libcalls at the rtl level. For those that are call-saved, 792 we don't currently model the fact that calls may in fact change 793 global hard registers, nor do we examine ASM_CLOBBERS at the tree 794 level, and so miss variable changes that might imply. All around, 795 it seems safest to not do too much optimization with these at the 796 tree level at all. We'll have to rely on the rtl optimizers to 797 clean this up, as there we've got all the appropriate bits exposed. */ 798 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) 799 return false; 800 801 /* Complex and vector values must have been put into SSA-like form. 802 That is, no assignments to the individual components. */ 803 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 804 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 805 return DECL_GIMPLE_REG_P (t); 806 807 return true; 808 } 809 810 811 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */ 812 813 bool 814 is_gimple_val (tree t) 815 { 816 /* Make loads from volatiles and memory vars explicit. */ 817 if (is_gimple_variable (t) 818 && is_gimple_reg_type (TREE_TYPE (t)) 819 && !is_gimple_reg (t)) 820 return false; 821 822 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); 823 } 824 825 /* Similarly, but accept hard registers as inputs to asm statements. */ 826 827 bool 828 is_gimple_asm_val (tree t) 829 { 830 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) 831 return true; 832 833 return is_gimple_val (t); 834 } 835 836 /* Return true if T is a GIMPLE minimal lvalue. */ 837 838 bool 839 is_gimple_min_lval (tree t) 840 { 841 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t)))) 842 return false; 843 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF); 844 } 845 846 /* Return true if T is a valid function operand of a CALL_EXPR. */ 847 848 bool 849 is_gimple_call_addr (tree t) 850 { 851 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); 852 } 853 854 /* Return true if T is a valid address operand of a MEM_REF. */ 855 856 bool 857 is_gimple_mem_ref_addr (tree t) 858 { 859 return (is_gimple_reg (t) 860 || TREE_CODE (t) == INTEGER_CST 861 || (TREE_CODE (t) == ADDR_EXPR 862 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0)) 863 || decl_address_invariant_p (TREE_OPERAND (t, 0))))); 864 } 865 866 /* Hold trees marked addressable during expand. */ 867 868 static hash_set<tree> *mark_addressable_queue; 869 870 /* Mark X as addressable or queue it up if called during expand. We 871 don't want to apply it immediately during expand because decls are 872 made addressable at that point due to RTL-only concerns, such as 873 uses of memcpy for block moves, and TREE_ADDRESSABLE changes 874 is_gimple_reg, which might make it seem like a variable that used 875 to be a gimple_reg shouldn't have been an SSA name. So we queue up 876 this flag setting and only apply it when we're done with GIMPLE and 877 only RTL issues matter. */ 878 879 static void 880 mark_addressable_1 (tree x) 881 { 882 if (!currently_expanding_to_rtl) 883 { 884 TREE_ADDRESSABLE (x) = 1; 885 return; 886 } 887 888 if (!mark_addressable_queue) 889 mark_addressable_queue = new hash_set<tree>(); 890 mark_addressable_queue->add (x); 891 } 892 893 /* Adaptor for mark_addressable_1 for use in hash_set traversal. */ 894 895 bool 896 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL) 897 { 898 mark_addressable_1 (x); 899 return false; 900 } 901 902 /* Mark all queued trees as addressable, and empty the queue. To be 903 called right after clearing CURRENTLY_EXPANDING_TO_RTL. */ 904 905 void 906 flush_mark_addressable_queue () 907 { 908 gcc_assert (!currently_expanding_to_rtl); 909 if (mark_addressable_queue) 910 { 911 mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL); 912 delete mark_addressable_queue; 913 mark_addressable_queue = NULL; 914 } 915 } 916 917 /* Mark X addressable. Unlike the langhook we expect X to be in gimple 918 form and we don't do any syntax checking. */ 919 920 void 921 mark_addressable (tree x) 922 { 923 while (handled_component_p (x)) 924 x = TREE_OPERAND (x, 0); 925 if (TREE_CODE (x) == MEM_REF 926 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 927 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 928 if (!VAR_P (x) 929 && TREE_CODE (x) != PARM_DECL 930 && TREE_CODE (x) != RESULT_DECL) 931 return; 932 mark_addressable_1 (x); 933 934 /* Also mark the artificial SSA_NAME that points to the partition of X. */ 935 if (TREE_CODE (x) == VAR_DECL 936 && !DECL_EXTERNAL (x) 937 && !TREE_STATIC (x) 938 && cfun->gimple_df != NULL 939 && cfun->gimple_df->decls_to_pointers != NULL) 940 { 941 tree *namep = cfun->gimple_df->decls_to_pointers->get (x); 942 if (namep) 943 mark_addressable_1 (*namep); 944 } 945 } 946 947 /* Returns true iff T is a valid RHS for an assignment to a renamed 948 user -- or front-end generated artificial -- variable. */ 949 950 bool 951 is_gimple_reg_rhs (tree t) 952 { 953 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; 954 } 955 956 #include "gt-gimple-expr.h" 957