1 /* Implements exception handling. 2 Copyright (C) 1989-2017 Free Software Foundation, Inc. 3 Contributed by Mike Stump <mrs@cygnus.com>. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 22 /* An exception is an event that can be "thrown" from within a 23 function. This event can then be "caught" by the callers of 24 the function. 25 26 The representation of exceptions changes several times during 27 the compilation process: 28 29 In the beginning, in the front end, we have the GENERIC trees 30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR, 31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR. 32 33 During initial gimplification (gimplify.c) these are lowered 34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes. 35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted 36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1 37 conversion. 38 39 During pass_lower_eh (tree-eh.c) we record the nested structure 40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE. 41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW 42 regions at this time. We can then flatten the statements within 43 the TRY nodes to straight-line code. Statements that had been within 44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE, 45 so that we may remember what action is supposed to be taken if 46 a given statement does throw. During this lowering process, 47 we create an EH_LANDING_PAD node for each EH_REGION that has 48 some code within the function that needs to be executed if a 49 throw does happen. We also create RESX statements that are 50 used to transfer control from an inner EH_REGION to an outer 51 EH_REGION. We also create EH_DISPATCH statements as placeholders 52 for a runtime type comparison that should be made in order to 53 select the action to perform among different CATCH and EH_FILTER 54 regions. 55 56 During pass_lower_eh_dispatch (tree-eh.c), which is run after 57 all inlining is complete, we are able to run assign_filter_values, 58 which allows us to map the set of types manipulated by all of the 59 CATCH and EH_FILTER regions to a set of integers. This set of integers 60 will be how the exception runtime communicates with the code generated 61 within the function. We then expand the GIMPLE_EH_DISPATCH statements 62 to a switch or conditional branches that use the argument provided by 63 the runtime (__builtin_eh_filter) and the set of integers we computed 64 in assign_filter_values. 65 66 During pass_lower_resx (tree-eh.c), which is run near the end 67 of optimization, we expand RESX statements. If the eh region 68 that is outer to the RESX statement is a MUST_NOT_THROW, then 69 the RESX expands to some form of abort statement. If the eh 70 region that is outer to the RESX statement is within the current 71 function, then the RESX expands to a bookkeeping call 72 (__builtin_eh_copy_values) and a goto. Otherwise, the next 73 handler for the exception must be within a function somewhere 74 up the call chain, so we call back into the exception runtime 75 (__builtin_unwind_resume). 76 77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes 78 that create an rtl to eh_region mapping that corresponds to the 79 gimple to eh_region mapping that had been recorded in the 80 THROW_STMT_TABLE. 81 82 Then, via finish_eh_generation, we generate the real landing pads 83 to which the runtime will actually transfer control. These new 84 landing pads perform whatever bookkeeping is needed by the target 85 backend in order to resume execution within the current function. 86 Each of these new landing pads falls through into the post_landing_pad 87 label which had been used within the CFG up to this point. All 88 exception edges within the CFG are redirected to the new landing pads. 89 If the target uses setjmp to implement exceptions, the various extra 90 calls into the runtime to register and unregister the current stack 91 frame are emitted at this time. 92 93 During pass_convert_to_eh_region_ranges (except.c), we transform 94 the REG_EH_REGION notes attached to individual insns into 95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG 96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the 97 same associated action within the exception region tree, meaning 98 that (1) the exception is caught by the same landing pad within the 99 current function, (2) the exception is blocked by the runtime with 100 a MUST_NOT_THROW region, or (3) the exception is not handled at all 101 within the current function. 102 103 Finally, during assembly generation, we call 104 output_function_exception_table (except.c) to emit the tables with 105 which the exception runtime can determine if a given stack frame 106 handles a given exception, and if so what filter value to provide 107 to the function when the non-local control transfer is effected. 108 If the target uses dwarf2 unwinding to implement exceptions, then 109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */ 110 111 112 #include "config.h" 113 #include "system.h" 114 #include "coretypes.h" 115 #include "backend.h" 116 #include "target.h" 117 #include "rtl.h" 118 #include "tree.h" 119 #include "cfghooks.h" 120 #include "tree-pass.h" 121 #include "memmodel.h" 122 #include "tm_p.h" 123 #include "stringpool.h" 124 #include "expmed.h" 125 #include "optabs.h" 126 #include "emit-rtl.h" 127 #include "cgraph.h" 128 #include "diagnostic.h" 129 #include "fold-const.h" 130 #include "stor-layout.h" 131 #include "explow.h" 132 #include "stmt.h" 133 #include "expr.h" 134 #include "calls.h" 135 #include "libfuncs.h" 136 #include "except.h" 137 #include "output.h" 138 #include "dwarf2asm.h" 139 #include "dwarf2out.h" 140 #include "common/common-target.h" 141 #include "langhooks.h" 142 #include "cfgrtl.h" 143 #include "tree-pretty-print.h" 144 #include "cfgloop.h" 145 #include "builtins.h" 146 #include "tree-hash-traits.h" 147 148 static GTY(()) int call_site_base; 149 150 static GTY (()) hash_map<tree_hash, tree> *type_to_runtime_map; 151 152 /* Describe the SjLj_Function_Context structure. */ 153 static GTY(()) tree sjlj_fc_type_node; 154 static int sjlj_fc_call_site_ofs; 155 static int sjlj_fc_data_ofs; 156 static int sjlj_fc_personality_ofs; 157 static int sjlj_fc_lsda_ofs; 158 static int sjlj_fc_jbuf_ofs; 159 160 161 struct GTY(()) call_site_record_d 162 { 163 rtx landing_pad; 164 int action; 165 }; 166 167 /* In the following structure and associated functions, 168 we represent entries in the action table as 1-based indices. 169 Special cases are: 170 171 0: null action record, non-null landing pad; implies cleanups 172 -1: null action record, null landing pad; implies no action 173 -2: no call-site entry; implies must_not_throw 174 -3: we have yet to process outer regions 175 176 Further, no special cases apply to the "next" field of the record. 177 For next, 0 means end of list. */ 178 179 struct action_record 180 { 181 int offset; 182 int filter; 183 int next; 184 }; 185 186 /* Hashtable helpers. */ 187 188 struct action_record_hasher : free_ptr_hash <action_record> 189 { 190 static inline hashval_t hash (const action_record *); 191 static inline bool equal (const action_record *, const action_record *); 192 }; 193 194 inline hashval_t 195 action_record_hasher::hash (const action_record *entry) 196 { 197 return entry->next * 1009 + entry->filter; 198 } 199 200 inline bool 201 action_record_hasher::equal (const action_record *entry, 202 const action_record *data) 203 { 204 return entry->filter == data->filter && entry->next == data->next; 205 } 206 207 typedef hash_table<action_record_hasher> action_hash_type; 208 209 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *, 210 eh_landing_pad *); 211 212 static void dw2_build_landing_pads (void); 213 214 static int collect_one_action_chain (action_hash_type *, eh_region); 215 static int add_call_site (rtx, int, int); 216 217 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int); 218 static void push_sleb128 (vec<uchar, va_gc> **, int); 219 static int dw2_size_of_call_site_table (int); 220 static int sjlj_size_of_call_site_table (void); 221 static void dw2_output_call_site_table (int, int); 222 static void sjlj_output_call_site_table (void); 223 224 225 void 226 init_eh (void) 227 { 228 if (! flag_exceptions) 229 return; 230 231 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31); 232 233 /* Create the SjLj_Function_Context structure. This should match 234 the definition in unwind-sjlj.c. */ 235 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 236 { 237 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp; 238 239 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE); 240 241 f_prev = build_decl (BUILTINS_LOCATION, 242 FIELD_DECL, get_identifier ("__prev"), 243 build_pointer_type (sjlj_fc_type_node)); 244 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node; 245 246 f_cs = build_decl (BUILTINS_LOCATION, 247 FIELD_DECL, get_identifier ("__call_site"), 248 integer_type_node); 249 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node; 250 251 tmp = build_index_type (size_int (4 - 1)); 252 tmp = build_array_type (lang_hooks.types.type_for_mode 253 (targetm.unwind_word_mode (), 1), 254 tmp); 255 f_data = build_decl (BUILTINS_LOCATION, 256 FIELD_DECL, get_identifier ("__data"), tmp); 257 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node; 258 259 f_per = build_decl (BUILTINS_LOCATION, 260 FIELD_DECL, get_identifier ("__personality"), 261 ptr_type_node); 262 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node; 263 264 f_lsda = build_decl (BUILTINS_LOCATION, 265 FIELD_DECL, get_identifier ("__lsda"), 266 ptr_type_node); 267 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node; 268 269 #ifdef DONT_USE_BUILTIN_SETJMP 270 #ifdef JMP_BUF_SIZE 271 tmp = size_int (JMP_BUF_SIZE - 1); 272 #else 273 /* Should be large enough for most systems, if it is not, 274 JMP_BUF_SIZE should be defined with the proper value. It will 275 also tend to be larger than necessary for most systems, a more 276 optimal port will define JMP_BUF_SIZE. */ 277 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1); 278 #endif 279 #else 280 /* Compute a minimally sized jump buffer. We need room to store at 281 least 3 pointers - stack pointer, frame pointer and return address. 282 Plus for some targets we need room for an extra pointer - in the 283 case of MIPS this is the global pointer. This makes a total of four 284 pointers, but to be safe we actually allocate room for 5. 285 286 If pointers are smaller than words then we allocate enough room for 287 5 words, just in case the backend needs this much room. For more 288 discussion on this issue see: 289 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */ 290 if (POINTER_SIZE > BITS_PER_WORD) 291 tmp = size_int (5 - 1); 292 else 293 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1); 294 #endif 295 296 tmp = build_index_type (tmp); 297 tmp = build_array_type (ptr_type_node, tmp); 298 f_jbuf = build_decl (BUILTINS_LOCATION, 299 FIELD_DECL, get_identifier ("__jbuf"), tmp); 300 #ifdef DONT_USE_BUILTIN_SETJMP 301 /* We don't know what the alignment requirements of the 302 runtime's jmp_buf has. Overestimate. */ 303 SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT); 304 DECL_USER_ALIGN (f_jbuf) = 1; 305 #endif 306 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node; 307 308 TYPE_FIELDS (sjlj_fc_type_node) = f_prev; 309 TREE_CHAIN (f_prev) = f_cs; 310 TREE_CHAIN (f_cs) = f_data; 311 TREE_CHAIN (f_data) = f_per; 312 TREE_CHAIN (f_per) = f_lsda; 313 TREE_CHAIN (f_lsda) = f_jbuf; 314 315 layout_type (sjlj_fc_type_node); 316 317 /* Cache the interesting field offsets so that we have 318 easy access from rtl. */ 319 sjlj_fc_call_site_ofs 320 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs)) 321 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT); 322 sjlj_fc_data_ofs 323 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data)) 324 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT); 325 sjlj_fc_personality_ofs 326 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per)) 327 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT); 328 sjlj_fc_lsda_ofs 329 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda)) 330 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT); 331 sjlj_fc_jbuf_ofs 332 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf)) 333 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT); 334 } 335 } 336 337 void 338 init_eh_for_function (void) 339 { 340 cfun->eh = ggc_cleared_alloc<eh_status> (); 341 342 /* Make sure zero'th entries are used. */ 343 vec_safe_push (cfun->eh->region_array, (eh_region)0); 344 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0); 345 } 346 347 /* Routines to generate the exception tree somewhat directly. 348 These are used from tree-eh.c when processing exception related 349 nodes during tree optimization. */ 350 351 static eh_region 352 gen_eh_region (enum eh_region_type type, eh_region outer) 353 { 354 eh_region new_eh; 355 356 /* Insert a new blank region as a leaf in the tree. */ 357 new_eh = ggc_cleared_alloc<eh_region_d> (); 358 new_eh->type = type; 359 new_eh->outer = outer; 360 if (outer) 361 { 362 new_eh->next_peer = outer->inner; 363 outer->inner = new_eh; 364 } 365 else 366 { 367 new_eh->next_peer = cfun->eh->region_tree; 368 cfun->eh->region_tree = new_eh; 369 } 370 371 new_eh->index = vec_safe_length (cfun->eh->region_array); 372 vec_safe_push (cfun->eh->region_array, new_eh); 373 374 /* Copy the language's notion of whether to use __cxa_end_cleanup. */ 375 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup) 376 new_eh->use_cxa_end_cleanup = true; 377 378 return new_eh; 379 } 380 381 eh_region 382 gen_eh_region_cleanup (eh_region outer) 383 { 384 return gen_eh_region (ERT_CLEANUP, outer); 385 } 386 387 eh_region 388 gen_eh_region_try (eh_region outer) 389 { 390 return gen_eh_region (ERT_TRY, outer); 391 } 392 393 eh_catch 394 gen_eh_region_catch (eh_region t, tree type_or_list) 395 { 396 eh_catch c, l; 397 tree type_list, type_node; 398 399 gcc_assert (t->type == ERT_TRY); 400 401 /* Ensure to always end up with a type list to normalize further 402 processing, then register each type against the runtime types map. */ 403 type_list = type_or_list; 404 if (type_or_list) 405 { 406 if (TREE_CODE (type_or_list) != TREE_LIST) 407 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE); 408 409 type_node = type_list; 410 for (; type_node; type_node = TREE_CHAIN (type_node)) 411 add_type_for_runtime (TREE_VALUE (type_node)); 412 } 413 414 c = ggc_cleared_alloc<eh_catch_d> (); 415 c->type_list = type_list; 416 l = t->u.eh_try.last_catch; 417 c->prev_catch = l; 418 if (l) 419 l->next_catch = c; 420 else 421 t->u.eh_try.first_catch = c; 422 t->u.eh_try.last_catch = c; 423 424 return c; 425 } 426 427 eh_region 428 gen_eh_region_allowed (eh_region outer, tree allowed) 429 { 430 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer); 431 region->u.allowed.type_list = allowed; 432 433 for (; allowed ; allowed = TREE_CHAIN (allowed)) 434 add_type_for_runtime (TREE_VALUE (allowed)); 435 436 return region; 437 } 438 439 eh_region 440 gen_eh_region_must_not_throw (eh_region outer) 441 { 442 return gen_eh_region (ERT_MUST_NOT_THROW, outer); 443 } 444 445 eh_landing_pad 446 gen_eh_landing_pad (eh_region region) 447 { 448 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> (); 449 450 lp->next_lp = region->landing_pads; 451 lp->region = region; 452 lp->index = vec_safe_length (cfun->eh->lp_array); 453 region->landing_pads = lp; 454 455 vec_safe_push (cfun->eh->lp_array, lp); 456 457 return lp; 458 } 459 460 eh_region 461 get_eh_region_from_number_fn (struct function *ifun, int i) 462 { 463 return (*ifun->eh->region_array)[i]; 464 } 465 466 eh_region 467 get_eh_region_from_number (int i) 468 { 469 return get_eh_region_from_number_fn (cfun, i); 470 } 471 472 eh_landing_pad 473 get_eh_landing_pad_from_number_fn (struct function *ifun, int i) 474 { 475 return (*ifun->eh->lp_array)[i]; 476 } 477 478 eh_landing_pad 479 get_eh_landing_pad_from_number (int i) 480 { 481 return get_eh_landing_pad_from_number_fn (cfun, i); 482 } 483 484 eh_region 485 get_eh_region_from_lp_number_fn (struct function *ifun, int i) 486 { 487 if (i < 0) 488 return (*ifun->eh->region_array)[-i]; 489 else if (i == 0) 490 return NULL; 491 else 492 { 493 eh_landing_pad lp; 494 lp = (*ifun->eh->lp_array)[i]; 495 return lp->region; 496 } 497 } 498 499 eh_region 500 get_eh_region_from_lp_number (int i) 501 { 502 return get_eh_region_from_lp_number_fn (cfun, i); 503 } 504 505 /* Returns true if the current function has exception handling regions. */ 506 507 bool 508 current_function_has_exception_handlers (void) 509 { 510 return cfun->eh->region_tree != NULL; 511 } 512 513 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD. 514 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */ 515 516 struct duplicate_eh_regions_data 517 { 518 duplicate_eh_regions_map label_map; 519 void *label_map_data; 520 hash_map<void *, void *> *eh_map; 521 }; 522 523 static void 524 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data, 525 eh_region old_r, eh_region outer) 526 { 527 eh_landing_pad old_lp, new_lp; 528 eh_region new_r; 529 530 new_r = gen_eh_region (old_r->type, outer); 531 gcc_assert (!data->eh_map->put (old_r, new_r)); 532 533 switch (old_r->type) 534 { 535 case ERT_CLEANUP: 536 break; 537 538 case ERT_TRY: 539 { 540 eh_catch oc, nc; 541 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch) 542 { 543 /* We should be doing all our region duplication before and 544 during inlining, which is before filter lists are created. */ 545 gcc_assert (oc->filter_list == NULL); 546 nc = gen_eh_region_catch (new_r, oc->type_list); 547 nc->label = data->label_map (oc->label, data->label_map_data); 548 } 549 } 550 break; 551 552 case ERT_ALLOWED_EXCEPTIONS: 553 new_r->u.allowed.type_list = old_r->u.allowed.type_list; 554 if (old_r->u.allowed.label) 555 new_r->u.allowed.label 556 = data->label_map (old_r->u.allowed.label, data->label_map_data); 557 else 558 new_r->u.allowed.label = NULL_TREE; 559 break; 560 561 case ERT_MUST_NOT_THROW: 562 new_r->u.must_not_throw.failure_loc = 563 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc); 564 new_r->u.must_not_throw.failure_decl = 565 old_r->u.must_not_throw.failure_decl; 566 break; 567 } 568 569 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp) 570 { 571 /* Don't bother copying unused landing pads. */ 572 if (old_lp->post_landing_pad == NULL) 573 continue; 574 575 new_lp = gen_eh_landing_pad (new_r); 576 gcc_assert (!data->eh_map->put (old_lp, new_lp)); 577 578 new_lp->post_landing_pad 579 = data->label_map (old_lp->post_landing_pad, data->label_map_data); 580 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index; 581 } 582 583 /* Make sure to preserve the original use of __cxa_end_cleanup. */ 584 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup; 585 586 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer) 587 duplicate_eh_regions_1 (data, old_r, new_r); 588 } 589 590 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into 591 the current function and root the tree below OUTER_REGION. 592 The special case of COPY_REGION of NULL means all regions. 593 Remap labels using MAP/MAP_DATA callback. Return a pointer map 594 that allows the caller to remap uses of both EH regions and 595 EH landing pads. */ 596 597 hash_map<void *, void *> * 598 duplicate_eh_regions (struct function *ifun, 599 eh_region copy_region, int outer_lp, 600 duplicate_eh_regions_map map, void *map_data) 601 { 602 struct duplicate_eh_regions_data data; 603 eh_region outer_region; 604 605 if (flag_checking) 606 verify_eh_tree (ifun); 607 608 data.label_map = map; 609 data.label_map_data = map_data; 610 data.eh_map = new hash_map<void *, void *>; 611 612 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp); 613 614 /* Copy all the regions in the subtree. */ 615 if (copy_region) 616 duplicate_eh_regions_1 (&data, copy_region, outer_region); 617 else 618 { 619 eh_region r; 620 for (r = ifun->eh->region_tree; r ; r = r->next_peer) 621 duplicate_eh_regions_1 (&data, r, outer_region); 622 } 623 624 if (flag_checking) 625 verify_eh_tree (cfun); 626 627 return data.eh_map; 628 } 629 630 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */ 631 632 eh_region 633 eh_region_outermost (struct function *ifun, eh_region region_a, 634 eh_region region_b) 635 { 636 gcc_assert (ifun->eh->region_array); 637 gcc_assert (ifun->eh->region_tree); 638 639 auto_sbitmap b_outer (ifun->eh->region_array->length ()); 640 bitmap_clear (b_outer); 641 642 do 643 { 644 bitmap_set_bit (b_outer, region_b->index); 645 region_b = region_b->outer; 646 } 647 while (region_b); 648 649 do 650 { 651 if (bitmap_bit_p (b_outer, region_a->index)) 652 break; 653 region_a = region_a->outer; 654 } 655 while (region_a); 656 657 return region_a; 658 } 659 660 void 661 add_type_for_runtime (tree type) 662 { 663 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */ 664 if (TREE_CODE (type) == NOP_EXPR) 665 return; 666 667 bool existed = false; 668 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed); 669 if (!existed) 670 *slot = lang_hooks.eh_runtime_type (type); 671 } 672 673 tree 674 lookup_type_for_runtime (tree type) 675 { 676 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */ 677 if (TREE_CODE (type) == NOP_EXPR) 678 return type; 679 680 /* We should have always inserted the data earlier. */ 681 return *type_to_runtime_map->get (type); 682 } 683 684 685 /* Represent an entry in @TTypes for either catch actions 686 or exception filter actions. */ 687 struct ttypes_filter { 688 tree t; 689 int filter; 690 }; 691 692 /* Helper for ttypes_filter hashing. */ 693 694 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter> 695 { 696 typedef tree_node *compare_type; 697 static inline hashval_t hash (const ttypes_filter *); 698 static inline bool equal (const ttypes_filter *, const tree_node *); 699 }; 700 701 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA 702 (a tree) for a @TTypes type node we are thinking about adding. */ 703 704 inline bool 705 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data) 706 { 707 return entry->t == data; 708 } 709 710 inline hashval_t 711 ttypes_filter_hasher::hash (const ttypes_filter *entry) 712 { 713 return TREE_HASH (entry->t); 714 } 715 716 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type; 717 718 719 /* Helper for ehspec hashing. */ 720 721 struct ehspec_hasher : free_ptr_hash <ttypes_filter> 722 { 723 static inline hashval_t hash (const ttypes_filter *); 724 static inline bool equal (const ttypes_filter *, const ttypes_filter *); 725 }; 726 727 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes 728 exception specification list we are thinking about adding. */ 729 /* ??? Currently we use the type lists in the order given. Someone 730 should put these in some canonical order. */ 731 732 inline bool 733 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data) 734 { 735 return type_list_equal (entry->t, data->t); 736 } 737 738 /* Hash function for exception specification lists. */ 739 740 inline hashval_t 741 ehspec_hasher::hash (const ttypes_filter *entry) 742 { 743 hashval_t h = 0; 744 tree list; 745 746 for (list = entry->t; list ; list = TREE_CHAIN (list)) 747 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list)); 748 return h; 749 } 750 751 typedef hash_table<ehspec_hasher> ehspec_hash_type; 752 753 754 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH 755 to speed up the search. Return the filter value to be used. */ 756 757 static int 758 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type) 759 { 760 struct ttypes_filter **slot, *n; 761 762 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type), 763 INSERT); 764 765 if ((n = *slot) == NULL) 766 { 767 /* Filter value is a 1 based table index. */ 768 769 n = XNEW (struct ttypes_filter); 770 n->t = type; 771 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1; 772 *slot = n; 773 774 vec_safe_push (cfun->eh->ttype_data, type); 775 } 776 777 return n->filter; 778 } 779 780 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH 781 to speed up the search. Return the filter value to be used. */ 782 783 static int 784 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash, 785 tree list) 786 { 787 struct ttypes_filter **slot, *n; 788 struct ttypes_filter dummy; 789 790 dummy.t = list; 791 slot = ehspec_hash->find_slot (&dummy, INSERT); 792 793 if ((n = *slot) == NULL) 794 { 795 int len; 796 797 if (targetm.arm_eabi_unwinder) 798 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi); 799 else 800 len = vec_safe_length (cfun->eh->ehspec_data.other); 801 802 /* Filter value is a -1 based byte index into a uleb128 buffer. */ 803 804 n = XNEW (struct ttypes_filter); 805 n->t = list; 806 n->filter = -(len + 1); 807 *slot = n; 808 809 /* Generate a 0 terminated list of filter values. */ 810 for (; list ; list = TREE_CHAIN (list)) 811 { 812 if (targetm.arm_eabi_unwinder) 813 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list)); 814 else 815 { 816 /* Look up each type in the list and encode its filter 817 value as a uleb128. */ 818 push_uleb128 (&cfun->eh->ehspec_data.other, 819 add_ttypes_entry (ttypes_hash, TREE_VALUE (list))); 820 } 821 } 822 if (targetm.arm_eabi_unwinder) 823 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE); 824 else 825 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0); 826 } 827 828 return n->filter; 829 } 830 831 /* Generate the action filter values to be used for CATCH and 832 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions, 833 we use lots of landing pads, and so every type or list can share 834 the same filter value, which saves table space. */ 835 836 void 837 assign_filter_values (void) 838 { 839 int i; 840 eh_region r; 841 eh_catch c; 842 843 vec_alloc (cfun->eh->ttype_data, 16); 844 if (targetm.arm_eabi_unwinder) 845 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64); 846 else 847 vec_alloc (cfun->eh->ehspec_data.other, 64); 848 849 ehspec_hash_type ehspec (31); 850 ttypes_hash_type ttypes (31); 851 852 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i) 853 { 854 if (r == NULL) 855 continue; 856 857 switch (r->type) 858 { 859 case ERT_TRY: 860 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) 861 { 862 /* Whatever type_list is (NULL or true list), we build a list 863 of filters for the region. */ 864 c->filter_list = NULL_TREE; 865 866 if (c->type_list != NULL) 867 { 868 /* Get a filter value for each of the types caught and store 869 them in the region's dedicated list. */ 870 tree tp_node = c->type_list; 871 872 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node)) 873 { 874 int flt 875 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node)); 876 tree flt_node = build_int_cst (integer_type_node, flt); 877 878 c->filter_list 879 = tree_cons (NULL_TREE, flt_node, c->filter_list); 880 } 881 } 882 else 883 { 884 /* Get a filter value for the NULL list also since it 885 will need an action record anyway. */ 886 int flt = add_ttypes_entry (&ttypes, NULL); 887 tree flt_node = build_int_cst (integer_type_node, flt); 888 889 c->filter_list 890 = tree_cons (NULL_TREE, flt_node, NULL); 891 } 892 } 893 break; 894 895 case ERT_ALLOWED_EXCEPTIONS: 896 r->u.allowed.filter 897 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list); 898 break; 899 900 default: 901 break; 902 } 903 } 904 } 905 906 /* Emit SEQ into basic block just before INSN (that is assumed to be 907 first instruction of some existing BB and return the newly 908 produced block. */ 909 static basic_block 910 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn) 911 { 912 rtx_insn *last; 913 basic_block bb; 914 edge e; 915 edge_iterator ei; 916 917 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg 918 call), we don't want it to go into newly created landing pad or other EH 919 construct. */ 920 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); ) 921 if (e->flags & EDGE_FALLTHRU) 922 force_nonfallthru (e); 923 else 924 ei_next (&ei); 925 last = emit_insn_before (seq, insn); 926 if (BARRIER_P (last)) 927 last = PREV_INSN (last); 928 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb); 929 update_bb_for_insn (bb); 930 bb->flags |= BB_SUPERBLOCK; 931 return bb; 932 } 933 934 /* A subroutine of dw2_build_landing_pads, also used for edge splitting 935 at the rtl level. Emit the code required by the target at a landing 936 pad for the given region. */ 937 938 void 939 expand_dw2_landing_pad_for_region (eh_region region) 940 { 941 if (targetm.have_exception_receiver ()) 942 emit_insn (targetm.gen_exception_receiver ()); 943 else if (targetm.have_nonlocal_goto_receiver ()) 944 emit_insn (targetm.gen_nonlocal_goto_receiver ()); 945 else 946 { /* Nothing */ } 947 948 if (region->exc_ptr_reg) 949 emit_move_insn (region->exc_ptr_reg, 950 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0))); 951 if (region->filter_reg) 952 emit_move_insn (region->filter_reg, 953 gen_rtx_REG (targetm.eh_return_filter_mode (), 954 EH_RETURN_DATA_REGNO (1))); 955 } 956 957 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */ 958 959 static void 960 dw2_build_landing_pads (void) 961 { 962 int i; 963 eh_landing_pad lp; 964 int e_flags = EDGE_FALLTHRU; 965 966 /* If we're going to partition blocks, we need to be able to add 967 new landing pads later, which means that we need to hold on to 968 the post-landing-pad block. Prevent it from being merged away. 969 We'll remove this bit after partitioning. */ 970 if (flag_reorder_blocks_and_partition) 971 e_flags |= EDGE_PRESERVE; 972 973 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 974 { 975 basic_block bb; 976 rtx_insn *seq; 977 edge e; 978 979 if (lp == NULL || lp->post_landing_pad == NULL) 980 continue; 981 982 start_sequence (); 983 984 lp->landing_pad = gen_label_rtx (); 985 emit_label (lp->landing_pad); 986 LABEL_PRESERVE_P (lp->landing_pad) = 1; 987 988 expand_dw2_landing_pad_for_region (lp->region); 989 990 seq = get_insns (); 991 end_sequence (); 992 993 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad)); 994 e = make_edge (bb, bb->next_bb, e_flags); 995 e->count = bb->count; 996 e->probability = REG_BR_PROB_BASE; 997 if (current_loops) 998 { 999 struct loop *loop = bb->next_bb->loop_father; 1000 /* If we created a pre-header block, add the new block to the 1001 outer loop, otherwise to the loop itself. */ 1002 if (bb->next_bb == loop->header) 1003 add_bb_to_loop (bb, loop_outer (loop)); 1004 else 1005 add_bb_to_loop (bb, loop); 1006 } 1007 } 1008 } 1009 1010 1011 static vec<int> sjlj_lp_call_site_index; 1012 1013 /* Process all active landing pads. Assign each one a compact dispatch 1014 index, and a call-site index. */ 1015 1016 static int 1017 sjlj_assign_call_site_values (void) 1018 { 1019 action_hash_type ar_hash (31); 1020 int i, disp_index; 1021 eh_landing_pad lp; 1022 1023 vec_alloc (crtl->eh.action_record_data, 64); 1024 1025 disp_index = 0; 1026 call_site_base = 1; 1027 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 1028 if (lp && lp->post_landing_pad) 1029 { 1030 int action, call_site; 1031 1032 /* First: build the action table. */ 1033 action = collect_one_action_chain (&ar_hash, lp->region); 1034 1035 /* Next: assign call-site values. If dwarf2 terms, this would be 1036 the region number assigned by convert_to_eh_region_ranges, but 1037 handles no-action and must-not-throw differently. */ 1038 /* Map must-not-throw to otherwise unused call-site index 0. */ 1039 if (action == -2) 1040 call_site = 0; 1041 /* Map no-action to otherwise unused call-site index -1. */ 1042 else if (action == -1) 1043 call_site = -1; 1044 /* Otherwise, look it up in the table. */ 1045 else 1046 call_site = add_call_site (GEN_INT (disp_index), action, 0); 1047 sjlj_lp_call_site_index[i] = call_site; 1048 1049 disp_index++; 1050 } 1051 1052 return disp_index; 1053 } 1054 1055 /* Emit code to record the current call-site index before every 1056 insn that can throw. */ 1057 1058 static void 1059 sjlj_mark_call_sites (void) 1060 { 1061 int last_call_site = -2; 1062 rtx_insn *insn; 1063 rtx mem; 1064 1065 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn)) 1066 { 1067 eh_landing_pad lp; 1068 eh_region r; 1069 bool nothrow; 1070 int this_call_site; 1071 rtx_insn *before, *p; 1072 1073 /* Reset value tracking at extended basic block boundaries. */ 1074 if (LABEL_P (insn)) 1075 last_call_site = -2; 1076 1077 /* If the function allocates dynamic stack space, the context must 1078 be updated after every allocation/deallocation accordingly. */ 1079 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT) 1080 { 1081 rtx buf_addr; 1082 1083 start_sequence (); 1084 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0), 1085 sjlj_fc_jbuf_ofs); 1086 expand_builtin_update_setjmp_buf (buf_addr); 1087 p = get_insns (); 1088 end_sequence (); 1089 emit_insn_before (p, insn); 1090 } 1091 1092 if (! INSN_P (insn)) 1093 continue; 1094 1095 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1096 if (nothrow) 1097 continue; 1098 if (lp) 1099 this_call_site = sjlj_lp_call_site_index[lp->index]; 1100 else if (r == NULL) 1101 { 1102 /* Calls (and trapping insns) without notes are outside any 1103 exception handling region in this function. Mark them as 1104 no action. */ 1105 this_call_site = -1; 1106 } 1107 else 1108 { 1109 gcc_assert (r->type == ERT_MUST_NOT_THROW); 1110 this_call_site = 0; 1111 } 1112 1113 if (this_call_site != -1) 1114 crtl->uses_eh_lsda = 1; 1115 1116 if (this_call_site == last_call_site) 1117 continue; 1118 1119 /* Don't separate a call from it's argument loads. */ 1120 before = insn; 1121 if (CALL_P (insn)) 1122 before = find_first_parameter_load (insn, NULL); 1123 1124 start_sequence (); 1125 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node), 1126 sjlj_fc_call_site_ofs); 1127 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem))); 1128 p = get_insns (); 1129 end_sequence (); 1130 1131 emit_insn_before (p, before); 1132 last_call_site = this_call_site; 1133 } 1134 } 1135 1136 /* Construct the SjLj_Function_Context. */ 1137 1138 static void 1139 sjlj_emit_function_enter (rtx_code_label *dispatch_label) 1140 { 1141 rtx_insn *fn_begin, *seq; 1142 rtx fc, mem; 1143 bool fn_begin_outside_block; 1144 rtx personality = get_personality_function (current_function_decl); 1145 1146 fc = crtl->eh.sjlj_fc; 1147 1148 start_sequence (); 1149 1150 /* We're storing this libcall's address into memory instead of 1151 calling it directly. Thus, we must call assemble_external_libcall 1152 here, as we can not depend on emit_library_call to do it for us. */ 1153 assemble_external_libcall (personality); 1154 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs); 1155 emit_move_insn (mem, personality); 1156 1157 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs); 1158 if (crtl->uses_eh_lsda) 1159 { 1160 char buf[20]; 1161 rtx sym; 1162 1163 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no); 1164 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)); 1165 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL; 1166 emit_move_insn (mem, sym); 1167 } 1168 else 1169 emit_move_insn (mem, const0_rtx); 1170 1171 if (dispatch_label) 1172 { 1173 rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs); 1174 1175 #ifdef DONT_USE_BUILTIN_SETJMP 1176 addr = copy_addr_to_reg (addr); 1177 addr = convert_memory_address (ptr_mode, addr); 1178 tree addr_tree = make_tree (ptr_type_node, addr); 1179 1180 tree fn = builtin_decl_implicit (BUILT_IN_SETJMP); 1181 tree call_expr = build_call_expr (fn, 1, addr_tree); 1182 rtx x = expand_call (call_expr, NULL_RTX, false); 1183 1184 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0, 1185 TYPE_MODE (integer_type_node), 0, 1186 dispatch_label, REG_BR_PROB_BASE / 100); 1187 #else 1188 expand_builtin_setjmp_setup (addr, dispatch_label); 1189 #endif 1190 } 1191 1192 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode, 1193 1, XEXP (fc, 0), Pmode); 1194 1195 seq = get_insns (); 1196 end_sequence (); 1197 1198 /* ??? Instead of doing this at the beginning of the function, 1199 do this in a block that is at loop level 0 and dominates all 1200 can_throw_internal instructions. */ 1201 1202 fn_begin_outside_block = true; 1203 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin)) 1204 if (NOTE_P (fn_begin)) 1205 { 1206 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG) 1207 break; 1208 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin)) 1209 fn_begin_outside_block = false; 1210 } 1211 1212 if (fn_begin_outside_block) 1213 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))); 1214 else 1215 emit_insn_after (seq, fn_begin); 1216 } 1217 1218 /* Call back from expand_function_end to know where we should put 1219 the call to unwind_sjlj_unregister_libfunc if needed. */ 1220 1221 void 1222 sjlj_emit_function_exit_after (rtx_insn *after) 1223 { 1224 crtl->eh.sjlj_exit_after = after; 1225 } 1226 1227 static void 1228 sjlj_emit_function_exit (void) 1229 { 1230 rtx_insn *seq, *insn; 1231 1232 start_sequence (); 1233 1234 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode, 1235 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode); 1236 1237 seq = get_insns (); 1238 end_sequence (); 1239 1240 /* ??? Really this can be done in any block at loop level 0 that 1241 post-dominates all can_throw_internal instructions. This is 1242 the last possible moment. */ 1243 1244 insn = crtl->eh.sjlj_exit_after; 1245 if (LABEL_P (insn)) 1246 insn = NEXT_INSN (insn); 1247 1248 emit_insn_after (seq, insn); 1249 } 1250 1251 static void 1252 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch) 1253 { 1254 machine_mode unwind_word_mode = targetm.unwind_word_mode (); 1255 machine_mode filter_mode = targetm.eh_return_filter_mode (); 1256 eh_landing_pad lp; 1257 rtx mem, fc, exc_ptr_reg, filter_reg; 1258 rtx_insn *seq; 1259 basic_block bb; 1260 eh_region r; 1261 edge e; 1262 int i, disp_index; 1263 vec<tree> dispatch_labels = vNULL; 1264 1265 fc = crtl->eh.sjlj_fc; 1266 1267 start_sequence (); 1268 1269 emit_label (dispatch_label); 1270 1271 #ifndef DONT_USE_BUILTIN_SETJMP 1272 expand_builtin_setjmp_receiver (dispatch_label); 1273 1274 /* The caller of expand_builtin_setjmp_receiver is responsible for 1275 making sure that the label doesn't vanish. The only other caller 1276 is the expander for __builtin_setjmp_receiver, which places this 1277 label on the nonlocal_goto_label list. Since we're modeling these 1278 CFG edges more exactly, we can use the forced_labels list instead. */ 1279 LABEL_PRESERVE_P (dispatch_label) = 1; 1280 vec_safe_push<rtx_insn *> (forced_labels, dispatch_label); 1281 #endif 1282 1283 /* Load up exc_ptr and filter values from the function context. */ 1284 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs); 1285 if (unwind_word_mode != ptr_mode) 1286 { 1287 #ifdef POINTERS_EXTEND_UNSIGNED 1288 mem = convert_memory_address (ptr_mode, mem); 1289 #else 1290 mem = convert_to_mode (ptr_mode, mem, 0); 1291 #endif 1292 } 1293 exc_ptr_reg = force_reg (ptr_mode, mem); 1294 1295 mem = adjust_address (fc, unwind_word_mode, 1296 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode)); 1297 if (unwind_word_mode != filter_mode) 1298 mem = convert_to_mode (filter_mode, mem, 0); 1299 filter_reg = force_reg (filter_mode, mem); 1300 1301 /* Jump to one of the directly reachable regions. */ 1302 1303 disp_index = 0; 1304 rtx_code_label *first_reachable_label = NULL; 1305 1306 /* If there's exactly one call site in the function, don't bother 1307 generating a switch statement. */ 1308 if (num_dispatch > 1) 1309 dispatch_labels.create (num_dispatch); 1310 1311 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 1312 if (lp && lp->post_landing_pad) 1313 { 1314 rtx_insn *seq2; 1315 rtx_code_label *label; 1316 1317 start_sequence (); 1318 1319 lp->landing_pad = dispatch_label; 1320 1321 if (num_dispatch > 1) 1322 { 1323 tree t_label, case_elt, t; 1324 1325 t_label = create_artificial_label (UNKNOWN_LOCATION); 1326 t = build_int_cst (integer_type_node, disp_index); 1327 case_elt = build_case_label (t, NULL, t_label); 1328 dispatch_labels.quick_push (case_elt); 1329 label = jump_target_rtx (t_label); 1330 } 1331 else 1332 label = gen_label_rtx (); 1333 1334 if (disp_index == 0) 1335 first_reachable_label = label; 1336 emit_label (label); 1337 1338 r = lp->region; 1339 if (r->exc_ptr_reg) 1340 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg); 1341 if (r->filter_reg) 1342 emit_move_insn (r->filter_reg, filter_reg); 1343 1344 seq2 = get_insns (); 1345 end_sequence (); 1346 1347 rtx_insn *before = label_rtx (lp->post_landing_pad); 1348 bb = emit_to_new_bb_before (seq2, before); 1349 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); 1350 e->count = bb->count; 1351 e->probability = REG_BR_PROB_BASE; 1352 if (current_loops) 1353 { 1354 struct loop *loop = bb->next_bb->loop_father; 1355 /* If we created a pre-header block, add the new block to the 1356 outer loop, otherwise to the loop itself. */ 1357 if (bb->next_bb == loop->header) 1358 add_bb_to_loop (bb, loop_outer (loop)); 1359 else 1360 add_bb_to_loop (bb, loop); 1361 /* ??? For multiple dispatches we will end up with edges 1362 from the loop tree root into this loop, making it a 1363 multiple-entry loop. Discard all affected loops. */ 1364 if (num_dispatch > 1) 1365 { 1366 for (loop = bb->loop_father; 1367 loop_outer (loop); loop = loop_outer (loop)) 1368 mark_loop_for_removal (loop); 1369 } 1370 } 1371 1372 disp_index++; 1373 } 1374 gcc_assert (disp_index == num_dispatch); 1375 1376 if (num_dispatch > 1) 1377 { 1378 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node), 1379 sjlj_fc_call_site_ofs); 1380 expand_sjlj_dispatch_table (disp, dispatch_labels); 1381 } 1382 1383 seq = get_insns (); 1384 end_sequence (); 1385 1386 bb = emit_to_new_bb_before (seq, first_reachable_label); 1387 if (num_dispatch == 1) 1388 { 1389 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); 1390 e->count = bb->count; 1391 e->probability = REG_BR_PROB_BASE; 1392 if (current_loops) 1393 { 1394 struct loop *loop = bb->next_bb->loop_father; 1395 /* If we created a pre-header block, add the new block to the 1396 outer loop, otherwise to the loop itself. */ 1397 if (bb->next_bb == loop->header) 1398 add_bb_to_loop (bb, loop_outer (loop)); 1399 else 1400 add_bb_to_loop (bb, loop); 1401 } 1402 } 1403 else 1404 { 1405 /* We are not wiring up edges here, but as the dispatcher call 1406 is at function begin simply associate the block with the 1407 outermost (non-)loop. */ 1408 if (current_loops) 1409 add_bb_to_loop (bb, current_loops->tree_root); 1410 } 1411 } 1412 1413 static void 1414 sjlj_build_landing_pads (void) 1415 { 1416 int num_dispatch; 1417 1418 num_dispatch = vec_safe_length (cfun->eh->lp_array); 1419 if (num_dispatch == 0) 1420 return; 1421 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch); 1422 1423 num_dispatch = sjlj_assign_call_site_values (); 1424 if (num_dispatch > 0) 1425 { 1426 rtx_code_label *dispatch_label = gen_label_rtx (); 1427 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node, 1428 TYPE_MODE (sjlj_fc_type_node), 1429 TYPE_ALIGN (sjlj_fc_type_node)); 1430 crtl->eh.sjlj_fc 1431 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node), 1432 int_size_in_bytes (sjlj_fc_type_node), 1433 align); 1434 1435 sjlj_mark_call_sites (); 1436 sjlj_emit_function_enter (dispatch_label); 1437 sjlj_emit_dispatch_table (dispatch_label, num_dispatch); 1438 sjlj_emit_function_exit (); 1439 } 1440 1441 /* If we do not have any landing pads, we may still need to register a 1442 personality routine and (empty) LSDA to handle must-not-throw regions. */ 1443 else if (function_needs_eh_personality (cfun) != eh_personality_none) 1444 { 1445 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node, 1446 TYPE_MODE (sjlj_fc_type_node), 1447 TYPE_ALIGN (sjlj_fc_type_node)); 1448 crtl->eh.sjlj_fc 1449 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node), 1450 int_size_in_bytes (sjlj_fc_type_node), 1451 align); 1452 1453 sjlj_mark_call_sites (); 1454 sjlj_emit_function_enter (NULL); 1455 sjlj_emit_function_exit (); 1456 } 1457 1458 sjlj_lp_call_site_index.release (); 1459 } 1460 1461 /* Update the sjlj function context. This function should be called 1462 whenever we allocate or deallocate dynamic stack space. */ 1463 1464 void 1465 update_sjlj_context (void) 1466 { 1467 if (!flag_exceptions) 1468 return; 1469 1470 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT); 1471 } 1472 1473 /* After initial rtl generation, call back to finish generating 1474 exception support code. */ 1475 1476 void 1477 finish_eh_generation (void) 1478 { 1479 basic_block bb; 1480 1481 /* Construct the landing pads. */ 1482 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 1483 sjlj_build_landing_pads (); 1484 else 1485 dw2_build_landing_pads (); 1486 break_superblocks (); 1487 1488 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ 1489 /* Kludge for Alpha (see alpha_gp_save_rtx). */ 1490 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r) 1491 commit_edge_insertions (); 1492 1493 /* Redirect all EH edges from the post_landing_pad to the landing pad. */ 1494 FOR_EACH_BB_FN (bb, cfun) 1495 { 1496 eh_landing_pad lp; 1497 edge_iterator ei; 1498 edge e; 1499 1500 lp = get_eh_landing_pad_from_rtx (BB_END (bb)); 1501 1502 FOR_EACH_EDGE (e, ei, bb->succs) 1503 if (e->flags & EDGE_EH) 1504 break; 1505 1506 /* We should not have generated any new throwing insns during this 1507 pass, and we should not have lost any EH edges, so we only need 1508 to handle two cases here: 1509 (1) reachable handler and an existing edge to post-landing-pad, 1510 (2) no reachable handler and no edge. */ 1511 gcc_assert ((lp != NULL) == (e != NULL)); 1512 if (lp != NULL) 1513 { 1514 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad)); 1515 1516 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad)); 1517 e->flags |= (CALL_P (BB_END (bb)) 1518 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL 1519 : EDGE_ABNORMAL); 1520 } 1521 } 1522 } 1523 1524 /* This section handles removing dead code for flow. */ 1525 1526 void 1527 remove_eh_landing_pad (eh_landing_pad lp) 1528 { 1529 eh_landing_pad *pp; 1530 1531 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp) 1532 continue; 1533 *pp = lp->next_lp; 1534 1535 if (lp->post_landing_pad) 1536 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0; 1537 (*cfun->eh->lp_array)[lp->index] = NULL; 1538 } 1539 1540 /* Splice the EH region at PP from the region tree. */ 1541 1542 static void 1543 remove_eh_handler_splicer (eh_region *pp) 1544 { 1545 eh_region region = *pp; 1546 eh_landing_pad lp; 1547 1548 for (lp = region->landing_pads; lp ; lp = lp->next_lp) 1549 { 1550 if (lp->post_landing_pad) 1551 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0; 1552 (*cfun->eh->lp_array)[lp->index] = NULL; 1553 } 1554 1555 if (region->inner) 1556 { 1557 eh_region p, outer; 1558 outer = region->outer; 1559 1560 *pp = p = region->inner; 1561 do 1562 { 1563 p->outer = outer; 1564 pp = &p->next_peer; 1565 p = *pp; 1566 } 1567 while (p); 1568 } 1569 *pp = region->next_peer; 1570 1571 (*cfun->eh->region_array)[region->index] = NULL; 1572 } 1573 1574 /* Splice a single EH region REGION from the region tree. 1575 1576 To unlink REGION, we need to find the pointer to it with a relatively 1577 expensive search in REGION's outer region. If you are going to 1578 remove a number of handlers, using remove_unreachable_eh_regions may 1579 be a better option. */ 1580 1581 void 1582 remove_eh_handler (eh_region region) 1583 { 1584 eh_region *pp, *pp_start, p, outer; 1585 1586 outer = region->outer; 1587 if (outer) 1588 pp_start = &outer->inner; 1589 else 1590 pp_start = &cfun->eh->region_tree; 1591 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp) 1592 continue; 1593 1594 remove_eh_handler_splicer (pp); 1595 } 1596 1597 /* Worker for remove_unreachable_eh_regions. 1598 PP is a pointer to the region to start a region tree depth-first 1599 search from. R_REACHABLE is the set of regions that have to be 1600 preserved. */ 1601 1602 static void 1603 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable) 1604 { 1605 while (*pp) 1606 { 1607 eh_region region = *pp; 1608 remove_unreachable_eh_regions_worker (®ion->inner, r_reachable); 1609 if (!bitmap_bit_p (r_reachable, region->index)) 1610 remove_eh_handler_splicer (pp); 1611 else 1612 pp = ®ion->next_peer; 1613 } 1614 } 1615 1616 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree. 1617 Do this by traversing the EH tree top-down and splice out regions that 1618 are not marked. By removing regions from the leaves, we avoid costly 1619 searches in the region tree. */ 1620 1621 void 1622 remove_unreachable_eh_regions (sbitmap r_reachable) 1623 { 1624 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable); 1625 } 1626 1627 /* Invokes CALLBACK for every exception handler landing pad label. 1628 Only used by reload hackery; should not be used by new code. */ 1629 1630 void 1631 for_each_eh_label (void (*callback) (rtx)) 1632 { 1633 eh_landing_pad lp; 1634 int i; 1635 1636 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 1637 { 1638 if (lp) 1639 { 1640 rtx_code_label *lab = lp->landing_pad; 1641 if (lab && LABEL_P (lab)) 1642 (*callback) (lab); 1643 } 1644 } 1645 } 1646 1647 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a 1648 call insn. 1649 1650 At the gimple level, we use LP_NR 1651 > 0 : The statement transfers to landing pad LP_NR 1652 = 0 : The statement is outside any EH region 1653 < 0 : The statement is within MUST_NOT_THROW region -LP_NR. 1654 1655 At the rtl level, we use LP_NR 1656 > 0 : The insn transfers to landing pad LP_NR 1657 = 0 : The insn cannot throw 1658 < 0 : The insn is within MUST_NOT_THROW region -LP_NR 1659 = INT_MIN : The insn cannot throw or execute a nonlocal-goto. 1660 missing note: The insn is outside any EH region. 1661 1662 ??? This difference probably ought to be avoided. We could stand 1663 to record nothrow for arbitrary gimple statements, and so avoid 1664 some moderately complex lookups in stmt_could_throw_p. Perhaps 1665 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the 1666 no-nonlocal-goto property should be recorded elsewhere as a bit 1667 on the call_insn directly. Perhaps we should make more use of 1668 attaching the trees to call_insns (reachable via symbol_ref in 1669 direct call cases) and just pull the data out of the trees. */ 1670 1671 void 1672 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr) 1673 { 1674 rtx value; 1675 if (ecf_flags & ECF_NOTHROW) 1676 value = const0_rtx; 1677 else if (lp_nr != 0) 1678 value = GEN_INT (lp_nr); 1679 else 1680 return; 1681 add_reg_note (insn, REG_EH_REGION, value); 1682 } 1683 1684 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw 1685 nor perform a non-local goto. Replace the region note if it 1686 already exists. */ 1687 1688 void 1689 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn) 1690 { 1691 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 1692 rtx intmin = GEN_INT (INT_MIN); 1693 1694 if (note != 0) 1695 XEXP (note, 0) = intmin; 1696 else 1697 add_reg_note (insn, REG_EH_REGION, intmin); 1698 } 1699 1700 /* Return true if INSN could throw, assuming no REG_EH_REGION note 1701 to the contrary. */ 1702 1703 bool 1704 insn_could_throw_p (const_rtx insn) 1705 { 1706 if (!flag_exceptions) 1707 return false; 1708 if (CALL_P (insn)) 1709 return true; 1710 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions) 1711 return may_trap_p (PATTERN (insn)); 1712 return false; 1713 } 1714 1715 /* Copy an REG_EH_REGION note to each insn that might throw beginning 1716 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn 1717 to look for a note, or the note itself. */ 1718 1719 void 1720 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last) 1721 { 1722 rtx_insn *insn; 1723 rtx note = note_or_insn; 1724 1725 if (INSN_P (note_or_insn)) 1726 { 1727 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX); 1728 if (note == NULL) 1729 return; 1730 } 1731 else if (is_a <rtx_insn *> (note_or_insn)) 1732 return; 1733 note = XEXP (note, 0); 1734 1735 for (insn = first; insn != last ; insn = NEXT_INSN (insn)) 1736 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX) 1737 && insn_could_throw_p (insn)) 1738 add_reg_note (insn, REG_EH_REGION, note); 1739 } 1740 1741 /* Likewise, but iterate backward. */ 1742 1743 void 1744 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first) 1745 { 1746 rtx_insn *insn; 1747 rtx note = note_or_insn; 1748 1749 if (INSN_P (note_or_insn)) 1750 { 1751 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX); 1752 if (note == NULL) 1753 return; 1754 } 1755 else if (is_a <rtx_insn *> (note_or_insn)) 1756 return; 1757 note = XEXP (note, 0); 1758 1759 for (insn = last; insn != first; insn = PREV_INSN (insn)) 1760 if (insn_could_throw_p (insn)) 1761 add_reg_note (insn, REG_EH_REGION, note); 1762 } 1763 1764 1765 /* Extract all EH information from INSN. Return true if the insn 1766 was marked NOTHROW. */ 1767 1768 static bool 1769 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr, 1770 eh_landing_pad *plp) 1771 { 1772 eh_landing_pad lp = NULL; 1773 eh_region r = NULL; 1774 bool ret = false; 1775 rtx note; 1776 int lp_nr; 1777 1778 if (! INSN_P (insn)) 1779 goto egress; 1780 1781 if (NONJUMP_INSN_P (insn) 1782 && GET_CODE (PATTERN (insn)) == SEQUENCE) 1783 insn = XVECEXP (PATTERN (insn), 0, 0); 1784 1785 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 1786 if (!note) 1787 { 1788 ret = !insn_could_throw_p (insn); 1789 goto egress; 1790 } 1791 1792 lp_nr = INTVAL (XEXP (note, 0)); 1793 if (lp_nr == 0 || lp_nr == INT_MIN) 1794 { 1795 ret = true; 1796 goto egress; 1797 } 1798 1799 if (lp_nr < 0) 1800 r = (*cfun->eh->region_array)[-lp_nr]; 1801 else 1802 { 1803 lp = (*cfun->eh->lp_array)[lp_nr]; 1804 r = lp->region; 1805 } 1806 1807 egress: 1808 *plp = lp; 1809 *pr = r; 1810 return ret; 1811 } 1812 1813 /* Return the landing pad to which INSN may go, or NULL if it does not 1814 have a reachable landing pad within this function. */ 1815 1816 eh_landing_pad 1817 get_eh_landing_pad_from_rtx (const_rtx insn) 1818 { 1819 eh_landing_pad lp; 1820 eh_region r; 1821 1822 get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1823 return lp; 1824 } 1825 1826 /* Return the region to which INSN may go, or NULL if it does not 1827 have a reachable region within this function. */ 1828 1829 eh_region 1830 get_eh_region_from_rtx (const_rtx insn) 1831 { 1832 eh_landing_pad lp; 1833 eh_region r; 1834 1835 get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1836 return r; 1837 } 1838 1839 /* Return true if INSN throws and is caught by something in this function. */ 1840 1841 bool 1842 can_throw_internal (const_rtx insn) 1843 { 1844 return get_eh_landing_pad_from_rtx (insn) != NULL; 1845 } 1846 1847 /* Return true if INSN throws and escapes from the current function. */ 1848 1849 bool 1850 can_throw_external (const_rtx insn) 1851 { 1852 eh_landing_pad lp; 1853 eh_region r; 1854 bool nothrow; 1855 1856 if (! INSN_P (insn)) 1857 return false; 1858 1859 if (NONJUMP_INSN_P (insn) 1860 && GET_CODE (PATTERN (insn)) == SEQUENCE) 1861 { 1862 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); 1863 int i, n = seq->len (); 1864 1865 for (i = 0; i < n; i++) 1866 if (can_throw_external (seq->element (i))) 1867 return true; 1868 1869 return false; 1870 } 1871 1872 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1873 1874 /* If we can't throw, we obviously can't throw external. */ 1875 if (nothrow) 1876 return false; 1877 1878 /* If we have an internal landing pad, then we're not external. */ 1879 if (lp != NULL) 1880 return false; 1881 1882 /* If we're not within an EH region, then we are external. */ 1883 if (r == NULL) 1884 return true; 1885 1886 /* The only thing that ought to be left is MUST_NOT_THROW regions, 1887 which don't always have landing pads. */ 1888 gcc_assert (r->type == ERT_MUST_NOT_THROW); 1889 return false; 1890 } 1891 1892 /* Return true if INSN cannot throw at all. */ 1893 1894 bool 1895 insn_nothrow_p (const_rtx insn) 1896 { 1897 eh_landing_pad lp; 1898 eh_region r; 1899 1900 if (! INSN_P (insn)) 1901 return true; 1902 1903 if (NONJUMP_INSN_P (insn) 1904 && GET_CODE (PATTERN (insn)) == SEQUENCE) 1905 { 1906 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); 1907 int i, n = seq->len (); 1908 1909 for (i = 0; i < n; i++) 1910 if (!insn_nothrow_p (seq->element (i))) 1911 return false; 1912 1913 return true; 1914 } 1915 1916 return get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1917 } 1918 1919 /* Return true if INSN can perform a non-local goto. */ 1920 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */ 1921 1922 bool 1923 can_nonlocal_goto (const rtx_insn *insn) 1924 { 1925 if (nonlocal_goto_handler_labels && CALL_P (insn)) 1926 { 1927 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 1928 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN) 1929 return true; 1930 } 1931 return false; 1932 } 1933 1934 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */ 1935 1936 static unsigned int 1937 set_nothrow_function_flags (void) 1938 { 1939 rtx_insn *insn; 1940 1941 crtl->nothrow = 1; 1942 1943 /* Assume crtl->all_throwers_are_sibcalls until we encounter 1944 something that can throw an exception. We specifically exempt 1945 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps, 1946 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this 1947 is optimistic. */ 1948 1949 crtl->all_throwers_are_sibcalls = 1; 1950 1951 /* If we don't know that this implementation of the function will 1952 actually be used, then we must not set TREE_NOTHROW, since 1953 callers must not assume that this function does not throw. */ 1954 if (TREE_NOTHROW (current_function_decl)) 1955 return 0; 1956 1957 if (! flag_exceptions) 1958 return 0; 1959 1960 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 1961 if (can_throw_external (insn)) 1962 { 1963 crtl->nothrow = 0; 1964 1965 if (!CALL_P (insn) || !SIBLING_CALL_P (insn)) 1966 { 1967 crtl->all_throwers_are_sibcalls = 0; 1968 return 0; 1969 } 1970 } 1971 1972 if (crtl->nothrow 1973 && (cgraph_node::get (current_function_decl)->get_availability () 1974 >= AVAIL_AVAILABLE)) 1975 { 1976 struct cgraph_node *node = cgraph_node::get (current_function_decl); 1977 struct cgraph_edge *e; 1978 for (e = node->callers; e; e = e->next_caller) 1979 e->can_throw_external = false; 1980 node->set_nothrow_flag (true); 1981 1982 if (dump_file) 1983 fprintf (dump_file, "Marking function nothrow: %s\n\n", 1984 current_function_name ()); 1985 } 1986 return 0; 1987 } 1988 1989 namespace { 1990 1991 const pass_data pass_data_set_nothrow_function_flags = 1992 { 1993 RTL_PASS, /* type */ 1994 "nothrow", /* name */ 1995 OPTGROUP_NONE, /* optinfo_flags */ 1996 TV_NONE, /* tv_id */ 1997 0, /* properties_required */ 1998 0, /* properties_provided */ 1999 0, /* properties_destroyed */ 2000 0, /* todo_flags_start */ 2001 0, /* todo_flags_finish */ 2002 }; 2003 2004 class pass_set_nothrow_function_flags : public rtl_opt_pass 2005 { 2006 public: 2007 pass_set_nothrow_function_flags (gcc::context *ctxt) 2008 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt) 2009 {} 2010 2011 /* opt_pass methods: */ 2012 virtual unsigned int execute (function *) 2013 { 2014 return set_nothrow_function_flags (); 2015 } 2016 2017 }; // class pass_set_nothrow_function_flags 2018 2019 } // anon namespace 2020 2021 rtl_opt_pass * 2022 make_pass_set_nothrow_function_flags (gcc::context *ctxt) 2023 { 2024 return new pass_set_nothrow_function_flags (ctxt); 2025 } 2026 2027 2028 /* Various hooks for unwind library. */ 2029 2030 /* Expand the EH support builtin functions: 2031 __builtin_eh_pointer and __builtin_eh_filter. */ 2032 2033 static eh_region 2034 expand_builtin_eh_common (tree region_nr_t) 2035 { 2036 HOST_WIDE_INT region_nr; 2037 eh_region region; 2038 2039 gcc_assert (tree_fits_shwi_p (region_nr_t)); 2040 region_nr = tree_to_shwi (region_nr_t); 2041 2042 region = (*cfun->eh->region_array)[region_nr]; 2043 2044 /* ??? We shouldn't have been able to delete a eh region without 2045 deleting all the code that depended on it. */ 2046 gcc_assert (region != NULL); 2047 2048 return region; 2049 } 2050 2051 /* Expand to the exc_ptr value from the given eh region. */ 2052 2053 rtx 2054 expand_builtin_eh_pointer (tree exp) 2055 { 2056 eh_region region 2057 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)); 2058 if (region->exc_ptr_reg == NULL) 2059 region->exc_ptr_reg = gen_reg_rtx (ptr_mode); 2060 return region->exc_ptr_reg; 2061 } 2062 2063 /* Expand to the filter value from the given eh region. */ 2064 2065 rtx 2066 expand_builtin_eh_filter (tree exp) 2067 { 2068 eh_region region 2069 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)); 2070 if (region->filter_reg == NULL) 2071 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ()); 2072 return region->filter_reg; 2073 } 2074 2075 /* Copy the exc_ptr and filter values from one landing pad's registers 2076 to another. This is used to inline the resx statement. */ 2077 2078 rtx 2079 expand_builtin_eh_copy_values (tree exp) 2080 { 2081 eh_region dst 2082 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)); 2083 eh_region src 2084 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1)); 2085 machine_mode fmode = targetm.eh_return_filter_mode (); 2086 2087 if (dst->exc_ptr_reg == NULL) 2088 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode); 2089 if (src->exc_ptr_reg == NULL) 2090 src->exc_ptr_reg = gen_reg_rtx (ptr_mode); 2091 2092 if (dst->filter_reg == NULL) 2093 dst->filter_reg = gen_reg_rtx (fmode); 2094 if (src->filter_reg == NULL) 2095 src->filter_reg = gen_reg_rtx (fmode); 2096 2097 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg); 2098 emit_move_insn (dst->filter_reg, src->filter_reg); 2099 2100 return const0_rtx; 2101 } 2102 2103 /* Do any necessary initialization to access arbitrary stack frames. 2104 On the SPARC, this means flushing the register windows. */ 2105 2106 void 2107 expand_builtin_unwind_init (void) 2108 { 2109 /* Set this so all the registers get saved in our frame; we need to be 2110 able to copy the saved values for any registers from frames we unwind. */ 2111 crtl->saves_all_registers = 1; 2112 2113 SETUP_FRAME_ADDRESSES (); 2114 } 2115 2116 /* Map a non-negative number to an eh return data register number; expands 2117 to -1 if no return data register is associated with the input number. 2118 At least the inputs 0 and 1 must be mapped; the target may provide more. */ 2119 2120 rtx 2121 expand_builtin_eh_return_data_regno (tree exp) 2122 { 2123 tree which = CALL_EXPR_ARG (exp, 0); 2124 unsigned HOST_WIDE_INT iwhich; 2125 2126 if (TREE_CODE (which) != INTEGER_CST) 2127 { 2128 error ("argument of %<__builtin_eh_return_regno%> must be constant"); 2129 return constm1_rtx; 2130 } 2131 2132 iwhich = tree_to_uhwi (which); 2133 iwhich = EH_RETURN_DATA_REGNO (iwhich); 2134 if (iwhich == INVALID_REGNUM) 2135 return constm1_rtx; 2136 2137 #ifdef DWARF_FRAME_REGNUM 2138 iwhich = DWARF_FRAME_REGNUM (iwhich); 2139 #else 2140 iwhich = DBX_REGISTER_NUMBER (iwhich); 2141 #endif 2142 2143 return GEN_INT (iwhich); 2144 } 2145 2146 /* Given a value extracted from the return address register or stack slot, 2147 return the actual address encoded in that value. */ 2148 2149 rtx 2150 expand_builtin_extract_return_addr (tree addr_tree) 2151 { 2152 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); 2153 2154 if (GET_MODE (addr) != Pmode 2155 && GET_MODE (addr) != VOIDmode) 2156 { 2157 #ifdef POINTERS_EXTEND_UNSIGNED 2158 addr = convert_memory_address (Pmode, addr); 2159 #else 2160 addr = convert_to_mode (Pmode, addr, 0); 2161 #endif 2162 } 2163 2164 /* First mask out any unwanted bits. */ 2165 rtx mask = MASK_RETURN_ADDR; 2166 if (mask) 2167 expand_and (Pmode, addr, mask, addr); 2168 2169 /* Then adjust to find the real return address. */ 2170 if (RETURN_ADDR_OFFSET) 2171 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET); 2172 2173 return addr; 2174 } 2175 2176 /* Given an actual address in addr_tree, do any necessary encoding 2177 and return the value to be stored in the return address register or 2178 stack slot so the epilogue will return to that address. */ 2179 2180 rtx 2181 expand_builtin_frob_return_addr (tree addr_tree) 2182 { 2183 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL); 2184 2185 addr = convert_memory_address (Pmode, addr); 2186 2187 if (RETURN_ADDR_OFFSET) 2188 { 2189 addr = force_reg (Pmode, addr); 2190 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET); 2191 } 2192 2193 return addr; 2194 } 2195 2196 /* Set up the epilogue with the magic bits we'll need to return to the 2197 exception handler. */ 2198 2199 void 2200 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED, 2201 tree handler_tree) 2202 { 2203 rtx tmp; 2204 2205 #ifdef EH_RETURN_STACKADJ_RTX 2206 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj, 2207 VOIDmode, EXPAND_NORMAL); 2208 tmp = convert_memory_address (Pmode, tmp); 2209 if (!crtl->eh.ehr_stackadj) 2210 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp); 2211 else if (tmp != crtl->eh.ehr_stackadj) 2212 emit_move_insn (crtl->eh.ehr_stackadj, tmp); 2213 #endif 2214 2215 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler, 2216 VOIDmode, EXPAND_NORMAL); 2217 tmp = convert_memory_address (Pmode, tmp); 2218 if (!crtl->eh.ehr_handler) 2219 crtl->eh.ehr_handler = copy_addr_to_reg (tmp); 2220 else if (tmp != crtl->eh.ehr_handler) 2221 emit_move_insn (crtl->eh.ehr_handler, tmp); 2222 2223 if (!crtl->eh.ehr_label) 2224 crtl->eh.ehr_label = gen_label_rtx (); 2225 emit_jump (crtl->eh.ehr_label); 2226 } 2227 2228 /* Expand __builtin_eh_return. This exit path from the function loads up 2229 the eh return data registers, adjusts the stack, and branches to a 2230 given PC other than the normal return address. */ 2231 2232 void 2233 expand_eh_return (void) 2234 { 2235 rtx_code_label *around_label; 2236 2237 if (! crtl->eh.ehr_label) 2238 return; 2239 2240 crtl->calls_eh_return = 1; 2241 2242 #ifdef EH_RETURN_STACKADJ_RTX 2243 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx); 2244 #endif 2245 2246 around_label = gen_label_rtx (); 2247 emit_jump (around_label); 2248 2249 emit_label (crtl->eh.ehr_label); 2250 clobber_return_register (); 2251 2252 #ifdef EH_RETURN_STACKADJ_RTX 2253 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj); 2254 #endif 2255 2256 if (targetm.have_eh_return ()) 2257 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler)); 2258 else 2259 { 2260 if (rtx handler = EH_RETURN_HANDLER_RTX) 2261 emit_move_insn (handler, crtl->eh.ehr_handler); 2262 else 2263 error ("__builtin_eh_return not supported on this target"); 2264 } 2265 2266 emit_label (around_label); 2267 } 2268 2269 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by 2270 POINTERS_EXTEND_UNSIGNED and return it. */ 2271 2272 rtx 2273 expand_builtin_extend_pointer (tree addr_tree) 2274 { 2275 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL); 2276 int extend; 2277 2278 #ifdef POINTERS_EXTEND_UNSIGNED 2279 extend = POINTERS_EXTEND_UNSIGNED; 2280 #else 2281 /* The previous EH code did an unsigned extend by default, so we do this also 2282 for consistency. */ 2283 extend = 1; 2284 #endif 2285 2286 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend); 2287 } 2288 2289 static int 2290 add_action_record (action_hash_type *ar_hash, int filter, int next) 2291 { 2292 struct action_record **slot, *new_ar, tmp; 2293 2294 tmp.filter = filter; 2295 tmp.next = next; 2296 slot = ar_hash->find_slot (&tmp, INSERT); 2297 2298 if ((new_ar = *slot) == NULL) 2299 { 2300 new_ar = XNEW (struct action_record); 2301 new_ar->offset = crtl->eh.action_record_data->length () + 1; 2302 new_ar->filter = filter; 2303 new_ar->next = next; 2304 *slot = new_ar; 2305 2306 /* The filter value goes in untouched. The link to the next 2307 record is a "self-relative" byte offset, or zero to indicate 2308 that there is no next record. So convert the absolute 1 based 2309 indices we've been carrying around into a displacement. */ 2310 2311 push_sleb128 (&crtl->eh.action_record_data, filter); 2312 if (next) 2313 next -= crtl->eh.action_record_data->length () + 1; 2314 push_sleb128 (&crtl->eh.action_record_data, next); 2315 } 2316 2317 return new_ar->offset; 2318 } 2319 2320 static int 2321 collect_one_action_chain (action_hash_type *ar_hash, eh_region region) 2322 { 2323 int next; 2324 2325 /* If we've reached the top of the region chain, then we have 2326 no actions, and require no landing pad. */ 2327 if (region == NULL) 2328 return -1; 2329 2330 switch (region->type) 2331 { 2332 case ERT_CLEANUP: 2333 { 2334 eh_region r; 2335 /* A cleanup adds a zero filter to the beginning of the chain, but 2336 there are special cases to look out for. If there are *only* 2337 cleanups along a path, then it compresses to a zero action. 2338 Further, if there are multiple cleanups along a path, we only 2339 need to represent one of them, as that is enough to trigger 2340 entry to the landing pad at runtime. */ 2341 next = collect_one_action_chain (ar_hash, region->outer); 2342 if (next <= 0) 2343 return 0; 2344 for (r = region->outer; r ; r = r->outer) 2345 if (r->type == ERT_CLEANUP) 2346 return next; 2347 return add_action_record (ar_hash, 0, next); 2348 } 2349 2350 case ERT_TRY: 2351 { 2352 eh_catch c; 2353 2354 /* Process the associated catch regions in reverse order. 2355 If there's a catch-all handler, then we don't need to 2356 search outer regions. Use a magic -3 value to record 2357 that we haven't done the outer search. */ 2358 next = -3; 2359 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch) 2360 { 2361 if (c->type_list == NULL) 2362 { 2363 /* Retrieve the filter from the head of the filter list 2364 where we have stored it (see assign_filter_values). */ 2365 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list)); 2366 next = add_action_record (ar_hash, filter, 0); 2367 } 2368 else 2369 { 2370 /* Once the outer search is done, trigger an action record for 2371 each filter we have. */ 2372 tree flt_node; 2373 2374 if (next == -3) 2375 { 2376 next = collect_one_action_chain (ar_hash, region->outer); 2377 2378 /* If there is no next action, terminate the chain. */ 2379 if (next == -1) 2380 next = 0; 2381 /* If all outer actions are cleanups or must_not_throw, 2382 we'll have no action record for it, since we had wanted 2383 to encode these states in the call-site record directly. 2384 Add a cleanup action to the chain to catch these. */ 2385 else if (next <= 0) 2386 next = add_action_record (ar_hash, 0, 0); 2387 } 2388 2389 flt_node = c->filter_list; 2390 for (; flt_node; flt_node = TREE_CHAIN (flt_node)) 2391 { 2392 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node)); 2393 next = add_action_record (ar_hash, filter, next); 2394 } 2395 } 2396 } 2397 return next; 2398 } 2399 2400 case ERT_ALLOWED_EXCEPTIONS: 2401 /* An exception specification adds its filter to the 2402 beginning of the chain. */ 2403 next = collect_one_action_chain (ar_hash, region->outer); 2404 2405 /* If there is no next action, terminate the chain. */ 2406 if (next == -1) 2407 next = 0; 2408 /* If all outer actions are cleanups or must_not_throw, 2409 we'll have no action record for it, since we had wanted 2410 to encode these states in the call-site record directly. 2411 Add a cleanup action to the chain to catch these. */ 2412 else if (next <= 0) 2413 next = add_action_record (ar_hash, 0, 0); 2414 2415 return add_action_record (ar_hash, region->u.allowed.filter, next); 2416 2417 case ERT_MUST_NOT_THROW: 2418 /* A must-not-throw region with no inner handlers or cleanups 2419 requires no call-site entry. Note that this differs from 2420 the no handler or cleanup case in that we do require an lsda 2421 to be generated. Return a magic -2 value to record this. */ 2422 return -2; 2423 } 2424 2425 gcc_unreachable (); 2426 } 2427 2428 static int 2429 add_call_site (rtx landing_pad, int action, int section) 2430 { 2431 call_site_record record; 2432 2433 record = ggc_alloc<call_site_record_d> (); 2434 record->landing_pad = landing_pad; 2435 record->action = action; 2436 2437 vec_safe_push (crtl->eh.call_site_record_v[section], record); 2438 2439 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1; 2440 } 2441 2442 static rtx_note * 2443 emit_note_eh_region_end (rtx_insn *insn) 2444 { 2445 rtx_insn *next = NEXT_INSN (insn); 2446 2447 /* Make sure we do not split a call and its corresponding 2448 CALL_ARG_LOCATION note. */ 2449 if (next && NOTE_P (next) 2450 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION) 2451 insn = next; 2452 2453 return emit_note_after (NOTE_INSN_EH_REGION_END, insn); 2454 } 2455 2456 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes. 2457 The new note numbers will not refer to region numbers, but 2458 instead to call site entries. */ 2459 2460 static unsigned int 2461 convert_to_eh_region_ranges (void) 2462 { 2463 rtx insn; 2464 rtx_insn *iter; 2465 rtx_note *note; 2466 action_hash_type ar_hash (31); 2467 int last_action = -3; 2468 rtx_insn *last_action_insn = NULL; 2469 rtx last_landing_pad = NULL_RTX; 2470 rtx_insn *first_no_action_insn = NULL; 2471 int call_site = 0; 2472 int cur_sec = 0; 2473 rtx_insn *section_switch_note = NULL; 2474 rtx_insn *first_no_action_insn_before_switch = NULL; 2475 rtx_insn *last_no_action_insn_before_switch = NULL; 2476 int saved_call_site_base = call_site_base; 2477 2478 vec_alloc (crtl->eh.action_record_data, 64); 2479 2480 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter)) 2481 if (INSN_P (iter)) 2482 { 2483 eh_landing_pad lp; 2484 eh_region region; 2485 bool nothrow; 2486 int this_action; 2487 rtx_code_label *this_landing_pad; 2488 2489 insn = iter; 2490 if (NONJUMP_INSN_P (insn) 2491 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2492 insn = XVECEXP (PATTERN (insn), 0, 0); 2493 2494 nothrow = get_eh_region_and_lp_from_rtx (insn, ®ion, &lp); 2495 if (nothrow) 2496 continue; 2497 if (region) 2498 this_action = collect_one_action_chain (&ar_hash, region); 2499 else 2500 this_action = -1; 2501 2502 /* Existence of catch handlers, or must-not-throw regions 2503 implies that an lsda is needed (even if empty). */ 2504 if (this_action != -1) 2505 crtl->uses_eh_lsda = 1; 2506 2507 /* Delay creation of region notes for no-action regions 2508 until we're sure that an lsda will be required. */ 2509 else if (last_action == -3) 2510 { 2511 first_no_action_insn = iter; 2512 last_action = -1; 2513 } 2514 2515 if (this_action >= 0) 2516 this_landing_pad = lp->landing_pad; 2517 else 2518 this_landing_pad = NULL; 2519 2520 /* Differing actions or landing pads implies a change in call-site 2521 info, which implies some EH_REGION note should be emitted. */ 2522 if (last_action != this_action 2523 || last_landing_pad != this_landing_pad) 2524 { 2525 /* If there is a queued no-action region in the other section 2526 with hot/cold partitioning, emit it now. */ 2527 if (first_no_action_insn_before_switch) 2528 { 2529 gcc_assert (this_action != -1 2530 && last_action == (first_no_action_insn 2531 ? -1 : -3)); 2532 call_site = add_call_site (NULL_RTX, 0, 0); 2533 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, 2534 first_no_action_insn_before_switch); 2535 NOTE_EH_HANDLER (note) = call_site; 2536 note 2537 = emit_note_eh_region_end (last_no_action_insn_before_switch); 2538 NOTE_EH_HANDLER (note) = call_site; 2539 gcc_assert (last_action != -3 2540 || (last_action_insn 2541 == last_no_action_insn_before_switch)); 2542 first_no_action_insn_before_switch = NULL; 2543 last_no_action_insn_before_switch = NULL; 2544 call_site_base++; 2545 } 2546 /* If we'd not seen a previous action (-3) or the previous 2547 action was must-not-throw (-2), then we do not need an 2548 end note. */ 2549 if (last_action >= -1) 2550 { 2551 /* If we delayed the creation of the begin, do it now. */ 2552 if (first_no_action_insn) 2553 { 2554 call_site = add_call_site (NULL_RTX, 0, cur_sec); 2555 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, 2556 first_no_action_insn); 2557 NOTE_EH_HANDLER (note) = call_site; 2558 first_no_action_insn = NULL; 2559 } 2560 2561 note = emit_note_eh_region_end (last_action_insn); 2562 NOTE_EH_HANDLER (note) = call_site; 2563 } 2564 2565 /* If the new action is must-not-throw, then no region notes 2566 are created. */ 2567 if (this_action >= -1) 2568 { 2569 call_site = add_call_site (this_landing_pad, 2570 this_action < 0 ? 0 : this_action, 2571 cur_sec); 2572 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter); 2573 NOTE_EH_HANDLER (note) = call_site; 2574 } 2575 2576 last_action = this_action; 2577 last_landing_pad = this_landing_pad; 2578 } 2579 last_action_insn = iter; 2580 } 2581 else if (NOTE_P (iter) 2582 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS) 2583 { 2584 gcc_assert (section_switch_note == NULL_RTX); 2585 gcc_assert (flag_reorder_blocks_and_partition); 2586 section_switch_note = iter; 2587 if (first_no_action_insn) 2588 { 2589 first_no_action_insn_before_switch = first_no_action_insn; 2590 last_no_action_insn_before_switch = last_action_insn; 2591 first_no_action_insn = NULL; 2592 gcc_assert (last_action == -1); 2593 last_action = -3; 2594 } 2595 /* Force closing of current EH region before section switch and 2596 opening a new one afterwards. */ 2597 else if (last_action != -3) 2598 last_landing_pad = pc_rtx; 2599 if (crtl->eh.call_site_record_v[cur_sec]) 2600 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length (); 2601 cur_sec++; 2602 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL); 2603 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10); 2604 } 2605 2606 if (last_action >= -1 && ! first_no_action_insn) 2607 { 2608 note = emit_note_eh_region_end (last_action_insn); 2609 NOTE_EH_HANDLER (note) = call_site; 2610 } 2611 2612 call_site_base = saved_call_site_base; 2613 2614 return 0; 2615 } 2616 2617 namespace { 2618 2619 const pass_data pass_data_convert_to_eh_region_ranges = 2620 { 2621 RTL_PASS, /* type */ 2622 "eh_ranges", /* name */ 2623 OPTGROUP_NONE, /* optinfo_flags */ 2624 TV_NONE, /* tv_id */ 2625 0, /* properties_required */ 2626 0, /* properties_provided */ 2627 0, /* properties_destroyed */ 2628 0, /* todo_flags_start */ 2629 0, /* todo_flags_finish */ 2630 }; 2631 2632 class pass_convert_to_eh_region_ranges : public rtl_opt_pass 2633 { 2634 public: 2635 pass_convert_to_eh_region_ranges (gcc::context *ctxt) 2636 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt) 2637 {} 2638 2639 /* opt_pass methods: */ 2640 virtual bool gate (function *); 2641 virtual unsigned int execute (function *) 2642 { 2643 return convert_to_eh_region_ranges (); 2644 } 2645 2646 }; // class pass_convert_to_eh_region_ranges 2647 2648 bool 2649 pass_convert_to_eh_region_ranges::gate (function *) 2650 { 2651 /* Nothing to do for SJLJ exceptions or if no regions created. */ 2652 if (cfun->eh->region_tree == NULL) 2653 return false; 2654 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 2655 return false; 2656 return true; 2657 } 2658 2659 } // anon namespace 2660 2661 rtl_opt_pass * 2662 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt) 2663 { 2664 return new pass_convert_to_eh_region_ranges (ctxt); 2665 } 2666 2667 static void 2668 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value) 2669 { 2670 do 2671 { 2672 unsigned char byte = value & 0x7f; 2673 value >>= 7; 2674 if (value) 2675 byte |= 0x80; 2676 vec_safe_push (*data_area, byte); 2677 } 2678 while (value); 2679 } 2680 2681 static void 2682 push_sleb128 (vec<uchar, va_gc> **data_area, int value) 2683 { 2684 unsigned char byte; 2685 int more; 2686 2687 do 2688 { 2689 byte = value & 0x7f; 2690 value >>= 7; 2691 more = ! ((value == 0 && (byte & 0x40) == 0) 2692 || (value == -1 && (byte & 0x40) != 0)); 2693 if (more) 2694 byte |= 0x80; 2695 vec_safe_push (*data_area, byte); 2696 } 2697 while (more); 2698 } 2699 2700 2701 static int 2702 dw2_size_of_call_site_table (int section) 2703 { 2704 int n = vec_safe_length (crtl->eh.call_site_record_v[section]); 2705 int size = n * (4 + 4 + 4); 2706 int i; 2707 2708 for (i = 0; i < n; ++i) 2709 { 2710 struct call_site_record_d *cs = 2711 (*crtl->eh.call_site_record_v[section])[i]; 2712 size += size_of_uleb128 (cs->action); 2713 } 2714 2715 return size; 2716 } 2717 2718 static int 2719 sjlj_size_of_call_site_table (void) 2720 { 2721 int n = vec_safe_length (crtl->eh.call_site_record_v[0]); 2722 int size = 0; 2723 int i; 2724 2725 for (i = 0; i < n; ++i) 2726 { 2727 struct call_site_record_d *cs = 2728 (*crtl->eh.call_site_record_v[0])[i]; 2729 size += size_of_uleb128 (INTVAL (cs->landing_pad)); 2730 size += size_of_uleb128 (cs->action); 2731 } 2732 2733 return size; 2734 } 2735 2736 static void 2737 dw2_output_call_site_table (int cs_format, int section) 2738 { 2739 int n = vec_safe_length (crtl->eh.call_site_record_v[section]); 2740 int i; 2741 const char *begin; 2742 2743 if (section == 0) 2744 begin = current_function_func_begin_label; 2745 else if (first_function_block_is_cold) 2746 begin = crtl->subsections.hot_section_label; 2747 else 2748 begin = crtl->subsections.cold_section_label; 2749 2750 for (i = 0; i < n; ++i) 2751 { 2752 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i]; 2753 char reg_start_lab[32]; 2754 char reg_end_lab[32]; 2755 char landing_pad_lab[32]; 2756 2757 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i); 2758 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i); 2759 2760 if (cs->landing_pad) 2761 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L", 2762 CODE_LABEL_NUMBER (cs->landing_pad)); 2763 2764 /* ??? Perhaps use insn length scaling if the assembler supports 2765 generic arithmetic. */ 2766 /* ??? Perhaps use attr_length to choose data1 or data2 instead of 2767 data4 if the function is small enough. */ 2768 if (cs_format == DW_EH_PE_uleb128) 2769 { 2770 dw2_asm_output_delta_uleb128 (reg_start_lab, begin, 2771 "region %d start", i); 2772 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab, 2773 "length"); 2774 if (cs->landing_pad) 2775 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin, 2776 "landing pad"); 2777 else 2778 dw2_asm_output_data_uleb128 (0, "landing pad"); 2779 } 2780 else 2781 { 2782 dw2_asm_output_delta (4, reg_start_lab, begin, 2783 "region %d start", i); 2784 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length"); 2785 if (cs->landing_pad) 2786 dw2_asm_output_delta (4, landing_pad_lab, begin, 2787 "landing pad"); 2788 else 2789 dw2_asm_output_data (4, 0, "landing pad"); 2790 } 2791 dw2_asm_output_data_uleb128 (cs->action, "action"); 2792 } 2793 2794 call_site_base += n; 2795 } 2796 2797 static void 2798 sjlj_output_call_site_table (void) 2799 { 2800 int n = vec_safe_length (crtl->eh.call_site_record_v[0]); 2801 int i; 2802 2803 for (i = 0; i < n; ++i) 2804 { 2805 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i]; 2806 2807 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad), 2808 "region %d landing pad", i); 2809 dw2_asm_output_data_uleb128 (cs->action, "action"); 2810 } 2811 2812 call_site_base += n; 2813 } 2814 2815 /* Switch to the section that should be used for exception tables. */ 2816 2817 static void 2818 switch_to_exception_section (const char * ARG_UNUSED (fnname)) 2819 { 2820 section *s; 2821 2822 if (exception_section) 2823 s = exception_section; 2824 else 2825 { 2826 int flags; 2827 2828 if (EH_TABLES_CAN_BE_READ_ONLY) 2829 { 2830 int tt_format = 2831 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); 2832 flags = ((! flag_pic 2833 || ((tt_format & 0x70) != DW_EH_PE_absptr 2834 && (tt_format & 0x70) != DW_EH_PE_aligned)) 2835 ? 0 : SECTION_WRITE); 2836 } 2837 else 2838 flags = SECTION_WRITE; 2839 2840 /* Compute the section and cache it into exception_section, 2841 unless it depends on the function name. */ 2842 if (targetm_common.have_named_sections) 2843 { 2844 #ifdef HAVE_LD_EH_GC_SECTIONS 2845 if (flag_function_sections 2846 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)) 2847 { 2848 char *section_name = XNEWVEC (char, strlen (fnname) + 32); 2849 /* The EH table must match the code section, so only mark 2850 it linkonce if we have COMDAT groups to tie them together. */ 2851 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP) 2852 flags |= SECTION_LINKONCE; 2853 sprintf (section_name, ".gcc_except_table.%s", fnname); 2854 s = get_section (section_name, flags, current_function_decl); 2855 free (section_name); 2856 } 2857 else 2858 #endif 2859 exception_section 2860 = s = get_section (".gcc_except_table", flags, NULL); 2861 } 2862 else 2863 exception_section 2864 = s = flags == SECTION_WRITE ? data_section : readonly_data_section; 2865 } 2866 2867 switch_to_section (s); 2868 } 2869 2870 2871 /* Output a reference from an exception table to the type_info object TYPE. 2872 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for 2873 the value. */ 2874 2875 static void 2876 output_ttype (tree type, int tt_format, int tt_format_size) 2877 { 2878 rtx value; 2879 bool is_public = true; 2880 2881 if (type == NULL_TREE) 2882 value = const0_rtx; 2883 else 2884 { 2885 /* FIXME lto. pass_ipa_free_lang_data changes all types to 2886 runtime types so TYPE should already be a runtime type 2887 reference. When pass_ipa_free_lang data is made a default 2888 pass, we can then remove the call to lookup_type_for_runtime 2889 below. */ 2890 if (TYPE_P (type)) 2891 type = lookup_type_for_runtime (type); 2892 2893 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER); 2894 2895 /* Let cgraph know that the rtti decl is used. Not all of the 2896 paths below go through assemble_integer, which would take 2897 care of this for us. */ 2898 STRIP_NOPS (type); 2899 if (TREE_CODE (type) == ADDR_EXPR) 2900 { 2901 type = TREE_OPERAND (type, 0); 2902 if (VAR_P (type)) 2903 is_public = TREE_PUBLIC (type); 2904 } 2905 else 2906 gcc_assert (TREE_CODE (type) == INTEGER_CST); 2907 } 2908 2909 /* Allow the target to override the type table entry format. */ 2910 if (targetm.asm_out.ttype (value)) 2911 return; 2912 2913 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned) 2914 assemble_integer (value, tt_format_size, 2915 tt_format_size * BITS_PER_UNIT, 1); 2916 else 2917 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL); 2918 } 2919 2920 static void 2921 output_one_function_exception_table (int section) 2922 { 2923 int tt_format, cs_format, lp_format, i; 2924 char ttype_label[32]; 2925 char cs_after_size_label[32]; 2926 char cs_end_label[32]; 2927 int call_site_len; 2928 int have_tt_data; 2929 int tt_format_size = 0; 2930 2931 have_tt_data = (vec_safe_length (cfun->eh->ttype_data) 2932 || (targetm.arm_eabi_unwinder 2933 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi) 2934 : vec_safe_length (cfun->eh->ehspec_data.other))); 2935 2936 /* Indicate the format of the @TType entries. */ 2937 if (! have_tt_data) 2938 tt_format = DW_EH_PE_omit; 2939 else 2940 { 2941 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); 2942 if (HAVE_AS_LEB128) 2943 ASM_GENERATE_INTERNAL_LABEL (ttype_label, 2944 section ? "LLSDATTC" : "LLSDATT", 2945 current_function_funcdef_no); 2946 2947 tt_format_size = size_of_encoded_value (tt_format); 2948 2949 assemble_align (tt_format_size * BITS_PER_UNIT); 2950 } 2951 2952 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA", 2953 current_function_funcdef_no); 2954 2955 /* The LSDA header. */ 2956 2957 /* Indicate the format of the landing pad start pointer. An omitted 2958 field implies @LPStart == @Start. */ 2959 /* Currently we always put @LPStart == @Start. This field would 2960 be most useful in moving the landing pads completely out of 2961 line to another section, but it could also be used to minimize 2962 the size of uleb128 landing pad offsets. */ 2963 lp_format = DW_EH_PE_omit; 2964 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)", 2965 eh_data_format_name (lp_format)); 2966 2967 /* @LPStart pointer would go here. */ 2968 2969 dw2_asm_output_data (1, tt_format, "@TType format (%s)", 2970 eh_data_format_name (tt_format)); 2971 2972 if (!HAVE_AS_LEB128) 2973 { 2974 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 2975 call_site_len = sjlj_size_of_call_site_table (); 2976 else 2977 call_site_len = dw2_size_of_call_site_table (section); 2978 } 2979 2980 /* A pc-relative 4-byte displacement to the @TType data. */ 2981 if (have_tt_data) 2982 { 2983 if (HAVE_AS_LEB128) 2984 { 2985 char ttype_after_disp_label[32]; 2986 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, 2987 section ? "LLSDATTDC" : "LLSDATTD", 2988 current_function_funcdef_no); 2989 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label, 2990 "@TType base offset"); 2991 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label); 2992 } 2993 else 2994 { 2995 /* Ug. Alignment queers things. */ 2996 unsigned int before_disp, after_disp, last_disp, disp; 2997 2998 before_disp = 1 + 1; 2999 after_disp = (1 + size_of_uleb128 (call_site_len) 3000 + call_site_len 3001 + vec_safe_length (crtl->eh.action_record_data) 3002 + (vec_safe_length (cfun->eh->ttype_data) 3003 * tt_format_size)); 3004 3005 disp = after_disp; 3006 do 3007 { 3008 unsigned int disp_size, pad; 3009 3010 last_disp = disp; 3011 disp_size = size_of_uleb128 (disp); 3012 pad = before_disp + disp_size + after_disp; 3013 if (pad % tt_format_size) 3014 pad = tt_format_size - (pad % tt_format_size); 3015 else 3016 pad = 0; 3017 disp = after_disp + pad; 3018 } 3019 while (disp != last_disp); 3020 3021 dw2_asm_output_data_uleb128 (disp, "@TType base offset"); 3022 } 3023 } 3024 3025 /* Indicate the format of the call-site offsets. */ 3026 if (HAVE_AS_LEB128) 3027 cs_format = DW_EH_PE_uleb128; 3028 else 3029 cs_format = DW_EH_PE_udata4; 3030 3031 dw2_asm_output_data (1, cs_format, "call-site format (%s)", 3032 eh_data_format_name (cs_format)); 3033 3034 if (HAVE_AS_LEB128) 3035 { 3036 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, 3037 section ? "LLSDACSBC" : "LLSDACSB", 3038 current_function_funcdef_no); 3039 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, 3040 section ? "LLSDACSEC" : "LLSDACSE", 3041 current_function_funcdef_no); 3042 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label, 3043 "Call-site table length"); 3044 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label); 3045 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 3046 sjlj_output_call_site_table (); 3047 else 3048 dw2_output_call_site_table (cs_format, section); 3049 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label); 3050 } 3051 else 3052 { 3053 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length"); 3054 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 3055 sjlj_output_call_site_table (); 3056 else 3057 dw2_output_call_site_table (cs_format, section); 3058 } 3059 3060 /* ??? Decode and interpret the data for flag_debug_asm. */ 3061 { 3062 uchar uc; 3063 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc) 3064 dw2_asm_output_data (1, uc, i ? NULL : "Action record table"); 3065 } 3066 3067 if (have_tt_data) 3068 assemble_align (tt_format_size * BITS_PER_UNIT); 3069 3070 i = vec_safe_length (cfun->eh->ttype_data); 3071 while (i-- > 0) 3072 { 3073 tree type = (*cfun->eh->ttype_data)[i]; 3074 output_ttype (type, tt_format, tt_format_size); 3075 } 3076 3077 if (HAVE_AS_LEB128 && have_tt_data) 3078 ASM_OUTPUT_LABEL (asm_out_file, ttype_label); 3079 3080 /* ??? Decode and interpret the data for flag_debug_asm. */ 3081 if (targetm.arm_eabi_unwinder) 3082 { 3083 tree type; 3084 for (i = 0; 3085 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i) 3086 output_ttype (type, tt_format, tt_format_size); 3087 } 3088 else 3089 { 3090 uchar uc; 3091 for (i = 0; 3092 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i) 3093 dw2_asm_output_data (1, uc, 3094 i ? NULL : "Exception specification table"); 3095 } 3096 } 3097 3098 void 3099 output_function_exception_table (const char *fnname) 3100 { 3101 rtx personality = get_personality_function (current_function_decl); 3102 3103 /* Not all functions need anything. */ 3104 if (! crtl->uses_eh_lsda) 3105 return; 3106 3107 if (personality) 3108 { 3109 assemble_external_libcall (personality); 3110 3111 if (targetm.asm_out.emit_except_personality) 3112 targetm.asm_out.emit_except_personality (personality); 3113 } 3114 3115 switch_to_exception_section (fnname); 3116 3117 /* If the target wants a label to begin the table, emit it here. */ 3118 targetm.asm_out.emit_except_table_label (asm_out_file); 3119 3120 output_one_function_exception_table (0); 3121 if (crtl->eh.call_site_record_v[1]) 3122 output_one_function_exception_table (1); 3123 3124 switch_to_section (current_function_section ()); 3125 } 3126 3127 void 3128 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table) 3129 { 3130 fun->eh->throw_stmt_table = table; 3131 } 3132 3133 hash_map<gimple *, int> * 3134 get_eh_throw_stmt_table (struct function *fun) 3135 { 3136 return fun->eh->throw_stmt_table; 3137 } 3138 3139 /* Determine if the function needs an EH personality function. */ 3140 3141 enum eh_personality_kind 3142 function_needs_eh_personality (struct function *fn) 3143 { 3144 enum eh_personality_kind kind = eh_personality_none; 3145 eh_region i; 3146 3147 FOR_ALL_EH_REGION_FN (i, fn) 3148 { 3149 switch (i->type) 3150 { 3151 case ERT_CLEANUP: 3152 /* Can do with any personality including the generic C one. */ 3153 kind = eh_personality_any; 3154 break; 3155 3156 case ERT_TRY: 3157 case ERT_ALLOWED_EXCEPTIONS: 3158 /* Always needs a EH personality function. The generic C 3159 personality doesn't handle these even for empty type lists. */ 3160 return eh_personality_lang; 3161 3162 case ERT_MUST_NOT_THROW: 3163 /* Always needs a EH personality function. The language may specify 3164 what abort routine that must be used, e.g. std::terminate. */ 3165 return eh_personality_lang; 3166 } 3167 } 3168 3169 return kind; 3170 } 3171 3172 /* Dump EH information to OUT. */ 3173 3174 void 3175 dump_eh_tree (FILE * out, struct function *fun) 3176 { 3177 eh_region i; 3178 int depth = 0; 3179 static const char *const type_name[] = { 3180 "cleanup", "try", "allowed_exceptions", "must_not_throw" 3181 }; 3182 3183 i = fun->eh->region_tree; 3184 if (!i) 3185 return; 3186 3187 fprintf (out, "Eh tree:\n"); 3188 while (1) 3189 { 3190 fprintf (out, " %*s %i %s", depth * 2, "", 3191 i->index, type_name[(int) i->type]); 3192 3193 if (i->landing_pads) 3194 { 3195 eh_landing_pad lp; 3196 3197 fprintf (out, " land:"); 3198 if (current_ir_type () == IR_GIMPLE) 3199 { 3200 for (lp = i->landing_pads; lp ; lp = lp->next_lp) 3201 { 3202 fprintf (out, "{%i,", lp->index); 3203 print_generic_expr (out, lp->post_landing_pad, 0); 3204 fputc ('}', out); 3205 if (lp->next_lp) 3206 fputc (',', out); 3207 } 3208 } 3209 else 3210 { 3211 for (lp = i->landing_pads; lp ; lp = lp->next_lp) 3212 { 3213 fprintf (out, "{%i,", lp->index); 3214 if (lp->landing_pad) 3215 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad), 3216 NOTE_P (lp->landing_pad) ? "(del)" : ""); 3217 else 3218 fprintf (out, "(nil),"); 3219 if (lp->post_landing_pad) 3220 { 3221 rtx_insn *lab = label_rtx (lp->post_landing_pad); 3222 fprintf (out, "%i%s}", INSN_UID (lab), 3223 NOTE_P (lab) ? "(del)" : ""); 3224 } 3225 else 3226 fprintf (out, "(nil)}"); 3227 if (lp->next_lp) 3228 fputc (',', out); 3229 } 3230 } 3231 } 3232 3233 switch (i->type) 3234 { 3235 case ERT_CLEANUP: 3236 case ERT_MUST_NOT_THROW: 3237 break; 3238 3239 case ERT_TRY: 3240 { 3241 eh_catch c; 3242 fprintf (out, " catch:"); 3243 for (c = i->u.eh_try.first_catch; c; c = c->next_catch) 3244 { 3245 fputc ('{', out); 3246 if (c->label) 3247 { 3248 fprintf (out, "lab:"); 3249 print_generic_expr (out, c->label, 0); 3250 fputc (';', out); 3251 } 3252 print_generic_expr (out, c->type_list, 0); 3253 fputc ('}', out); 3254 if (c->next_catch) 3255 fputc (',', out); 3256 } 3257 } 3258 break; 3259 3260 case ERT_ALLOWED_EXCEPTIONS: 3261 fprintf (out, " filter :%i types:", i->u.allowed.filter); 3262 print_generic_expr (out, i->u.allowed.type_list, 0); 3263 break; 3264 } 3265 fputc ('\n', out); 3266 3267 /* If there are sub-regions, process them. */ 3268 if (i->inner) 3269 i = i->inner, depth++; 3270 /* If there are peers, process them. */ 3271 else if (i->next_peer) 3272 i = i->next_peer; 3273 /* Otherwise, step back up the tree to the next peer. */ 3274 else 3275 { 3276 do 3277 { 3278 i = i->outer; 3279 depth--; 3280 if (i == NULL) 3281 return; 3282 } 3283 while (i->next_peer == NULL); 3284 i = i->next_peer; 3285 } 3286 } 3287 } 3288 3289 /* Dump the EH tree for FN on stderr. */ 3290 3291 DEBUG_FUNCTION void 3292 debug_eh_tree (struct function *fn) 3293 { 3294 dump_eh_tree (stderr, fn); 3295 } 3296 3297 /* Verify invariants on EH datastructures. */ 3298 3299 DEBUG_FUNCTION void 3300 verify_eh_tree (struct function *fun) 3301 { 3302 eh_region r, outer; 3303 int nvisited_lp, nvisited_r; 3304 int count_lp, count_r, depth, i; 3305 eh_landing_pad lp; 3306 bool err = false; 3307 3308 if (!fun->eh->region_tree) 3309 return; 3310 3311 count_r = 0; 3312 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i) 3313 if (r) 3314 { 3315 if (r->index == i) 3316 count_r++; 3317 else 3318 { 3319 error ("region_array is corrupted for region %i", r->index); 3320 err = true; 3321 } 3322 } 3323 3324 count_lp = 0; 3325 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i) 3326 if (lp) 3327 { 3328 if (lp->index == i) 3329 count_lp++; 3330 else 3331 { 3332 error ("lp_array is corrupted for lp %i", lp->index); 3333 err = true; 3334 } 3335 } 3336 3337 depth = nvisited_lp = nvisited_r = 0; 3338 outer = NULL; 3339 r = fun->eh->region_tree; 3340 while (1) 3341 { 3342 if ((*fun->eh->region_array)[r->index] != r) 3343 { 3344 error ("region_array is corrupted for region %i", r->index); 3345 err = true; 3346 } 3347 if (r->outer != outer) 3348 { 3349 error ("outer block of region %i is wrong", r->index); 3350 err = true; 3351 } 3352 if (depth < 0) 3353 { 3354 error ("negative nesting depth of region %i", r->index); 3355 err = true; 3356 } 3357 nvisited_r++; 3358 3359 for (lp = r->landing_pads; lp ; lp = lp->next_lp) 3360 { 3361 if ((*fun->eh->lp_array)[lp->index] != lp) 3362 { 3363 error ("lp_array is corrupted for lp %i", lp->index); 3364 err = true; 3365 } 3366 if (lp->region != r) 3367 { 3368 error ("region of lp %i is wrong", lp->index); 3369 err = true; 3370 } 3371 nvisited_lp++; 3372 } 3373 3374 if (r->inner) 3375 outer = r, r = r->inner, depth++; 3376 else if (r->next_peer) 3377 r = r->next_peer; 3378 else 3379 { 3380 do 3381 { 3382 r = r->outer; 3383 if (r == NULL) 3384 goto region_done; 3385 depth--; 3386 outer = r->outer; 3387 } 3388 while (r->next_peer == NULL); 3389 r = r->next_peer; 3390 } 3391 } 3392 region_done: 3393 if (depth != 0) 3394 { 3395 error ("tree list ends on depth %i", depth); 3396 err = true; 3397 } 3398 if (count_r != nvisited_r) 3399 { 3400 error ("region_array does not match region_tree"); 3401 err = true; 3402 } 3403 if (count_lp != nvisited_lp) 3404 { 3405 error ("lp_array does not match region_tree"); 3406 err = true; 3407 } 3408 3409 if (err) 3410 { 3411 dump_eh_tree (stderr, fun); 3412 internal_error ("verify_eh_tree failed"); 3413 } 3414 } 3415 3416 #include "gt-except.h" 3417