xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/except.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* Implements exception handling.
2    Copyright (C) 1989-2017 Free Software Foundation, Inc.
3    Contributed by Mike Stump <mrs@cygnus.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* An exception is an event that can be "thrown" from within a
23    function.  This event can then be "caught" by the callers of
24    the function.
25 
26    The representation of exceptions changes several times during
27    the compilation process:
28 
29    In the beginning, in the front end, we have the GENERIC trees
30    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32 
33    During initial gimplification (gimplify.c) these are lowered
34    to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35    The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36    into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37    conversion.
38 
39    During pass_lower_eh (tree-eh.c) we record the nested structure
40    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42    regions at this time.  We can then flatten the statements within
43    the TRY nodes to straight-line code.  Statements that had been within
44    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45    so that we may remember what action is supposed to be taken if
46    a given statement does throw.  During this lowering process,
47    we create an EH_LANDING_PAD node for each EH_REGION that has
48    some code within the function that needs to be executed if a
49    throw does happen.  We also create RESX statements that are
50    used to transfer control from an inner EH_REGION to an outer
51    EH_REGION.  We also create EH_DISPATCH statements as placeholders
52    for a runtime type comparison that should be made in order to
53    select the action to perform among different CATCH and EH_FILTER
54    regions.
55 
56    During pass_lower_eh_dispatch (tree-eh.c), which is run after
57    all inlining is complete, we are able to run assign_filter_values,
58    which allows us to map the set of types manipulated by all of the
59    CATCH and EH_FILTER regions to a set of integers.  This set of integers
60    will be how the exception runtime communicates with the code generated
61    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
62    to a switch or conditional branches that use the argument provided by
63    the runtime (__builtin_eh_filter) and the set of integers we computed
64    in assign_filter_values.
65 
66    During pass_lower_resx (tree-eh.c), which is run near the end
67    of optimization, we expand RESX statements.  If the eh region
68    that is outer to the RESX statement is a MUST_NOT_THROW, then
69    the RESX expands to some form of abort statement.  If the eh
70    region that is outer to the RESX statement is within the current
71    function, then the RESX expands to a bookkeeping call
72    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
73    handler for the exception must be within a function somewhere
74    up the call chain, so we call back into the exception runtime
75    (__builtin_unwind_resume).
76 
77    During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78    that create an rtl to eh_region mapping that corresponds to the
79    gimple to eh_region mapping that had been recorded in the
80    THROW_STMT_TABLE.
81 
82    Then, via finish_eh_generation, we generate the real landing pads
83    to which the runtime will actually transfer control.  These new
84    landing pads perform whatever bookkeeping is needed by the target
85    backend in order to resume execution within the current function.
86    Each of these new landing pads falls through into the post_landing_pad
87    label which had been used within the CFG up to this point.  All
88    exception edges within the CFG are redirected to the new landing pads.
89    If the target uses setjmp to implement exceptions, the various extra
90    calls into the runtime to register and unregister the current stack
91    frame are emitted at this time.
92 
93    During pass_convert_to_eh_region_ranges (except.c), we transform
94    the REG_EH_REGION notes attached to individual insns into
95    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
97    same associated action within the exception region tree, meaning
98    that (1) the exception is caught by the same landing pad within the
99    current function, (2) the exception is blocked by the runtime with
100    a MUST_NOT_THROW region, or (3) the exception is not handled at all
101    within the current function.
102 
103    Finally, during assembly generation, we call
104    output_function_exception_table (except.c) to emit the tables with
105    which the exception runtime can determine if a given stack frame
106    handles a given exception, and if so what filter value to provide
107    to the function when the non-local control transfer is effected.
108    If the target uses dwarf2 unwinding to implement exceptions, then
109    output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
110 
111 
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "target.h"
117 #include "rtl.h"
118 #include "tree.h"
119 #include "cfghooks.h"
120 #include "tree-pass.h"
121 #include "memmodel.h"
122 #include "tm_p.h"
123 #include "stringpool.h"
124 #include "expmed.h"
125 #include "optabs.h"
126 #include "emit-rtl.h"
127 #include "cgraph.h"
128 #include "diagnostic.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
131 #include "explow.h"
132 #include "stmt.h"
133 #include "expr.h"
134 #include "calls.h"
135 #include "libfuncs.h"
136 #include "except.h"
137 #include "output.h"
138 #include "dwarf2asm.h"
139 #include "dwarf2out.h"
140 #include "common/common-target.h"
141 #include "langhooks.h"
142 #include "cfgrtl.h"
143 #include "tree-pretty-print.h"
144 #include "cfgloop.h"
145 #include "builtins.h"
146 #include "tree-hash-traits.h"
147 
148 static GTY(()) int call_site_base;
149 
150 static GTY (()) hash_map<tree_hash, tree> *type_to_runtime_map;
151 
152 /* Describe the SjLj_Function_Context structure.  */
153 static GTY(()) tree sjlj_fc_type_node;
154 static int sjlj_fc_call_site_ofs;
155 static int sjlj_fc_data_ofs;
156 static int sjlj_fc_personality_ofs;
157 static int sjlj_fc_lsda_ofs;
158 static int sjlj_fc_jbuf_ofs;
159 
160 
161 struct GTY(()) call_site_record_d
162 {
163   rtx landing_pad;
164   int action;
165 };
166 
167 /* In the following structure and associated functions,
168    we represent entries in the action table as 1-based indices.
169    Special cases are:
170 
171 	 0:	null action record, non-null landing pad; implies cleanups
172 	-1:	null action record, null landing pad; implies no action
173 	-2:	no call-site entry; implies must_not_throw
174 	-3:	we have yet to process outer regions
175 
176    Further, no special cases apply to the "next" field of the record.
177    For next, 0 means end of list.  */
178 
179 struct action_record
180 {
181   int offset;
182   int filter;
183   int next;
184 };
185 
186 /* Hashtable helpers.  */
187 
188 struct action_record_hasher : free_ptr_hash <action_record>
189 {
190   static inline hashval_t hash (const action_record *);
191   static inline bool equal (const action_record *, const action_record *);
192 };
193 
194 inline hashval_t
195 action_record_hasher::hash (const action_record *entry)
196 {
197   return entry->next * 1009 + entry->filter;
198 }
199 
200 inline bool
201 action_record_hasher::equal (const action_record *entry,
202 			     const action_record *data)
203 {
204   return entry->filter == data->filter && entry->next == data->next;
205 }
206 
207 typedef hash_table<action_record_hasher> action_hash_type;
208 
209 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
210 					   eh_landing_pad *);
211 
212 static void dw2_build_landing_pads (void);
213 
214 static int collect_one_action_chain (action_hash_type *, eh_region);
215 static int add_call_site (rtx, int, int);
216 
217 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
218 static void push_sleb128 (vec<uchar, va_gc> **, int);
219 static int dw2_size_of_call_site_table (int);
220 static int sjlj_size_of_call_site_table (void);
221 static void dw2_output_call_site_table (int, int);
222 static void sjlj_output_call_site_table (void);
223 
224 
225 void
226 init_eh (void)
227 {
228   if (! flag_exceptions)
229     return;
230 
231   type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
232 
233   /* Create the SjLj_Function_Context structure.  This should match
234      the definition in unwind-sjlj.c.  */
235   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
236     {
237       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
238 
239       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
240 
241       f_prev = build_decl (BUILTINS_LOCATION,
242 			   FIELD_DECL, get_identifier ("__prev"),
243 			   build_pointer_type (sjlj_fc_type_node));
244       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
245 
246       f_cs = build_decl (BUILTINS_LOCATION,
247 			 FIELD_DECL, get_identifier ("__call_site"),
248 			 integer_type_node);
249       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
250 
251       tmp = build_index_type (size_int (4 - 1));
252       tmp = build_array_type (lang_hooks.types.type_for_mode
253 				(targetm.unwind_word_mode (), 1),
254 			      tmp);
255       f_data = build_decl (BUILTINS_LOCATION,
256 			   FIELD_DECL, get_identifier ("__data"), tmp);
257       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
258 
259       f_per = build_decl (BUILTINS_LOCATION,
260 			  FIELD_DECL, get_identifier ("__personality"),
261 			  ptr_type_node);
262       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
263 
264       f_lsda = build_decl (BUILTINS_LOCATION,
265 			   FIELD_DECL, get_identifier ("__lsda"),
266 			   ptr_type_node);
267       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
268 
269 #ifdef DONT_USE_BUILTIN_SETJMP
270 #ifdef JMP_BUF_SIZE
271       tmp = size_int (JMP_BUF_SIZE - 1);
272 #else
273       /* Should be large enough for most systems, if it is not,
274 	 JMP_BUF_SIZE should be defined with the proper value.  It will
275 	 also tend to be larger than necessary for most systems, a more
276 	 optimal port will define JMP_BUF_SIZE.  */
277       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
278 #endif
279 #else
280       /* Compute a minimally sized jump buffer.  We need room to store at
281 	 least 3 pointers - stack pointer, frame pointer and return address.
282 	 Plus for some targets we need room for an extra pointer - in the
283 	 case of MIPS this is the global pointer.  This makes a total of four
284 	 pointers, but to be safe we actually allocate room for 5.
285 
286 	 If pointers are smaller than words then we allocate enough room for
287 	 5 words, just in case the backend needs this much room.  For more
288 	 discussion on this issue see:
289 	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
290       if (POINTER_SIZE > BITS_PER_WORD)
291 	tmp = size_int (5 - 1);
292       else
293 	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
294 #endif
295 
296       tmp = build_index_type (tmp);
297       tmp = build_array_type (ptr_type_node, tmp);
298       f_jbuf = build_decl (BUILTINS_LOCATION,
299 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
300 #ifdef DONT_USE_BUILTIN_SETJMP
301       /* We don't know what the alignment requirements of the
302 	 runtime's jmp_buf has.  Overestimate.  */
303       SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
304       DECL_USER_ALIGN (f_jbuf) = 1;
305 #endif
306       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
307 
308       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
309       TREE_CHAIN (f_prev) = f_cs;
310       TREE_CHAIN (f_cs) = f_data;
311       TREE_CHAIN (f_data) = f_per;
312       TREE_CHAIN (f_per) = f_lsda;
313       TREE_CHAIN (f_lsda) = f_jbuf;
314 
315       layout_type (sjlj_fc_type_node);
316 
317       /* Cache the interesting field offsets so that we have
318 	 easy access from rtl.  */
319       sjlj_fc_call_site_ofs
320 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
321 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
322       sjlj_fc_data_ofs
323 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
324 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
325       sjlj_fc_personality_ofs
326 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
327 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
328       sjlj_fc_lsda_ofs
329 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
330 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
331       sjlj_fc_jbuf_ofs
332 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
333 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
334     }
335 }
336 
337 void
338 init_eh_for_function (void)
339 {
340   cfun->eh = ggc_cleared_alloc<eh_status> ();
341 
342   /* Make sure zero'th entries are used.  */
343   vec_safe_push (cfun->eh->region_array, (eh_region)0);
344   vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
345 }
346 
347 /* Routines to generate the exception tree somewhat directly.
348    These are used from tree-eh.c when processing exception related
349    nodes during tree optimization.  */
350 
351 static eh_region
352 gen_eh_region (enum eh_region_type type, eh_region outer)
353 {
354   eh_region new_eh;
355 
356   /* Insert a new blank region as a leaf in the tree.  */
357   new_eh = ggc_cleared_alloc<eh_region_d> ();
358   new_eh->type = type;
359   new_eh->outer = outer;
360   if (outer)
361     {
362       new_eh->next_peer = outer->inner;
363       outer->inner = new_eh;
364     }
365   else
366     {
367       new_eh->next_peer = cfun->eh->region_tree;
368       cfun->eh->region_tree = new_eh;
369     }
370 
371   new_eh->index = vec_safe_length (cfun->eh->region_array);
372   vec_safe_push (cfun->eh->region_array, new_eh);
373 
374   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
375   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
376     new_eh->use_cxa_end_cleanup = true;
377 
378   return new_eh;
379 }
380 
381 eh_region
382 gen_eh_region_cleanup (eh_region outer)
383 {
384   return gen_eh_region (ERT_CLEANUP, outer);
385 }
386 
387 eh_region
388 gen_eh_region_try (eh_region outer)
389 {
390   return gen_eh_region (ERT_TRY, outer);
391 }
392 
393 eh_catch
394 gen_eh_region_catch (eh_region t, tree type_or_list)
395 {
396   eh_catch c, l;
397   tree type_list, type_node;
398 
399   gcc_assert (t->type == ERT_TRY);
400 
401   /* Ensure to always end up with a type list to normalize further
402      processing, then register each type against the runtime types map.  */
403   type_list = type_or_list;
404   if (type_or_list)
405     {
406       if (TREE_CODE (type_or_list) != TREE_LIST)
407 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
408 
409       type_node = type_list;
410       for (; type_node; type_node = TREE_CHAIN (type_node))
411 	add_type_for_runtime (TREE_VALUE (type_node));
412     }
413 
414   c = ggc_cleared_alloc<eh_catch_d> ();
415   c->type_list = type_list;
416   l = t->u.eh_try.last_catch;
417   c->prev_catch = l;
418   if (l)
419     l->next_catch = c;
420   else
421     t->u.eh_try.first_catch = c;
422   t->u.eh_try.last_catch = c;
423 
424   return c;
425 }
426 
427 eh_region
428 gen_eh_region_allowed (eh_region outer, tree allowed)
429 {
430   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
431   region->u.allowed.type_list = allowed;
432 
433   for (; allowed ; allowed = TREE_CHAIN (allowed))
434     add_type_for_runtime (TREE_VALUE (allowed));
435 
436   return region;
437 }
438 
439 eh_region
440 gen_eh_region_must_not_throw (eh_region outer)
441 {
442   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
443 }
444 
445 eh_landing_pad
446 gen_eh_landing_pad (eh_region region)
447 {
448   eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
449 
450   lp->next_lp = region->landing_pads;
451   lp->region = region;
452   lp->index = vec_safe_length (cfun->eh->lp_array);
453   region->landing_pads = lp;
454 
455   vec_safe_push (cfun->eh->lp_array, lp);
456 
457   return lp;
458 }
459 
460 eh_region
461 get_eh_region_from_number_fn (struct function *ifun, int i)
462 {
463   return (*ifun->eh->region_array)[i];
464 }
465 
466 eh_region
467 get_eh_region_from_number (int i)
468 {
469   return get_eh_region_from_number_fn (cfun, i);
470 }
471 
472 eh_landing_pad
473 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
474 {
475   return (*ifun->eh->lp_array)[i];
476 }
477 
478 eh_landing_pad
479 get_eh_landing_pad_from_number (int i)
480 {
481   return get_eh_landing_pad_from_number_fn (cfun, i);
482 }
483 
484 eh_region
485 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
486 {
487   if (i < 0)
488     return (*ifun->eh->region_array)[-i];
489   else if (i == 0)
490     return NULL;
491   else
492     {
493       eh_landing_pad lp;
494       lp = (*ifun->eh->lp_array)[i];
495       return lp->region;
496     }
497 }
498 
499 eh_region
500 get_eh_region_from_lp_number (int i)
501 {
502   return get_eh_region_from_lp_number_fn (cfun, i);
503 }
504 
505 /* Returns true if the current function has exception handling regions.  */
506 
507 bool
508 current_function_has_exception_handlers (void)
509 {
510   return cfun->eh->region_tree != NULL;
511 }
512 
513 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
514    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
515 
516 struct duplicate_eh_regions_data
517 {
518   duplicate_eh_regions_map label_map;
519   void *label_map_data;
520   hash_map<void *, void *> *eh_map;
521 };
522 
523 static void
524 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
525 			eh_region old_r, eh_region outer)
526 {
527   eh_landing_pad old_lp, new_lp;
528   eh_region new_r;
529 
530   new_r = gen_eh_region (old_r->type, outer);
531   gcc_assert (!data->eh_map->put (old_r, new_r));
532 
533   switch (old_r->type)
534     {
535     case ERT_CLEANUP:
536       break;
537 
538     case ERT_TRY:
539       {
540 	eh_catch oc, nc;
541 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
542 	  {
543 	    /* We should be doing all our region duplication before and
544 	       during inlining, which is before filter lists are created.  */
545 	    gcc_assert (oc->filter_list == NULL);
546 	    nc = gen_eh_region_catch (new_r, oc->type_list);
547 	    nc->label = data->label_map (oc->label, data->label_map_data);
548 	  }
549       }
550       break;
551 
552     case ERT_ALLOWED_EXCEPTIONS:
553       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
554       if (old_r->u.allowed.label)
555 	new_r->u.allowed.label
556 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
557       else
558 	new_r->u.allowed.label = NULL_TREE;
559       break;
560 
561     case ERT_MUST_NOT_THROW:
562       new_r->u.must_not_throw.failure_loc =
563 	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
564       new_r->u.must_not_throw.failure_decl =
565 	old_r->u.must_not_throw.failure_decl;
566       break;
567     }
568 
569   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
570     {
571       /* Don't bother copying unused landing pads.  */
572       if (old_lp->post_landing_pad == NULL)
573 	continue;
574 
575       new_lp = gen_eh_landing_pad (new_r);
576       gcc_assert (!data->eh_map->put (old_lp, new_lp));
577 
578       new_lp->post_landing_pad
579 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
580       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
581     }
582 
583   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
584   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
585 
586   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
587     duplicate_eh_regions_1 (data, old_r, new_r);
588 }
589 
590 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
591    the current function and root the tree below OUTER_REGION.
592    The special case of COPY_REGION of NULL means all regions.
593    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
594    that allows the caller to remap uses of both EH regions and
595    EH landing pads.  */
596 
597 hash_map<void *, void *> *
598 duplicate_eh_regions (struct function *ifun,
599 		      eh_region copy_region, int outer_lp,
600 		      duplicate_eh_regions_map map, void *map_data)
601 {
602   struct duplicate_eh_regions_data data;
603   eh_region outer_region;
604 
605   if (flag_checking)
606     verify_eh_tree (ifun);
607 
608   data.label_map = map;
609   data.label_map_data = map_data;
610   data.eh_map = new hash_map<void *, void *>;
611 
612   outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
613 
614   /* Copy all the regions in the subtree.  */
615   if (copy_region)
616     duplicate_eh_regions_1 (&data, copy_region, outer_region);
617   else
618     {
619       eh_region r;
620       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
621 	duplicate_eh_regions_1 (&data, r, outer_region);
622     }
623 
624   if (flag_checking)
625     verify_eh_tree (cfun);
626 
627   return data.eh_map;
628 }
629 
630 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
631 
632 eh_region
633 eh_region_outermost (struct function *ifun, eh_region region_a,
634 		     eh_region region_b)
635 {
636   gcc_assert (ifun->eh->region_array);
637   gcc_assert (ifun->eh->region_tree);
638 
639   auto_sbitmap b_outer (ifun->eh->region_array->length ());
640   bitmap_clear (b_outer);
641 
642   do
643     {
644       bitmap_set_bit (b_outer, region_b->index);
645       region_b = region_b->outer;
646     }
647   while (region_b);
648 
649   do
650     {
651       if (bitmap_bit_p (b_outer, region_a->index))
652 	break;
653       region_a = region_a->outer;
654     }
655   while (region_a);
656 
657   return region_a;
658 }
659 
660 void
661 add_type_for_runtime (tree type)
662 {
663   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
664   if (TREE_CODE (type) == NOP_EXPR)
665     return;
666 
667   bool existed = false;
668   tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
669   if (!existed)
670     *slot = lang_hooks.eh_runtime_type (type);
671 }
672 
673 tree
674 lookup_type_for_runtime (tree type)
675 {
676   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
677   if (TREE_CODE (type) == NOP_EXPR)
678     return type;
679 
680   /* We should have always inserted the data earlier.  */
681   return *type_to_runtime_map->get (type);
682 }
683 
684 
685 /* Represent an entry in @TTypes for either catch actions
686    or exception filter actions.  */
687 struct ttypes_filter {
688   tree t;
689   int filter;
690 };
691 
692 /* Helper for ttypes_filter hashing.  */
693 
694 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
695 {
696   typedef tree_node *compare_type;
697   static inline hashval_t hash (const ttypes_filter *);
698   static inline bool equal (const ttypes_filter *, const tree_node *);
699 };
700 
701 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
702    (a tree) for a @TTypes type node we are thinking about adding.  */
703 
704 inline bool
705 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
706 {
707   return entry->t == data;
708 }
709 
710 inline hashval_t
711 ttypes_filter_hasher::hash (const ttypes_filter *entry)
712 {
713   return TREE_HASH (entry->t);
714 }
715 
716 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
717 
718 
719 /* Helper for ehspec hashing.  */
720 
721 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
722 {
723   static inline hashval_t hash (const ttypes_filter *);
724   static inline bool equal (const ttypes_filter *, const ttypes_filter *);
725 };
726 
727 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
728    exception specification list we are thinking about adding.  */
729 /* ??? Currently we use the type lists in the order given.  Someone
730    should put these in some canonical order.  */
731 
732 inline bool
733 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
734 {
735   return type_list_equal (entry->t, data->t);
736 }
737 
738 /* Hash function for exception specification lists.  */
739 
740 inline hashval_t
741 ehspec_hasher::hash (const ttypes_filter *entry)
742 {
743   hashval_t h = 0;
744   tree list;
745 
746   for (list = entry->t; list ; list = TREE_CHAIN (list))
747     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
748   return h;
749 }
750 
751 typedef hash_table<ehspec_hasher> ehspec_hash_type;
752 
753 
754 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
755    to speed up the search.  Return the filter value to be used.  */
756 
757 static int
758 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
759 {
760   struct ttypes_filter **slot, *n;
761 
762   slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
763 					  INSERT);
764 
765   if ((n = *slot) == NULL)
766     {
767       /* Filter value is a 1 based table index.  */
768 
769       n = XNEW (struct ttypes_filter);
770       n->t = type;
771       n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
772       *slot = n;
773 
774       vec_safe_push (cfun->eh->ttype_data, type);
775     }
776 
777   return n->filter;
778 }
779 
780 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
781    to speed up the search.  Return the filter value to be used.  */
782 
783 static int
784 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
785 		  tree list)
786 {
787   struct ttypes_filter **slot, *n;
788   struct ttypes_filter dummy;
789 
790   dummy.t = list;
791   slot = ehspec_hash->find_slot (&dummy, INSERT);
792 
793   if ((n = *slot) == NULL)
794     {
795       int len;
796 
797       if (targetm.arm_eabi_unwinder)
798 	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
799       else
800 	len = vec_safe_length (cfun->eh->ehspec_data.other);
801 
802       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
803 
804       n = XNEW (struct ttypes_filter);
805       n->t = list;
806       n->filter = -(len + 1);
807       *slot = n;
808 
809       /* Generate a 0 terminated list of filter values.  */
810       for (; list ; list = TREE_CHAIN (list))
811 	{
812 	  if (targetm.arm_eabi_unwinder)
813 	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
814 	  else
815 	    {
816 	      /* Look up each type in the list and encode its filter
817 		 value as a uleb128.  */
818 	      push_uleb128 (&cfun->eh->ehspec_data.other,
819 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
820 	    }
821 	}
822       if (targetm.arm_eabi_unwinder)
823 	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
824       else
825 	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
826     }
827 
828   return n->filter;
829 }
830 
831 /* Generate the action filter values to be used for CATCH and
832    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
833    we use lots of landing pads, and so every type or list can share
834    the same filter value, which saves table space.  */
835 
836 void
837 assign_filter_values (void)
838 {
839   int i;
840   eh_region r;
841   eh_catch c;
842 
843   vec_alloc (cfun->eh->ttype_data, 16);
844   if (targetm.arm_eabi_unwinder)
845     vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
846   else
847     vec_alloc (cfun->eh->ehspec_data.other, 64);
848 
849   ehspec_hash_type ehspec (31);
850   ttypes_hash_type ttypes (31);
851 
852   for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
853     {
854       if (r == NULL)
855 	continue;
856 
857       switch (r->type)
858 	{
859 	case ERT_TRY:
860 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
861 	    {
862 	      /* Whatever type_list is (NULL or true list), we build a list
863 		 of filters for the region.  */
864 	      c->filter_list = NULL_TREE;
865 
866 	      if (c->type_list != NULL)
867 		{
868 		  /* Get a filter value for each of the types caught and store
869 		     them in the region's dedicated list.  */
870 		  tree tp_node = c->type_list;
871 
872 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
873 		    {
874 		      int flt
875 		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
876 		      tree flt_node = build_int_cst (integer_type_node, flt);
877 
878 		      c->filter_list
879 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
880 		    }
881 		}
882 	      else
883 		{
884 		  /* Get a filter value for the NULL list also since it
885 		     will need an action record anyway.  */
886 		  int flt = add_ttypes_entry (&ttypes, NULL);
887 		  tree flt_node = build_int_cst (integer_type_node, flt);
888 
889 		  c->filter_list
890 		    = tree_cons (NULL_TREE, flt_node, NULL);
891 		}
892 	    }
893 	  break;
894 
895 	case ERT_ALLOWED_EXCEPTIONS:
896 	  r->u.allowed.filter
897 	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
898 	  break;
899 
900 	default:
901 	  break;
902 	}
903     }
904 }
905 
906 /* Emit SEQ into basic block just before INSN (that is assumed to be
907    first instruction of some existing BB and return the newly
908    produced block.  */
909 static basic_block
910 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
911 {
912   rtx_insn *last;
913   basic_block bb;
914   edge e;
915   edge_iterator ei;
916 
917   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
918      call), we don't want it to go into newly created landing pad or other EH
919      construct.  */
920   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
921     if (e->flags & EDGE_FALLTHRU)
922       force_nonfallthru (e);
923     else
924       ei_next (&ei);
925   last = emit_insn_before (seq, insn);
926   if (BARRIER_P (last))
927     last = PREV_INSN (last);
928   bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
929   update_bb_for_insn (bb);
930   bb->flags |= BB_SUPERBLOCK;
931   return bb;
932 }
933 
934 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
935    at the rtl level.  Emit the code required by the target at a landing
936    pad for the given region.  */
937 
938 void
939 expand_dw2_landing_pad_for_region (eh_region region)
940 {
941   if (targetm.have_exception_receiver ())
942     emit_insn (targetm.gen_exception_receiver ());
943   else if (targetm.have_nonlocal_goto_receiver ())
944     emit_insn (targetm.gen_nonlocal_goto_receiver ());
945   else
946     { /* Nothing */ }
947 
948   if (region->exc_ptr_reg)
949     emit_move_insn (region->exc_ptr_reg,
950 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
951   if (region->filter_reg)
952     emit_move_insn (region->filter_reg,
953 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
954 				 EH_RETURN_DATA_REGNO (1)));
955 }
956 
957 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
958 
959 static void
960 dw2_build_landing_pads (void)
961 {
962   int i;
963   eh_landing_pad lp;
964   int e_flags = EDGE_FALLTHRU;
965 
966   /* If we're going to partition blocks, we need to be able to add
967      new landing pads later, which means that we need to hold on to
968      the post-landing-pad block.  Prevent it from being merged away.
969      We'll remove this bit after partitioning.  */
970   if (flag_reorder_blocks_and_partition)
971     e_flags |= EDGE_PRESERVE;
972 
973   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
974     {
975       basic_block bb;
976       rtx_insn *seq;
977       edge e;
978 
979       if (lp == NULL || lp->post_landing_pad == NULL)
980 	continue;
981 
982       start_sequence ();
983 
984       lp->landing_pad = gen_label_rtx ();
985       emit_label (lp->landing_pad);
986       LABEL_PRESERVE_P (lp->landing_pad) = 1;
987 
988       expand_dw2_landing_pad_for_region (lp->region);
989 
990       seq = get_insns ();
991       end_sequence ();
992 
993       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
994       e = make_edge (bb, bb->next_bb, e_flags);
995       e->count = bb->count;
996       e->probability = REG_BR_PROB_BASE;
997       if (current_loops)
998 	{
999 	  struct loop *loop = bb->next_bb->loop_father;
1000 	  /* If we created a pre-header block, add the new block to the
1001 	     outer loop, otherwise to the loop itself.  */
1002 	  if (bb->next_bb == loop->header)
1003 	    add_bb_to_loop (bb, loop_outer (loop));
1004 	  else
1005 	    add_bb_to_loop (bb, loop);
1006 	}
1007     }
1008 }
1009 
1010 
1011 static vec<int> sjlj_lp_call_site_index;
1012 
1013 /* Process all active landing pads.  Assign each one a compact dispatch
1014    index, and a call-site index.  */
1015 
1016 static int
1017 sjlj_assign_call_site_values (void)
1018 {
1019   action_hash_type ar_hash (31);
1020   int i, disp_index;
1021   eh_landing_pad lp;
1022 
1023   vec_alloc (crtl->eh.action_record_data, 64);
1024 
1025   disp_index = 0;
1026   call_site_base = 1;
1027   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1028     if (lp && lp->post_landing_pad)
1029       {
1030 	int action, call_site;
1031 
1032 	/* First: build the action table.  */
1033 	action = collect_one_action_chain (&ar_hash, lp->region);
1034 
1035 	/* Next: assign call-site values.  If dwarf2 terms, this would be
1036 	   the region number assigned by convert_to_eh_region_ranges, but
1037 	   handles no-action and must-not-throw differently.  */
1038 	/* Map must-not-throw to otherwise unused call-site index 0.  */
1039 	if (action == -2)
1040 	  call_site = 0;
1041 	/* Map no-action to otherwise unused call-site index -1.  */
1042 	else if (action == -1)
1043 	  call_site = -1;
1044 	/* Otherwise, look it up in the table.  */
1045 	else
1046 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1047 	sjlj_lp_call_site_index[i] = call_site;
1048 
1049 	disp_index++;
1050       }
1051 
1052   return disp_index;
1053 }
1054 
1055 /* Emit code to record the current call-site index before every
1056    insn that can throw.  */
1057 
1058 static void
1059 sjlj_mark_call_sites (void)
1060 {
1061   int last_call_site = -2;
1062   rtx_insn *insn;
1063   rtx mem;
1064 
1065   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1066     {
1067       eh_landing_pad lp;
1068       eh_region r;
1069       bool nothrow;
1070       int this_call_site;
1071       rtx_insn *before, *p;
1072 
1073       /* Reset value tracking at extended basic block boundaries.  */
1074       if (LABEL_P (insn))
1075 	last_call_site = -2;
1076 
1077       /* If the function allocates dynamic stack space, the context must
1078 	 be updated after every allocation/deallocation accordingly.  */
1079       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1080 	{
1081 	  rtx buf_addr;
1082 
1083 	  start_sequence ();
1084 	  buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1085 				    sjlj_fc_jbuf_ofs);
1086 	  expand_builtin_update_setjmp_buf (buf_addr);
1087 	  p = get_insns ();
1088 	  end_sequence ();
1089 	  emit_insn_before (p, insn);
1090 	}
1091 
1092       if (! INSN_P (insn))
1093 	continue;
1094 
1095       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1096       if (nothrow)
1097 	continue;
1098       if (lp)
1099 	this_call_site = sjlj_lp_call_site_index[lp->index];
1100       else if (r == NULL)
1101 	{
1102 	  /* Calls (and trapping insns) without notes are outside any
1103 	     exception handling region in this function.  Mark them as
1104 	     no action.  */
1105 	  this_call_site = -1;
1106 	}
1107       else
1108 	{
1109 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1110 	  this_call_site = 0;
1111 	}
1112 
1113       if (this_call_site != -1)
1114 	crtl->uses_eh_lsda = 1;
1115 
1116       if (this_call_site == last_call_site)
1117 	continue;
1118 
1119       /* Don't separate a call from it's argument loads.  */
1120       before = insn;
1121       if (CALL_P (insn))
1122 	before = find_first_parameter_load (insn, NULL);
1123 
1124       start_sequence ();
1125       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1126 			    sjlj_fc_call_site_ofs);
1127       emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1128       p = get_insns ();
1129       end_sequence ();
1130 
1131       emit_insn_before (p, before);
1132       last_call_site = this_call_site;
1133     }
1134 }
1135 
1136 /* Construct the SjLj_Function_Context.  */
1137 
1138 static void
1139 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1140 {
1141   rtx_insn *fn_begin, *seq;
1142   rtx fc, mem;
1143   bool fn_begin_outside_block;
1144   rtx personality = get_personality_function (current_function_decl);
1145 
1146   fc = crtl->eh.sjlj_fc;
1147 
1148   start_sequence ();
1149 
1150   /* We're storing this libcall's address into memory instead of
1151      calling it directly.  Thus, we must call assemble_external_libcall
1152      here, as we can not depend on emit_library_call to do it for us.  */
1153   assemble_external_libcall (personality);
1154   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1155   emit_move_insn (mem, personality);
1156 
1157   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1158   if (crtl->uses_eh_lsda)
1159     {
1160       char buf[20];
1161       rtx sym;
1162 
1163       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1164       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1165       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1166       emit_move_insn (mem, sym);
1167     }
1168   else
1169     emit_move_insn (mem, const0_rtx);
1170 
1171   if (dispatch_label)
1172     {
1173       rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1174 
1175 #ifdef DONT_USE_BUILTIN_SETJMP
1176       addr = copy_addr_to_reg (addr);
1177       addr = convert_memory_address (ptr_mode, addr);
1178       tree addr_tree = make_tree (ptr_type_node, addr);
1179 
1180       tree fn = builtin_decl_implicit (BUILT_IN_SETJMP);
1181       tree call_expr = build_call_expr (fn, 1, addr_tree);
1182       rtx x = expand_call (call_expr, NULL_RTX, false);
1183 
1184       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1185 			       TYPE_MODE (integer_type_node), 0,
1186 			       dispatch_label, REG_BR_PROB_BASE / 100);
1187 #else
1188       expand_builtin_setjmp_setup (addr, dispatch_label);
1189 #endif
1190     }
1191 
1192   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1193 		     1, XEXP (fc, 0), Pmode);
1194 
1195   seq = get_insns ();
1196   end_sequence ();
1197 
1198   /* ??? Instead of doing this at the beginning of the function,
1199      do this in a block that is at loop level 0 and dominates all
1200      can_throw_internal instructions.  */
1201 
1202   fn_begin_outside_block = true;
1203   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1204     if (NOTE_P (fn_begin))
1205       {
1206 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1207 	  break;
1208 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1209 	  fn_begin_outside_block = false;
1210       }
1211 
1212   if (fn_begin_outside_block)
1213     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1214   else
1215     emit_insn_after (seq, fn_begin);
1216 }
1217 
1218 /* Call back from expand_function_end to know where we should put
1219    the call to unwind_sjlj_unregister_libfunc if needed.  */
1220 
1221 void
1222 sjlj_emit_function_exit_after (rtx_insn *after)
1223 {
1224   crtl->eh.sjlj_exit_after = after;
1225 }
1226 
1227 static void
1228 sjlj_emit_function_exit (void)
1229 {
1230   rtx_insn *seq, *insn;
1231 
1232   start_sequence ();
1233 
1234   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1235 		     1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1236 
1237   seq = get_insns ();
1238   end_sequence ();
1239 
1240   /* ??? Really this can be done in any block at loop level 0 that
1241      post-dominates all can_throw_internal instructions.  This is
1242      the last possible moment.  */
1243 
1244   insn = crtl->eh.sjlj_exit_after;
1245   if (LABEL_P (insn))
1246     insn = NEXT_INSN (insn);
1247 
1248   emit_insn_after (seq, insn);
1249 }
1250 
1251 static void
1252 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1253 {
1254   machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1255   machine_mode filter_mode = targetm.eh_return_filter_mode ();
1256   eh_landing_pad lp;
1257   rtx mem, fc, exc_ptr_reg, filter_reg;
1258   rtx_insn *seq;
1259   basic_block bb;
1260   eh_region r;
1261   edge e;
1262   int i, disp_index;
1263   vec<tree> dispatch_labels = vNULL;
1264 
1265   fc = crtl->eh.sjlj_fc;
1266 
1267   start_sequence ();
1268 
1269   emit_label (dispatch_label);
1270 
1271 #ifndef DONT_USE_BUILTIN_SETJMP
1272   expand_builtin_setjmp_receiver (dispatch_label);
1273 
1274   /* The caller of expand_builtin_setjmp_receiver is responsible for
1275      making sure that the label doesn't vanish.  The only other caller
1276      is the expander for __builtin_setjmp_receiver, which places this
1277      label on the nonlocal_goto_label list.  Since we're modeling these
1278      CFG edges more exactly, we can use the forced_labels list instead.  */
1279   LABEL_PRESERVE_P (dispatch_label) = 1;
1280   vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1281 #endif
1282 
1283   /* Load up exc_ptr and filter values from the function context.  */
1284   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1285   if (unwind_word_mode != ptr_mode)
1286     {
1287 #ifdef POINTERS_EXTEND_UNSIGNED
1288       mem = convert_memory_address (ptr_mode, mem);
1289 #else
1290       mem = convert_to_mode (ptr_mode, mem, 0);
1291 #endif
1292     }
1293   exc_ptr_reg = force_reg (ptr_mode, mem);
1294 
1295   mem = adjust_address (fc, unwind_word_mode,
1296 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1297   if (unwind_word_mode != filter_mode)
1298     mem = convert_to_mode (filter_mode, mem, 0);
1299   filter_reg = force_reg (filter_mode, mem);
1300 
1301   /* Jump to one of the directly reachable regions.  */
1302 
1303   disp_index = 0;
1304   rtx_code_label *first_reachable_label = NULL;
1305 
1306   /* If there's exactly one call site in the function, don't bother
1307      generating a switch statement.  */
1308   if (num_dispatch > 1)
1309     dispatch_labels.create (num_dispatch);
1310 
1311   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1312     if (lp && lp->post_landing_pad)
1313       {
1314 	rtx_insn *seq2;
1315 	rtx_code_label *label;
1316 
1317 	start_sequence ();
1318 
1319 	lp->landing_pad = dispatch_label;
1320 
1321 	if (num_dispatch > 1)
1322 	  {
1323 	    tree t_label, case_elt, t;
1324 
1325 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1326 	    t = build_int_cst (integer_type_node, disp_index);
1327 	    case_elt = build_case_label (t, NULL, t_label);
1328 	    dispatch_labels.quick_push (case_elt);
1329 	    label = jump_target_rtx (t_label);
1330 	  }
1331 	else
1332 	  label = gen_label_rtx ();
1333 
1334 	if (disp_index == 0)
1335 	  first_reachable_label = label;
1336 	emit_label (label);
1337 
1338 	r = lp->region;
1339 	if (r->exc_ptr_reg)
1340 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1341 	if (r->filter_reg)
1342 	  emit_move_insn (r->filter_reg, filter_reg);
1343 
1344 	seq2 = get_insns ();
1345 	end_sequence ();
1346 
1347 	rtx_insn *before = label_rtx (lp->post_landing_pad);
1348 	bb = emit_to_new_bb_before (seq2, before);
1349 	e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1350 	e->count = bb->count;
1351 	e->probability = REG_BR_PROB_BASE;
1352 	if (current_loops)
1353 	  {
1354 	    struct loop *loop = bb->next_bb->loop_father;
1355 	    /* If we created a pre-header block, add the new block to the
1356 	       outer loop, otherwise to the loop itself.  */
1357 	    if (bb->next_bb == loop->header)
1358 	      add_bb_to_loop (bb, loop_outer (loop));
1359 	    else
1360 	      add_bb_to_loop (bb, loop);
1361 	    /* ???  For multiple dispatches we will end up with edges
1362 	       from the loop tree root into this loop, making it a
1363 	       multiple-entry loop.  Discard all affected loops.  */
1364 	    if (num_dispatch > 1)
1365 	      {
1366 		for (loop = bb->loop_father;
1367 		     loop_outer (loop); loop = loop_outer (loop))
1368 		  mark_loop_for_removal (loop);
1369 	      }
1370 	  }
1371 
1372 	disp_index++;
1373       }
1374   gcc_assert (disp_index == num_dispatch);
1375 
1376   if (num_dispatch > 1)
1377     {
1378       rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1379 				 sjlj_fc_call_site_ofs);
1380       expand_sjlj_dispatch_table (disp, dispatch_labels);
1381     }
1382 
1383   seq = get_insns ();
1384   end_sequence ();
1385 
1386   bb = emit_to_new_bb_before (seq, first_reachable_label);
1387   if (num_dispatch == 1)
1388     {
1389       e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1390       e->count = bb->count;
1391       e->probability = REG_BR_PROB_BASE;
1392       if (current_loops)
1393 	{
1394 	  struct loop *loop = bb->next_bb->loop_father;
1395 	  /* If we created a pre-header block, add the new block to the
1396 	     outer loop, otherwise to the loop itself.  */
1397 	  if (bb->next_bb == loop->header)
1398 	    add_bb_to_loop (bb, loop_outer (loop));
1399 	  else
1400 	    add_bb_to_loop (bb, loop);
1401 	}
1402     }
1403   else
1404     {
1405       /* We are not wiring up edges here, but as the dispatcher call
1406          is at function begin simply associate the block with the
1407 	 outermost (non-)loop.  */
1408       if (current_loops)
1409 	add_bb_to_loop (bb, current_loops->tree_root);
1410     }
1411 }
1412 
1413 static void
1414 sjlj_build_landing_pads (void)
1415 {
1416   int num_dispatch;
1417 
1418   num_dispatch = vec_safe_length (cfun->eh->lp_array);
1419   if (num_dispatch == 0)
1420     return;
1421   sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1422 
1423   num_dispatch = sjlj_assign_call_site_values ();
1424   if (num_dispatch > 0)
1425     {
1426       rtx_code_label *dispatch_label = gen_label_rtx ();
1427       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1428 					TYPE_MODE (sjlj_fc_type_node),
1429 					TYPE_ALIGN (sjlj_fc_type_node));
1430       crtl->eh.sjlj_fc
1431 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1432 			      int_size_in_bytes (sjlj_fc_type_node),
1433 			      align);
1434 
1435       sjlj_mark_call_sites ();
1436       sjlj_emit_function_enter (dispatch_label);
1437       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1438       sjlj_emit_function_exit ();
1439     }
1440 
1441   /* If we do not have any landing pads, we may still need to register a
1442      personality routine and (empty) LSDA to handle must-not-throw regions.  */
1443   else if (function_needs_eh_personality (cfun) != eh_personality_none)
1444     {
1445       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1446 					TYPE_MODE (sjlj_fc_type_node),
1447 					TYPE_ALIGN (sjlj_fc_type_node));
1448       crtl->eh.sjlj_fc
1449 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1450 			      int_size_in_bytes (sjlj_fc_type_node),
1451 			      align);
1452 
1453       sjlj_mark_call_sites ();
1454       sjlj_emit_function_enter (NULL);
1455       sjlj_emit_function_exit ();
1456     }
1457 
1458   sjlj_lp_call_site_index.release ();
1459 }
1460 
1461 /* Update the sjlj function context.  This function should be called
1462    whenever we allocate or deallocate dynamic stack space.  */
1463 
1464 void
1465 update_sjlj_context (void)
1466 {
1467   if (!flag_exceptions)
1468     return;
1469 
1470   emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1471 }
1472 
1473 /* After initial rtl generation, call back to finish generating
1474    exception support code.  */
1475 
1476 void
1477 finish_eh_generation (void)
1478 {
1479   basic_block bb;
1480 
1481   /* Construct the landing pads.  */
1482   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1483     sjlj_build_landing_pads ();
1484   else
1485     dw2_build_landing_pads ();
1486   break_superblocks ();
1487 
1488   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1489       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
1490       || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1491     commit_edge_insertions ();
1492 
1493   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1494   FOR_EACH_BB_FN (bb, cfun)
1495     {
1496       eh_landing_pad lp;
1497       edge_iterator ei;
1498       edge e;
1499 
1500       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1501 
1502       FOR_EACH_EDGE (e, ei, bb->succs)
1503 	if (e->flags & EDGE_EH)
1504 	  break;
1505 
1506       /* We should not have generated any new throwing insns during this
1507 	 pass, and we should not have lost any EH edges, so we only need
1508 	 to handle two cases here:
1509 	 (1) reachable handler and an existing edge to post-landing-pad,
1510 	 (2) no reachable handler and no edge.  */
1511       gcc_assert ((lp != NULL) == (e != NULL));
1512       if (lp != NULL)
1513 	{
1514 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1515 
1516 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1517 	  e->flags |= (CALL_P (BB_END (bb))
1518 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1519 		       : EDGE_ABNORMAL);
1520 	}
1521     }
1522 }
1523 
1524 /* This section handles removing dead code for flow.  */
1525 
1526 void
1527 remove_eh_landing_pad (eh_landing_pad lp)
1528 {
1529   eh_landing_pad *pp;
1530 
1531   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1532     continue;
1533   *pp = lp->next_lp;
1534 
1535   if (lp->post_landing_pad)
1536     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1537   (*cfun->eh->lp_array)[lp->index] = NULL;
1538 }
1539 
1540 /* Splice the EH region at PP from the region tree.  */
1541 
1542 static void
1543 remove_eh_handler_splicer (eh_region *pp)
1544 {
1545   eh_region region = *pp;
1546   eh_landing_pad lp;
1547 
1548   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1549     {
1550       if (lp->post_landing_pad)
1551 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1552       (*cfun->eh->lp_array)[lp->index] = NULL;
1553     }
1554 
1555   if (region->inner)
1556     {
1557       eh_region p, outer;
1558       outer = region->outer;
1559 
1560       *pp = p = region->inner;
1561       do
1562 	{
1563 	  p->outer = outer;
1564 	  pp = &p->next_peer;
1565 	  p = *pp;
1566 	}
1567       while (p);
1568     }
1569   *pp = region->next_peer;
1570 
1571   (*cfun->eh->region_array)[region->index] = NULL;
1572 }
1573 
1574 /* Splice a single EH region REGION from the region tree.
1575 
1576    To unlink REGION, we need to find the pointer to it with a relatively
1577    expensive search in REGION's outer region.  If you are going to
1578    remove a number of handlers, using remove_unreachable_eh_regions may
1579    be a better option.  */
1580 
1581 void
1582 remove_eh_handler (eh_region region)
1583 {
1584   eh_region *pp, *pp_start, p, outer;
1585 
1586   outer = region->outer;
1587   if (outer)
1588     pp_start = &outer->inner;
1589   else
1590     pp_start = &cfun->eh->region_tree;
1591   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1592     continue;
1593 
1594   remove_eh_handler_splicer (pp);
1595 }
1596 
1597 /* Worker for remove_unreachable_eh_regions.
1598    PP is a pointer to the region to start a region tree depth-first
1599    search from.  R_REACHABLE is the set of regions that have to be
1600    preserved.  */
1601 
1602 static void
1603 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1604 {
1605   while (*pp)
1606     {
1607       eh_region region = *pp;
1608       remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1609       if (!bitmap_bit_p (r_reachable, region->index))
1610 	remove_eh_handler_splicer (pp);
1611       else
1612 	pp = &region->next_peer;
1613     }
1614 }
1615 
1616 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1617    Do this by traversing the EH tree top-down and splice out regions that
1618    are not marked.  By removing regions from the leaves, we avoid costly
1619    searches in the region tree.  */
1620 
1621 void
1622 remove_unreachable_eh_regions (sbitmap r_reachable)
1623 {
1624   remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1625 }
1626 
1627 /* Invokes CALLBACK for every exception handler landing pad label.
1628    Only used by reload hackery; should not be used by new code.  */
1629 
1630 void
1631 for_each_eh_label (void (*callback) (rtx))
1632 {
1633   eh_landing_pad lp;
1634   int i;
1635 
1636   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1637     {
1638       if (lp)
1639 	{
1640 	  rtx_code_label *lab = lp->landing_pad;
1641 	  if (lab && LABEL_P (lab))
1642 	    (*callback) (lab);
1643 	}
1644     }
1645 }
1646 
1647 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1648    call insn.
1649 
1650    At the gimple level, we use LP_NR
1651        > 0 : The statement transfers to landing pad LP_NR
1652        = 0 : The statement is outside any EH region
1653        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1654 
1655    At the rtl level, we use LP_NR
1656        > 0 : The insn transfers to landing pad LP_NR
1657        = 0 : The insn cannot throw
1658        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1659        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1660        missing note: The insn is outside any EH region.
1661 
1662   ??? This difference probably ought to be avoided.  We could stand
1663   to record nothrow for arbitrary gimple statements, and so avoid
1664   some moderately complex lookups in stmt_could_throw_p.  Perhaps
1665   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1666   no-nonlocal-goto property should be recorded elsewhere as a bit
1667   on the call_insn directly.  Perhaps we should make more use of
1668   attaching the trees to call_insns (reachable via symbol_ref in
1669   direct call cases) and just pull the data out of the trees.  */
1670 
1671 void
1672 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1673 {
1674   rtx value;
1675   if (ecf_flags & ECF_NOTHROW)
1676     value = const0_rtx;
1677   else if (lp_nr != 0)
1678     value = GEN_INT (lp_nr);
1679   else
1680     return;
1681   add_reg_note (insn, REG_EH_REGION, value);
1682 }
1683 
1684 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1685    nor perform a non-local goto.  Replace the region note if it
1686    already exists.  */
1687 
1688 void
1689 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1690 {
1691   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1692   rtx intmin = GEN_INT (INT_MIN);
1693 
1694   if (note != 0)
1695     XEXP (note, 0) = intmin;
1696   else
1697     add_reg_note (insn, REG_EH_REGION, intmin);
1698 }
1699 
1700 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1701    to the contrary.  */
1702 
1703 bool
1704 insn_could_throw_p (const_rtx insn)
1705 {
1706   if (!flag_exceptions)
1707     return false;
1708   if (CALL_P (insn))
1709     return true;
1710   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1711     return may_trap_p (PATTERN (insn));
1712   return false;
1713 }
1714 
1715 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1716    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1717    to look for a note, or the note itself.  */
1718 
1719 void
1720 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1721 {
1722   rtx_insn *insn;
1723   rtx note = note_or_insn;
1724 
1725   if (INSN_P (note_or_insn))
1726     {
1727       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1728       if (note == NULL)
1729 	return;
1730     }
1731   note = XEXP (note, 0);
1732 
1733   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1734     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1735         && insn_could_throw_p (insn))
1736       add_reg_note (insn, REG_EH_REGION, note);
1737 }
1738 
1739 /* Likewise, but iterate backward.  */
1740 
1741 void
1742 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1743 {
1744   rtx_insn *insn;
1745   rtx note = note_or_insn;
1746 
1747   if (INSN_P (note_or_insn))
1748     {
1749       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1750       if (note == NULL)
1751 	return;
1752     }
1753   note = XEXP (note, 0);
1754 
1755   for (insn = last; insn != first; insn = PREV_INSN (insn))
1756     if (insn_could_throw_p (insn))
1757       add_reg_note (insn, REG_EH_REGION, note);
1758 }
1759 
1760 
1761 /* Extract all EH information from INSN.  Return true if the insn
1762    was marked NOTHROW.  */
1763 
1764 static bool
1765 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1766 			       eh_landing_pad *plp)
1767 {
1768   eh_landing_pad lp = NULL;
1769   eh_region r = NULL;
1770   bool ret = false;
1771   rtx note;
1772   int lp_nr;
1773 
1774   if (! INSN_P (insn))
1775     goto egress;
1776 
1777   if (NONJUMP_INSN_P (insn)
1778       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1779     insn = XVECEXP (PATTERN (insn), 0, 0);
1780 
1781   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1782   if (!note)
1783     {
1784       ret = !insn_could_throw_p (insn);
1785       goto egress;
1786     }
1787 
1788   lp_nr = INTVAL (XEXP (note, 0));
1789   if (lp_nr == 0 || lp_nr == INT_MIN)
1790     {
1791       ret = true;
1792       goto egress;
1793     }
1794 
1795   if (lp_nr < 0)
1796     r = (*cfun->eh->region_array)[-lp_nr];
1797   else
1798     {
1799       lp = (*cfun->eh->lp_array)[lp_nr];
1800       r = lp->region;
1801     }
1802 
1803  egress:
1804   *plp = lp;
1805   *pr = r;
1806   return ret;
1807 }
1808 
1809 /* Return the landing pad to which INSN may go, or NULL if it does not
1810    have a reachable landing pad within this function.  */
1811 
1812 eh_landing_pad
1813 get_eh_landing_pad_from_rtx (const_rtx insn)
1814 {
1815   eh_landing_pad lp;
1816   eh_region r;
1817 
1818   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1819   return lp;
1820 }
1821 
1822 /* Return the region to which INSN may go, or NULL if it does not
1823    have a reachable region within this function.  */
1824 
1825 eh_region
1826 get_eh_region_from_rtx (const_rtx insn)
1827 {
1828   eh_landing_pad lp;
1829   eh_region r;
1830 
1831   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1832   return r;
1833 }
1834 
1835 /* Return true if INSN throws and is caught by something in this function.  */
1836 
1837 bool
1838 can_throw_internal (const_rtx insn)
1839 {
1840   return get_eh_landing_pad_from_rtx (insn) != NULL;
1841 }
1842 
1843 /* Return true if INSN throws and escapes from the current function.  */
1844 
1845 bool
1846 can_throw_external (const_rtx insn)
1847 {
1848   eh_landing_pad lp;
1849   eh_region r;
1850   bool nothrow;
1851 
1852   if (! INSN_P (insn))
1853     return false;
1854 
1855   if (NONJUMP_INSN_P (insn)
1856       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1857     {
1858       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1859       int i, n = seq->len ();
1860 
1861       for (i = 0; i < n; i++)
1862 	if (can_throw_external (seq->element (i)))
1863 	  return true;
1864 
1865       return false;
1866     }
1867 
1868   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1869 
1870   /* If we can't throw, we obviously can't throw external.  */
1871   if (nothrow)
1872     return false;
1873 
1874   /* If we have an internal landing pad, then we're not external.  */
1875   if (lp != NULL)
1876     return false;
1877 
1878   /* If we're not within an EH region, then we are external.  */
1879   if (r == NULL)
1880     return true;
1881 
1882   /* The only thing that ought to be left is MUST_NOT_THROW regions,
1883      which don't always have landing pads.  */
1884   gcc_assert (r->type == ERT_MUST_NOT_THROW);
1885   return false;
1886 }
1887 
1888 /* Return true if INSN cannot throw at all.  */
1889 
1890 bool
1891 insn_nothrow_p (const_rtx insn)
1892 {
1893   eh_landing_pad lp;
1894   eh_region r;
1895 
1896   if (! INSN_P (insn))
1897     return true;
1898 
1899   if (NONJUMP_INSN_P (insn)
1900       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1901     {
1902       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1903       int i, n = seq->len ();
1904 
1905       for (i = 0; i < n; i++)
1906 	if (!insn_nothrow_p (seq->element (i)))
1907 	  return false;
1908 
1909       return true;
1910     }
1911 
1912   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1913 }
1914 
1915 /* Return true if INSN can perform a non-local goto.  */
1916 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1917 
1918 bool
1919 can_nonlocal_goto (const rtx_insn *insn)
1920 {
1921   if (nonlocal_goto_handler_labels && CALL_P (insn))
1922     {
1923       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1924       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1925 	return true;
1926     }
1927   return false;
1928 }
1929 
1930 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1931 
1932 static unsigned int
1933 set_nothrow_function_flags (void)
1934 {
1935   rtx_insn *insn;
1936 
1937   crtl->nothrow = 1;
1938 
1939   /* Assume crtl->all_throwers_are_sibcalls until we encounter
1940      something that can throw an exception.  We specifically exempt
1941      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1942      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1943      is optimistic.  */
1944 
1945   crtl->all_throwers_are_sibcalls = 1;
1946 
1947   /* If we don't know that this implementation of the function will
1948      actually be used, then we must not set TREE_NOTHROW, since
1949      callers must not assume that this function does not throw.  */
1950   if (TREE_NOTHROW (current_function_decl))
1951     return 0;
1952 
1953   if (! flag_exceptions)
1954     return 0;
1955 
1956   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1957     if (can_throw_external (insn))
1958       {
1959         crtl->nothrow = 0;
1960 
1961 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1962 	  {
1963 	    crtl->all_throwers_are_sibcalls = 0;
1964 	    return 0;
1965 	  }
1966       }
1967 
1968   if (crtl->nothrow
1969       && (cgraph_node::get (current_function_decl)->get_availability ()
1970           >= AVAIL_AVAILABLE))
1971     {
1972       struct cgraph_node *node = cgraph_node::get (current_function_decl);
1973       struct cgraph_edge *e;
1974       for (e = node->callers; e; e = e->next_caller)
1975         e->can_throw_external = false;
1976       node->set_nothrow_flag (true);
1977 
1978       if (dump_file)
1979 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
1980 		 current_function_name ());
1981     }
1982   return 0;
1983 }
1984 
1985 namespace {
1986 
1987 const pass_data pass_data_set_nothrow_function_flags =
1988 {
1989   RTL_PASS, /* type */
1990   "nothrow", /* name */
1991   OPTGROUP_NONE, /* optinfo_flags */
1992   TV_NONE, /* tv_id */
1993   0, /* properties_required */
1994   0, /* properties_provided */
1995   0, /* properties_destroyed */
1996   0, /* todo_flags_start */
1997   0, /* todo_flags_finish */
1998 };
1999 
2000 class pass_set_nothrow_function_flags : public rtl_opt_pass
2001 {
2002 public:
2003   pass_set_nothrow_function_flags (gcc::context *ctxt)
2004     : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2005   {}
2006 
2007   /* opt_pass methods: */
2008   virtual unsigned int execute (function *)
2009     {
2010       return set_nothrow_function_flags ();
2011     }
2012 
2013 }; // class pass_set_nothrow_function_flags
2014 
2015 } // anon namespace
2016 
2017 rtl_opt_pass *
2018 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2019 {
2020   return new pass_set_nothrow_function_flags (ctxt);
2021 }
2022 
2023 
2024 /* Various hooks for unwind library.  */
2025 
2026 /* Expand the EH support builtin functions:
2027    __builtin_eh_pointer and __builtin_eh_filter.  */
2028 
2029 static eh_region
2030 expand_builtin_eh_common (tree region_nr_t)
2031 {
2032   HOST_WIDE_INT region_nr;
2033   eh_region region;
2034 
2035   gcc_assert (tree_fits_shwi_p (region_nr_t));
2036   region_nr = tree_to_shwi (region_nr_t);
2037 
2038   region = (*cfun->eh->region_array)[region_nr];
2039 
2040   /* ??? We shouldn't have been able to delete a eh region without
2041      deleting all the code that depended on it.  */
2042   gcc_assert (region != NULL);
2043 
2044   return region;
2045 }
2046 
2047 /* Expand to the exc_ptr value from the given eh region.  */
2048 
2049 rtx
2050 expand_builtin_eh_pointer (tree exp)
2051 {
2052   eh_region region
2053     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2054   if (region->exc_ptr_reg == NULL)
2055     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2056   return region->exc_ptr_reg;
2057 }
2058 
2059 /* Expand to the filter value from the given eh region.  */
2060 
2061 rtx
2062 expand_builtin_eh_filter (tree exp)
2063 {
2064   eh_region region
2065     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2066   if (region->filter_reg == NULL)
2067     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2068   return region->filter_reg;
2069 }
2070 
2071 /* Copy the exc_ptr and filter values from one landing pad's registers
2072    to another.  This is used to inline the resx statement.  */
2073 
2074 rtx
2075 expand_builtin_eh_copy_values (tree exp)
2076 {
2077   eh_region dst
2078     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2079   eh_region src
2080     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2081   machine_mode fmode = targetm.eh_return_filter_mode ();
2082 
2083   if (dst->exc_ptr_reg == NULL)
2084     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2085   if (src->exc_ptr_reg == NULL)
2086     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2087 
2088   if (dst->filter_reg == NULL)
2089     dst->filter_reg = gen_reg_rtx (fmode);
2090   if (src->filter_reg == NULL)
2091     src->filter_reg = gen_reg_rtx (fmode);
2092 
2093   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2094   emit_move_insn (dst->filter_reg, src->filter_reg);
2095 
2096   return const0_rtx;
2097 }
2098 
2099 /* Do any necessary initialization to access arbitrary stack frames.
2100    On the SPARC, this means flushing the register windows.  */
2101 
2102 void
2103 expand_builtin_unwind_init (void)
2104 {
2105   /* Set this so all the registers get saved in our frame; we need to be
2106      able to copy the saved values for any registers from frames we unwind.  */
2107   crtl->saves_all_registers = 1;
2108 
2109   SETUP_FRAME_ADDRESSES ();
2110 }
2111 
2112 /* Map a non-negative number to an eh return data register number; expands
2113    to -1 if no return data register is associated with the input number.
2114    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2115 
2116 rtx
2117 expand_builtin_eh_return_data_regno (tree exp)
2118 {
2119   tree which = CALL_EXPR_ARG (exp, 0);
2120   unsigned HOST_WIDE_INT iwhich;
2121 
2122   if (TREE_CODE (which) != INTEGER_CST)
2123     {
2124       error ("argument of %<__builtin_eh_return_regno%> must be constant");
2125       return constm1_rtx;
2126     }
2127 
2128   iwhich = tree_to_uhwi (which);
2129   iwhich = EH_RETURN_DATA_REGNO (iwhich);
2130   if (iwhich == INVALID_REGNUM)
2131     return constm1_rtx;
2132 
2133 #ifdef DWARF_FRAME_REGNUM
2134   iwhich = DWARF_FRAME_REGNUM (iwhich);
2135 #else
2136   iwhich = DBX_REGISTER_NUMBER (iwhich);
2137 #endif
2138 
2139   return GEN_INT (iwhich);
2140 }
2141 
2142 /* Given a value extracted from the return address register or stack slot,
2143    return the actual address encoded in that value.  */
2144 
2145 rtx
2146 expand_builtin_extract_return_addr (tree addr_tree)
2147 {
2148   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2149 
2150   if (GET_MODE (addr) != Pmode
2151       && GET_MODE (addr) != VOIDmode)
2152     {
2153 #ifdef POINTERS_EXTEND_UNSIGNED
2154       addr = convert_memory_address (Pmode, addr);
2155 #else
2156       addr = convert_to_mode (Pmode, addr, 0);
2157 #endif
2158     }
2159 
2160   /* First mask out any unwanted bits.  */
2161   rtx mask = MASK_RETURN_ADDR;
2162   if (mask)
2163     expand_and (Pmode, addr, mask, addr);
2164 
2165   /* Then adjust to find the real return address.  */
2166   if (RETURN_ADDR_OFFSET)
2167     addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2168 
2169   return addr;
2170 }
2171 
2172 /* Given an actual address in addr_tree, do any necessary encoding
2173    and return the value to be stored in the return address register or
2174    stack slot so the epilogue will return to that address.  */
2175 
2176 rtx
2177 expand_builtin_frob_return_addr (tree addr_tree)
2178 {
2179   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2180 
2181   addr = convert_memory_address (Pmode, addr);
2182 
2183   if (RETURN_ADDR_OFFSET)
2184     {
2185       addr = force_reg (Pmode, addr);
2186       addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2187     }
2188 
2189   return addr;
2190 }
2191 
2192 /* Set up the epilogue with the magic bits we'll need to return to the
2193    exception handler.  */
2194 
2195 void
2196 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2197 			  tree handler_tree)
2198 {
2199   rtx tmp;
2200 
2201 #ifdef EH_RETURN_STACKADJ_RTX
2202   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2203 		     VOIDmode, EXPAND_NORMAL);
2204   tmp = convert_memory_address (Pmode, tmp);
2205   if (!crtl->eh.ehr_stackadj)
2206     crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2207   else if (tmp != crtl->eh.ehr_stackadj)
2208     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2209 #endif
2210 
2211   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2212 		     VOIDmode, EXPAND_NORMAL);
2213   tmp = convert_memory_address (Pmode, tmp);
2214   if (!crtl->eh.ehr_handler)
2215     crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2216   else if (tmp != crtl->eh.ehr_handler)
2217     emit_move_insn (crtl->eh.ehr_handler, tmp);
2218 
2219   if (!crtl->eh.ehr_label)
2220     crtl->eh.ehr_label = gen_label_rtx ();
2221   emit_jump (crtl->eh.ehr_label);
2222 }
2223 
2224 /* Expand __builtin_eh_return.  This exit path from the function loads up
2225    the eh return data registers, adjusts the stack, and branches to a
2226    given PC other than the normal return address.  */
2227 
2228 void
2229 expand_eh_return (void)
2230 {
2231   rtx_code_label *around_label;
2232 
2233   if (! crtl->eh.ehr_label)
2234     return;
2235 
2236   crtl->calls_eh_return = 1;
2237 
2238 #ifdef EH_RETURN_STACKADJ_RTX
2239   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2240 #endif
2241 
2242   around_label = gen_label_rtx ();
2243   emit_jump (around_label);
2244 
2245   emit_label (crtl->eh.ehr_label);
2246   clobber_return_register ();
2247 
2248 #ifdef EH_RETURN_STACKADJ_RTX
2249   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2250 #endif
2251 
2252   if (targetm.have_eh_return ())
2253     emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2254   else
2255     {
2256       if (rtx handler = EH_RETURN_HANDLER_RTX)
2257 	emit_move_insn (handler, crtl->eh.ehr_handler);
2258       else
2259 	error ("__builtin_eh_return not supported on this target");
2260     }
2261 
2262   emit_label (around_label);
2263 }
2264 
2265 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2266    POINTERS_EXTEND_UNSIGNED and return it.  */
2267 
2268 rtx
2269 expand_builtin_extend_pointer (tree addr_tree)
2270 {
2271   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2272   int extend;
2273 
2274 #ifdef POINTERS_EXTEND_UNSIGNED
2275   extend = POINTERS_EXTEND_UNSIGNED;
2276 #else
2277   /* The previous EH code did an unsigned extend by default, so we do this also
2278      for consistency.  */
2279   extend = 1;
2280 #endif
2281 
2282   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2283 }
2284 
2285 static int
2286 add_action_record (action_hash_type *ar_hash, int filter, int next)
2287 {
2288   struct action_record **slot, *new_ar, tmp;
2289 
2290   tmp.filter = filter;
2291   tmp.next = next;
2292   slot = ar_hash->find_slot (&tmp, INSERT);
2293 
2294   if ((new_ar = *slot) == NULL)
2295     {
2296       new_ar = XNEW (struct action_record);
2297       new_ar->offset = crtl->eh.action_record_data->length () + 1;
2298       new_ar->filter = filter;
2299       new_ar->next = next;
2300       *slot = new_ar;
2301 
2302       /* The filter value goes in untouched.  The link to the next
2303 	 record is a "self-relative" byte offset, or zero to indicate
2304 	 that there is no next record.  So convert the absolute 1 based
2305 	 indices we've been carrying around into a displacement.  */
2306 
2307       push_sleb128 (&crtl->eh.action_record_data, filter);
2308       if (next)
2309 	next -= crtl->eh.action_record_data->length () + 1;
2310       push_sleb128 (&crtl->eh.action_record_data, next);
2311     }
2312 
2313   return new_ar->offset;
2314 }
2315 
2316 static int
2317 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2318 {
2319   int next;
2320 
2321   /* If we've reached the top of the region chain, then we have
2322      no actions, and require no landing pad.  */
2323   if (region == NULL)
2324     return -1;
2325 
2326   switch (region->type)
2327     {
2328     case ERT_CLEANUP:
2329       {
2330 	eh_region r;
2331 	/* A cleanup adds a zero filter to the beginning of the chain, but
2332 	   there are special cases to look out for.  If there are *only*
2333 	   cleanups along a path, then it compresses to a zero action.
2334 	   Further, if there are multiple cleanups along a path, we only
2335 	   need to represent one of them, as that is enough to trigger
2336 	   entry to the landing pad at runtime.  */
2337 	next = collect_one_action_chain (ar_hash, region->outer);
2338 	if (next <= 0)
2339 	  return 0;
2340 	for (r = region->outer; r ; r = r->outer)
2341 	  if (r->type == ERT_CLEANUP)
2342 	    return next;
2343 	return add_action_record (ar_hash, 0, next);
2344       }
2345 
2346     case ERT_TRY:
2347       {
2348 	eh_catch c;
2349 
2350 	/* Process the associated catch regions in reverse order.
2351 	   If there's a catch-all handler, then we don't need to
2352 	   search outer regions.  Use a magic -3 value to record
2353 	   that we haven't done the outer search.  */
2354 	next = -3;
2355 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2356 	  {
2357 	    if (c->type_list == NULL)
2358 	      {
2359 		/* Retrieve the filter from the head of the filter list
2360 		   where we have stored it (see assign_filter_values).  */
2361 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2362 		next = add_action_record (ar_hash, filter, 0);
2363 	      }
2364 	    else
2365 	      {
2366 		/* Once the outer search is done, trigger an action record for
2367 		   each filter we have.  */
2368 		tree flt_node;
2369 
2370 		if (next == -3)
2371 		  {
2372 		    next = collect_one_action_chain (ar_hash, region->outer);
2373 
2374 		    /* If there is no next action, terminate the chain.  */
2375 		    if (next == -1)
2376 		      next = 0;
2377 		    /* If all outer actions are cleanups or must_not_throw,
2378 		       we'll have no action record for it, since we had wanted
2379 		       to encode these states in the call-site record directly.
2380 		       Add a cleanup action to the chain to catch these.  */
2381 		    else if (next <= 0)
2382 		      next = add_action_record (ar_hash, 0, 0);
2383 		  }
2384 
2385 		flt_node = c->filter_list;
2386 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2387 		  {
2388 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2389 		    next = add_action_record (ar_hash, filter, next);
2390 		  }
2391 	      }
2392 	  }
2393 	return next;
2394       }
2395 
2396     case ERT_ALLOWED_EXCEPTIONS:
2397       /* An exception specification adds its filter to the
2398 	 beginning of the chain.  */
2399       next = collect_one_action_chain (ar_hash, region->outer);
2400 
2401       /* If there is no next action, terminate the chain.  */
2402       if (next == -1)
2403 	next = 0;
2404       /* If all outer actions are cleanups or must_not_throw,
2405 	 we'll have no action record for it, since we had wanted
2406 	 to encode these states in the call-site record directly.
2407 	 Add a cleanup action to the chain to catch these.  */
2408       else if (next <= 0)
2409 	next = add_action_record (ar_hash, 0, 0);
2410 
2411       return add_action_record (ar_hash, region->u.allowed.filter, next);
2412 
2413     case ERT_MUST_NOT_THROW:
2414       /* A must-not-throw region with no inner handlers or cleanups
2415 	 requires no call-site entry.  Note that this differs from
2416 	 the no handler or cleanup case in that we do require an lsda
2417 	 to be generated.  Return a magic -2 value to record this.  */
2418       return -2;
2419     }
2420 
2421   gcc_unreachable ();
2422 }
2423 
2424 static int
2425 add_call_site (rtx landing_pad, int action, int section)
2426 {
2427   call_site_record record;
2428 
2429   record = ggc_alloc<call_site_record_d> ();
2430   record->landing_pad = landing_pad;
2431   record->action = action;
2432 
2433   vec_safe_push (crtl->eh.call_site_record_v[section], record);
2434 
2435   return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2436 }
2437 
2438 static rtx_note *
2439 emit_note_eh_region_end (rtx_insn *insn)
2440 {
2441   rtx_insn *next = NEXT_INSN (insn);
2442 
2443   /* Make sure we do not split a call and its corresponding
2444      CALL_ARG_LOCATION note.  */
2445   if (next && NOTE_P (next)
2446       && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2447     insn = next;
2448 
2449   return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2450 }
2451 
2452 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2453    The new note numbers will not refer to region numbers, but
2454    instead to call site entries.  */
2455 
2456 static unsigned int
2457 convert_to_eh_region_ranges (void)
2458 {
2459   rtx insn;
2460   rtx_insn *iter;
2461   rtx_note *note;
2462   action_hash_type ar_hash (31);
2463   int last_action = -3;
2464   rtx_insn *last_action_insn = NULL;
2465   rtx last_landing_pad = NULL_RTX;
2466   rtx_insn *first_no_action_insn = NULL;
2467   int call_site = 0;
2468   int cur_sec = 0;
2469   rtx_insn *section_switch_note = NULL;
2470   rtx_insn *first_no_action_insn_before_switch = NULL;
2471   rtx_insn *last_no_action_insn_before_switch = NULL;
2472   int saved_call_site_base = call_site_base;
2473 
2474   vec_alloc (crtl->eh.action_record_data, 64);
2475 
2476   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2477     if (INSN_P (iter))
2478       {
2479 	eh_landing_pad lp;
2480 	eh_region region;
2481 	bool nothrow;
2482 	int this_action;
2483 	rtx_code_label *this_landing_pad;
2484 
2485 	insn = iter;
2486 	if (NONJUMP_INSN_P (insn)
2487 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2488 	  insn = XVECEXP (PATTERN (insn), 0, 0);
2489 
2490 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2491 	if (nothrow)
2492 	  continue;
2493 	if (region)
2494 	  this_action = collect_one_action_chain (&ar_hash, region);
2495 	else
2496 	  this_action = -1;
2497 
2498 	/* Existence of catch handlers, or must-not-throw regions
2499 	   implies that an lsda is needed (even if empty).  */
2500 	if (this_action != -1)
2501 	  crtl->uses_eh_lsda = 1;
2502 
2503 	/* Delay creation of region notes for no-action regions
2504 	   until we're sure that an lsda will be required.  */
2505 	else if (last_action == -3)
2506 	  {
2507 	    first_no_action_insn = iter;
2508 	    last_action = -1;
2509 	  }
2510 
2511 	if (this_action >= 0)
2512 	  this_landing_pad = lp->landing_pad;
2513 	else
2514 	  this_landing_pad = NULL;
2515 
2516 	/* Differing actions or landing pads implies a change in call-site
2517 	   info, which implies some EH_REGION note should be emitted.  */
2518 	if (last_action != this_action
2519 	    || last_landing_pad != this_landing_pad)
2520 	  {
2521 	    /* If there is a queued no-action region in the other section
2522 	       with hot/cold partitioning, emit it now.  */
2523 	    if (first_no_action_insn_before_switch)
2524 	      {
2525 		gcc_assert (this_action != -1
2526 			    && last_action == (first_no_action_insn
2527 					       ? -1 : -3));
2528 		call_site = add_call_site (NULL_RTX, 0, 0);
2529 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2530 					 first_no_action_insn_before_switch);
2531 		NOTE_EH_HANDLER (note) = call_site;
2532 		note
2533 		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
2534 		NOTE_EH_HANDLER (note) = call_site;
2535 		gcc_assert (last_action != -3
2536 			    || (last_action_insn
2537 				== last_no_action_insn_before_switch));
2538 		first_no_action_insn_before_switch = NULL;
2539 		last_no_action_insn_before_switch = NULL;
2540 		call_site_base++;
2541 	      }
2542 	    /* If we'd not seen a previous action (-3) or the previous
2543 	       action was must-not-throw (-2), then we do not need an
2544 	       end note.  */
2545 	    if (last_action >= -1)
2546 	      {
2547 		/* If we delayed the creation of the begin, do it now.  */
2548 		if (first_no_action_insn)
2549 		  {
2550 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2551 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2552 					     first_no_action_insn);
2553 		    NOTE_EH_HANDLER (note) = call_site;
2554 		    first_no_action_insn = NULL;
2555 		  }
2556 
2557 		note = emit_note_eh_region_end (last_action_insn);
2558 		NOTE_EH_HANDLER (note) = call_site;
2559 	      }
2560 
2561 	    /* If the new action is must-not-throw, then no region notes
2562 	       are created.  */
2563 	    if (this_action >= -1)
2564 	      {
2565 		call_site = add_call_site (this_landing_pad,
2566 					   this_action < 0 ? 0 : this_action,
2567 					   cur_sec);
2568 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2569 		NOTE_EH_HANDLER (note) = call_site;
2570 	      }
2571 
2572 	    last_action = this_action;
2573 	    last_landing_pad = this_landing_pad;
2574 	  }
2575 	last_action_insn = iter;
2576       }
2577     else if (NOTE_P (iter)
2578 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2579       {
2580 	gcc_assert (section_switch_note == NULL_RTX);
2581 	gcc_assert (flag_reorder_blocks_and_partition);
2582 	section_switch_note = iter;
2583 	if (first_no_action_insn)
2584 	  {
2585 	    first_no_action_insn_before_switch = first_no_action_insn;
2586 	    last_no_action_insn_before_switch = last_action_insn;
2587 	    first_no_action_insn = NULL;
2588 	    gcc_assert (last_action == -1);
2589 	    last_action = -3;
2590 	  }
2591 	/* Force closing of current EH region before section switch and
2592 	   opening a new one afterwards.  */
2593 	else if (last_action != -3)
2594 	  last_landing_pad = pc_rtx;
2595 	if (crtl->eh.call_site_record_v[cur_sec])
2596 	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2597 	cur_sec++;
2598 	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2599 	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2600       }
2601 
2602   if (last_action >= -1 && ! first_no_action_insn)
2603     {
2604       note = emit_note_eh_region_end (last_action_insn);
2605       NOTE_EH_HANDLER (note) = call_site;
2606     }
2607 
2608   call_site_base = saved_call_site_base;
2609 
2610   return 0;
2611 }
2612 
2613 namespace {
2614 
2615 const pass_data pass_data_convert_to_eh_region_ranges =
2616 {
2617   RTL_PASS, /* type */
2618   "eh_ranges", /* name */
2619   OPTGROUP_NONE, /* optinfo_flags */
2620   TV_NONE, /* tv_id */
2621   0, /* properties_required */
2622   0, /* properties_provided */
2623   0, /* properties_destroyed */
2624   0, /* todo_flags_start */
2625   0, /* todo_flags_finish */
2626 };
2627 
2628 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2629 {
2630 public:
2631   pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2632     : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2633   {}
2634 
2635   /* opt_pass methods: */
2636   virtual bool gate (function *);
2637   virtual unsigned int execute (function *)
2638     {
2639       return convert_to_eh_region_ranges ();
2640     }
2641 
2642 }; // class pass_convert_to_eh_region_ranges
2643 
2644 bool
2645 pass_convert_to_eh_region_ranges::gate (function *)
2646 {
2647   /* Nothing to do for SJLJ exceptions or if no regions created.  */
2648   if (cfun->eh->region_tree == NULL)
2649     return false;
2650   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2651     return false;
2652   return true;
2653 }
2654 
2655 } // anon namespace
2656 
2657 rtl_opt_pass *
2658 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2659 {
2660   return new pass_convert_to_eh_region_ranges (ctxt);
2661 }
2662 
2663 static void
2664 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2665 {
2666   do
2667     {
2668       unsigned char byte = value & 0x7f;
2669       value >>= 7;
2670       if (value)
2671 	byte |= 0x80;
2672       vec_safe_push (*data_area, byte);
2673     }
2674   while (value);
2675 }
2676 
2677 static void
2678 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2679 {
2680   unsigned char byte;
2681   int more;
2682 
2683   do
2684     {
2685       byte = value & 0x7f;
2686       value >>= 7;
2687       more = ! ((value == 0 && (byte & 0x40) == 0)
2688 		|| (value == -1 && (byte & 0x40) != 0));
2689       if (more)
2690 	byte |= 0x80;
2691       vec_safe_push (*data_area, byte);
2692     }
2693   while (more);
2694 }
2695 
2696 
2697 static int
2698 dw2_size_of_call_site_table (int section)
2699 {
2700   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2701   int size = n * (4 + 4 + 4);
2702   int i;
2703 
2704   for (i = 0; i < n; ++i)
2705     {
2706       struct call_site_record_d *cs =
2707 	(*crtl->eh.call_site_record_v[section])[i];
2708       size += size_of_uleb128 (cs->action);
2709     }
2710 
2711   return size;
2712 }
2713 
2714 static int
2715 sjlj_size_of_call_site_table (void)
2716 {
2717   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2718   int size = 0;
2719   int i;
2720 
2721   for (i = 0; i < n; ++i)
2722     {
2723       struct call_site_record_d *cs =
2724 	(*crtl->eh.call_site_record_v[0])[i];
2725       size += size_of_uleb128 (INTVAL (cs->landing_pad));
2726       size += size_of_uleb128 (cs->action);
2727     }
2728 
2729   return size;
2730 }
2731 
2732 static void
2733 dw2_output_call_site_table (int cs_format, int section)
2734 {
2735   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2736   int i;
2737   const char *begin;
2738 
2739   if (section == 0)
2740     begin = current_function_func_begin_label;
2741   else if (first_function_block_is_cold)
2742     begin = crtl->subsections.hot_section_label;
2743   else
2744     begin = crtl->subsections.cold_section_label;
2745 
2746   for (i = 0; i < n; ++i)
2747     {
2748       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2749       char reg_start_lab[32];
2750       char reg_end_lab[32];
2751       char landing_pad_lab[32];
2752 
2753       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2754       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2755 
2756       if (cs->landing_pad)
2757 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2758 				     CODE_LABEL_NUMBER (cs->landing_pad));
2759 
2760       /* ??? Perhaps use insn length scaling if the assembler supports
2761 	 generic arithmetic.  */
2762       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2763 	 data4 if the function is small enough.  */
2764       if (cs_format == DW_EH_PE_uleb128)
2765 	{
2766 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2767 					"region %d start", i);
2768 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2769 					"length");
2770 	  if (cs->landing_pad)
2771 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2772 					  "landing pad");
2773 	  else
2774 	    dw2_asm_output_data_uleb128 (0, "landing pad");
2775 	}
2776       else
2777 	{
2778 	  dw2_asm_output_delta (4, reg_start_lab, begin,
2779 				"region %d start", i);
2780 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2781 	  if (cs->landing_pad)
2782 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2783 				  "landing pad");
2784 	  else
2785 	    dw2_asm_output_data (4, 0, "landing pad");
2786 	}
2787       dw2_asm_output_data_uleb128 (cs->action, "action");
2788     }
2789 
2790   call_site_base += n;
2791 }
2792 
2793 static void
2794 sjlj_output_call_site_table (void)
2795 {
2796   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2797   int i;
2798 
2799   for (i = 0; i < n; ++i)
2800     {
2801       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2802 
2803       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2804 				   "region %d landing pad", i);
2805       dw2_asm_output_data_uleb128 (cs->action, "action");
2806     }
2807 
2808   call_site_base += n;
2809 }
2810 
2811 /* Switch to the section that should be used for exception tables.  */
2812 
2813 static void
2814 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2815 {
2816   section *s;
2817 
2818   if (exception_section)
2819     s = exception_section;
2820   else
2821     {
2822       int flags;
2823 
2824       if (EH_TABLES_CAN_BE_READ_ONLY)
2825 	{
2826 	  int tt_format =
2827 	    ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2828 	  flags = ((! flag_pic
2829 		    || ((tt_format & 0x70) != DW_EH_PE_absptr
2830 			&& (tt_format & 0x70) != DW_EH_PE_aligned))
2831 		   ? 0 : SECTION_WRITE);
2832 	}
2833       else
2834 	flags = SECTION_WRITE;
2835 
2836       /* Compute the section and cache it into exception_section,
2837 	 unless it depends on the function name.  */
2838       if (targetm_common.have_named_sections)
2839 	{
2840 #ifdef HAVE_LD_EH_GC_SECTIONS
2841 	  if (flag_function_sections
2842 	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2843 	    {
2844 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2845 	      /* The EH table must match the code section, so only mark
2846 		 it linkonce if we have COMDAT groups to tie them together.  */
2847 	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2848 		flags |= SECTION_LINKONCE;
2849 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2850 	      s = get_section (section_name, flags, current_function_decl);
2851 	      free (section_name);
2852 	    }
2853 	  else
2854 #endif
2855 	    exception_section
2856 	      = s = get_section (".gcc_except_table", flags, NULL);
2857 	}
2858       else
2859 	exception_section
2860 	  = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2861     }
2862 
2863   switch_to_section (s);
2864 }
2865 
2866 
2867 /* Output a reference from an exception table to the type_info object TYPE.
2868    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2869    the value.  */
2870 
2871 static void
2872 output_ttype (tree type, int tt_format, int tt_format_size)
2873 {
2874   rtx value;
2875   bool is_public = true;
2876 
2877   if (type == NULL_TREE)
2878     value = const0_rtx;
2879   else
2880     {
2881       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2882 	 runtime types so TYPE should already be a runtime type
2883 	 reference.  When pass_ipa_free_lang data is made a default
2884 	 pass, we can then remove the call to lookup_type_for_runtime
2885 	 below.  */
2886       if (TYPE_P (type))
2887 	type = lookup_type_for_runtime (type);
2888 
2889       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2890 
2891       /* Let cgraph know that the rtti decl is used.  Not all of the
2892 	 paths below go through assemble_integer, which would take
2893 	 care of this for us.  */
2894       STRIP_NOPS (type);
2895       if (TREE_CODE (type) == ADDR_EXPR)
2896 	{
2897 	  type = TREE_OPERAND (type, 0);
2898 	  if (VAR_P (type))
2899 	    is_public = TREE_PUBLIC (type);
2900 	}
2901       else
2902 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2903     }
2904 
2905   /* Allow the target to override the type table entry format.  */
2906   if (targetm.asm_out.ttype (value))
2907     return;
2908 
2909   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2910     assemble_integer (value, tt_format_size,
2911 		      tt_format_size * BITS_PER_UNIT, 1);
2912   else
2913     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2914 }
2915 
2916 static void
2917 output_one_function_exception_table (int section)
2918 {
2919   int tt_format, cs_format, lp_format, i;
2920   char ttype_label[32];
2921   char cs_after_size_label[32];
2922   char cs_end_label[32];
2923   int call_site_len;
2924   int have_tt_data;
2925   int tt_format_size = 0;
2926 
2927   have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2928 		  || (targetm.arm_eabi_unwinder
2929 		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2930 		      : vec_safe_length (cfun->eh->ehspec_data.other)));
2931 
2932   /* Indicate the format of the @TType entries.  */
2933   if (! have_tt_data)
2934     tt_format = DW_EH_PE_omit;
2935   else
2936     {
2937       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2938       if (HAVE_AS_LEB128)
2939 	ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2940 				     section ? "LLSDATTC" : "LLSDATT",
2941 				     current_function_funcdef_no);
2942 
2943       tt_format_size = size_of_encoded_value (tt_format);
2944 
2945       assemble_align (tt_format_size * BITS_PER_UNIT);
2946     }
2947 
2948   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2949 				  current_function_funcdef_no);
2950 
2951   /* The LSDA header.  */
2952 
2953   /* Indicate the format of the landing pad start pointer.  An omitted
2954      field implies @LPStart == @Start.  */
2955   /* Currently we always put @LPStart == @Start.  This field would
2956      be most useful in moving the landing pads completely out of
2957      line to another section, but it could also be used to minimize
2958      the size of uleb128 landing pad offsets.  */
2959   lp_format = DW_EH_PE_omit;
2960   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2961 		       eh_data_format_name (lp_format));
2962 
2963   /* @LPStart pointer would go here.  */
2964 
2965   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2966 		       eh_data_format_name (tt_format));
2967 
2968   if (!HAVE_AS_LEB128)
2969     {
2970       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2971 	call_site_len = sjlj_size_of_call_site_table ();
2972       else
2973 	call_site_len = dw2_size_of_call_site_table (section);
2974     }
2975 
2976   /* A pc-relative 4-byte displacement to the @TType data.  */
2977   if (have_tt_data)
2978     {
2979       if (HAVE_AS_LEB128)
2980 	{
2981 	  char ttype_after_disp_label[32];
2982 	  ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2983 				       section ? "LLSDATTDC" : "LLSDATTD",
2984 				       current_function_funcdef_no);
2985 	  dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2986 					"@TType base offset");
2987 	  ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
2988 	}
2989       else
2990 	{
2991 	  /* Ug.  Alignment queers things.  */
2992 	  unsigned int before_disp, after_disp, last_disp, disp;
2993 
2994 	  before_disp = 1 + 1;
2995 	  after_disp = (1 + size_of_uleb128 (call_site_len)
2996 			+ call_site_len
2997 			+ vec_safe_length (crtl->eh.action_record_data)
2998 			+ (vec_safe_length (cfun->eh->ttype_data)
2999 			   * tt_format_size));
3000 
3001 	  disp = after_disp;
3002 	  do
3003 	    {
3004 	      unsigned int disp_size, pad;
3005 
3006 	      last_disp = disp;
3007 	      disp_size = size_of_uleb128 (disp);
3008 	      pad = before_disp + disp_size + after_disp;
3009 	      if (pad % tt_format_size)
3010 		pad = tt_format_size - (pad % tt_format_size);
3011 	      else
3012 		pad = 0;
3013 	      disp = after_disp + pad;
3014 	    }
3015 	  while (disp != last_disp);
3016 
3017 	  dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3018 	}
3019 	}
3020 
3021   /* Indicate the format of the call-site offsets.  */
3022   if (HAVE_AS_LEB128)
3023     cs_format = DW_EH_PE_uleb128;
3024   else
3025     cs_format = DW_EH_PE_udata4;
3026 
3027   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3028 		       eh_data_format_name (cs_format));
3029 
3030   if (HAVE_AS_LEB128)
3031     {
3032       ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3033 				   section ? "LLSDACSBC" : "LLSDACSB",
3034 				   current_function_funcdef_no);
3035       ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3036 				   section ? "LLSDACSEC" : "LLSDACSE",
3037 				   current_function_funcdef_no);
3038       dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3039 				    "Call-site table length");
3040       ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3041       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3042 	sjlj_output_call_site_table ();
3043       else
3044 	dw2_output_call_site_table (cs_format, section);
3045       ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3046     }
3047   else
3048     {
3049       dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3050       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3051 	sjlj_output_call_site_table ();
3052       else
3053 	dw2_output_call_site_table (cs_format, section);
3054     }
3055 
3056   /* ??? Decode and interpret the data for flag_debug_asm.  */
3057   {
3058     uchar uc;
3059     FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3060       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3061   }
3062 
3063   if (have_tt_data)
3064     assemble_align (tt_format_size * BITS_PER_UNIT);
3065 
3066   i = vec_safe_length (cfun->eh->ttype_data);
3067   while (i-- > 0)
3068     {
3069       tree type = (*cfun->eh->ttype_data)[i];
3070       output_ttype (type, tt_format, tt_format_size);
3071     }
3072 
3073   if (HAVE_AS_LEB128 && have_tt_data)
3074     ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3075 
3076   /* ??? Decode and interpret the data for flag_debug_asm.  */
3077   if (targetm.arm_eabi_unwinder)
3078     {
3079       tree type;
3080       for (i = 0;
3081 	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3082 	output_ttype (type, tt_format, tt_format_size);
3083     }
3084   else
3085     {
3086       uchar uc;
3087       for (i = 0;
3088 	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3089 	dw2_asm_output_data (1, uc,
3090 			     i ? NULL : "Exception specification table");
3091     }
3092 }
3093 
3094 void
3095 output_function_exception_table (const char *fnname)
3096 {
3097   rtx personality = get_personality_function (current_function_decl);
3098 
3099   /* Not all functions need anything.  */
3100   if (! crtl->uses_eh_lsda)
3101     return;
3102 
3103   if (personality)
3104     {
3105       assemble_external_libcall (personality);
3106 
3107       if (targetm.asm_out.emit_except_personality)
3108 	targetm.asm_out.emit_except_personality (personality);
3109     }
3110 
3111   switch_to_exception_section (fnname);
3112 
3113   /* If the target wants a label to begin the table, emit it here.  */
3114   targetm.asm_out.emit_except_table_label (asm_out_file);
3115 
3116   output_one_function_exception_table (0);
3117   if (crtl->eh.call_site_record_v[1])
3118     output_one_function_exception_table (1);
3119 
3120   switch_to_section (current_function_section ());
3121 }
3122 
3123 void
3124 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3125 {
3126   fun->eh->throw_stmt_table = table;
3127 }
3128 
3129 hash_map<gimple *, int> *
3130 get_eh_throw_stmt_table (struct function *fun)
3131 {
3132   return fun->eh->throw_stmt_table;
3133 }
3134 
3135 /* Determine if the function needs an EH personality function.  */
3136 
3137 enum eh_personality_kind
3138 function_needs_eh_personality (struct function *fn)
3139 {
3140   enum eh_personality_kind kind = eh_personality_none;
3141   eh_region i;
3142 
3143   FOR_ALL_EH_REGION_FN (i, fn)
3144     {
3145       switch (i->type)
3146 	{
3147 	case ERT_CLEANUP:
3148 	  /* Can do with any personality including the generic C one.  */
3149 	  kind = eh_personality_any;
3150 	  break;
3151 
3152 	case ERT_TRY:
3153 	case ERT_ALLOWED_EXCEPTIONS:
3154 	  /* Always needs a EH personality function.  The generic C
3155 	     personality doesn't handle these even for empty type lists.  */
3156 	  return eh_personality_lang;
3157 
3158 	case ERT_MUST_NOT_THROW:
3159 	  /* Always needs a EH personality function.  The language may specify
3160 	     what abort routine that must be used, e.g. std::terminate.  */
3161 	  return eh_personality_lang;
3162 	}
3163     }
3164 
3165   return kind;
3166 }
3167 
3168 /* Dump EH information to OUT.  */
3169 
3170 void
3171 dump_eh_tree (FILE * out, struct function *fun)
3172 {
3173   eh_region i;
3174   int depth = 0;
3175   static const char *const type_name[] = {
3176     "cleanup", "try", "allowed_exceptions", "must_not_throw"
3177   };
3178 
3179   i = fun->eh->region_tree;
3180   if (!i)
3181     return;
3182 
3183   fprintf (out, "Eh tree:\n");
3184   while (1)
3185     {
3186       fprintf (out, "  %*s %i %s", depth * 2, "",
3187 	       i->index, type_name[(int) i->type]);
3188 
3189       if (i->landing_pads)
3190 	{
3191 	  eh_landing_pad lp;
3192 
3193 	  fprintf (out, " land:");
3194 	  if (current_ir_type () == IR_GIMPLE)
3195 	    {
3196 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3197 		{
3198 		  fprintf (out, "{%i,", lp->index);
3199 		  print_generic_expr (out, lp->post_landing_pad, 0);
3200 		  fputc ('}', out);
3201 		  if (lp->next_lp)
3202 		    fputc (',', out);
3203 		}
3204 	    }
3205 	  else
3206 	    {
3207 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3208 		{
3209 		  fprintf (out, "{%i,", lp->index);
3210 		  if (lp->landing_pad)
3211 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3212 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3213 		  else
3214 		    fprintf (out, "(nil),");
3215 		  if (lp->post_landing_pad)
3216 		    {
3217 		      rtx_insn *lab = label_rtx (lp->post_landing_pad);
3218 		      fprintf (out, "%i%s}", INSN_UID (lab),
3219 			       NOTE_P (lab) ? "(del)" : "");
3220 		    }
3221 		  else
3222 		    fprintf (out, "(nil)}");
3223 		  if (lp->next_lp)
3224 		    fputc (',', out);
3225 		}
3226 	    }
3227 	}
3228 
3229       switch (i->type)
3230 	{
3231 	case ERT_CLEANUP:
3232 	case ERT_MUST_NOT_THROW:
3233 	  break;
3234 
3235 	case ERT_TRY:
3236 	  {
3237 	    eh_catch c;
3238 	    fprintf (out, " catch:");
3239 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3240 	      {
3241 		fputc ('{', out);
3242 		if (c->label)
3243 		  {
3244 		    fprintf (out, "lab:");
3245 		    print_generic_expr (out, c->label, 0);
3246 		    fputc (';', out);
3247 		  }
3248 		print_generic_expr (out, c->type_list, 0);
3249 		fputc ('}', out);
3250 		if (c->next_catch)
3251 		  fputc (',', out);
3252 	      }
3253 	  }
3254 	  break;
3255 
3256 	case ERT_ALLOWED_EXCEPTIONS:
3257 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3258 	  print_generic_expr (out, i->u.allowed.type_list, 0);
3259 	  break;
3260 	}
3261       fputc ('\n', out);
3262 
3263       /* If there are sub-regions, process them.  */
3264       if (i->inner)
3265 	i = i->inner, depth++;
3266       /* If there are peers, process them.  */
3267       else if (i->next_peer)
3268 	i = i->next_peer;
3269       /* Otherwise, step back up the tree to the next peer.  */
3270       else
3271 	{
3272 	  do
3273 	    {
3274 	      i = i->outer;
3275 	      depth--;
3276 	      if (i == NULL)
3277 		return;
3278 	    }
3279 	  while (i->next_peer == NULL);
3280 	  i = i->next_peer;
3281 	}
3282     }
3283 }
3284 
3285 /* Dump the EH tree for FN on stderr.  */
3286 
3287 DEBUG_FUNCTION void
3288 debug_eh_tree (struct function *fn)
3289 {
3290   dump_eh_tree (stderr, fn);
3291 }
3292 
3293 /* Verify invariants on EH datastructures.  */
3294 
3295 DEBUG_FUNCTION void
3296 verify_eh_tree (struct function *fun)
3297 {
3298   eh_region r, outer;
3299   int nvisited_lp, nvisited_r;
3300   int count_lp, count_r, depth, i;
3301   eh_landing_pad lp;
3302   bool err = false;
3303 
3304   if (!fun->eh->region_tree)
3305     return;
3306 
3307   count_r = 0;
3308   for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3309     if (r)
3310       {
3311 	if (r->index == i)
3312 	  count_r++;
3313 	else
3314 	  {
3315 	    error ("region_array is corrupted for region %i", r->index);
3316 	    err = true;
3317 	  }
3318       }
3319 
3320   count_lp = 0;
3321   for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3322     if (lp)
3323       {
3324 	if (lp->index == i)
3325 	  count_lp++;
3326 	else
3327 	  {
3328 	    error ("lp_array is corrupted for lp %i", lp->index);
3329 	    err = true;
3330 	  }
3331       }
3332 
3333   depth = nvisited_lp = nvisited_r = 0;
3334   outer = NULL;
3335   r = fun->eh->region_tree;
3336   while (1)
3337     {
3338       if ((*fun->eh->region_array)[r->index] != r)
3339 	{
3340 	  error ("region_array is corrupted for region %i", r->index);
3341 	  err = true;
3342 	}
3343       if (r->outer != outer)
3344 	{
3345 	  error ("outer block of region %i is wrong", r->index);
3346 	  err = true;
3347 	}
3348       if (depth < 0)
3349 	{
3350 	  error ("negative nesting depth of region %i", r->index);
3351 	  err = true;
3352 	}
3353       nvisited_r++;
3354 
3355       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3356 	{
3357 	  if ((*fun->eh->lp_array)[lp->index] != lp)
3358 	    {
3359 	      error ("lp_array is corrupted for lp %i", lp->index);
3360 	      err = true;
3361 	    }
3362 	  if (lp->region != r)
3363 	    {
3364 	      error ("region of lp %i is wrong", lp->index);
3365 	      err = true;
3366 	    }
3367 	  nvisited_lp++;
3368 	}
3369 
3370       if (r->inner)
3371 	outer = r, r = r->inner, depth++;
3372       else if (r->next_peer)
3373 	r = r->next_peer;
3374       else
3375 	{
3376 	  do
3377 	    {
3378 	      r = r->outer;
3379 	      if (r == NULL)
3380 		goto region_done;
3381 	      depth--;
3382 	      outer = r->outer;
3383 	    }
3384 	  while (r->next_peer == NULL);
3385 	  r = r->next_peer;
3386 	}
3387     }
3388  region_done:
3389   if (depth != 0)
3390     {
3391       error ("tree list ends on depth %i", depth);
3392       err = true;
3393     }
3394   if (count_r != nvisited_r)
3395     {
3396       error ("region_array does not match region_tree");
3397       err = true;
3398     }
3399   if (count_lp != nvisited_lp)
3400     {
3401       error ("lp_array does not match region_tree");
3402       err = true;
3403     }
3404 
3405   if (err)
3406     {
3407       dump_eh_tree (stderr, fun);
3408       internal_error ("verify_eh_tree failed");
3409     }
3410 }
3411 
3412 #include "gt-except.h"
3413