xref: /openbsd-src/gnu/usr.bin/gcc/gcc/except.c (revision 4e43c760ad4cd5f644ec700462679d05749498d8)
1 /* Implements exception handling.
2    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4    Contributed by Mike Stump <mrs@cygnus.com>.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA.  */
22 
23 
24 /* An exception is an event that can be signaled from within a
25    function. This event can then be "caught" or "trapped" by the
26    callers of this function. This potentially allows program flow to
27    be transferred to any arbitrary code associated with a function call
28    several levels up the stack.
29 
30    The intended use for this mechanism is for signaling "exceptional
31    events" in an out-of-band fashion, hence its name. The C++ language
32    (and many other OO-styled or functional languages) practically
33    requires such a mechanism, as otherwise it becomes very difficult
34    or even impossible to signal failure conditions in complex
35    situations.  The traditional C++ example is when an error occurs in
36    the process of constructing an object; without such a mechanism, it
37    is impossible to signal that the error occurs without adding global
38    state variables and error checks around every object construction.
39 
40    The act of causing this event to occur is referred to as "throwing
41    an exception". (Alternate terms include "raising an exception" or
42    "signaling an exception".) The term "throw" is used because control
43    is returned to the callers of the function that is signaling the
44    exception, and thus there is the concept of "throwing" the
45    exception up the call stack.
46 
47    [ Add updated documentation on how to use this.  ]  */
48 
49 
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
72 #include "target.h"
73 #include "langhooks.h"
74 
75 /* Provide defaults for stuff that may not be defined when using
76    sjlj exceptions.  */
77 #ifndef EH_RETURN_DATA_REGNO
78 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
79 #endif
80 
81 
82 /* Nonzero means enable synchronous exceptions for non-call instructions.  */
83 int flag_non_call_exceptions;
84 
85 /* Protect cleanup actions with must-not-throw regions, with a call
86    to the given failure handler.  */
87 tree (*lang_protect_cleanup_actions) PARAMS ((void));
88 
89 /* Return true if type A catches type B.  */
90 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
91 
92 /* Map a type to a runtime object to match type.  */
93 tree (*lang_eh_runtime_type) PARAMS ((tree));
94 
95 /* A hash table of label to region number.  */
96 
97 struct ehl_map_entry GTY(())
98 {
99   rtx label;
100   struct eh_region *region;
101 };
102 
103 static int call_site_base;
104 static GTY ((param_is (union tree_node)))
105   htab_t type_to_runtime_map;
106 
107 /* Describe the SjLj_Function_Context structure.  */
108 static GTY(()) tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
114 
115 /* Describes one exception region.  */
116 struct eh_region GTY(())
117 {
118   /* The immediately surrounding region.  */
119   struct eh_region *outer;
120 
121   /* The list of immediately contained regions.  */
122   struct eh_region *inner;
123   struct eh_region *next_peer;
124 
125   /* An identifier for this region.  */
126   int region_number;
127 
128   /* When a region is deleted, its parents inherit the REG_EH_REGION
129      numbers already assigned.  */
130   bitmap aka;
131 
132   /* Each region does exactly one thing.  */
133   enum eh_region_type
134   {
135     ERT_UNKNOWN = 0,
136     ERT_CLEANUP,
137     ERT_TRY,
138     ERT_CATCH,
139     ERT_ALLOWED_EXCEPTIONS,
140     ERT_MUST_NOT_THROW,
141     ERT_THROW,
142     ERT_FIXUP
143   } type;
144 
145   /* Holds the action to perform based on the preceding type.  */
146   union eh_region_u {
147     /* A list of catch blocks, a surrounding try block,
148        and the label for continuing after a catch.  */
149     struct eh_region_u_try {
150       struct eh_region *catch;
151       struct eh_region *last_catch;
152       struct eh_region *prev_try;
153       rtx continue_label;
154     } GTY ((tag ("ERT_TRY"))) try;
155 
156     /* The list through the catch handlers, the list of type objects
157        matched, and the list of associated filters.  */
158     struct eh_region_u_catch {
159       struct eh_region *next_catch;
160       struct eh_region *prev_catch;
161       tree type_list;
162       tree filter_list;
163     } GTY ((tag ("ERT_CATCH"))) catch;
164 
165     /* A tree_list of allowed types.  */
166     struct eh_region_u_allowed {
167       tree type_list;
168       int filter;
169     } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 
171     /* The type given by a call to "throw foo();", or discovered
172        for a throw.  */
173     struct eh_region_u_throw {
174       tree type;
175     } GTY ((tag ("ERT_THROW"))) throw;
176 
177     /* Retain the cleanup expression even after expansion so that
178        we can match up fixup regions.  */
179     struct eh_region_u_cleanup {
180       tree exp;
181       struct eh_region *prev_try;
182     } GTY ((tag ("ERT_CLEANUP"))) cleanup;
183 
184     /* The real region (by expression and by pointer) that fixup code
185        should live in.  */
186     struct eh_region_u_fixup {
187       tree cleanup_exp;
188       struct eh_region *real_region;
189     } GTY ((tag ("ERT_FIXUP"))) fixup;
190   } GTY ((desc ("%0.type"))) u;
191 
192   /* Entry point for this region's handler before landing pads are built.  */
193   rtx label;
194 
195   /* Entry point for this region's handler from the runtime eh library.  */
196   rtx landing_pad;
197 
198   /* Entry point for this region's handler from an inner region.  */
199   rtx post_landing_pad;
200 
201   /* The RESX insn for handing off control to the next outermost handler,
202      if appropriate.  */
203   rtx resume;
204 
205   /* True if something in this region may throw.  */
206   unsigned may_contain_throw : 1;
207 };
208 
209 struct call_site_record GTY(())
210 {
211   rtx landing_pad;
212   int action;
213 };
214 
215 /* Used to save exception status for each function.  */
216 struct eh_status GTY(())
217 {
218   /* The tree of all regions for this function.  */
219   struct eh_region *region_tree;
220 
221   /* The same information as an indexable array.  */
222   struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
223 
224   /* The most recently open region.  */
225   struct eh_region *cur_region;
226 
227   /* This is the region for which we are processing catch blocks.  */
228   struct eh_region *try_region;
229 
230   rtx filter;
231   rtx exc_ptr;
232 
233   int built_landing_pads;
234   int last_region_number;
235 
236   varray_type ttype_data;
237   varray_type ehspec_data;
238   varray_type action_record_data;
239 
240   htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
241 
242   struct call_site_record * GTY ((length ("%h.call_site_data_used")))
243     call_site_data;
244   int call_site_data_used;
245   int call_site_data_size;
246 
247   rtx ehr_stackadj;
248   rtx ehr_handler;
249   rtx ehr_label;
250 
251   rtx sjlj_fc;
252   rtx sjlj_exit_after;
253 };
254 
255 
256 static int t2r_eq				PARAMS ((const PTR,
257 							 const PTR));
258 static hashval_t t2r_hash			PARAMS ((const PTR));
259 static void add_type_for_runtime		PARAMS ((tree));
260 static tree lookup_type_for_runtime		PARAMS ((tree));
261 
262 static struct eh_region *expand_eh_region_end	PARAMS ((void));
263 
264 static rtx get_exception_filter			PARAMS ((struct function *));
265 
266 static void collect_eh_region_array		PARAMS ((void));
267 static void resolve_fixup_regions		PARAMS ((void));
268 static void remove_fixup_regions		PARAMS ((void));
269 static void remove_unreachable_regions		PARAMS ((rtx));
270 static void convert_from_eh_region_ranges_1	PARAMS ((rtx *, int *, int));
271 
272 static struct eh_region *duplicate_eh_region_1	PARAMS ((struct eh_region *,
273 						     struct inline_remap *));
274 static void duplicate_eh_region_2		PARAMS ((struct eh_region *,
275 							 struct eh_region **));
276 static int ttypes_filter_eq			PARAMS ((const PTR,
277 							 const PTR));
278 static hashval_t ttypes_filter_hash		PARAMS ((const PTR));
279 static int ehspec_filter_eq			PARAMS ((const PTR,
280 							 const PTR));
281 static hashval_t ehspec_filter_hash		PARAMS ((const PTR));
282 static int add_ttypes_entry			PARAMS ((htab_t, tree));
283 static int add_ehspec_entry			PARAMS ((htab_t, htab_t,
284 							 tree));
285 static void assign_filter_values		PARAMS ((void));
286 static void build_post_landing_pads		PARAMS ((void));
287 static void connect_post_landing_pads		PARAMS ((void));
288 static void dw2_build_landing_pads		PARAMS ((void));
289 
290 struct sjlj_lp_info;
291 static bool sjlj_find_directly_reachable_regions
292      PARAMS ((struct sjlj_lp_info *));
293 static void sjlj_assign_call_site_values
294      PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_mark_call_sites
296      PARAMS ((struct sjlj_lp_info *));
297 static void sjlj_emit_function_enter		PARAMS ((rtx));
298 static void sjlj_emit_function_exit		PARAMS ((void));
299 static void sjlj_emit_dispatch_table
300      PARAMS ((rtx, struct sjlj_lp_info *));
301 static void sjlj_build_landing_pads		PARAMS ((void));
302 
303 static hashval_t ehl_hash			PARAMS ((const PTR));
304 static int ehl_eq				PARAMS ((const PTR,
305 							 const PTR));
306 static void add_ehl_entry			PARAMS ((rtx,
307 							 struct eh_region *));
308 static void remove_exception_handler_label	PARAMS ((rtx));
309 static void remove_eh_handler			PARAMS ((struct eh_region *));
310 static int for_each_eh_label_1			PARAMS ((PTR *, PTR));
311 
312 struct reachable_info;
313 
314 /* The return value of reachable_next_level.  */
315 enum reachable_code
316 {
317   /* The given exception is not processed by the given region.  */
318   RNL_NOT_CAUGHT,
319   /* The given exception may need processing by the given region.  */
320   RNL_MAYBE_CAUGHT,
321   /* The given exception is completely processed by the given region.  */
322   RNL_CAUGHT,
323   /* The given exception is completely processed by the runtime.  */
324   RNL_BLOCKED
325 };
326 
327 static int check_handled			PARAMS ((tree, tree));
328 static void add_reachable_handler
329      PARAMS ((struct reachable_info *, struct eh_region *,
330 	      struct eh_region *));
331 static enum reachable_code reachable_next_level
332      PARAMS ((struct eh_region *, tree, struct reachable_info *));
333 
334 static int action_record_eq			PARAMS ((const PTR,
335 							 const PTR));
336 static hashval_t action_record_hash		PARAMS ((const PTR));
337 static int add_action_record			PARAMS ((htab_t, int, int));
338 static int collect_one_action_chain		PARAMS ((htab_t,
339 							 struct eh_region *));
340 static int add_call_site			PARAMS ((rtx, int));
341 
342 static void push_uleb128			PARAMS ((varray_type *,
343 							 unsigned int));
344 static void push_sleb128			PARAMS ((varray_type *, int));
345 #ifndef HAVE_AS_LEB128
346 static int dw2_size_of_call_site_table		PARAMS ((void));
347 static int sjlj_size_of_call_site_table		PARAMS ((void));
348 #endif
349 static void dw2_output_call_site_table		PARAMS ((void));
350 static void sjlj_output_call_site_table		PARAMS ((void));
351 
352 
353 /* Routine to see if exception handling is turned on.
354    DO_WARN is nonzero if we want to inform the user that exception
355    handling is turned off.
356 
357    This is used to ensure that -fexceptions has been specified if the
358    compiler tries to use any exception-specific functions.  */
359 
360 int
doing_eh(do_warn)361 doing_eh (do_warn)
362      int do_warn;
363 {
364   if (! flag_exceptions)
365     {
366       static int warned = 0;
367       if (! warned && do_warn)
368 	{
369 	  error ("exception handling disabled, use -fexceptions to enable");
370 	  warned = 1;
371 	}
372       return 0;
373     }
374   return 1;
375 }
376 
377 
378 void
init_eh()379 init_eh ()
380 {
381   if (! flag_exceptions)
382     return;
383 
384   type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
385 
386   /* Create the SjLj_Function_Context structure.  This should match
387      the definition in unwind-sjlj.c.  */
388   if (USING_SJLJ_EXCEPTIONS)
389     {
390       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
391 
392       sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
393 
394       f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
395 			   build_pointer_type (sjlj_fc_type_node));
396       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
397 
398       f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
399 			 integer_type_node);
400       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
401 
402       tmp = build_index_type (build_int_2 (4 - 1, 0));
403       tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
404 			      tmp);
405       f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
406       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
407 
408       f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
409 			  ptr_type_node);
410       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
411 
412       f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
413 			   ptr_type_node);
414       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
415 
416 #ifdef DONT_USE_BUILTIN_SETJMP
417 #ifdef JMP_BUF_SIZE
418       tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
419 #else
420       /* Should be large enough for most systems, if it is not,
421 	 JMP_BUF_SIZE should be defined with the proper value.  It will
422 	 also tend to be larger than necessary for most systems, a more
423 	 optimal port will define JMP_BUF_SIZE.  */
424       tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
425 #endif
426 #else
427       /* This is 2 for builtin_setjmp, plus whatever the target requires
428 	 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL).  */
429       tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
430 			  / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
431 #endif
432       tmp = build_index_type (tmp);
433       tmp = build_array_type (ptr_type_node, tmp);
434       f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
435 #ifdef DONT_USE_BUILTIN_SETJMP
436       /* We don't know what the alignment requirements of the
437 	 runtime's jmp_buf has.  Overestimate.  */
438       DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
439       DECL_USER_ALIGN (f_jbuf) = 1;
440 #endif
441       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
442 
443       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
444       TREE_CHAIN (f_prev) = f_cs;
445       TREE_CHAIN (f_cs) = f_data;
446       TREE_CHAIN (f_data) = f_per;
447       TREE_CHAIN (f_per) = f_lsda;
448       TREE_CHAIN (f_lsda) = f_jbuf;
449 
450       layout_type (sjlj_fc_type_node);
451 
452       /* Cache the interesting field offsets so that we have
453 	 easy access from rtl.  */
454       sjlj_fc_call_site_ofs
455 	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
456 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
457       sjlj_fc_data_ofs
458 	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
459 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
460       sjlj_fc_personality_ofs
461 	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
462 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
463       sjlj_fc_lsda_ofs
464 	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
465 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
466       sjlj_fc_jbuf_ofs
467 	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
468 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
469     }
470 }
471 
472 void
init_eh_for_function()473 init_eh_for_function ()
474 {
475   cfun->eh = (struct eh_status *)
476     ggc_alloc_cleared (sizeof (struct eh_status));
477 }
478 
479 /* Start an exception handling region.  All instructions emitted
480    after this point are considered to be part of the region until
481    expand_eh_region_end is invoked.  */
482 
483 void
expand_eh_region_start()484 expand_eh_region_start ()
485 {
486   struct eh_region *new_region;
487   struct eh_region *cur_region;
488   rtx note;
489 
490   if (! doing_eh (0))
491     return;
492 
493   /* Insert a new blank region as a leaf in the tree.  */
494   new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
495   cur_region = cfun->eh->cur_region;
496   new_region->outer = cur_region;
497   if (cur_region)
498     {
499       new_region->next_peer = cur_region->inner;
500       cur_region->inner = new_region;
501     }
502   else
503     {
504       new_region->next_peer = cfun->eh->region_tree;
505       cfun->eh->region_tree = new_region;
506     }
507   cfun->eh->cur_region = new_region;
508 
509   /* Create a note marking the start of this region.  */
510   new_region->region_number = ++cfun->eh->last_region_number;
511   note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
512   NOTE_EH_HANDLER (note) = new_region->region_number;
513 }
514 
515 /* Common code to end a region.  Returns the region just ended.  */
516 
517 static struct eh_region *
expand_eh_region_end()518 expand_eh_region_end ()
519 {
520   struct eh_region *cur_region = cfun->eh->cur_region;
521   rtx note;
522 
523   /* Create a note marking the end of this region.  */
524   note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
525   NOTE_EH_HANDLER (note) = cur_region->region_number;
526 
527   /* Pop.  */
528   cfun->eh->cur_region = cur_region->outer;
529 
530   return cur_region;
531 }
532 
533 /* End an exception handling region for a cleanup.  HANDLER is an
534    expression to expand for the cleanup.  */
535 
536 void
expand_eh_region_end_cleanup(handler)537 expand_eh_region_end_cleanup (handler)
538      tree handler;
539 {
540   struct eh_region *region;
541   tree protect_cleanup_actions;
542   rtx around_label;
543   rtx data_save[2];
544 
545   if (! doing_eh (0))
546     return;
547 
548   region = expand_eh_region_end ();
549   region->type = ERT_CLEANUP;
550   region->label = gen_label_rtx ();
551   region->u.cleanup.exp = handler;
552   region->u.cleanup.prev_try = cfun->eh->try_region;
553 
554   around_label = gen_label_rtx ();
555   emit_jump (around_label);
556 
557   emit_label (region->label);
558 
559   if (flag_non_call_exceptions || region->may_contain_throw)
560     {
561       /* Give the language a chance to specify an action to be taken if an
562 	 exception is thrown that would propagate out of the HANDLER.  */
563       protect_cleanup_actions
564 	= (lang_protect_cleanup_actions
565 	   ? (*lang_protect_cleanup_actions) ()
566 	   : NULL_TREE);
567 
568       if (protect_cleanup_actions)
569 	expand_eh_region_start ();
570 
571       /* In case this cleanup involves an inline destructor with a try block in
572 	 it, we need to save the EH return data registers around it.  */
573       data_save[0] = gen_reg_rtx (ptr_mode);
574       emit_move_insn (data_save[0], get_exception_pointer (cfun));
575       data_save[1] = gen_reg_rtx (word_mode);
576       emit_move_insn (data_save[1], get_exception_filter (cfun));
577 
578       expand_expr (handler, const0_rtx, VOIDmode, 0);
579 
580       emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
581       emit_move_insn (cfun->eh->filter, data_save[1]);
582 
583       if (protect_cleanup_actions)
584 	expand_eh_region_end_must_not_throw (protect_cleanup_actions);
585 
586       /* We need any stack adjustment complete before the around_label.  */
587       do_pending_stack_adjust ();
588     }
589 
590   /* We delay the generation of the _Unwind_Resume until we generate
591      landing pads.  We emit a marker here so as to get good control
592      flow data in the meantime.  */
593   region->resume
594     = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
595   emit_barrier ();
596 
597   emit_label (around_label);
598 }
599 
600 /* End an exception handling region for a try block, and prepares
601    for subsequent calls to expand_start_catch.  */
602 
603 void
expand_start_all_catch()604 expand_start_all_catch ()
605 {
606   struct eh_region *region;
607 
608   if (! doing_eh (1))
609     return;
610 
611   region = expand_eh_region_end ();
612   region->type = ERT_TRY;
613   region->u.try.prev_try = cfun->eh->try_region;
614   region->u.try.continue_label = gen_label_rtx ();
615 
616   cfun->eh->try_region = region;
617 
618   emit_jump (region->u.try.continue_label);
619 }
620 
621 /* Begin a catch clause.  TYPE is the type caught, a list of such types, or
622    null if this is a catch-all clause. Providing a type list enables to
623    associate the catch region with potentially several exception types, which
624    is useful e.g. for Ada.  */
625 
626 void
expand_start_catch(type_or_list)627 expand_start_catch (type_or_list)
628      tree type_or_list;
629 {
630   struct eh_region *t, *c, *l;
631   tree type_list;
632 
633   if (! doing_eh (0))
634     return;
635 
636   type_list = type_or_list;
637 
638   if (type_or_list)
639     {
640       /* Ensure to always end up with a type list to normalize further
641          processing, then register each type against the runtime types
642          map.  */
643       tree type_node;
644 
645       if (TREE_CODE (type_or_list) != TREE_LIST)
646 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
647 
648       type_node = type_list;
649       for (; type_node; type_node = TREE_CHAIN (type_node))
650 	add_type_for_runtime (TREE_VALUE (type_node));
651     }
652 
653   expand_eh_region_start ();
654 
655   t = cfun->eh->try_region;
656   c = cfun->eh->cur_region;
657   c->type = ERT_CATCH;
658   c->u.catch.type_list = type_list;
659   c->label = gen_label_rtx ();
660 
661   l = t->u.try.last_catch;
662   c->u.catch.prev_catch = l;
663   if (l)
664     l->u.catch.next_catch = c;
665   else
666     t->u.try.catch = c;
667   t->u.try.last_catch = c;
668 
669   emit_label (c->label);
670 }
671 
672 /* End a catch clause.  Control will resume after the try/catch block.  */
673 
674 void
expand_end_catch()675 expand_end_catch ()
676 {
677   struct eh_region *try_region, *catch_region;
678 
679   if (! doing_eh (0))
680     return;
681 
682   catch_region = expand_eh_region_end ();
683   try_region = cfun->eh->try_region;
684 
685   emit_jump (try_region->u.try.continue_label);
686 }
687 
688 /* End a sequence of catch handlers for a try block.  */
689 
690 void
expand_end_all_catch()691 expand_end_all_catch ()
692 {
693   struct eh_region *try_region;
694 
695   if (! doing_eh (0))
696     return;
697 
698   try_region = cfun->eh->try_region;
699   cfun->eh->try_region = try_region->u.try.prev_try;
700 
701   emit_label (try_region->u.try.continue_label);
702 }
703 
704 /* End an exception region for an exception type filter.  ALLOWED is a
705    TREE_LIST of types to be matched by the runtime.  FAILURE is an
706    expression to invoke if a mismatch occurs.
707 
708    ??? We could use these semantics for calls to rethrow, too; if we can
709    see the surrounding catch clause, we know that the exception we're
710    rethrowing satisfies the "filter" of the catch type.  */
711 
712 void
expand_eh_region_end_allowed(allowed,failure)713 expand_eh_region_end_allowed (allowed, failure)
714      tree allowed, failure;
715 {
716   struct eh_region *region;
717   rtx around_label;
718 
719   if (! doing_eh (0))
720     return;
721 
722   region = expand_eh_region_end ();
723   region->type = ERT_ALLOWED_EXCEPTIONS;
724   region->u.allowed.type_list = allowed;
725   region->label = gen_label_rtx ();
726 
727   for (; allowed ; allowed = TREE_CHAIN (allowed))
728     add_type_for_runtime (TREE_VALUE (allowed));
729 
730   /* We must emit the call to FAILURE here, so that if this function
731      throws a different exception, that it will be processed by the
732      correct region.  */
733 
734   around_label = gen_label_rtx ();
735   emit_jump (around_label);
736 
737   emit_label (region->label);
738   expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
739   /* We must adjust the stack before we reach the AROUND_LABEL because
740      the call to FAILURE does not occur on all paths to the
741      AROUND_LABEL.  */
742   do_pending_stack_adjust ();
743 
744   emit_label (around_label);
745 }
746 
747 /* End an exception region for a must-not-throw filter.  FAILURE is an
748    expression invoke if an uncaught exception propagates this far.
749 
750    This is conceptually identical to expand_eh_region_end_allowed with
751    an empty allowed list (if you passed "std::terminate" instead of
752    "__cxa_call_unexpected"), but they are represented differently in
753    the C++ LSDA.  */
754 
755 void
expand_eh_region_end_must_not_throw(failure)756 expand_eh_region_end_must_not_throw (failure)
757      tree failure;
758 {
759   struct eh_region *region;
760   rtx around_label;
761 
762   if (! doing_eh (0))
763     return;
764 
765   region = expand_eh_region_end ();
766   region->type = ERT_MUST_NOT_THROW;
767   region->label = gen_label_rtx ();
768 
769   /* We must emit the call to FAILURE here, so that if this function
770      throws a different exception, that it will be processed by the
771      correct region.  */
772 
773   around_label = gen_label_rtx ();
774   emit_jump (around_label);
775 
776   emit_label (region->label);
777   expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
778 
779   emit_label (around_label);
780 }
781 
782 /* End an exception region for a throw.  No handling goes on here,
783    but it's the easiest way for the front-end to indicate what type
784    is being thrown.  */
785 
786 void
expand_eh_region_end_throw(type)787 expand_eh_region_end_throw (type)
788      tree type;
789 {
790   struct eh_region *region;
791 
792   if (! doing_eh (0))
793     return;
794 
795   region = expand_eh_region_end ();
796   region->type = ERT_THROW;
797   region->u.throw.type = type;
798 }
799 
800 /* End a fixup region.  Within this region the cleanups for the immediately
801    enclosing region are _not_ run.  This is used for goto cleanup to avoid
802    destroying an object twice.
803 
804    This would be an extraordinarily simple prospect, were it not for the
805    fact that we don't actually know what the immediately enclosing region
806    is.  This surprising fact is because expand_cleanups is currently
807    generating a sequence that it will insert somewhere else.  We collect
808    the proper notion of "enclosing" in convert_from_eh_region_ranges.  */
809 
810 void
expand_eh_region_end_fixup(handler)811 expand_eh_region_end_fixup (handler)
812      tree handler;
813 {
814   struct eh_region *fixup;
815 
816   if (! doing_eh (0))
817     return;
818 
819   fixup = expand_eh_region_end ();
820   fixup->type = ERT_FIXUP;
821   fixup->u.fixup.cleanup_exp = handler;
822 }
823 
824 /* Note that the current EH region (if any) may contain a throw, or a
825    call to a function which itself may contain a throw.  */
826 
827 void
note_eh_region_may_contain_throw()828 note_eh_region_may_contain_throw ()
829 {
830   struct eh_region *region;
831 
832   region = cfun->eh->cur_region;
833   while (region && !region->may_contain_throw)
834     {
835       region->may_contain_throw = 1;
836       region = region->outer;
837     }
838 }
839 
840 /* Return an rtl expression for a pointer to the exception object
841    within a handler.  */
842 
843 rtx
get_exception_pointer(fun)844 get_exception_pointer (fun)
845      struct function *fun;
846 {
847   rtx exc_ptr = fun->eh->exc_ptr;
848   if (fun == cfun && ! exc_ptr)
849     {
850       exc_ptr = gen_reg_rtx (ptr_mode);
851       fun->eh->exc_ptr = exc_ptr;
852     }
853   return exc_ptr;
854 }
855 
856 /* Return an rtl expression for the exception dispatch filter
857    within a handler.  */
858 
859 static rtx
get_exception_filter(fun)860 get_exception_filter (fun)
861      struct function *fun;
862 {
863   rtx filter = fun->eh->filter;
864   if (fun == cfun && ! filter)
865     {
866       filter = gen_reg_rtx (word_mode);
867       fun->eh->filter = filter;
868     }
869   return filter;
870 }
871 
872 /* This section is for the exception handling specific optimization pass.  */
873 
874 /* Random access the exception region tree.  It's just as simple to
875    collect the regions this way as in expand_eh_region_start, but
876    without having to realloc memory.  */
877 
878 static void
collect_eh_region_array()879 collect_eh_region_array ()
880 {
881   struct eh_region **array, *i;
882 
883   i = cfun->eh->region_tree;
884   if (! i)
885     return;
886 
887   array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
888 			     * sizeof (*array));
889   cfun->eh->region_array = array;
890 
891   while (1)
892     {
893       array[i->region_number] = i;
894 
895       /* If there are sub-regions, process them.  */
896       if (i->inner)
897 	i = i->inner;
898       /* If there are peers, process them.  */
899       else if (i->next_peer)
900 	i = i->next_peer;
901       /* Otherwise, step back up the tree to the next peer.  */
902       else
903 	{
904 	  do {
905 	    i = i->outer;
906 	    if (i == NULL)
907 	      return;
908 	  } while (i->next_peer == NULL);
909 	  i = i->next_peer;
910 	}
911     }
912 }
913 
914 static void
resolve_fixup_regions()915 resolve_fixup_regions ()
916 {
917   int i, j, n = cfun->eh->last_region_number;
918 
919   for (i = 1; i <= n; ++i)
920     {
921       struct eh_region *fixup = cfun->eh->region_array[i];
922       struct eh_region *cleanup = 0;
923 
924       if (! fixup || fixup->type != ERT_FIXUP)
925 	continue;
926 
927       for (j = 1; j <= n; ++j)
928 	{
929 	  cleanup = cfun->eh->region_array[j];
930 	  if (cleanup->type == ERT_CLEANUP
931 	      && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
932 	    break;
933 	}
934       if (j > n)
935 	abort ();
936 
937       fixup->u.fixup.real_region = cleanup->outer;
938     }
939 }
940 
941 /* Now that we've discovered what region actually encloses a fixup,
942    we can shuffle pointers and remove them from the tree.  */
943 
944 static void
remove_fixup_regions()945 remove_fixup_regions ()
946 {
947   int i;
948   rtx insn, note;
949   struct eh_region *fixup;
950 
951   /* Walk the insn chain and adjust the REG_EH_REGION numbers
952      for instructions referencing fixup regions.  This is only
953      strictly necessary for fixup regions with no parent, but
954      doesn't hurt to do it for all regions.  */
955   for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
956     if (INSN_P (insn)
957 	&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
958 	&& INTVAL (XEXP (note, 0)) > 0
959 	&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
960 	&& fixup->type == ERT_FIXUP)
961       {
962 	if (fixup->u.fixup.real_region)
963 	  XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
964 	else
965 	  remove_note (insn, note);
966       }
967 
968   /* Remove the fixup regions from the tree.  */
969   for (i = cfun->eh->last_region_number; i > 0; --i)
970     {
971       fixup = cfun->eh->region_array[i];
972       if (! fixup)
973 	continue;
974 
975       /* Allow GC to maybe free some memory.  */
976       if (fixup->type == ERT_CLEANUP)
977 	fixup->u.cleanup.exp = NULL_TREE;
978 
979       if (fixup->type != ERT_FIXUP)
980 	continue;
981 
982       if (fixup->inner)
983 	{
984 	  struct eh_region *parent, *p, **pp;
985 
986 	  parent = fixup->u.fixup.real_region;
987 
988 	  /* Fix up the children's parent pointers; find the end of
989 	     the list.  */
990 	  for (p = fixup->inner; ; p = p->next_peer)
991 	    {
992 	      p->outer = parent;
993 	      if (! p->next_peer)
994 		break;
995 	    }
996 
997 	  /* In the tree of cleanups, only outer-inner ordering matters.
998 	     So link the children back in anywhere at the correct level.  */
999 	  if (parent)
1000 	    pp = &parent->inner;
1001 	  else
1002 	    pp = &cfun->eh->region_tree;
1003 	  p->next_peer = *pp;
1004 	  *pp = fixup->inner;
1005 	  fixup->inner = NULL;
1006 	}
1007 
1008       remove_eh_handler (fixup);
1009     }
1010 }
1011 
1012 /* Remove all regions whose labels are not reachable from insns.  */
1013 
1014 static void
remove_unreachable_regions(insns)1015 remove_unreachable_regions (insns)
1016      rtx insns;
1017 {
1018   int i, *uid_region_num;
1019   bool *reachable;
1020   struct eh_region *r;
1021   rtx insn;
1022 
1023   uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1024   reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1025 
1026   for (i = cfun->eh->last_region_number; i > 0; --i)
1027     {
1028       r = cfun->eh->region_array[i];
1029       if (!r || r->region_number != i)
1030 	continue;
1031 
1032       if (r->resume)
1033 	{
1034 	  if (uid_region_num[INSN_UID (r->resume)])
1035 	    abort ();
1036 	  uid_region_num[INSN_UID (r->resume)] = i;
1037 	}
1038       if (r->label)
1039 	{
1040 	  if (uid_region_num[INSN_UID (r->label)])
1041 	    abort ();
1042 	  uid_region_num[INSN_UID (r->label)] = i;
1043 	}
1044       if (r->type == ERT_TRY && r->u.try.continue_label)
1045 	{
1046 	  if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1047 	    abort ();
1048 	  uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1049 	}
1050     }
1051 
1052   for (insn = insns; insn; insn = NEXT_INSN (insn))
1053     reachable[uid_region_num[INSN_UID (insn)]] = true;
1054 
1055   for (i = cfun->eh->last_region_number; i > 0; --i)
1056     {
1057       r = cfun->eh->region_array[i];
1058       if (r && r->region_number == i && !reachable[i])
1059 	{
1060 	  /* Don't remove ERT_THROW regions if their outer region
1061 	     is reachable.  */
1062 	  if (r->type == ERT_THROW
1063 	      && r->outer
1064 	      && reachable[r->outer->region_number])
1065 	    continue;
1066 
1067 	  remove_eh_handler (r);
1068 	}
1069     }
1070 
1071   free (reachable);
1072   free (uid_region_num);
1073 }
1074 
1075 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1076    can_throw instruction in the region.  */
1077 
1078 static void
convert_from_eh_region_ranges_1(pinsns,orig_sp,cur)1079 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1080      rtx *pinsns;
1081      int *orig_sp;
1082      int cur;
1083 {
1084   int *sp = orig_sp;
1085   rtx insn, next;
1086 
1087   for (insn = *pinsns; insn ; insn = next)
1088     {
1089       next = NEXT_INSN (insn);
1090       if (GET_CODE (insn) == NOTE)
1091 	{
1092 	  int kind = NOTE_LINE_NUMBER (insn);
1093 	  if (kind == NOTE_INSN_EH_REGION_BEG
1094 	      || kind == NOTE_INSN_EH_REGION_END)
1095 	    {
1096 	      if (kind == NOTE_INSN_EH_REGION_BEG)
1097 		{
1098 		  struct eh_region *r;
1099 
1100 		  *sp++ = cur;
1101 		  cur = NOTE_EH_HANDLER (insn);
1102 
1103 		  r = cfun->eh->region_array[cur];
1104 		  if (r->type == ERT_FIXUP)
1105 		    {
1106 		      r = r->u.fixup.real_region;
1107 		      cur = r ? r->region_number : 0;
1108 		    }
1109 		  else if (r->type == ERT_CATCH)
1110 		    {
1111 		      r = r->outer;
1112 		      cur = r ? r->region_number : 0;
1113 		    }
1114 		}
1115 	      else
1116 		cur = *--sp;
1117 
1118 	      /* Removing the first insn of a CALL_PLACEHOLDER sequence
1119 		 requires extra care to adjust sequence start.  */
1120 	      if (insn == *pinsns)
1121 		*pinsns = next;
1122 	      remove_insn (insn);
1123 	      continue;
1124 	    }
1125 	}
1126       else if (INSN_P (insn))
1127 	{
1128 	  if (cur > 0
1129 	      && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1130 	      /* Calls can always potentially throw exceptions, unless
1131 		 they have a REG_EH_REGION note with a value of 0 or less.
1132 		 Which should be the only possible kind so far.  */
1133 	      && (GET_CODE (insn) == CALL_INSN
1134 		  /* If we wanted exceptions for non-call insns, then
1135 		     any may_trap_p instruction could throw.  */
1136 		  || (flag_non_call_exceptions
1137 		      && GET_CODE (PATTERN (insn)) != CLOBBER
1138 		      && GET_CODE (PATTERN (insn)) != USE
1139 		      && may_trap_p (PATTERN (insn)))))
1140 	    {
1141 	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1142 						  REG_NOTES (insn));
1143 	    }
1144 
1145 	  if (GET_CODE (insn) == CALL_INSN
1146 	      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1147 	    {
1148 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1149 					       sp, cur);
1150 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1151 					       sp, cur);
1152 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1153 					       sp, cur);
1154 	    }
1155 	}
1156     }
1157 
1158   if (sp != orig_sp)
1159     abort ();
1160 }
1161 
1162 void
convert_from_eh_region_ranges()1163 convert_from_eh_region_ranges ()
1164 {
1165   int *stack;
1166   rtx insns;
1167 
1168   collect_eh_region_array ();
1169   resolve_fixup_regions ();
1170 
1171   stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1172   insns = get_insns ();
1173   convert_from_eh_region_ranges_1 (&insns, stack, 0);
1174   free (stack);
1175 
1176   remove_fixup_regions ();
1177   remove_unreachable_regions (insns);
1178 }
1179 
1180 static void
add_ehl_entry(label,region)1181 add_ehl_entry (label, region)
1182      rtx label;
1183      struct eh_region *region;
1184 {
1185   struct ehl_map_entry **slot, *entry;
1186 
1187   LABEL_PRESERVE_P (label) = 1;
1188 
1189   entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
1190   entry->label = label;
1191   entry->region = region;
1192 
1193   slot = (struct ehl_map_entry **)
1194     htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1195 
1196   /* Before landing pad creation, each exception handler has its own
1197      label.  After landing pad creation, the exception handlers may
1198      share landing pads.  This is ok, since maybe_remove_eh_handler
1199      only requires the 1-1 mapping before landing pad creation.  */
1200   if (*slot && !cfun->eh->built_landing_pads)
1201     abort ();
1202 
1203   *slot = entry;
1204 }
1205 
1206 void
find_exception_handler_labels()1207 find_exception_handler_labels ()
1208 {
1209   int i;
1210 
1211   if (cfun->eh->exception_handler_label_map)
1212     htab_empty (cfun->eh->exception_handler_label_map);
1213   else
1214     {
1215       /* ??? The expansion factor here (3/2) must be greater than the htab
1216 	 occupancy factor (4/3) to avoid unnecessary resizing.  */
1217       cfun->eh->exception_handler_label_map
1218         = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1219 			   ehl_hash, ehl_eq, NULL);
1220     }
1221 
1222   if (cfun->eh->region_tree == NULL)
1223     return;
1224 
1225   for (i = cfun->eh->last_region_number; i > 0; --i)
1226     {
1227       struct eh_region *region = cfun->eh->region_array[i];
1228       rtx lab;
1229 
1230       if (! region || region->region_number != i)
1231 	continue;
1232       if (cfun->eh->built_landing_pads)
1233 	lab = region->landing_pad;
1234       else
1235 	lab = region->label;
1236 
1237       if (lab)
1238 	add_ehl_entry (lab, region);
1239     }
1240 
1241   /* For sjlj exceptions, need the return label to remain live until
1242      after landing pad generation.  */
1243   if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1244     add_ehl_entry (return_label, NULL);
1245 }
1246 
1247 bool
current_function_has_exception_handlers()1248 current_function_has_exception_handlers ()
1249 {
1250   int i;
1251 
1252   for (i = cfun->eh->last_region_number; i > 0; --i)
1253     {
1254       struct eh_region *region = cfun->eh->region_array[i];
1255 
1256       if (! region || region->region_number != i)
1257 	continue;
1258       if (region->type != ERT_THROW)
1259 	return true;
1260     }
1261 
1262   return false;
1263 }
1264 
1265 static struct eh_region *
duplicate_eh_region_1(o,map)1266 duplicate_eh_region_1 (o, map)
1267      struct eh_region *o;
1268      struct inline_remap *map;
1269 {
1270   struct eh_region *n
1271     = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
1272 
1273   n->region_number = o->region_number + cfun->eh->last_region_number;
1274   n->type = o->type;
1275 
1276   switch (n->type)
1277     {
1278     case ERT_CLEANUP:
1279     case ERT_MUST_NOT_THROW:
1280       break;
1281 
1282     case ERT_TRY:
1283       if (o->u.try.continue_label)
1284 	n->u.try.continue_label
1285 	  = get_label_from_map (map,
1286 				CODE_LABEL_NUMBER (o->u.try.continue_label));
1287       break;
1288 
1289     case ERT_CATCH:
1290       n->u.catch.type_list = o->u.catch.type_list;
1291       break;
1292 
1293     case ERT_ALLOWED_EXCEPTIONS:
1294       n->u.allowed.type_list = o->u.allowed.type_list;
1295       break;
1296 
1297     case ERT_THROW:
1298       n->u.throw.type = o->u.throw.type;
1299 
1300     default:
1301       abort ();
1302     }
1303 
1304   if (o->label)
1305     n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1306   if (o->resume)
1307     {
1308       n->resume = map->insn_map[INSN_UID (o->resume)];
1309       if (n->resume == NULL)
1310 	abort ();
1311     }
1312 
1313   return n;
1314 }
1315 
1316 static void
duplicate_eh_region_2(o,n_array)1317 duplicate_eh_region_2 (o, n_array)
1318      struct eh_region *o;
1319      struct eh_region **n_array;
1320 {
1321   struct eh_region *n = n_array[o->region_number];
1322 
1323   switch (n->type)
1324     {
1325     case ERT_TRY:
1326       n->u.try.catch = n_array[o->u.try.catch->region_number];
1327       n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1328       break;
1329 
1330     case ERT_CATCH:
1331       if (o->u.catch.next_catch)
1332 	n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1333       if (o->u.catch.prev_catch)
1334 	n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1335       break;
1336 
1337     default:
1338       break;
1339     }
1340 
1341   if (o->outer)
1342     n->outer = n_array[o->outer->region_number];
1343   if (o->inner)
1344     n->inner = n_array[o->inner->region_number];
1345   if (o->next_peer)
1346     n->next_peer = n_array[o->next_peer->region_number];
1347 }
1348 
1349 int
duplicate_eh_regions(ifun,map)1350 duplicate_eh_regions (ifun, map)
1351      struct function *ifun;
1352      struct inline_remap *map;
1353 {
1354   int ifun_last_region_number = ifun->eh->last_region_number;
1355   struct eh_region **n_array, *root, *cur;
1356   int i;
1357 
1358   if (ifun_last_region_number == 0)
1359     return 0;
1360 
1361   n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1362 
1363   for (i = 1; i <= ifun_last_region_number; ++i)
1364     {
1365       cur = ifun->eh->region_array[i];
1366       if (!cur || cur->region_number != i)
1367 	continue;
1368       n_array[i] = duplicate_eh_region_1 (cur, map);
1369     }
1370   for (i = 1; i <= ifun_last_region_number; ++i)
1371     {
1372       cur = ifun->eh->region_array[i];
1373       if (!cur || cur->region_number != i)
1374 	continue;
1375       duplicate_eh_region_2 (cur, n_array);
1376     }
1377 
1378   root = n_array[ifun->eh->region_tree->region_number];
1379   cur = cfun->eh->cur_region;
1380   if (cur)
1381     {
1382       struct eh_region *p = cur->inner;
1383       if (p)
1384 	{
1385 	  while (p->next_peer)
1386 	    p = p->next_peer;
1387 	  p->next_peer = root;
1388 	}
1389       else
1390 	cur->inner = root;
1391 
1392       for (i = 1; i <= ifun_last_region_number; ++i)
1393 	if (n_array[i] && n_array[i]->outer == NULL)
1394 	  n_array[i]->outer = cur;
1395     }
1396   else
1397     {
1398       struct eh_region *p = cfun->eh->region_tree;
1399       if (p)
1400 	{
1401 	  while (p->next_peer)
1402 	    p = p->next_peer;
1403 	  p->next_peer = root;
1404 	}
1405       else
1406 	cfun->eh->region_tree = root;
1407     }
1408 
1409   free (n_array);
1410 
1411   i = cfun->eh->last_region_number;
1412   cfun->eh->last_region_number = i + ifun_last_region_number;
1413   return i;
1414 }
1415 
1416 
1417 static int
t2r_eq(pentry,pdata)1418 t2r_eq (pentry, pdata)
1419      const PTR pentry;
1420      const PTR pdata;
1421 {
1422   tree entry = (tree) pentry;
1423   tree data = (tree) pdata;
1424 
1425   return TREE_PURPOSE (entry) == data;
1426 }
1427 
1428 static hashval_t
t2r_hash(pentry)1429 t2r_hash (pentry)
1430      const PTR pentry;
1431 {
1432   tree entry = (tree) pentry;
1433   return TYPE_HASH (TREE_PURPOSE (entry));
1434 }
1435 
1436 static void
add_type_for_runtime(type)1437 add_type_for_runtime (type)
1438      tree type;
1439 {
1440   tree *slot;
1441 
1442   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1443 					    TYPE_HASH (type), INSERT);
1444   if (*slot == NULL)
1445     {
1446       tree runtime = (*lang_eh_runtime_type) (type);
1447       *slot = tree_cons (type, runtime, NULL_TREE);
1448     }
1449 }
1450 
1451 static tree
lookup_type_for_runtime(type)1452 lookup_type_for_runtime (type)
1453      tree type;
1454 {
1455   tree *slot;
1456 
1457   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1458 					    TYPE_HASH (type), NO_INSERT);
1459 
1460   /* We should have always inserted the data earlier.  */
1461   return TREE_VALUE (*slot);
1462 }
1463 
1464 
1465 /* Represent an entry in @TTypes for either catch actions
1466    or exception filter actions.  */
1467 struct ttypes_filter GTY(())
1468 {
1469   tree t;
1470   int filter;
1471 };
1472 
1473 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1474    (a tree) for a @TTypes type node we are thinking about adding.  */
1475 
1476 static int
ttypes_filter_eq(pentry,pdata)1477 ttypes_filter_eq (pentry, pdata)
1478      const PTR pentry;
1479      const PTR pdata;
1480 {
1481   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1482   tree data = (tree) pdata;
1483 
1484   return entry->t == data;
1485 }
1486 
1487 static hashval_t
ttypes_filter_hash(pentry)1488 ttypes_filter_hash (pentry)
1489      const PTR pentry;
1490 {
1491   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1492   return TYPE_HASH (entry->t);
1493 }
1494 
1495 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1496    exception specification list we are thinking about adding.  */
1497 /* ??? Currently we use the type lists in the order given.  Someone
1498    should put these in some canonical order.  */
1499 
1500 static int
ehspec_filter_eq(pentry,pdata)1501 ehspec_filter_eq (pentry, pdata)
1502      const PTR pentry;
1503      const PTR pdata;
1504 {
1505   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1506   const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1507 
1508   return type_list_equal (entry->t, data->t);
1509 }
1510 
1511 /* Hash function for exception specification lists.  */
1512 
1513 static hashval_t
ehspec_filter_hash(pentry)1514 ehspec_filter_hash (pentry)
1515      const PTR pentry;
1516 {
1517   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1518   hashval_t h = 0;
1519   tree list;
1520 
1521   for (list = entry->t; list ; list = TREE_CHAIN (list))
1522     h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1523   return h;
1524 }
1525 
1526 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1527    up the search.  Return the filter value to be used.  */
1528 
1529 static int
add_ttypes_entry(ttypes_hash,type)1530 add_ttypes_entry (ttypes_hash, type)
1531      htab_t ttypes_hash;
1532      tree type;
1533 {
1534   struct ttypes_filter **slot, *n;
1535 
1536   slot = (struct ttypes_filter **)
1537     htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1538 
1539   if ((n = *slot) == NULL)
1540     {
1541       /* Filter value is a 1 based table index.  */
1542 
1543       n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1544       n->t = type;
1545       n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1546       *slot = n;
1547 
1548       VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1549     }
1550 
1551   return n->filter;
1552 }
1553 
1554 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1555    to speed up the search.  Return the filter value to be used.  */
1556 
1557 static int
add_ehspec_entry(ehspec_hash,ttypes_hash,list)1558 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1559      htab_t ehspec_hash;
1560      htab_t ttypes_hash;
1561      tree list;
1562 {
1563   struct ttypes_filter **slot, *n;
1564   struct ttypes_filter dummy;
1565 
1566   dummy.t = list;
1567   slot = (struct ttypes_filter **)
1568     htab_find_slot (ehspec_hash, &dummy, INSERT);
1569 
1570   if ((n = *slot) == NULL)
1571     {
1572       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1573 
1574       n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1575       n->t = list;
1576       n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1577       *slot = n;
1578 
1579       /* Look up each type in the list and encode its filter
1580 	 value as a uleb128.  Terminate the list with 0.  */
1581       for (; list ; list = TREE_CHAIN (list))
1582 	push_uleb128 (&cfun->eh->ehspec_data,
1583 		      add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1584       VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1585     }
1586 
1587   return n->filter;
1588 }
1589 
1590 /* Generate the action filter values to be used for CATCH and
1591    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1592    we use lots of landing pads, and so every type or list can share
1593    the same filter value, which saves table space.  */
1594 
1595 static void
assign_filter_values()1596 assign_filter_values ()
1597 {
1598   int i;
1599   htab_t ttypes, ehspec;
1600 
1601   VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1602   VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1603 
1604   ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1605   ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1606 
1607   for (i = cfun->eh->last_region_number; i > 0; --i)
1608     {
1609       struct eh_region *r = cfun->eh->region_array[i];
1610 
1611       /* Mind we don't process a region more than once.  */
1612       if (!r || r->region_number != i)
1613 	continue;
1614 
1615       switch (r->type)
1616 	{
1617 	case ERT_CATCH:
1618 	  /* Whatever type_list is (NULL or true list), we build a list
1619 	     of filters for the region.  */
1620 	  r->u.catch.filter_list = NULL_TREE;
1621 
1622 	  if (r->u.catch.type_list != NULL)
1623 	    {
1624 	      /* Get a filter value for each of the types caught and store
1625 		 them in the region's dedicated list.  */
1626 	      tree tp_node = r->u.catch.type_list;
1627 
1628 	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1629 		{
1630 		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1631 		  tree flt_node = build_int_2 (flt, 0);
1632 
1633 		  r->u.catch.filter_list
1634 		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1635 		}
1636 	    }
1637 	  else
1638 	    {
1639 	      /* Get a filter value for the NULL list also since it will need
1640 		 an action record anyway.  */
1641 	      int flt = add_ttypes_entry (ttypes, NULL);
1642 	      tree flt_node = build_int_2 (flt, 0);
1643 
1644 	      r->u.catch.filter_list
1645 		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1646 	    }
1647 
1648 	  break;
1649 
1650 	case ERT_ALLOWED_EXCEPTIONS:
1651 	  r->u.allowed.filter
1652 	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1653 	  break;
1654 
1655 	default:
1656 	  break;
1657 	}
1658     }
1659 
1660   htab_delete (ttypes);
1661   htab_delete (ehspec);
1662 }
1663 
1664 static void
build_post_landing_pads()1665 build_post_landing_pads ()
1666 {
1667   int i;
1668 
1669   for (i = cfun->eh->last_region_number; i > 0; --i)
1670     {
1671       struct eh_region *region = cfun->eh->region_array[i];
1672       rtx seq;
1673 
1674       /* Mind we don't process a region more than once.  */
1675       if (!region || region->region_number != i)
1676 	continue;
1677 
1678       switch (region->type)
1679 	{
1680 	case ERT_TRY:
1681 	  /* ??? Collect the set of all non-overlapping catch handlers
1682 	       all the way up the chain until blocked by a cleanup.  */
1683 	  /* ??? Outer try regions can share landing pads with inner
1684 	     try regions if the types are completely non-overlapping,
1685 	     and there are no intervening cleanups.  */
1686 
1687 	  region->post_landing_pad = gen_label_rtx ();
1688 
1689 	  start_sequence ();
1690 
1691 	  emit_label (region->post_landing_pad);
1692 
1693 	  /* ??? It is mighty inconvenient to call back into the
1694 	     switch statement generation code in expand_end_case.
1695 	     Rapid prototyping sez a sequence of ifs.  */
1696 	  {
1697 	    struct eh_region *c;
1698 	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1699 	      {
1700 		if (c->u.catch.type_list == NULL)
1701 		  emit_jump (c->label);
1702 		else
1703 		  {
1704 		    /* Need for one cmp/jump per type caught. Each type
1705 		       list entry has a matching entry in the filter list
1706 		       (see assign_filter_values).  */
1707 		    tree tp_node = c->u.catch.type_list;
1708 		    tree flt_node = c->u.catch.filter_list;
1709 
1710 		    for (; tp_node; )
1711 		      {
1712 			emit_cmp_and_jump_insns
1713 			  (cfun->eh->filter,
1714 			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1715 			   EQ, NULL_RTX, word_mode, 0, c->label);
1716 
1717 			tp_node = TREE_CHAIN (tp_node);
1718 			flt_node = TREE_CHAIN (flt_node);
1719 		      }
1720 		  }
1721 	      }
1722 	  }
1723 
1724 	  /* We delay the generation of the _Unwind_Resume until we generate
1725 	     landing pads.  We emit a marker here so as to get good control
1726 	     flow data in the meantime.  */
1727 	  region->resume
1728 	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1729 	  emit_barrier ();
1730 
1731 	  seq = get_insns ();
1732 	  end_sequence ();
1733 
1734 	  emit_insn_before (seq, region->u.try.catch->label);
1735 	  break;
1736 
1737 	case ERT_ALLOWED_EXCEPTIONS:
1738 	  region->post_landing_pad = gen_label_rtx ();
1739 
1740 	  start_sequence ();
1741 
1742 	  emit_label (region->post_landing_pad);
1743 
1744 	  emit_cmp_and_jump_insns (cfun->eh->filter,
1745 				   GEN_INT (region->u.allowed.filter),
1746 				   EQ, NULL_RTX, word_mode, 0, region->label);
1747 
1748 	  /* We delay the generation of the _Unwind_Resume until we generate
1749 	     landing pads.  We emit a marker here so as to get good control
1750 	     flow data in the meantime.  */
1751 	  region->resume
1752 	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1753 	  emit_barrier ();
1754 
1755 	  seq = get_insns ();
1756 	  end_sequence ();
1757 
1758 	  emit_insn_before (seq, region->label);
1759 	  break;
1760 
1761 	case ERT_CLEANUP:
1762 	case ERT_MUST_NOT_THROW:
1763 	  region->post_landing_pad = region->label;
1764 	  break;
1765 
1766 	case ERT_CATCH:
1767 	case ERT_THROW:
1768 	  /* Nothing to do.  */
1769 	  break;
1770 
1771 	default:
1772 	  abort ();
1773 	}
1774     }
1775 }
1776 
1777 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1778    _Unwind_Resume otherwise.  */
1779 
1780 static void
connect_post_landing_pads()1781 connect_post_landing_pads ()
1782 {
1783   int i;
1784 
1785   for (i = cfun->eh->last_region_number; i > 0; --i)
1786     {
1787       struct eh_region *region = cfun->eh->region_array[i];
1788       struct eh_region *outer;
1789       rtx seq;
1790 
1791       /* Mind we don't process a region more than once.  */
1792       if (!region || region->region_number != i)
1793 	continue;
1794 
1795       /* If there is no RESX, or it has been deleted by flow, there's
1796 	 nothing to fix up.  */
1797       if (! region->resume || INSN_DELETED_P (region->resume))
1798 	continue;
1799 
1800       /* Search for another landing pad in this function.  */
1801       for (outer = region->outer; outer ; outer = outer->outer)
1802 	if (outer->post_landing_pad)
1803 	  break;
1804 
1805       start_sequence ();
1806 
1807       if (outer)
1808 	emit_jump (outer->post_landing_pad);
1809       else
1810 	emit_library_call (unwind_resume_libfunc, LCT_THROW,
1811 			   VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1812 
1813       seq = get_insns ();
1814       end_sequence ();
1815       emit_insn_before (seq, region->resume);
1816       delete_insn (region->resume);
1817     }
1818 }
1819 
1820 
1821 static void
dw2_build_landing_pads()1822 dw2_build_landing_pads ()
1823 {
1824   int i;
1825   unsigned int j;
1826 
1827   for (i = cfun->eh->last_region_number; i > 0; --i)
1828     {
1829       struct eh_region *region = cfun->eh->region_array[i];
1830       rtx seq;
1831       bool clobbers_hard_regs = false;
1832 
1833       /* Mind we don't process a region more than once.  */
1834       if (!region || region->region_number != i)
1835 	continue;
1836 
1837       if (region->type != ERT_CLEANUP
1838 	  && region->type != ERT_TRY
1839 	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1840 	continue;
1841 
1842       start_sequence ();
1843 
1844       region->landing_pad = gen_label_rtx ();
1845       emit_label (region->landing_pad);
1846 
1847 #ifdef HAVE_exception_receiver
1848       if (HAVE_exception_receiver)
1849 	emit_insn (gen_exception_receiver ());
1850       else
1851 #endif
1852 #ifdef HAVE_nonlocal_goto_receiver
1853 	if (HAVE_nonlocal_goto_receiver)
1854 	  emit_insn (gen_nonlocal_goto_receiver ());
1855 	else
1856 #endif
1857 	  { /* Nothing */ }
1858 
1859       /* If the eh_return data registers are call-saved, then we
1860 	 won't have considered them clobbered from the call that
1861 	 threw.  Kill them now.  */
1862       for (j = 0; ; ++j)
1863 	{
1864 	  unsigned r = EH_RETURN_DATA_REGNO (j);
1865 	  if (r == INVALID_REGNUM)
1866 	    break;
1867 	  if (! call_used_regs[r])
1868 	    {
1869 	      emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1870 	      clobbers_hard_regs = true;
1871 	    }
1872 	}
1873 
1874       if (clobbers_hard_regs)
1875 	{
1876 	  /* @@@ This is a kludge.  Not all machine descriptions define a
1877 	     blockage insn, but we must not allow the code we just generated
1878 	     to be reordered by scheduling.  So emit an ASM_INPUT to act as
1879 	     blockage insn.  */
1880 	  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1881 	}
1882 
1883       emit_move_insn (cfun->eh->exc_ptr,
1884 		      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1885       emit_move_insn (cfun->eh->filter,
1886 		      gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1887 
1888       seq = get_insns ();
1889       end_sequence ();
1890 
1891       emit_insn_before (seq, region->post_landing_pad);
1892     }
1893 }
1894 
1895 
1896 struct sjlj_lp_info
1897 {
1898   int directly_reachable;
1899   int action_index;
1900   int dispatch_index;
1901   int call_site_index;
1902 };
1903 
1904 static bool
sjlj_find_directly_reachable_regions(lp_info)1905 sjlj_find_directly_reachable_regions (lp_info)
1906      struct sjlj_lp_info *lp_info;
1907 {
1908   rtx insn;
1909   bool found_one = false;
1910 
1911   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1912     {
1913       struct eh_region *region;
1914       enum reachable_code rc;
1915       tree type_thrown;
1916       rtx note;
1917 
1918       if (! INSN_P (insn))
1919 	continue;
1920 
1921       note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1922       if (!note || INTVAL (XEXP (note, 0)) <= 0)
1923 	continue;
1924 
1925       region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1926 
1927       type_thrown = NULL_TREE;
1928       if (region->type == ERT_THROW)
1929 	{
1930 	  type_thrown = region->u.throw.type;
1931 	  region = region->outer;
1932 	}
1933 
1934       /* Find the first containing region that might handle the exception.
1935 	 That's the landing pad to which we will transfer control.  */
1936       rc = RNL_NOT_CAUGHT;
1937       for (; region; region = region->outer)
1938 	{
1939 	  rc = reachable_next_level (region, type_thrown, 0);
1940 	  if (rc != RNL_NOT_CAUGHT)
1941 	    break;
1942 	}
1943       if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1944 	{
1945 	  lp_info[region->region_number].directly_reachable = 1;
1946 	  found_one = true;
1947 	}
1948     }
1949 
1950   return found_one;
1951 }
1952 
1953 static void
sjlj_assign_call_site_values(dispatch_label,lp_info)1954 sjlj_assign_call_site_values (dispatch_label, lp_info)
1955      rtx dispatch_label;
1956      struct sjlj_lp_info *lp_info;
1957 {
1958   htab_t ar_hash;
1959   int i, index;
1960 
1961   /* First task: build the action table.  */
1962 
1963   VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1964   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1965 
1966   for (i = cfun->eh->last_region_number; i > 0; --i)
1967     if (lp_info[i].directly_reachable)
1968       {
1969 	struct eh_region *r = cfun->eh->region_array[i];
1970 	r->landing_pad = dispatch_label;
1971 	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1972 	if (lp_info[i].action_index != -1)
1973 	  cfun->uses_eh_lsda = 1;
1974       }
1975 
1976   htab_delete (ar_hash);
1977 
1978   /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1979      landing pad label for the region.  For sjlj though, there is one
1980      common landing pad from which we dispatch to the post-landing pads.
1981 
1982      A region receives a dispatch index if it is directly reachable
1983      and requires in-function processing.  Regions that share post-landing
1984      pads may share dispatch indices.  */
1985   /* ??? Post-landing pad sharing doesn't actually happen at the moment
1986      (see build_post_landing_pads) so we don't bother checking for it.  */
1987 
1988   index = 0;
1989   for (i = cfun->eh->last_region_number; i > 0; --i)
1990     if (lp_info[i].directly_reachable)
1991       lp_info[i].dispatch_index = index++;
1992 
1993   /* Finally: assign call-site values.  If dwarf2 terms, this would be
1994      the region number assigned by convert_to_eh_region_ranges, but
1995      handles no-action and must-not-throw differently.  */
1996 
1997   call_site_base = 1;
1998   for (i = cfun->eh->last_region_number; i > 0; --i)
1999     if (lp_info[i].directly_reachable)
2000       {
2001 	int action = lp_info[i].action_index;
2002 
2003 	/* Map must-not-throw to otherwise unused call-site index 0.  */
2004 	if (action == -2)
2005 	  index = 0;
2006 	/* Map no-action to otherwise unused call-site index -1.  */
2007 	else if (action == -1)
2008 	  index = -1;
2009 	/* Otherwise, look it up in the table.  */
2010 	else
2011 	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2012 
2013 	lp_info[i].call_site_index = index;
2014       }
2015 }
2016 
2017 static void
sjlj_mark_call_sites(lp_info)2018 sjlj_mark_call_sites (lp_info)
2019      struct sjlj_lp_info *lp_info;
2020 {
2021   int last_call_site = -2;
2022   rtx insn, mem;
2023 
2024   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2025     {
2026       struct eh_region *region;
2027       int this_call_site;
2028       rtx note, before, p;
2029 
2030       /* Reset value tracking at extended basic block boundaries.  */
2031       if (GET_CODE (insn) == CODE_LABEL)
2032 	last_call_site = -2;
2033 
2034       if (! INSN_P (insn))
2035 	continue;
2036 
2037       note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2038       if (!note)
2039 	{
2040 	  /* Calls (and trapping insns) without notes are outside any
2041 	     exception handling region in this function.  Mark them as
2042 	     no action.  */
2043 	  if (GET_CODE (insn) == CALL_INSN
2044 	      || (flag_non_call_exceptions
2045 		  && may_trap_p (PATTERN (insn))))
2046 	    this_call_site = -1;
2047 	  else
2048 	    continue;
2049 	}
2050       else
2051 	{
2052 	  /* Calls that are known to not throw need not be marked.  */
2053 	  if (INTVAL (XEXP (note, 0)) <= 0)
2054 	    continue;
2055 
2056 	  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2057 	  this_call_site = lp_info[region->region_number].call_site_index;
2058 	}
2059 
2060       if (this_call_site == last_call_site)
2061 	continue;
2062 
2063       /* Don't separate a call from it's argument loads.  */
2064       before = insn;
2065       if (GET_CODE (insn) == CALL_INSN)
2066 	before = find_first_parameter_load (insn, NULL_RTX);
2067 
2068       start_sequence ();
2069       mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2070 			    sjlj_fc_call_site_ofs);
2071       emit_move_insn (mem, GEN_INT (this_call_site));
2072       p = get_insns ();
2073       end_sequence ();
2074 
2075       emit_insn_before (p, before);
2076       last_call_site = this_call_site;
2077     }
2078 }
2079 
2080 /* Construct the SjLj_Function_Context.  */
2081 
2082 static void
sjlj_emit_function_enter(dispatch_label)2083 sjlj_emit_function_enter (dispatch_label)
2084      rtx dispatch_label;
2085 {
2086   rtx fn_begin, fc, mem, seq;
2087 
2088   fc = cfun->eh->sjlj_fc;
2089 
2090   start_sequence ();
2091 
2092   /* We're storing this libcall's address into memory instead of
2093      calling it directly.  Thus, we must call assemble_external_libcall
2094      here, as we can not depend on emit_library_call to do it for us.  */
2095   assemble_external_libcall (eh_personality_libfunc);
2096   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2097   emit_move_insn (mem, eh_personality_libfunc);
2098 
2099   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2100   if (cfun->uses_eh_lsda)
2101     {
2102       char buf[20];
2103       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2104       emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2105     }
2106   else
2107     emit_move_insn (mem, const0_rtx);
2108 
2109 #ifdef DONT_USE_BUILTIN_SETJMP
2110   {
2111     rtx x, note;
2112     x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2113 				 TYPE_MODE (integer_type_node), 1,
2114 				 plus_constant (XEXP (fc, 0),
2115 						sjlj_fc_jbuf_ofs), Pmode);
2116 
2117     note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2118     NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2119 
2120     emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2121 			     TYPE_MODE (integer_type_node), 0, dispatch_label);
2122   }
2123 #else
2124   expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2125 			       dispatch_label);
2126 #endif
2127 
2128   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2129 		     1, XEXP (fc, 0), Pmode);
2130 
2131   seq = get_insns ();
2132   end_sequence ();
2133 
2134   /* ??? Instead of doing this at the beginning of the function,
2135      do this in a block that is at loop level 0 and dominates all
2136      can_throw_internal instructions.  */
2137 
2138   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2139     if (GET_CODE (fn_begin) == NOTE
2140 	&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2141       break;
2142   emit_insn_after (seq, fn_begin);
2143 }
2144 
2145 /* Call back from expand_function_end to know where we should put
2146    the call to unwind_sjlj_unregister_libfunc if needed.  */
2147 
2148 void
sjlj_emit_function_exit_after(after)2149 sjlj_emit_function_exit_after (after)
2150      rtx after;
2151 {
2152   cfun->eh->sjlj_exit_after = after;
2153 }
2154 
2155 static void
sjlj_emit_function_exit()2156 sjlj_emit_function_exit ()
2157 {
2158   rtx seq;
2159 
2160   start_sequence ();
2161 
2162   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2163 		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2164 
2165   seq = get_insns ();
2166   end_sequence ();
2167 
2168   /* ??? Really this can be done in any block at loop level 0 that
2169      post-dominates all can_throw_internal instructions.  This is
2170      the last possible moment.  */
2171 
2172   emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2173 }
2174 
2175 static void
sjlj_emit_dispatch_table(dispatch_label,lp_info)2176 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2177      rtx dispatch_label;
2178      struct sjlj_lp_info *lp_info;
2179 {
2180   int i, first_reachable;
2181   rtx mem, dispatch, seq, fc;
2182 
2183   fc = cfun->eh->sjlj_fc;
2184 
2185   start_sequence ();
2186 
2187   emit_label (dispatch_label);
2188 
2189 #ifndef DONT_USE_BUILTIN_SETJMP
2190   expand_builtin_setjmp_receiver (dispatch_label);
2191 #endif
2192 
2193   /* Load up dispatch index, exc_ptr and filter values from the
2194      function context.  */
2195   mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2196 			sjlj_fc_call_site_ofs);
2197   dispatch = copy_to_reg (mem);
2198 
2199   mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2200   if (word_mode != Pmode)
2201     {
2202 #ifdef POINTERS_EXTEND_UNSIGNED
2203       mem = convert_memory_address (Pmode, mem);
2204 #else
2205       mem = convert_to_mode (Pmode, mem, 0);
2206 #endif
2207     }
2208   emit_move_insn (cfun->eh->exc_ptr, mem);
2209 
2210   mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2211   emit_move_insn (cfun->eh->filter, mem);
2212 
2213   /* Jump to one of the directly reachable regions.  */
2214   /* ??? This really ought to be using a switch statement.  */
2215 
2216   first_reachable = 0;
2217   for (i = cfun->eh->last_region_number; i > 0; --i)
2218     {
2219       if (! lp_info[i].directly_reachable)
2220 	continue;
2221 
2222       if (! first_reachable)
2223 	{
2224 	  first_reachable = i;
2225 	  continue;
2226 	}
2227 
2228       emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2229 			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2230 			       cfun->eh->region_array[i]->post_landing_pad);
2231     }
2232 
2233   seq = get_insns ();
2234   end_sequence ();
2235 
2236   emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2237 			  ->post_landing_pad));
2238 }
2239 
2240 static void
sjlj_build_landing_pads()2241 sjlj_build_landing_pads ()
2242 {
2243   struct sjlj_lp_info *lp_info;
2244 
2245   lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2246 					     sizeof (struct sjlj_lp_info));
2247 
2248   if (sjlj_find_directly_reachable_regions (lp_info))
2249     {
2250       rtx dispatch_label = gen_label_rtx ();
2251 
2252       cfun->eh->sjlj_fc
2253 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2254 			      int_size_in_bytes (sjlj_fc_type_node),
2255 			      TYPE_ALIGN (sjlj_fc_type_node));
2256 
2257       sjlj_assign_call_site_values (dispatch_label, lp_info);
2258       sjlj_mark_call_sites (lp_info);
2259 
2260       sjlj_emit_function_enter (dispatch_label);
2261       sjlj_emit_dispatch_table (dispatch_label, lp_info);
2262       sjlj_emit_function_exit ();
2263     }
2264 
2265   free (lp_info);
2266 }
2267 
2268 void
finish_eh_generation()2269 finish_eh_generation ()
2270 {
2271   /* Nothing to do if no regions created.  */
2272   if (cfun->eh->region_tree == NULL)
2273     return;
2274 
2275   /* The object here is to provide find_basic_blocks with detailed
2276      information (via reachable_handlers) on how exception control
2277      flows within the function.  In this first pass, we can include
2278      type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2279      regions, and hope that it will be useful in deleting unreachable
2280      handlers.  Subsequently, we will generate landing pads which will
2281      connect many of the handlers, and then type information will not
2282      be effective.  Still, this is a win over previous implementations.  */
2283 
2284   cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2285 
2286   /* These registers are used by the landing pads.  Make sure they
2287      have been generated.  */
2288   get_exception_pointer (cfun);
2289   get_exception_filter (cfun);
2290 
2291   /* Construct the landing pads.  */
2292 
2293   assign_filter_values ();
2294   build_post_landing_pads ();
2295   connect_post_landing_pads ();
2296   if (USING_SJLJ_EXCEPTIONS)
2297     sjlj_build_landing_pads ();
2298   else
2299     dw2_build_landing_pads ();
2300 
2301   cfun->eh->built_landing_pads = 1;
2302 
2303   /* We've totally changed the CFG.  Start over.  */
2304   find_exception_handler_labels ();
2305   rebuild_jump_labels (get_insns ());
2306   find_basic_blocks (get_insns (), max_reg_num (), 0);
2307   cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2308 }
2309 
2310 static hashval_t
ehl_hash(pentry)2311 ehl_hash (pentry)
2312      const PTR pentry;
2313 {
2314   struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2315 
2316   /* 2^32 * ((sqrt(5) - 1) / 2) */
2317   const hashval_t scaled_golden_ratio = 0x9e3779b9;
2318   return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2319 }
2320 
2321 static int
ehl_eq(pentry,pdata)2322 ehl_eq (pentry, pdata)
2323      const PTR pentry;
2324      const PTR pdata;
2325 {
2326   struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2327   struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2328 
2329   return entry->label == data->label;
2330 }
2331 
2332 /* This section handles removing dead code for flow.  */
2333 
2334 /* Remove LABEL from exception_handler_label_map.  */
2335 
2336 static void
remove_exception_handler_label(label)2337 remove_exception_handler_label (label)
2338      rtx label;
2339 {
2340   struct ehl_map_entry **slot, tmp;
2341 
2342   /* If exception_handler_label_map was not built yet,
2343      there is nothing to do.  */
2344   if (cfun->eh->exception_handler_label_map == NULL)
2345     return;
2346 
2347   tmp.label = label;
2348   slot = (struct ehl_map_entry **)
2349     htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2350   if (! slot)
2351     abort ();
2352 
2353   htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2354 }
2355 
2356 /* Splice REGION from the region tree etc.  */
2357 
2358 static void
remove_eh_handler(region)2359 remove_eh_handler (region)
2360      struct eh_region *region;
2361 {
2362   struct eh_region **pp, **pp_start, *p, *outer, *inner;
2363   rtx lab;
2364 
2365   /* For the benefit of efficiently handling REG_EH_REGION notes,
2366      replace this region in the region array with its containing
2367      region.  Note that previous region deletions may result in
2368      multiple copies of this region in the array, so we have a
2369      list of alternate numbers by which we are known.  */
2370 
2371   outer = region->outer;
2372   cfun->eh->region_array[region->region_number] = outer;
2373   if (region->aka)
2374     {
2375       int i;
2376       EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2377 	{ cfun->eh->region_array[i] = outer; });
2378     }
2379 
2380   if (outer)
2381     {
2382       if (!outer->aka)
2383         outer->aka = BITMAP_GGC_ALLOC ();
2384       if (region->aka)
2385 	bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2386       bitmap_set_bit (outer->aka, region->region_number);
2387     }
2388 
2389   if (cfun->eh->built_landing_pads)
2390     lab = region->landing_pad;
2391   else
2392     lab = region->label;
2393   if (lab)
2394     remove_exception_handler_label (lab);
2395 
2396   if (outer)
2397     pp_start = &outer->inner;
2398   else
2399     pp_start = &cfun->eh->region_tree;
2400   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2401     continue;
2402   *pp = region->next_peer;
2403 
2404   inner = region->inner;
2405   if (inner)
2406     {
2407       for (p = inner; p->next_peer ; p = p->next_peer)
2408 	p->outer = outer;
2409       p->outer = outer;
2410 
2411       p->next_peer = *pp_start;
2412       *pp_start = inner;
2413     }
2414 
2415   if (region->type == ERT_CATCH)
2416     {
2417       struct eh_region *try, *next, *prev;
2418 
2419       for (try = region->next_peer;
2420 	   try->type == ERT_CATCH;
2421 	   try = try->next_peer)
2422 	continue;
2423       if (try->type != ERT_TRY)
2424 	abort ();
2425 
2426       next = region->u.catch.next_catch;
2427       prev = region->u.catch.prev_catch;
2428 
2429       if (next)
2430 	next->u.catch.prev_catch = prev;
2431       else
2432 	try->u.try.last_catch = prev;
2433       if (prev)
2434 	prev->u.catch.next_catch = next;
2435       else
2436 	{
2437 	  try->u.try.catch = next;
2438 	  if (! next)
2439 	    remove_eh_handler (try);
2440 	}
2441     }
2442 }
2443 
2444 /* LABEL heads a basic block that is about to be deleted.  If this
2445    label corresponds to an exception region, we may be able to
2446    delete the region.  */
2447 
2448 void
maybe_remove_eh_handler(label)2449 maybe_remove_eh_handler (label)
2450      rtx label;
2451 {
2452   struct ehl_map_entry **slot, tmp;
2453   struct eh_region *region;
2454 
2455   /* ??? After generating landing pads, it's not so simple to determine
2456      if the region data is completely unused.  One must examine the
2457      landing pad and the post landing pad, and whether an inner try block
2458      is referencing the catch handlers directly.  */
2459   if (cfun->eh->built_landing_pads)
2460     return;
2461 
2462   tmp.label = label;
2463   slot = (struct ehl_map_entry **)
2464     htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2465   if (! slot)
2466     return;
2467   region = (*slot)->region;
2468   if (! region)
2469     return;
2470 
2471   /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2472      because there is no path to the fallback call to terminate.
2473      But the region continues to affect call-site data until there
2474      are no more contained calls, which we don't see here.  */
2475   if (region->type == ERT_MUST_NOT_THROW)
2476     {
2477       htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2478       region->label = NULL_RTX;
2479     }
2480   else
2481     remove_eh_handler (region);
2482 }
2483 
2484 /* Invokes CALLBACK for every exception handler label.  Only used by old
2485    loop hackery; should not be used by new code.  */
2486 
2487 void
2488 for_each_eh_label (callback)
2489      void (*callback) PARAMS ((rtx));
2490 {
2491   htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2492 		 (void *)callback);
2493 }
2494 
2495 static int
for_each_eh_label_1(pentry,data)2496 for_each_eh_label_1 (pentry, data)
2497      PTR *pentry;
2498      PTR data;
2499 {
2500   struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2501   void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
2502 
2503   (*callback) (entry->label);
2504   return 1;
2505 }
2506 
2507 /* This section describes CFG exception edges for flow.  */
2508 
2509 /* For communicating between calls to reachable_next_level.  */
2510 struct reachable_info GTY(())
2511 {
2512   tree types_caught;
2513   tree types_allowed;
2514   rtx handlers;
2515 };
2516 
2517 /* A subroutine of reachable_next_level.  Return true if TYPE, or a
2518    base class of TYPE, is in HANDLED.  */
2519 
2520 static int
check_handled(handled,type)2521 check_handled (handled, type)
2522      tree handled, type;
2523 {
2524   tree t;
2525 
2526   /* We can check for exact matches without front-end help.  */
2527   if (! lang_eh_type_covers)
2528     {
2529       for (t = handled; t ; t = TREE_CHAIN (t))
2530 	if (TREE_VALUE (t) == type)
2531 	  return 1;
2532     }
2533   else
2534     {
2535       for (t = handled; t ; t = TREE_CHAIN (t))
2536 	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2537 	  return 1;
2538     }
2539 
2540   return 0;
2541 }
2542 
2543 /* A subroutine of reachable_next_level.  If we are collecting a list
2544    of handlers, add one.  After landing pad generation, reference
2545    it instead of the handlers themselves.  Further, the handlers are
2546    all wired together, so by referencing one, we've got them all.
2547    Before landing pad generation we reference each handler individually.
2548 
2549    LP_REGION contains the landing pad; REGION is the handler.  */
2550 
2551 static void
add_reachable_handler(info,lp_region,region)2552 add_reachable_handler (info, lp_region, region)
2553      struct reachable_info *info;
2554      struct eh_region *lp_region;
2555      struct eh_region *region;
2556 {
2557   if (! info)
2558     return;
2559 
2560   if (cfun->eh->built_landing_pads)
2561     {
2562       if (! info->handlers)
2563 	info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2564     }
2565   else
2566     info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2567 }
2568 
2569 /* Process one level of exception regions for reachability.
2570    If TYPE_THROWN is non-null, then it is the *exact* type being
2571    propagated.  If INFO is non-null, then collect handler labels
2572    and caught/allowed type information between invocations.  */
2573 
2574 static enum reachable_code
reachable_next_level(region,type_thrown,info)2575 reachable_next_level (region, type_thrown, info)
2576      struct eh_region *region;
2577      tree type_thrown;
2578      struct reachable_info *info;
2579 {
2580   switch (region->type)
2581     {
2582     case ERT_CLEANUP:
2583       /* Before landing-pad generation, we model control flow
2584 	 directly to the individual handlers.  In this way we can
2585 	 see that catch handler types may shadow one another.  */
2586       add_reachable_handler (info, region, region);
2587       return RNL_MAYBE_CAUGHT;
2588 
2589     case ERT_TRY:
2590       {
2591 	struct eh_region *c;
2592 	enum reachable_code ret = RNL_NOT_CAUGHT;
2593 
2594 	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2595 	  {
2596 	    /* A catch-all handler ends the search.  */
2597 	    if (c->u.catch.type_list == NULL)
2598 	      {
2599 		add_reachable_handler (info, region, c);
2600 		return RNL_CAUGHT;
2601 	      }
2602 
2603 	    if (type_thrown)
2604 	      {
2605 		/* If we have at least one type match, end the search.  */
2606 		tree tp_node = c->u.catch.type_list;
2607 
2608 		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2609 		  {
2610 		    tree type = TREE_VALUE (tp_node);
2611 
2612 		    if (type == type_thrown
2613 			|| (lang_eh_type_covers
2614 			    && (*lang_eh_type_covers) (type, type_thrown)))
2615 		      {
2616 			add_reachable_handler (info, region, c);
2617 			return RNL_CAUGHT;
2618 		      }
2619 		  }
2620 
2621 		/* If we have definitive information of a match failure,
2622 		   the catch won't trigger.  */
2623 		if (lang_eh_type_covers)
2624 		  return RNL_NOT_CAUGHT;
2625 	      }
2626 
2627 	    /* At this point, we either don't know what type is thrown or
2628 	       don't have front-end assistance to help deciding if it is
2629 	       covered by one of the types in the list for this region.
2630 
2631 	       We'd then like to add this region to the list of reachable
2632 	       handlers since it is indeed potentially reachable based on the
2633 	       information we have.
2634 
2635 	       Actually, this handler is for sure not reachable if all the
2636 	       types it matches have already been caught. That is, it is only
2637 	       potentially reachable if at least one of the types it catches
2638 	       has not been previously caught.  */
2639 
2640 	    if (! info)
2641 	      ret = RNL_MAYBE_CAUGHT;
2642 	    else
2643 	      {
2644 		tree tp_node = c->u.catch.type_list;
2645 		bool maybe_reachable = false;
2646 
2647 		/* Compute the potential reachability of this handler and
2648 		   update the list of types caught at the same time.  */
2649 		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2650 		  {
2651 		    tree type = TREE_VALUE (tp_node);
2652 
2653 		    if (! check_handled (info->types_caught, type))
2654 		      {
2655 			info->types_caught
2656 			  = tree_cons (NULL, type, info->types_caught);
2657 
2658 			maybe_reachable = true;
2659 		      }
2660 		  }
2661 
2662 		if (maybe_reachable)
2663 		  {
2664 		    add_reachable_handler (info, region, c);
2665 
2666 		    /* ??? If the catch type is a base class of every allowed
2667 		       type, then we know we can stop the search.  */
2668 		    ret = RNL_MAYBE_CAUGHT;
2669 		  }
2670 	      }
2671 	  }
2672 
2673 	return ret;
2674       }
2675 
2676     case ERT_ALLOWED_EXCEPTIONS:
2677       /* An empty list of types definitely ends the search.  */
2678       if (region->u.allowed.type_list == NULL_TREE)
2679 	{
2680 	  add_reachable_handler (info, region, region);
2681 	  return RNL_CAUGHT;
2682 	}
2683 
2684       /* Collect a list of lists of allowed types for use in detecting
2685 	 when a catch may be transformed into a catch-all.  */
2686       if (info)
2687 	info->types_allowed = tree_cons (NULL_TREE,
2688 					 region->u.allowed.type_list,
2689 					 info->types_allowed);
2690 
2691       /* If we have definitive information about the type hierarchy,
2692 	 then we can tell if the thrown type will pass through the
2693 	 filter.  */
2694       if (type_thrown && lang_eh_type_covers)
2695 	{
2696 	  if (check_handled (region->u.allowed.type_list, type_thrown))
2697 	    return RNL_NOT_CAUGHT;
2698 	  else
2699 	    {
2700 	      add_reachable_handler (info, region, region);
2701 	      return RNL_CAUGHT;
2702 	    }
2703 	}
2704 
2705       add_reachable_handler (info, region, region);
2706       return RNL_MAYBE_CAUGHT;
2707 
2708     case ERT_CATCH:
2709       /* Catch regions are handled by their controling try region.  */
2710       return RNL_NOT_CAUGHT;
2711 
2712     case ERT_MUST_NOT_THROW:
2713       /* Here we end our search, since no exceptions may propagate.
2714 	 If we've touched down at some landing pad previous, then the
2715 	 explicit function call we generated may be used.  Otherwise
2716 	 the call is made by the runtime.  */
2717       if (info && info->handlers)
2718 	{
2719 	  add_reachable_handler (info, region, region);
2720 	  return RNL_CAUGHT;
2721 	}
2722       else
2723 	return RNL_BLOCKED;
2724 
2725     case ERT_THROW:
2726     case ERT_FIXUP:
2727     case ERT_UNKNOWN:
2728       /* Shouldn't see these here.  */
2729       break;
2730     }
2731 
2732   abort ();
2733 }
2734 
2735 /* Retrieve a list of labels of exception handlers which can be
2736    reached by a given insn.  */
2737 
2738 rtx
reachable_handlers(insn)2739 reachable_handlers (insn)
2740      rtx insn;
2741 {
2742   struct reachable_info info;
2743   struct eh_region *region;
2744   tree type_thrown;
2745   int region_number;
2746 
2747   if (GET_CODE (insn) == JUMP_INSN
2748       && GET_CODE (PATTERN (insn)) == RESX)
2749     region_number = XINT (PATTERN (insn), 0);
2750   else
2751     {
2752       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2753       if (!note || INTVAL (XEXP (note, 0)) <= 0)
2754 	return NULL;
2755       region_number = INTVAL (XEXP (note, 0));
2756     }
2757 
2758   memset (&info, 0, sizeof (info));
2759 
2760   region = cfun->eh->region_array[region_number];
2761 
2762   type_thrown = NULL_TREE;
2763   if (GET_CODE (insn) == JUMP_INSN
2764       && GET_CODE (PATTERN (insn)) == RESX)
2765     {
2766       /* A RESX leaves a region instead of entering it.  Thus the
2767 	 region itself may have been deleted out from under us.  */
2768       if (region == NULL)
2769 	return NULL;
2770       region = region->outer;
2771     }
2772   else if (region->type == ERT_THROW)
2773     {
2774       type_thrown = region->u.throw.type;
2775       region = region->outer;
2776     }
2777 
2778   while (region)
2779     {
2780       if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2781 	break;
2782       /* If we have processed one cleanup, there is no point in
2783 	 processing any more of them.  Each cleanup will have an edge
2784 	 to the next outer cleanup region, so the flow graph will be
2785 	 accurate.  */
2786       if (region->type == ERT_CLEANUP)
2787 	region = region->u.cleanup.prev_try;
2788       else
2789 	region = region->outer;
2790     }
2791 
2792   return info.handlers;
2793 }
2794 
2795 /* Determine if the given INSN can throw an exception that is caught
2796    within the function.  */
2797 
2798 bool
can_throw_internal(insn)2799 can_throw_internal (insn)
2800      rtx insn;
2801 {
2802   struct eh_region *region;
2803   tree type_thrown;
2804   rtx note;
2805 
2806   if (! INSN_P (insn))
2807     return false;
2808 
2809   if (GET_CODE (insn) == INSN
2810       && GET_CODE (PATTERN (insn)) == SEQUENCE)
2811     insn = XVECEXP (PATTERN (insn), 0, 0);
2812 
2813   if (GET_CODE (insn) == CALL_INSN
2814       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2815     {
2816       int i;
2817       for (i = 0; i < 3; ++i)
2818 	{
2819 	  rtx sub = XEXP (PATTERN (insn), i);
2820 	  for (; sub ; sub = NEXT_INSN (sub))
2821 	    if (can_throw_internal (sub))
2822 	      return true;
2823 	}
2824       return false;
2825     }
2826 
2827   /* Every insn that might throw has an EH_REGION note.  */
2828   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2829   if (!note || INTVAL (XEXP (note, 0)) <= 0)
2830     return false;
2831 
2832   region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2833 
2834   type_thrown = NULL_TREE;
2835   if (region->type == ERT_THROW)
2836     {
2837       type_thrown = region->u.throw.type;
2838       region = region->outer;
2839     }
2840 
2841   /* If this exception is ignored by each and every containing region,
2842      then control passes straight out.  The runtime may handle some
2843      regions, which also do not require processing internally.  */
2844   for (; region; region = region->outer)
2845     {
2846       enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2847       if (how == RNL_BLOCKED)
2848 	return false;
2849       if (how != RNL_NOT_CAUGHT)
2850 	return true;
2851     }
2852 
2853   return false;
2854 }
2855 
2856 /* Determine if the given INSN can throw an exception that is
2857    visible outside the function.  */
2858 
2859 bool
can_throw_external(insn)2860 can_throw_external (insn)
2861      rtx insn;
2862 {
2863   struct eh_region *region;
2864   tree type_thrown;
2865   rtx note;
2866 
2867   if (! INSN_P (insn))
2868     return false;
2869 
2870   if (GET_CODE (insn) == INSN
2871       && GET_CODE (PATTERN (insn)) == SEQUENCE)
2872     insn = XVECEXP (PATTERN (insn), 0, 0);
2873 
2874   if (GET_CODE (insn) == CALL_INSN
2875       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2876     {
2877       int i;
2878       for (i = 0; i < 3; ++i)
2879 	{
2880 	  rtx sub = XEXP (PATTERN (insn), i);
2881 	  for (; sub ; sub = NEXT_INSN (sub))
2882 	    if (can_throw_external (sub))
2883 	      return true;
2884 	}
2885       return false;
2886     }
2887 
2888   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2889   if (!note)
2890     {
2891       /* Calls (and trapping insns) without notes are outside any
2892 	 exception handling region in this function.  We have to
2893 	 assume it might throw.  Given that the front end and middle
2894 	 ends mark known NOTHROW functions, this isn't so wildly
2895 	 inaccurate.  */
2896       return (GET_CODE (insn) == CALL_INSN
2897 	      || (flag_non_call_exceptions
2898 		  && may_trap_p (PATTERN (insn))));
2899     }
2900   if (INTVAL (XEXP (note, 0)) <= 0)
2901     return false;
2902 
2903   region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2904 
2905   type_thrown = NULL_TREE;
2906   if (region->type == ERT_THROW)
2907     {
2908       type_thrown = region->u.throw.type;
2909       region = region->outer;
2910     }
2911 
2912   /* If the exception is caught or blocked by any containing region,
2913      then it is not seen by any calling function.  */
2914   for (; region ; region = region->outer)
2915     if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2916       return false;
2917 
2918   return true;
2919 }
2920 
2921 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls.  */
2922 
2923 void
set_nothrow_function_flags()2924 set_nothrow_function_flags ()
2925 {
2926   rtx insn;
2927 
2928   current_function_nothrow = 1;
2929 
2930   /* Assume cfun->all_throwers_are_sibcalls until we encounter
2931      something that can throw an exception.  We specifically exempt
2932      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2933      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2934      is optimistic.  */
2935 
2936   cfun->all_throwers_are_sibcalls = 1;
2937 
2938   if (! flag_exceptions)
2939     return;
2940 
2941   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2942     if (can_throw_external (insn))
2943       {
2944 	current_function_nothrow = 0;
2945 
2946 	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2947 	  {
2948 	    cfun->all_throwers_are_sibcalls = 0;
2949 	    return;
2950 	  }
2951       }
2952 
2953   for (insn = current_function_epilogue_delay_list; insn;
2954        insn = XEXP (insn, 1))
2955     if (can_throw_external (insn))
2956       {
2957 	current_function_nothrow = 0;
2958 
2959 	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2960 	  {
2961 	    cfun->all_throwers_are_sibcalls = 0;
2962 	    return;
2963 	  }
2964       }
2965 }
2966 
2967 
2968 /* Various hooks for unwind library.  */
2969 
2970 /* Do any necessary initialization to access arbitrary stack frames.
2971    On the SPARC, this means flushing the register windows.  */
2972 
2973 void
expand_builtin_unwind_init()2974 expand_builtin_unwind_init ()
2975 {
2976   /* Set this so all the registers get saved in our frame; we need to be
2977      able to copy the saved values for any registers from frames we unwind.  */
2978   current_function_has_nonlocal_label = 1;
2979 
2980 #ifdef SETUP_FRAME_ADDRESSES
2981   SETUP_FRAME_ADDRESSES ();
2982 #endif
2983 }
2984 
2985 rtx
expand_builtin_eh_return_data_regno(arglist)2986 expand_builtin_eh_return_data_regno (arglist)
2987      tree arglist;
2988 {
2989   tree which = TREE_VALUE (arglist);
2990   unsigned HOST_WIDE_INT iwhich;
2991 
2992   if (TREE_CODE (which) != INTEGER_CST)
2993     {
2994       error ("argument of `__builtin_eh_return_regno' must be constant");
2995       return constm1_rtx;
2996     }
2997 
2998   iwhich = tree_low_cst (which, 1);
2999   iwhich = EH_RETURN_DATA_REGNO (iwhich);
3000   if (iwhich == INVALID_REGNUM)
3001     return constm1_rtx;
3002 
3003 #ifdef DWARF_FRAME_REGNUM
3004   iwhich = DWARF_FRAME_REGNUM (iwhich);
3005 #else
3006   iwhich = DBX_REGISTER_NUMBER (iwhich);
3007 #endif
3008 
3009   return GEN_INT (iwhich);
3010 }
3011 
3012 /* Given a value extracted from the return address register or stack slot,
3013    return the actual address encoded in that value.  */
3014 
3015 rtx
expand_builtin_extract_return_addr(addr_tree)3016 expand_builtin_extract_return_addr (addr_tree)
3017      tree addr_tree;
3018 {
3019   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3020 
3021   if (GET_MODE (addr) != Pmode
3022       && GET_MODE (addr) != VOIDmode)
3023     {
3024 #ifdef POINTERS_EXTEND_UNSIGNED
3025       addr = convert_memory_address (Pmode, addr);
3026 #else
3027       addr = convert_to_mode (Pmode, addr, 0);
3028 #endif
3029     }
3030 
3031   /* First mask out any unwanted bits.  */
3032 #ifdef MASK_RETURN_ADDR
3033   expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3034 #endif
3035 
3036   /* Then adjust to find the real return address.  */
3037 #if defined (RETURN_ADDR_OFFSET)
3038   addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3039 #endif
3040 
3041   return addr;
3042 }
3043 
3044 /* Given an actual address in addr_tree, do any necessary encoding
3045    and return the value to be stored in the return address register or
3046    stack slot so the epilogue will return to that address.  */
3047 
3048 rtx
expand_builtin_frob_return_addr(addr_tree)3049 expand_builtin_frob_return_addr (addr_tree)
3050      tree addr_tree;
3051 {
3052   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3053 
3054 #ifdef POINTERS_EXTEND_UNSIGNED
3055   if (GET_MODE (addr) != Pmode)
3056     addr = convert_memory_address (Pmode, addr);
3057 #endif
3058 
3059 #ifdef RETURN_ADDR_OFFSET
3060   addr = force_reg (Pmode, addr);
3061   addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3062 #endif
3063 
3064   return addr;
3065 }
3066 
3067 /* Set up the epilogue with the magic bits we'll need to return to the
3068    exception handler.  */
3069 
3070 void
expand_builtin_eh_return(stackadj_tree,handler_tree)3071 expand_builtin_eh_return (stackadj_tree, handler_tree)
3072     tree stackadj_tree ATTRIBUTE_UNUSED;
3073     tree handler_tree;
3074 {
3075   rtx tmp;
3076 
3077 #ifdef EH_RETURN_STACKADJ_RTX
3078   tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3079 #ifdef POINTERS_EXTEND_UNSIGNED
3080   if (GET_MODE (tmp) != Pmode)
3081     tmp = convert_memory_address (Pmode, tmp);
3082 #endif
3083   if (!cfun->eh->ehr_stackadj)
3084     cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3085   else if (tmp != cfun->eh->ehr_stackadj)
3086     emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3087 #endif
3088 
3089   tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3090 #ifdef POINTERS_EXTEND_UNSIGNED
3091   if (GET_MODE (tmp) != Pmode)
3092     tmp = convert_memory_address (Pmode, tmp);
3093 #endif
3094   if (!cfun->eh->ehr_handler)
3095     cfun->eh->ehr_handler = copy_to_reg (tmp);
3096   else if (tmp != cfun->eh->ehr_handler)
3097     emit_move_insn (cfun->eh->ehr_handler, tmp);
3098 
3099   if (!cfun->eh->ehr_label)
3100     cfun->eh->ehr_label = gen_label_rtx ();
3101   emit_jump (cfun->eh->ehr_label);
3102 }
3103 
3104 void
expand_eh_return()3105 expand_eh_return ()
3106 {
3107   rtx around_label;
3108 
3109   if (! cfun->eh->ehr_label)
3110     return;
3111 
3112   current_function_calls_eh_return = 1;
3113 
3114 #ifdef EH_RETURN_STACKADJ_RTX
3115   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3116 #endif
3117 
3118   around_label = gen_label_rtx ();
3119   emit_jump (around_label);
3120 
3121   emit_label (cfun->eh->ehr_label);
3122   clobber_return_register ();
3123 
3124 #ifdef EH_RETURN_STACKADJ_RTX
3125   emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3126 #endif
3127 
3128 #ifdef HAVE_eh_return
3129   if (HAVE_eh_return)
3130     emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3131   else
3132 #endif
3133     {
3134 #ifdef EH_RETURN_HANDLER_RTX
3135       emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3136 #else
3137       error ("__builtin_eh_return not supported on this target");
3138 #endif
3139     }
3140 
3141   emit_label (around_label);
3142 }
3143 
3144 /* In the following functions, we represent entries in the action table
3145    as 1-based indices.  Special cases are:
3146 
3147 	 0:	null action record, non-null landing pad; implies cleanups
3148 	-1:	null action record, null landing pad; implies no action
3149 	-2:	no call-site entry; implies must_not_throw
3150 	-3:	we have yet to process outer regions
3151 
3152    Further, no special cases apply to the "next" field of the record.
3153    For next, 0 means end of list.  */
3154 
3155 struct action_record
3156 {
3157   int offset;
3158   int filter;
3159   int next;
3160 };
3161 
3162 static int
action_record_eq(pentry,pdata)3163 action_record_eq (pentry, pdata)
3164      const PTR pentry;
3165      const PTR pdata;
3166 {
3167   const struct action_record *entry = (const struct action_record *) pentry;
3168   const struct action_record *data = (const struct action_record *) pdata;
3169   return entry->filter == data->filter && entry->next == data->next;
3170 }
3171 
3172 static hashval_t
action_record_hash(pentry)3173 action_record_hash (pentry)
3174      const PTR pentry;
3175 {
3176   const struct action_record *entry = (const struct action_record *) pentry;
3177   return entry->next * 1009 + entry->filter;
3178 }
3179 
3180 static int
add_action_record(ar_hash,filter,next)3181 add_action_record (ar_hash, filter, next)
3182      htab_t ar_hash;
3183      int filter, next;
3184 {
3185   struct action_record **slot, *new, tmp;
3186 
3187   tmp.filter = filter;
3188   tmp.next = next;
3189   slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3190 
3191   if ((new = *slot) == NULL)
3192     {
3193       new = (struct action_record *) xmalloc (sizeof (*new));
3194       new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3195       new->filter = filter;
3196       new->next = next;
3197       *slot = new;
3198 
3199       /* The filter value goes in untouched.  The link to the next
3200 	 record is a "self-relative" byte offset, or zero to indicate
3201 	 that there is no next record.  So convert the absolute 1 based
3202 	 indices we've been carrying around into a displacement.  */
3203 
3204       push_sleb128 (&cfun->eh->action_record_data, filter);
3205       if (next)
3206 	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3207       push_sleb128 (&cfun->eh->action_record_data, next);
3208     }
3209 
3210   return new->offset;
3211 }
3212 
3213 static int
collect_one_action_chain(ar_hash,region)3214 collect_one_action_chain (ar_hash, region)
3215      htab_t ar_hash;
3216      struct eh_region *region;
3217 {
3218   struct eh_region *c;
3219   int next;
3220 
3221   /* If we've reached the top of the region chain, then we have
3222      no actions, and require no landing pad.  */
3223   if (region == NULL)
3224     return -1;
3225 
3226   switch (region->type)
3227     {
3228     case ERT_CLEANUP:
3229       /* A cleanup adds a zero filter to the beginning of the chain, but
3230 	 there are special cases to look out for.  If there are *only*
3231 	 cleanups along a path, then it compresses to a zero action.
3232 	 Further, if there are multiple cleanups along a path, we only
3233 	 need to represent one of them, as that is enough to trigger
3234 	 entry to the landing pad at runtime.  */
3235       next = collect_one_action_chain (ar_hash, region->outer);
3236       if (next <= 0)
3237 	return 0;
3238       for (c = region->outer; c ; c = c->outer)
3239 	if (c->type == ERT_CLEANUP)
3240 	  return next;
3241       return add_action_record (ar_hash, 0, next);
3242 
3243     case ERT_TRY:
3244       /* Process the associated catch regions in reverse order.
3245 	 If there's a catch-all handler, then we don't need to
3246 	 search outer regions.  Use a magic -3 value to record
3247 	 that we haven't done the outer search.  */
3248       next = -3;
3249       for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3250 	{
3251 	  if (c->u.catch.type_list == NULL)
3252 	    {
3253 	      /* Retrieve the filter from the head of the filter list
3254 		 where we have stored it (see assign_filter_values).  */
3255 	      int filter
3256 		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3257 
3258 	      next = add_action_record (ar_hash, filter, 0);
3259 	    }
3260 	  else
3261 	    {
3262 	      /* Once the outer search is done, trigger an action record for
3263                  each filter we have.  */
3264 	      tree flt_node;
3265 
3266 	      if (next == -3)
3267 		{
3268 		  next = collect_one_action_chain (ar_hash, region->outer);
3269 
3270 		  /* If there is no next action, terminate the chain.  */
3271 		  if (next == -1)
3272 		    next = 0;
3273 		  /* If all outer actions are cleanups or must_not_throw,
3274 		     we'll have no action record for it, since we had wanted
3275 		     to encode these states in the call-site record directly.
3276 		     Add a cleanup action to the chain to catch these.  */
3277 		  else if (next <= 0)
3278 		    next = add_action_record (ar_hash, 0, 0);
3279 		}
3280 
3281 	      flt_node = c->u.catch.filter_list;
3282 	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3283 		{
3284 		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3285 		  next = add_action_record (ar_hash, filter, next);
3286 		}
3287 	    }
3288 	}
3289       return next;
3290 
3291     case ERT_ALLOWED_EXCEPTIONS:
3292       /* An exception specification adds its filter to the
3293 	 beginning of the chain.  */
3294       next = collect_one_action_chain (ar_hash, region->outer);
3295 
3296       /* If there is no next action, terminate the chain.  */
3297       if (next == -1)
3298 	next = 0;
3299       /* If all outer actions are cleanups or must_not_throw,
3300 	 we'll have no action record for it, since we had wanted
3301 	 to encode these states in the call-site record directly.
3302 	 Add a cleanup action to the chain to catch these.  */
3303       else if (next <= 0)
3304 	next = add_action_record (ar_hash, 0, 0);
3305 
3306       return add_action_record (ar_hash, region->u.allowed.filter, next);
3307 
3308     case ERT_MUST_NOT_THROW:
3309       /* A must-not-throw region with no inner handlers or cleanups
3310 	 requires no call-site entry.  Note that this differs from
3311 	 the no handler or cleanup case in that we do require an lsda
3312 	 to be generated.  Return a magic -2 value to record this.  */
3313       return -2;
3314 
3315     case ERT_CATCH:
3316     case ERT_THROW:
3317       /* CATCH regions are handled in TRY above.  THROW regions are
3318 	 for optimization information only and produce no output.  */
3319       return collect_one_action_chain (ar_hash, region->outer);
3320 
3321     default:
3322       abort ();
3323     }
3324 }
3325 
3326 static int
add_call_site(landing_pad,action)3327 add_call_site (landing_pad, action)
3328      rtx landing_pad;
3329      int action;
3330 {
3331   struct call_site_record *data = cfun->eh->call_site_data;
3332   int used = cfun->eh->call_site_data_used;
3333   int size = cfun->eh->call_site_data_size;
3334 
3335   if (used >= size)
3336     {
3337       size = (size ? size * 2 : 64);
3338       data = (struct call_site_record *)
3339 	ggc_realloc (data, sizeof (*data) * size);
3340       cfun->eh->call_site_data = data;
3341       cfun->eh->call_site_data_size = size;
3342     }
3343 
3344   data[used].landing_pad = landing_pad;
3345   data[used].action = action;
3346 
3347   cfun->eh->call_site_data_used = used + 1;
3348 
3349   return used + call_site_base;
3350 }
3351 
3352 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3353    The new note numbers will not refer to region numbers, but
3354    instead to call site entries.  */
3355 
3356 void
convert_to_eh_region_ranges()3357 convert_to_eh_region_ranges ()
3358 {
3359   rtx insn, iter, note;
3360   htab_t ar_hash;
3361   int last_action = -3;
3362   rtx last_action_insn = NULL_RTX;
3363   rtx last_landing_pad = NULL_RTX;
3364   rtx first_no_action_insn = NULL_RTX;
3365   int call_site = 0;
3366 
3367   if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3368     return;
3369 
3370   VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3371 
3372   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3373 
3374   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3375     if (INSN_P (iter))
3376       {
3377 	struct eh_region *region;
3378 	int this_action;
3379 	rtx this_landing_pad;
3380 
3381 	insn = iter;
3382 	if (GET_CODE (insn) == INSN
3383 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3384 	  insn = XVECEXP (PATTERN (insn), 0, 0);
3385 
3386 	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3387 	if (!note)
3388 	  {
3389 	    if (! (GET_CODE (insn) == CALL_INSN
3390 		   || (flag_non_call_exceptions
3391 		       && may_trap_p (PATTERN (insn)))))
3392 	      continue;
3393 	    this_action = -1;
3394 	    region = NULL;
3395 	  }
3396 	else
3397 	  {
3398 	    if (INTVAL (XEXP (note, 0)) <= 0)
3399 	      continue;
3400 	    region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3401 	    this_action = collect_one_action_chain (ar_hash, region);
3402 	  }
3403 
3404 	/* Existence of catch handlers, or must-not-throw regions
3405 	   implies that an lsda is needed (even if empty).  */
3406 	if (this_action != -1)
3407 	  cfun->uses_eh_lsda = 1;
3408 
3409 	/* Delay creation of region notes for no-action regions
3410 	   until we're sure that an lsda will be required.  */
3411 	else if (last_action == -3)
3412 	  {
3413 	    first_no_action_insn = iter;
3414 	    last_action = -1;
3415 	  }
3416 
3417 	/* Cleanups and handlers may share action chains but not
3418 	   landing pads.  Collect the landing pad for this region.  */
3419 	if (this_action >= 0)
3420 	  {
3421 	    struct eh_region *o;
3422 	    for (o = region; ! o->landing_pad ; o = o->outer)
3423 	      continue;
3424 	    this_landing_pad = o->landing_pad;
3425 	  }
3426 	else
3427 	  this_landing_pad = NULL_RTX;
3428 
3429 	/* Differing actions or landing pads implies a change in call-site
3430 	   info, which implies some EH_REGION note should be emitted.  */
3431 	if (last_action != this_action
3432 	    || last_landing_pad != this_landing_pad)
3433 	  {
3434 	    /* If we'd not seen a previous action (-3) or the previous
3435 	       action was must-not-throw (-2), then we do not need an
3436 	       end note.  */
3437 	    if (last_action >= -1)
3438 	      {
3439 		/* If we delayed the creation of the begin, do it now.  */
3440 		if (first_no_action_insn)
3441 		  {
3442 		    call_site = add_call_site (NULL_RTX, 0);
3443 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3444 					     first_no_action_insn);
3445 		    NOTE_EH_HANDLER (note) = call_site;
3446 		    first_no_action_insn = NULL_RTX;
3447 		  }
3448 
3449 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3450 					last_action_insn);
3451 		NOTE_EH_HANDLER (note) = call_site;
3452 	      }
3453 
3454 	    /* If the new action is must-not-throw, then no region notes
3455 	       are created.  */
3456 	    if (this_action >= -1)
3457 	      {
3458 		call_site = add_call_site (this_landing_pad,
3459 					   this_action < 0 ? 0 : this_action);
3460 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3461 		NOTE_EH_HANDLER (note) = call_site;
3462 	      }
3463 
3464 	    last_action = this_action;
3465 	    last_landing_pad = this_landing_pad;
3466 	  }
3467 	last_action_insn = iter;
3468       }
3469 
3470   if (last_action >= -1 && ! first_no_action_insn)
3471     {
3472       note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3473       NOTE_EH_HANDLER (note) = call_site;
3474     }
3475 
3476   htab_delete (ar_hash);
3477 }
3478 
3479 
3480 static void
push_uleb128(data_area,value)3481 push_uleb128 (data_area, value)
3482      varray_type *data_area;
3483      unsigned int value;
3484 {
3485   do
3486     {
3487       unsigned char byte = value & 0x7f;
3488       value >>= 7;
3489       if (value)
3490 	byte |= 0x80;
3491       VARRAY_PUSH_UCHAR (*data_area, byte);
3492     }
3493   while (value);
3494 }
3495 
3496 static void
push_sleb128(data_area,value)3497 push_sleb128 (data_area, value)
3498      varray_type *data_area;
3499      int value;
3500 {
3501   unsigned char byte;
3502   int more;
3503 
3504   do
3505     {
3506       byte = value & 0x7f;
3507       value >>= 7;
3508       more = ! ((value == 0 && (byte & 0x40) == 0)
3509 		|| (value == -1 && (byte & 0x40) != 0));
3510       if (more)
3511 	byte |= 0x80;
3512       VARRAY_PUSH_UCHAR (*data_area, byte);
3513     }
3514   while (more);
3515 }
3516 
3517 
3518 #ifndef HAVE_AS_LEB128
3519 static int
dw2_size_of_call_site_table()3520 dw2_size_of_call_site_table ()
3521 {
3522   int n = cfun->eh->call_site_data_used;
3523   int size = n * (4 + 4 + 4);
3524   int i;
3525 
3526   for (i = 0; i < n; ++i)
3527     {
3528       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3529       size += size_of_uleb128 (cs->action);
3530     }
3531 
3532   return size;
3533 }
3534 
3535 static int
sjlj_size_of_call_site_table()3536 sjlj_size_of_call_site_table ()
3537 {
3538   int n = cfun->eh->call_site_data_used;
3539   int size = 0;
3540   int i;
3541 
3542   for (i = 0; i < n; ++i)
3543     {
3544       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3545       size += size_of_uleb128 (INTVAL (cs->landing_pad));
3546       size += size_of_uleb128 (cs->action);
3547     }
3548 
3549   return size;
3550 }
3551 #endif
3552 
3553 static void
dw2_output_call_site_table()3554 dw2_output_call_site_table ()
3555 {
3556   const char *const function_start_lab
3557     = IDENTIFIER_POINTER (current_function_func_begin_label);
3558   int n = cfun->eh->call_site_data_used;
3559   int i;
3560 
3561   for (i = 0; i < n; ++i)
3562     {
3563       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3564       char reg_start_lab[32];
3565       char reg_end_lab[32];
3566       char landing_pad_lab[32];
3567 
3568       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3569       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3570 
3571       if (cs->landing_pad)
3572 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3573 				     CODE_LABEL_NUMBER (cs->landing_pad));
3574 
3575       /* ??? Perhaps use insn length scaling if the assembler supports
3576 	 generic arithmetic.  */
3577       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3578 	 data4 if the function is small enough.  */
3579 #ifdef HAVE_AS_LEB128
3580       dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3581 				    "region %d start", i);
3582       dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3583 				    "length");
3584       if (cs->landing_pad)
3585 	dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3586 				      "landing pad");
3587       else
3588 	dw2_asm_output_data_uleb128 (0, "landing pad");
3589 #else
3590       dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3591 			    "region %d start", i);
3592       dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3593       if (cs->landing_pad)
3594 	dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3595 			      "landing pad");
3596       else
3597 	dw2_asm_output_data (4, 0, "landing pad");
3598 #endif
3599       dw2_asm_output_data_uleb128 (cs->action, "action");
3600     }
3601 
3602   call_site_base += n;
3603 }
3604 
3605 static void
sjlj_output_call_site_table()3606 sjlj_output_call_site_table ()
3607 {
3608   int n = cfun->eh->call_site_data_used;
3609   int i;
3610 
3611   for (i = 0; i < n; ++i)
3612     {
3613       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3614 
3615       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3616 				   "region %d landing pad", i);
3617       dw2_asm_output_data_uleb128 (cs->action, "action");
3618     }
3619 
3620   call_site_base += n;
3621 }
3622 
3623 /* Tell assembler to switch to the section for the exception handling
3624    table.  */
3625 
3626 void
default_exception_section()3627 default_exception_section ()
3628 {
3629   if (targetm.have_named_sections)
3630     {
3631       int flags;
3632 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3633       int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3634 
3635       flags = (! flag_pic
3636 	       || ((tt_format & 0x70) != DW_EH_PE_absptr
3637 		   && (tt_format & 0x70) != DW_EH_PE_aligned))
3638 	      ? 0 : SECTION_WRITE;
3639 #else
3640       flags = SECTION_WRITE;
3641 #endif
3642       named_section_flags (".gcc_except_table", flags);
3643     }
3644   else if (flag_pic)
3645     data_section ();
3646   else
3647     readonly_data_section ();
3648 }
3649 
3650 void
output_function_exception_table()3651 output_function_exception_table ()
3652 {
3653   int tt_format, cs_format, lp_format, i, n;
3654 #ifdef HAVE_AS_LEB128
3655   char ttype_label[32];
3656   char cs_after_size_label[32];
3657   char cs_end_label[32];
3658 #else
3659   int call_site_len;
3660 #endif
3661   int have_tt_data;
3662   int tt_format_size = 0;
3663 
3664   /* Not all functions need anything.  */
3665   if (! cfun->uses_eh_lsda)
3666     return;
3667 
3668 #ifdef IA64_UNWIND_INFO
3669   fputs ("\t.personality\t", asm_out_file);
3670   output_addr_const (asm_out_file, eh_personality_libfunc);
3671   fputs ("\n\t.handlerdata\n", asm_out_file);
3672   /* Note that varasm still thinks we're in the function's code section.
3673      The ".endp" directive that will immediately follow will take us back.  */
3674 #else
3675   (*targetm.asm_out.exception_section) ();
3676 #endif
3677 
3678   have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3679 		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3680 
3681   /* Indicate the format of the @TType entries.  */
3682   if (! have_tt_data)
3683     tt_format = DW_EH_PE_omit;
3684   else
3685     {
3686       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3687 #ifdef HAVE_AS_LEB128
3688       ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3689 				   current_function_funcdef_no);
3690 #endif
3691       tt_format_size = size_of_encoded_value (tt_format);
3692 
3693       assemble_align (tt_format_size * BITS_PER_UNIT);
3694     }
3695 
3696   ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA",
3697 			     current_function_funcdef_no);
3698 
3699   /* The LSDA header.  */
3700 
3701   /* Indicate the format of the landing pad start pointer.  An omitted
3702      field implies @LPStart == @Start.  */
3703   /* Currently we always put @LPStart == @Start.  This field would
3704      be most useful in moving the landing pads completely out of
3705      line to another section, but it could also be used to minimize
3706      the size of uleb128 landing pad offsets.  */
3707   lp_format = DW_EH_PE_omit;
3708   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3709 		       eh_data_format_name (lp_format));
3710 
3711   /* @LPStart pointer would go here.  */
3712 
3713   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3714 		       eh_data_format_name (tt_format));
3715 
3716 #ifndef HAVE_AS_LEB128
3717   if (USING_SJLJ_EXCEPTIONS)
3718     call_site_len = sjlj_size_of_call_site_table ();
3719   else
3720     call_site_len = dw2_size_of_call_site_table ();
3721 #endif
3722 
3723   /* A pc-relative 4-byte displacement to the @TType data.  */
3724   if (have_tt_data)
3725     {
3726 #ifdef HAVE_AS_LEB128
3727       char ttype_after_disp_label[32];
3728       ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3729 				   current_function_funcdef_no);
3730       dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3731 				    "@TType base offset");
3732       ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3733 #else
3734       /* Ug.  Alignment queers things.  */
3735       unsigned int before_disp, after_disp, last_disp, disp;
3736 
3737       before_disp = 1 + 1;
3738       after_disp = (1 + size_of_uleb128 (call_site_len)
3739 		    + call_site_len
3740 		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3741 		    + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3742 		       * tt_format_size));
3743 
3744       disp = after_disp;
3745       do
3746 	{
3747 	  unsigned int disp_size, pad;
3748 
3749 	  last_disp = disp;
3750 	  disp_size = size_of_uleb128 (disp);
3751 	  pad = before_disp + disp_size + after_disp;
3752 	  if (pad % tt_format_size)
3753 	    pad = tt_format_size - (pad % tt_format_size);
3754 	  else
3755 	    pad = 0;
3756 	  disp = after_disp + pad;
3757 	}
3758       while (disp != last_disp);
3759 
3760       dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3761 #endif
3762     }
3763 
3764   /* Indicate the format of the call-site offsets.  */
3765 #ifdef HAVE_AS_LEB128
3766   cs_format = DW_EH_PE_uleb128;
3767 #else
3768   cs_format = DW_EH_PE_udata4;
3769 #endif
3770   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3771 		       eh_data_format_name (cs_format));
3772 
3773 #ifdef HAVE_AS_LEB128
3774   ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3775 			       current_function_funcdef_no);
3776   ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3777 			       current_function_funcdef_no);
3778   dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3779 				"Call-site table length");
3780   ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3781   if (USING_SJLJ_EXCEPTIONS)
3782     sjlj_output_call_site_table ();
3783   else
3784     dw2_output_call_site_table ();
3785   ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3786 #else
3787   dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3788   if (USING_SJLJ_EXCEPTIONS)
3789     sjlj_output_call_site_table ();
3790   else
3791     dw2_output_call_site_table ();
3792 #endif
3793 
3794   /* ??? Decode and interpret the data for flag_debug_asm.  */
3795   n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3796   for (i = 0; i < n; ++i)
3797     dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3798 			 (i ? NULL : "Action record table"));
3799 
3800   if (have_tt_data)
3801     assemble_align (tt_format_size * BITS_PER_UNIT);
3802 
3803   i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3804   while (i-- > 0)
3805     {
3806       tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3807       rtx value;
3808 
3809       if (type == NULL_TREE)
3810 	type = integer_zero_node;
3811       else
3812 	type = lookup_type_for_runtime (type);
3813 
3814       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3815       if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3816 	assemble_integer (value, tt_format_size,
3817 			  tt_format_size * BITS_PER_UNIT, 1);
3818       else
3819 	dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3820     }
3821 
3822 #ifdef HAVE_AS_LEB128
3823   if (have_tt_data)
3824       ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3825 #endif
3826 
3827   /* ??? Decode and interpret the data for flag_debug_asm.  */
3828   n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3829   for (i = 0; i < n; ++i)
3830     dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3831 			 (i ? NULL : "Exception specification table"));
3832 
3833   function_section (current_function_decl);
3834 }
3835 
3836 #include "gt-except.h"
3837