1*404b540aSrobert /* Implements exception handling.
2*404b540aSrobert Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4*404b540aSrobert Contributed by Mike Stump <mrs@cygnus.com>.
5*404b540aSrobert
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING. If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA. */
22*404b540aSrobert
23*404b540aSrobert
24*404b540aSrobert /* An exception is an event that can be signaled from within a
25*404b540aSrobert function. This event can then be "caught" or "trapped" by the
26*404b540aSrobert callers of this function. This potentially allows program flow to
27*404b540aSrobert be transferred to any arbitrary code associated with a function call
28*404b540aSrobert several levels up the stack.
29*404b540aSrobert
30*404b540aSrobert The intended use for this mechanism is for signaling "exceptional
31*404b540aSrobert events" in an out-of-band fashion, hence its name. The C++ language
32*404b540aSrobert (and many other OO-styled or functional languages) practically
33*404b540aSrobert requires such a mechanism, as otherwise it becomes very difficult
34*404b540aSrobert or even impossible to signal failure conditions in complex
35*404b540aSrobert situations. The traditional C++ example is when an error occurs in
36*404b540aSrobert the process of constructing an object; without such a mechanism, it
37*404b540aSrobert is impossible to signal that the error occurs without adding global
38*404b540aSrobert state variables and error checks around every object construction.
39*404b540aSrobert
40*404b540aSrobert The act of causing this event to occur is referred to as "throwing
41*404b540aSrobert an exception". (Alternate terms include "raising an exception" or
42*404b540aSrobert "signaling an exception".) The term "throw" is used because control
43*404b540aSrobert is returned to the callers of the function that is signaling the
44*404b540aSrobert exception, and thus there is the concept of "throwing" the
45*404b540aSrobert exception up the call stack.
46*404b540aSrobert
47*404b540aSrobert [ Add updated documentation on how to use this. ] */
48*404b540aSrobert
49*404b540aSrobert
50*404b540aSrobert #include "config.h"
51*404b540aSrobert #include "system.h"
52*404b540aSrobert #include "coretypes.h"
53*404b540aSrobert #include "tm.h"
54*404b540aSrobert #include "rtl.h"
55*404b540aSrobert #include "tree.h"
56*404b540aSrobert #include "flags.h"
57*404b540aSrobert #include "function.h"
58*404b540aSrobert #include "expr.h"
59*404b540aSrobert #include "libfuncs.h"
60*404b540aSrobert #include "insn-config.h"
61*404b540aSrobert #include "except.h"
62*404b540aSrobert #include "integrate.h"
63*404b540aSrobert #include "hard-reg-set.h"
64*404b540aSrobert #include "basic-block.h"
65*404b540aSrobert #include "output.h"
66*404b540aSrobert #include "dwarf2asm.h"
67*404b540aSrobert #include "dwarf2out.h"
68*404b540aSrobert #include "dwarf2.h"
69*404b540aSrobert #include "toplev.h"
70*404b540aSrobert #include "hashtab.h"
71*404b540aSrobert #include "intl.h"
72*404b540aSrobert #include "ggc.h"
73*404b540aSrobert #include "tm_p.h"
74*404b540aSrobert #include "target.h"
75*404b540aSrobert #include "langhooks.h"
76*404b540aSrobert #include "cgraph.h"
77*404b540aSrobert #include "diagnostic.h"
78*404b540aSrobert #include "tree-pass.h"
79*404b540aSrobert #include "timevar.h"
80*404b540aSrobert
81*404b540aSrobert /* Provide defaults for stuff that may not be defined when using
82*404b540aSrobert sjlj exceptions. */
83*404b540aSrobert #ifndef EH_RETURN_DATA_REGNO
84*404b540aSrobert #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85*404b540aSrobert #endif
86*404b540aSrobert
87*404b540aSrobert
88*404b540aSrobert /* Protect cleanup actions with must-not-throw regions, with a call
89*404b540aSrobert to the given failure handler. */
90*404b540aSrobert tree (*lang_protect_cleanup_actions) (void);
91*404b540aSrobert
92*404b540aSrobert /* Return true if type A catches type B. */
93*404b540aSrobert int (*lang_eh_type_covers) (tree a, tree b);
94*404b540aSrobert
95*404b540aSrobert /* Map a type to a runtime object to match type. */
96*404b540aSrobert tree (*lang_eh_runtime_type) (tree);
97*404b540aSrobert
98*404b540aSrobert /* A hash table of label to region number. */
99*404b540aSrobert
100*404b540aSrobert struct ehl_map_entry GTY(())
101*404b540aSrobert {
102*404b540aSrobert rtx label;
103*404b540aSrobert struct eh_region *region;
104*404b540aSrobert };
105*404b540aSrobert
106*404b540aSrobert static GTY(()) int call_site_base;
107*404b540aSrobert static GTY ((param_is (union tree_node)))
108*404b540aSrobert htab_t type_to_runtime_map;
109*404b540aSrobert
110*404b540aSrobert /* Describe the SjLj_Function_Context structure. */
111*404b540aSrobert static GTY(()) tree sjlj_fc_type_node;
112*404b540aSrobert static int sjlj_fc_call_site_ofs;
113*404b540aSrobert static int sjlj_fc_data_ofs;
114*404b540aSrobert static int sjlj_fc_personality_ofs;
115*404b540aSrobert static int sjlj_fc_lsda_ofs;
116*404b540aSrobert static int sjlj_fc_jbuf_ofs;
117*404b540aSrobert
118*404b540aSrobert /* Describes one exception region. */
119*404b540aSrobert struct eh_region GTY(())
120*404b540aSrobert {
121*404b540aSrobert /* The immediately surrounding region. */
122*404b540aSrobert struct eh_region *outer;
123*404b540aSrobert
124*404b540aSrobert /* The list of immediately contained regions. */
125*404b540aSrobert struct eh_region *inner;
126*404b540aSrobert struct eh_region *next_peer;
127*404b540aSrobert
128*404b540aSrobert /* An identifier for this region. */
129*404b540aSrobert int region_number;
130*404b540aSrobert
131*404b540aSrobert /* When a region is deleted, its parents inherit the REG_EH_REGION
132*404b540aSrobert numbers already assigned. */
133*404b540aSrobert bitmap aka;
134*404b540aSrobert
135*404b540aSrobert /* Each region does exactly one thing. */
136*404b540aSrobert enum eh_region_type
137*404b540aSrobert {
138*404b540aSrobert ERT_UNKNOWN = 0,
139*404b540aSrobert ERT_CLEANUP,
140*404b540aSrobert ERT_TRY,
141*404b540aSrobert ERT_CATCH,
142*404b540aSrobert ERT_ALLOWED_EXCEPTIONS,
143*404b540aSrobert ERT_MUST_NOT_THROW,
144*404b540aSrobert ERT_THROW
145*404b540aSrobert } type;
146*404b540aSrobert
147*404b540aSrobert /* Holds the action to perform based on the preceding type. */
148*404b540aSrobert union eh_region_u {
149*404b540aSrobert /* A list of catch blocks, a surrounding try block,
150*404b540aSrobert and the label for continuing after a catch. */
151*404b540aSrobert struct eh_region_u_try {
152*404b540aSrobert struct eh_region *catch;
153*404b540aSrobert struct eh_region *last_catch;
154*404b540aSrobert } GTY ((tag ("ERT_TRY"))) try;
155*404b540aSrobert
156*404b540aSrobert /* The list through the catch handlers, the list of type objects
157*404b540aSrobert matched, and the list of associated filters. */
158*404b540aSrobert struct eh_region_u_catch {
159*404b540aSrobert struct eh_region *next_catch;
160*404b540aSrobert struct eh_region *prev_catch;
161*404b540aSrobert tree type_list;
162*404b540aSrobert tree filter_list;
163*404b540aSrobert } GTY ((tag ("ERT_CATCH"))) catch;
164*404b540aSrobert
165*404b540aSrobert /* A tree_list of allowed types. */
166*404b540aSrobert struct eh_region_u_allowed {
167*404b540aSrobert tree type_list;
168*404b540aSrobert int filter;
169*404b540aSrobert } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170*404b540aSrobert
171*404b540aSrobert /* The type given by a call to "throw foo();", or discovered
172*404b540aSrobert for a throw. */
173*404b540aSrobert struct eh_region_u_throw {
174*404b540aSrobert tree type;
175*404b540aSrobert } GTY ((tag ("ERT_THROW"))) throw;
176*404b540aSrobert
177*404b540aSrobert /* Retain the cleanup expression even after expansion so that
178*404b540aSrobert we can match up fixup regions. */
179*404b540aSrobert struct eh_region_u_cleanup {
180*404b540aSrobert struct eh_region *prev_try;
181*404b540aSrobert } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182*404b540aSrobert } GTY ((desc ("%0.type"))) u;
183*404b540aSrobert
184*404b540aSrobert /* Entry point for this region's handler before landing pads are built. */
185*404b540aSrobert rtx label;
186*404b540aSrobert tree tree_label;
187*404b540aSrobert
188*404b540aSrobert /* Entry point for this region's handler from the runtime eh library. */
189*404b540aSrobert rtx landing_pad;
190*404b540aSrobert
191*404b540aSrobert /* Entry point for this region's handler from an inner region. */
192*404b540aSrobert rtx post_landing_pad;
193*404b540aSrobert
194*404b540aSrobert /* The RESX insn for handing off control to the next outermost handler,
195*404b540aSrobert if appropriate. */
196*404b540aSrobert rtx resume;
197*404b540aSrobert
198*404b540aSrobert /* True if something in this region may throw. */
199*404b540aSrobert unsigned may_contain_throw : 1;
200*404b540aSrobert };
201*404b540aSrobert
202*404b540aSrobert typedef struct eh_region *eh_region;
203*404b540aSrobert
204*404b540aSrobert struct call_site_record GTY(())
205*404b540aSrobert {
206*404b540aSrobert rtx landing_pad;
207*404b540aSrobert int action;
208*404b540aSrobert };
209*404b540aSrobert
210*404b540aSrobert DEF_VEC_P(eh_region);
211*404b540aSrobert DEF_VEC_ALLOC_P(eh_region, gc);
212*404b540aSrobert
213*404b540aSrobert /* Used to save exception status for each function. */
214*404b540aSrobert struct eh_status GTY(())
215*404b540aSrobert {
216*404b540aSrobert /* The tree of all regions for this function. */
217*404b540aSrobert struct eh_region *region_tree;
218*404b540aSrobert
219*404b540aSrobert /* The same information as an indexable array. */
220*404b540aSrobert VEC(eh_region,gc) *region_array;
221*404b540aSrobert
222*404b540aSrobert /* The most recently open region. */
223*404b540aSrobert struct eh_region *cur_region;
224*404b540aSrobert
225*404b540aSrobert /* This is the region for which we are processing catch blocks. */
226*404b540aSrobert struct eh_region *try_region;
227*404b540aSrobert
228*404b540aSrobert rtx filter;
229*404b540aSrobert rtx exc_ptr;
230*404b540aSrobert
231*404b540aSrobert int built_landing_pads;
232*404b540aSrobert int last_region_number;
233*404b540aSrobert
234*404b540aSrobert VEC(tree,gc) *ttype_data;
235*404b540aSrobert varray_type ehspec_data;
236*404b540aSrobert varray_type action_record_data;
237*404b540aSrobert
238*404b540aSrobert htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
239*404b540aSrobert
240*404b540aSrobert struct call_site_record * GTY ((length ("%h.call_site_data_used")))
241*404b540aSrobert call_site_data;
242*404b540aSrobert int call_site_data_used;
243*404b540aSrobert int call_site_data_size;
244*404b540aSrobert
245*404b540aSrobert rtx ehr_stackadj;
246*404b540aSrobert rtx ehr_handler;
247*404b540aSrobert rtx ehr_label;
248*404b540aSrobert
249*404b540aSrobert rtx sjlj_fc;
250*404b540aSrobert rtx sjlj_exit_after;
251*404b540aSrobert
252*404b540aSrobert htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
253*404b540aSrobert };
254*404b540aSrobert
255*404b540aSrobert static int t2r_eq (const void *, const void *);
256*404b540aSrobert static hashval_t t2r_hash (const void *);
257*404b540aSrobert static void add_type_for_runtime (tree);
258*404b540aSrobert static tree lookup_type_for_runtime (tree);
259*404b540aSrobert
260*404b540aSrobert static void remove_unreachable_regions (rtx);
261*404b540aSrobert
262*404b540aSrobert static int ttypes_filter_eq (const void *, const void *);
263*404b540aSrobert static hashval_t ttypes_filter_hash (const void *);
264*404b540aSrobert static int ehspec_filter_eq (const void *, const void *);
265*404b540aSrobert static hashval_t ehspec_filter_hash (const void *);
266*404b540aSrobert static int add_ttypes_entry (htab_t, tree);
267*404b540aSrobert static int add_ehspec_entry (htab_t, htab_t, tree);
268*404b540aSrobert static void assign_filter_values (void);
269*404b540aSrobert static void build_post_landing_pads (void);
270*404b540aSrobert static void connect_post_landing_pads (void);
271*404b540aSrobert static void dw2_build_landing_pads (void);
272*404b540aSrobert
273*404b540aSrobert struct sjlj_lp_info;
274*404b540aSrobert static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
275*404b540aSrobert static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
276*404b540aSrobert static void sjlj_mark_call_sites (struct sjlj_lp_info *);
277*404b540aSrobert static void sjlj_emit_function_enter (rtx);
278*404b540aSrobert static void sjlj_emit_function_exit (void);
279*404b540aSrobert static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
280*404b540aSrobert static void sjlj_build_landing_pads (void);
281*404b540aSrobert
282*404b540aSrobert static hashval_t ehl_hash (const void *);
283*404b540aSrobert static int ehl_eq (const void *, const void *);
284*404b540aSrobert static void add_ehl_entry (rtx, struct eh_region *);
285*404b540aSrobert static void remove_exception_handler_label (rtx);
286*404b540aSrobert static void remove_eh_handler (struct eh_region *);
287*404b540aSrobert static int for_each_eh_label_1 (void **, void *);
288*404b540aSrobert
289*404b540aSrobert /* The return value of reachable_next_level. */
290*404b540aSrobert enum reachable_code
291*404b540aSrobert {
292*404b540aSrobert /* The given exception is not processed by the given region. */
293*404b540aSrobert RNL_NOT_CAUGHT,
294*404b540aSrobert /* The given exception may need processing by the given region. */
295*404b540aSrobert RNL_MAYBE_CAUGHT,
296*404b540aSrobert /* The given exception is completely processed by the given region. */
297*404b540aSrobert RNL_CAUGHT,
298*404b540aSrobert /* The given exception is completely processed by the runtime. */
299*404b540aSrobert RNL_BLOCKED
300*404b540aSrobert };
301*404b540aSrobert
302*404b540aSrobert struct reachable_info;
303*404b540aSrobert static enum reachable_code reachable_next_level (struct eh_region *, tree,
304*404b540aSrobert struct reachable_info *);
305*404b540aSrobert
306*404b540aSrobert static int action_record_eq (const void *, const void *);
307*404b540aSrobert static hashval_t action_record_hash (const void *);
308*404b540aSrobert static int add_action_record (htab_t, int, int);
309*404b540aSrobert static int collect_one_action_chain (htab_t, struct eh_region *);
310*404b540aSrobert static int add_call_site (rtx, int);
311*404b540aSrobert
312*404b540aSrobert static void push_uleb128 (varray_type *, unsigned int);
313*404b540aSrobert static void push_sleb128 (varray_type *, int);
314*404b540aSrobert #ifndef HAVE_AS_LEB128
315*404b540aSrobert static int dw2_size_of_call_site_table (void);
316*404b540aSrobert static int sjlj_size_of_call_site_table (void);
317*404b540aSrobert #endif
318*404b540aSrobert static void dw2_output_call_site_table (void);
319*404b540aSrobert static void sjlj_output_call_site_table (void);
320*404b540aSrobert
321*404b540aSrobert
322*404b540aSrobert /* Routine to see if exception handling is turned on.
323*404b540aSrobert DO_WARN is nonzero if we want to inform the user that exception
324*404b540aSrobert handling is turned off.
325*404b540aSrobert
326*404b540aSrobert This is used to ensure that -fexceptions has been specified if the
327*404b540aSrobert compiler tries to use any exception-specific functions. */
328*404b540aSrobert
329*404b540aSrobert int
doing_eh(int do_warn)330*404b540aSrobert doing_eh (int do_warn)
331*404b540aSrobert {
332*404b540aSrobert if (! flag_exceptions)
333*404b540aSrobert {
334*404b540aSrobert static int warned = 0;
335*404b540aSrobert if (! warned && do_warn)
336*404b540aSrobert {
337*404b540aSrobert error ("exception handling disabled, use -fexceptions to enable");
338*404b540aSrobert warned = 1;
339*404b540aSrobert }
340*404b540aSrobert return 0;
341*404b540aSrobert }
342*404b540aSrobert return 1;
343*404b540aSrobert }
344*404b540aSrobert
345*404b540aSrobert
346*404b540aSrobert void
init_eh(void)347*404b540aSrobert init_eh (void)
348*404b540aSrobert {
349*404b540aSrobert if (! flag_exceptions)
350*404b540aSrobert return;
351*404b540aSrobert
352*404b540aSrobert type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
353*404b540aSrobert
354*404b540aSrobert /* Create the SjLj_Function_Context structure. This should match
355*404b540aSrobert the definition in unwind-sjlj.c. */
356*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
357*404b540aSrobert {
358*404b540aSrobert tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
359*404b540aSrobert
360*404b540aSrobert sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
361*404b540aSrobert
362*404b540aSrobert f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
363*404b540aSrobert build_pointer_type (sjlj_fc_type_node));
364*404b540aSrobert DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
365*404b540aSrobert
366*404b540aSrobert f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
367*404b540aSrobert integer_type_node);
368*404b540aSrobert DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
369*404b540aSrobert
370*404b540aSrobert tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
371*404b540aSrobert tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
372*404b540aSrobert tmp);
373*404b540aSrobert f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
374*404b540aSrobert DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
375*404b540aSrobert
376*404b540aSrobert f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
377*404b540aSrobert ptr_type_node);
378*404b540aSrobert DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
379*404b540aSrobert
380*404b540aSrobert f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
381*404b540aSrobert ptr_type_node);
382*404b540aSrobert DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
383*404b540aSrobert
384*404b540aSrobert #ifdef DONT_USE_BUILTIN_SETJMP
385*404b540aSrobert #ifdef JMP_BUF_SIZE
386*404b540aSrobert tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
387*404b540aSrobert #else
388*404b540aSrobert /* Should be large enough for most systems, if it is not,
389*404b540aSrobert JMP_BUF_SIZE should be defined with the proper value. It will
390*404b540aSrobert also tend to be larger than necessary for most systems, a more
391*404b540aSrobert optimal port will define JMP_BUF_SIZE. */
392*404b540aSrobert tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
393*404b540aSrobert #endif
394*404b540aSrobert #else
395*404b540aSrobert /* builtin_setjmp takes a pointer to 5 words. */
396*404b540aSrobert tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
397*404b540aSrobert #endif
398*404b540aSrobert tmp = build_index_type (tmp);
399*404b540aSrobert tmp = build_array_type (ptr_type_node, tmp);
400*404b540aSrobert f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
401*404b540aSrobert #ifdef DONT_USE_BUILTIN_SETJMP
402*404b540aSrobert /* We don't know what the alignment requirements of the
403*404b540aSrobert runtime's jmp_buf has. Overestimate. */
404*404b540aSrobert DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
405*404b540aSrobert DECL_USER_ALIGN (f_jbuf) = 1;
406*404b540aSrobert #endif
407*404b540aSrobert DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
408*404b540aSrobert
409*404b540aSrobert TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
410*404b540aSrobert TREE_CHAIN (f_prev) = f_cs;
411*404b540aSrobert TREE_CHAIN (f_cs) = f_data;
412*404b540aSrobert TREE_CHAIN (f_data) = f_per;
413*404b540aSrobert TREE_CHAIN (f_per) = f_lsda;
414*404b540aSrobert TREE_CHAIN (f_lsda) = f_jbuf;
415*404b540aSrobert
416*404b540aSrobert layout_type (sjlj_fc_type_node);
417*404b540aSrobert
418*404b540aSrobert /* Cache the interesting field offsets so that we have
419*404b540aSrobert easy access from rtl. */
420*404b540aSrobert sjlj_fc_call_site_ofs
421*404b540aSrobert = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
422*404b540aSrobert + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
423*404b540aSrobert sjlj_fc_data_ofs
424*404b540aSrobert = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
425*404b540aSrobert + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
426*404b540aSrobert sjlj_fc_personality_ofs
427*404b540aSrobert = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
428*404b540aSrobert + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
429*404b540aSrobert sjlj_fc_lsda_ofs
430*404b540aSrobert = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
431*404b540aSrobert + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
432*404b540aSrobert sjlj_fc_jbuf_ofs
433*404b540aSrobert = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
434*404b540aSrobert + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
435*404b540aSrobert }
436*404b540aSrobert }
437*404b540aSrobert
438*404b540aSrobert void
init_eh_for_function(void)439*404b540aSrobert init_eh_for_function (void)
440*404b540aSrobert {
441*404b540aSrobert cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
442*404b540aSrobert }
443*404b540aSrobert
444*404b540aSrobert /* Routines to generate the exception tree somewhat directly.
445*404b540aSrobert These are used from tree-eh.c when processing exception related
446*404b540aSrobert nodes during tree optimization. */
447*404b540aSrobert
448*404b540aSrobert static struct eh_region *
gen_eh_region(enum eh_region_type type,struct eh_region * outer)449*404b540aSrobert gen_eh_region (enum eh_region_type type, struct eh_region *outer)
450*404b540aSrobert {
451*404b540aSrobert struct eh_region *new;
452*404b540aSrobert
453*404b540aSrobert #ifdef ENABLE_CHECKING
454*404b540aSrobert gcc_assert (doing_eh (0));
455*404b540aSrobert #endif
456*404b540aSrobert
457*404b540aSrobert /* Insert a new blank region as a leaf in the tree. */
458*404b540aSrobert new = ggc_alloc_cleared (sizeof (*new));
459*404b540aSrobert new->type = type;
460*404b540aSrobert new->outer = outer;
461*404b540aSrobert if (outer)
462*404b540aSrobert {
463*404b540aSrobert new->next_peer = outer->inner;
464*404b540aSrobert outer->inner = new;
465*404b540aSrobert }
466*404b540aSrobert else
467*404b540aSrobert {
468*404b540aSrobert new->next_peer = cfun->eh->region_tree;
469*404b540aSrobert cfun->eh->region_tree = new;
470*404b540aSrobert }
471*404b540aSrobert
472*404b540aSrobert new->region_number = ++cfun->eh->last_region_number;
473*404b540aSrobert
474*404b540aSrobert return new;
475*404b540aSrobert }
476*404b540aSrobert
477*404b540aSrobert struct eh_region *
gen_eh_region_cleanup(struct eh_region * outer,struct eh_region * prev_try)478*404b540aSrobert gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
479*404b540aSrobert {
480*404b540aSrobert struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
481*404b540aSrobert cleanup->u.cleanup.prev_try = prev_try;
482*404b540aSrobert return cleanup;
483*404b540aSrobert }
484*404b540aSrobert
485*404b540aSrobert struct eh_region *
gen_eh_region_try(struct eh_region * outer)486*404b540aSrobert gen_eh_region_try (struct eh_region *outer)
487*404b540aSrobert {
488*404b540aSrobert return gen_eh_region (ERT_TRY, outer);
489*404b540aSrobert }
490*404b540aSrobert
491*404b540aSrobert struct eh_region *
gen_eh_region_catch(struct eh_region * t,tree type_or_list)492*404b540aSrobert gen_eh_region_catch (struct eh_region *t, tree type_or_list)
493*404b540aSrobert {
494*404b540aSrobert struct eh_region *c, *l;
495*404b540aSrobert tree type_list, type_node;
496*404b540aSrobert
497*404b540aSrobert /* Ensure to always end up with a type list to normalize further
498*404b540aSrobert processing, then register each type against the runtime types map. */
499*404b540aSrobert type_list = type_or_list;
500*404b540aSrobert if (type_or_list)
501*404b540aSrobert {
502*404b540aSrobert if (TREE_CODE (type_or_list) != TREE_LIST)
503*404b540aSrobert type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
504*404b540aSrobert
505*404b540aSrobert type_node = type_list;
506*404b540aSrobert for (; type_node; type_node = TREE_CHAIN (type_node))
507*404b540aSrobert add_type_for_runtime (TREE_VALUE (type_node));
508*404b540aSrobert }
509*404b540aSrobert
510*404b540aSrobert c = gen_eh_region (ERT_CATCH, t->outer);
511*404b540aSrobert c->u.catch.type_list = type_list;
512*404b540aSrobert l = t->u.try.last_catch;
513*404b540aSrobert c->u.catch.prev_catch = l;
514*404b540aSrobert if (l)
515*404b540aSrobert l->u.catch.next_catch = c;
516*404b540aSrobert else
517*404b540aSrobert t->u.try.catch = c;
518*404b540aSrobert t->u.try.last_catch = c;
519*404b540aSrobert
520*404b540aSrobert return c;
521*404b540aSrobert }
522*404b540aSrobert
523*404b540aSrobert struct eh_region *
gen_eh_region_allowed(struct eh_region * outer,tree allowed)524*404b540aSrobert gen_eh_region_allowed (struct eh_region *outer, tree allowed)
525*404b540aSrobert {
526*404b540aSrobert struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
527*404b540aSrobert region->u.allowed.type_list = allowed;
528*404b540aSrobert
529*404b540aSrobert for (; allowed ; allowed = TREE_CHAIN (allowed))
530*404b540aSrobert add_type_for_runtime (TREE_VALUE (allowed));
531*404b540aSrobert
532*404b540aSrobert return region;
533*404b540aSrobert }
534*404b540aSrobert
535*404b540aSrobert struct eh_region *
gen_eh_region_must_not_throw(struct eh_region * outer)536*404b540aSrobert gen_eh_region_must_not_throw (struct eh_region *outer)
537*404b540aSrobert {
538*404b540aSrobert return gen_eh_region (ERT_MUST_NOT_THROW, outer);
539*404b540aSrobert }
540*404b540aSrobert
541*404b540aSrobert int
get_eh_region_number(struct eh_region * region)542*404b540aSrobert get_eh_region_number (struct eh_region *region)
543*404b540aSrobert {
544*404b540aSrobert return region->region_number;
545*404b540aSrobert }
546*404b540aSrobert
547*404b540aSrobert bool
get_eh_region_may_contain_throw(struct eh_region * region)548*404b540aSrobert get_eh_region_may_contain_throw (struct eh_region *region)
549*404b540aSrobert {
550*404b540aSrobert return region->may_contain_throw;
551*404b540aSrobert }
552*404b540aSrobert
553*404b540aSrobert tree
get_eh_region_tree_label(struct eh_region * region)554*404b540aSrobert get_eh_region_tree_label (struct eh_region *region)
555*404b540aSrobert {
556*404b540aSrobert return region->tree_label;
557*404b540aSrobert }
558*404b540aSrobert
559*404b540aSrobert void
set_eh_region_tree_label(struct eh_region * region,tree lab)560*404b540aSrobert set_eh_region_tree_label (struct eh_region *region, tree lab)
561*404b540aSrobert {
562*404b540aSrobert region->tree_label = lab;
563*404b540aSrobert }
564*404b540aSrobert
565*404b540aSrobert void
expand_resx_expr(tree exp)566*404b540aSrobert expand_resx_expr (tree exp)
567*404b540aSrobert {
568*404b540aSrobert int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
569*404b540aSrobert struct eh_region *reg = VEC_index (eh_region,
570*404b540aSrobert cfun->eh->region_array, region_nr);
571*404b540aSrobert
572*404b540aSrobert gcc_assert (!reg->resume);
573*404b540aSrobert reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
574*404b540aSrobert emit_barrier ();
575*404b540aSrobert }
576*404b540aSrobert
577*404b540aSrobert /* Note that the current EH region (if any) may contain a throw, or a
578*404b540aSrobert call to a function which itself may contain a throw. */
579*404b540aSrobert
580*404b540aSrobert void
note_eh_region_may_contain_throw(struct eh_region * region)581*404b540aSrobert note_eh_region_may_contain_throw (struct eh_region *region)
582*404b540aSrobert {
583*404b540aSrobert while (region && !region->may_contain_throw)
584*404b540aSrobert {
585*404b540aSrobert region->may_contain_throw = 1;
586*404b540aSrobert region = region->outer;
587*404b540aSrobert }
588*404b540aSrobert }
589*404b540aSrobert
590*404b540aSrobert void
note_current_region_may_contain_throw(void)591*404b540aSrobert note_current_region_may_contain_throw (void)
592*404b540aSrobert {
593*404b540aSrobert note_eh_region_may_contain_throw (cfun->eh->cur_region);
594*404b540aSrobert }
595*404b540aSrobert
596*404b540aSrobert
597*404b540aSrobert /* Return an rtl expression for a pointer to the exception object
598*404b540aSrobert within a handler. */
599*404b540aSrobert
600*404b540aSrobert rtx
get_exception_pointer(struct function * fun)601*404b540aSrobert get_exception_pointer (struct function *fun)
602*404b540aSrobert {
603*404b540aSrobert rtx exc_ptr = fun->eh->exc_ptr;
604*404b540aSrobert if (fun == cfun && ! exc_ptr)
605*404b540aSrobert {
606*404b540aSrobert exc_ptr = gen_reg_rtx (ptr_mode);
607*404b540aSrobert fun->eh->exc_ptr = exc_ptr;
608*404b540aSrobert }
609*404b540aSrobert return exc_ptr;
610*404b540aSrobert }
611*404b540aSrobert
612*404b540aSrobert /* Return an rtl expression for the exception dispatch filter
613*404b540aSrobert within a handler. */
614*404b540aSrobert
615*404b540aSrobert rtx
get_exception_filter(struct function * fun)616*404b540aSrobert get_exception_filter (struct function *fun)
617*404b540aSrobert {
618*404b540aSrobert rtx filter = fun->eh->filter;
619*404b540aSrobert if (fun == cfun && ! filter)
620*404b540aSrobert {
621*404b540aSrobert filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
622*404b540aSrobert fun->eh->filter = filter;
623*404b540aSrobert }
624*404b540aSrobert return filter;
625*404b540aSrobert }
626*404b540aSrobert
627*404b540aSrobert /* This section is for the exception handling specific optimization pass. */
628*404b540aSrobert
629*404b540aSrobert /* Random access the exception region tree. */
630*404b540aSrobert
631*404b540aSrobert void
collect_eh_region_array(void)632*404b540aSrobert collect_eh_region_array (void)
633*404b540aSrobert {
634*404b540aSrobert struct eh_region *i;
635*404b540aSrobert
636*404b540aSrobert i = cfun->eh->region_tree;
637*404b540aSrobert if (! i)
638*404b540aSrobert return;
639*404b540aSrobert
640*404b540aSrobert VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
641*404b540aSrobert cfun->eh->last_region_number + 1);
642*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
643*404b540aSrobert
644*404b540aSrobert while (1)
645*404b540aSrobert {
646*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
647*404b540aSrobert
648*404b540aSrobert /* If there are sub-regions, process them. */
649*404b540aSrobert if (i->inner)
650*404b540aSrobert i = i->inner;
651*404b540aSrobert /* If there are peers, process them. */
652*404b540aSrobert else if (i->next_peer)
653*404b540aSrobert i = i->next_peer;
654*404b540aSrobert /* Otherwise, step back up the tree to the next peer. */
655*404b540aSrobert else
656*404b540aSrobert {
657*404b540aSrobert do {
658*404b540aSrobert i = i->outer;
659*404b540aSrobert if (i == NULL)
660*404b540aSrobert return;
661*404b540aSrobert } while (i->next_peer == NULL);
662*404b540aSrobert i = i->next_peer;
663*404b540aSrobert }
664*404b540aSrobert }
665*404b540aSrobert }
666*404b540aSrobert
667*404b540aSrobert /* Remove all regions whose labels are not reachable from insns. */
668*404b540aSrobert
669*404b540aSrobert static void
remove_unreachable_regions(rtx insns)670*404b540aSrobert remove_unreachable_regions (rtx insns)
671*404b540aSrobert {
672*404b540aSrobert int i, *uid_region_num;
673*404b540aSrobert bool *reachable;
674*404b540aSrobert struct eh_region *r;
675*404b540aSrobert rtx insn;
676*404b540aSrobert
677*404b540aSrobert uid_region_num = xcalloc (get_max_uid (), sizeof(int));
678*404b540aSrobert reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
679*404b540aSrobert
680*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
681*404b540aSrobert {
682*404b540aSrobert r = VEC_index (eh_region, cfun->eh->region_array, i);
683*404b540aSrobert if (!r || r->region_number != i)
684*404b540aSrobert continue;
685*404b540aSrobert
686*404b540aSrobert if (r->resume)
687*404b540aSrobert {
688*404b540aSrobert gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
689*404b540aSrobert uid_region_num[INSN_UID (r->resume)] = i;
690*404b540aSrobert }
691*404b540aSrobert if (r->label)
692*404b540aSrobert {
693*404b540aSrobert gcc_assert (!uid_region_num[INSN_UID (r->label)]);
694*404b540aSrobert uid_region_num[INSN_UID (r->label)] = i;
695*404b540aSrobert }
696*404b540aSrobert }
697*404b540aSrobert
698*404b540aSrobert for (insn = insns; insn; insn = NEXT_INSN (insn))
699*404b540aSrobert reachable[uid_region_num[INSN_UID (insn)]] = true;
700*404b540aSrobert
701*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
702*404b540aSrobert {
703*404b540aSrobert r = VEC_index (eh_region, cfun->eh->region_array, i);
704*404b540aSrobert if (r && r->region_number == i && !reachable[i])
705*404b540aSrobert {
706*404b540aSrobert bool kill_it = true;
707*404b540aSrobert switch (r->type)
708*404b540aSrobert {
709*404b540aSrobert case ERT_THROW:
710*404b540aSrobert /* Don't remove ERT_THROW regions if their outer region
711*404b540aSrobert is reachable. */
712*404b540aSrobert if (r->outer && reachable[r->outer->region_number])
713*404b540aSrobert kill_it = false;
714*404b540aSrobert break;
715*404b540aSrobert
716*404b540aSrobert case ERT_MUST_NOT_THROW:
717*404b540aSrobert /* MUST_NOT_THROW regions are implementable solely in the
718*404b540aSrobert runtime, but their existence continues to affect calls
719*404b540aSrobert within that region. Never delete them here. */
720*404b540aSrobert kill_it = false;
721*404b540aSrobert break;
722*404b540aSrobert
723*404b540aSrobert case ERT_TRY:
724*404b540aSrobert {
725*404b540aSrobert /* TRY regions are reachable if any of its CATCH regions
726*404b540aSrobert are reachable. */
727*404b540aSrobert struct eh_region *c;
728*404b540aSrobert for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
729*404b540aSrobert if (reachable[c->region_number])
730*404b540aSrobert {
731*404b540aSrobert kill_it = false;
732*404b540aSrobert break;
733*404b540aSrobert }
734*404b540aSrobert break;
735*404b540aSrobert }
736*404b540aSrobert
737*404b540aSrobert default:
738*404b540aSrobert break;
739*404b540aSrobert }
740*404b540aSrobert
741*404b540aSrobert if (kill_it)
742*404b540aSrobert remove_eh_handler (r);
743*404b540aSrobert }
744*404b540aSrobert }
745*404b540aSrobert
746*404b540aSrobert free (reachable);
747*404b540aSrobert free (uid_region_num);
748*404b540aSrobert }
749*404b540aSrobert
750*404b540aSrobert /* Set up EH labels for RTL. */
751*404b540aSrobert
752*404b540aSrobert void
convert_from_eh_region_ranges(void)753*404b540aSrobert convert_from_eh_region_ranges (void)
754*404b540aSrobert {
755*404b540aSrobert rtx insns = get_insns ();
756*404b540aSrobert int i, n = cfun->eh->last_region_number;
757*404b540aSrobert
758*404b540aSrobert /* Most of the work is already done at the tree level. All we need to
759*404b540aSrobert do is collect the rtl labels that correspond to the tree labels that
760*404b540aSrobert collect the rtl labels that correspond to the tree labels
761*404b540aSrobert we allocated earlier. */
762*404b540aSrobert for (i = 1; i <= n; ++i)
763*404b540aSrobert {
764*404b540aSrobert struct eh_region *region;
765*404b540aSrobert
766*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
767*404b540aSrobert if (region && region->tree_label)
768*404b540aSrobert region->label = DECL_RTL_IF_SET (region->tree_label);
769*404b540aSrobert }
770*404b540aSrobert
771*404b540aSrobert remove_unreachable_regions (insns);
772*404b540aSrobert }
773*404b540aSrobert
774*404b540aSrobert static void
add_ehl_entry(rtx label,struct eh_region * region)775*404b540aSrobert add_ehl_entry (rtx label, struct eh_region *region)
776*404b540aSrobert {
777*404b540aSrobert struct ehl_map_entry **slot, *entry;
778*404b540aSrobert
779*404b540aSrobert LABEL_PRESERVE_P (label) = 1;
780*404b540aSrobert
781*404b540aSrobert entry = ggc_alloc (sizeof (*entry));
782*404b540aSrobert entry->label = label;
783*404b540aSrobert entry->region = region;
784*404b540aSrobert
785*404b540aSrobert slot = (struct ehl_map_entry **)
786*404b540aSrobert htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
787*404b540aSrobert
788*404b540aSrobert /* Before landing pad creation, each exception handler has its own
789*404b540aSrobert label. After landing pad creation, the exception handlers may
790*404b540aSrobert share landing pads. This is ok, since maybe_remove_eh_handler
791*404b540aSrobert only requires the 1-1 mapping before landing pad creation. */
792*404b540aSrobert gcc_assert (!*slot || cfun->eh->built_landing_pads);
793*404b540aSrobert
794*404b540aSrobert *slot = entry;
795*404b540aSrobert }
796*404b540aSrobert
797*404b540aSrobert void
find_exception_handler_labels(void)798*404b540aSrobert find_exception_handler_labels (void)
799*404b540aSrobert {
800*404b540aSrobert int i;
801*404b540aSrobert
802*404b540aSrobert if (cfun->eh->exception_handler_label_map)
803*404b540aSrobert htab_empty (cfun->eh->exception_handler_label_map);
804*404b540aSrobert else
805*404b540aSrobert {
806*404b540aSrobert /* ??? The expansion factor here (3/2) must be greater than the htab
807*404b540aSrobert occupancy factor (4/3) to avoid unnecessary resizing. */
808*404b540aSrobert cfun->eh->exception_handler_label_map
809*404b540aSrobert = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
810*404b540aSrobert ehl_hash, ehl_eq, NULL);
811*404b540aSrobert }
812*404b540aSrobert
813*404b540aSrobert if (cfun->eh->region_tree == NULL)
814*404b540aSrobert return;
815*404b540aSrobert
816*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
817*404b540aSrobert {
818*404b540aSrobert struct eh_region *region;
819*404b540aSrobert rtx lab;
820*404b540aSrobert
821*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
822*404b540aSrobert if (! region || region->region_number != i)
823*404b540aSrobert continue;
824*404b540aSrobert if (cfun->eh->built_landing_pads)
825*404b540aSrobert lab = region->landing_pad;
826*404b540aSrobert else
827*404b540aSrobert lab = region->label;
828*404b540aSrobert
829*404b540aSrobert if (lab)
830*404b540aSrobert add_ehl_entry (lab, region);
831*404b540aSrobert }
832*404b540aSrobert
833*404b540aSrobert /* For sjlj exceptions, need the return label to remain live until
834*404b540aSrobert after landing pad generation. */
835*404b540aSrobert if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
836*404b540aSrobert add_ehl_entry (return_label, NULL);
837*404b540aSrobert }
838*404b540aSrobert
839*404b540aSrobert /* Returns true if the current function has exception handling regions. */
840*404b540aSrobert
841*404b540aSrobert bool
current_function_has_exception_handlers(void)842*404b540aSrobert current_function_has_exception_handlers (void)
843*404b540aSrobert {
844*404b540aSrobert int i;
845*404b540aSrobert
846*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
847*404b540aSrobert {
848*404b540aSrobert struct eh_region *region;
849*404b540aSrobert
850*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
851*404b540aSrobert if (region
852*404b540aSrobert && region->region_number == i
853*404b540aSrobert && region->type != ERT_THROW)
854*404b540aSrobert return true;
855*404b540aSrobert }
856*404b540aSrobert
857*404b540aSrobert return false;
858*404b540aSrobert }
859*404b540aSrobert
860*404b540aSrobert /* A subroutine of duplicate_eh_regions. Search the region tree under O
861*404b540aSrobert for the minimum and maximum region numbers. Update *MIN and *MAX. */
862*404b540aSrobert
863*404b540aSrobert static void
duplicate_eh_regions_0(eh_region o,int * min,int * max)864*404b540aSrobert duplicate_eh_regions_0 (eh_region o, int *min, int *max)
865*404b540aSrobert {
866*404b540aSrobert if (o->region_number < *min)
867*404b540aSrobert *min = o->region_number;
868*404b540aSrobert if (o->region_number > *max)
869*404b540aSrobert *max = o->region_number;
870*404b540aSrobert
871*404b540aSrobert if (o->inner)
872*404b540aSrobert {
873*404b540aSrobert o = o->inner;
874*404b540aSrobert duplicate_eh_regions_0 (o, min, max);
875*404b540aSrobert while (o->next_peer)
876*404b540aSrobert {
877*404b540aSrobert o = o->next_peer;
878*404b540aSrobert duplicate_eh_regions_0 (o, min, max);
879*404b540aSrobert }
880*404b540aSrobert }
881*404b540aSrobert }
882*404b540aSrobert
883*404b540aSrobert /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
884*404b540aSrobert Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
885*404b540aSrobert about the other internal pointers just yet, just the tree-like pointers. */
886*404b540aSrobert
887*404b540aSrobert static eh_region
duplicate_eh_regions_1(eh_region old,eh_region outer,int eh_offset)888*404b540aSrobert duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
889*404b540aSrobert {
890*404b540aSrobert eh_region ret, n;
891*404b540aSrobert
892*404b540aSrobert ret = n = ggc_alloc (sizeof (struct eh_region));
893*404b540aSrobert
894*404b540aSrobert *n = *old;
895*404b540aSrobert n->outer = outer;
896*404b540aSrobert n->next_peer = NULL;
897*404b540aSrobert gcc_assert (!old->aka);
898*404b540aSrobert
899*404b540aSrobert n->region_number += eh_offset;
900*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
901*404b540aSrobert
902*404b540aSrobert if (old->inner)
903*404b540aSrobert {
904*404b540aSrobert old = old->inner;
905*404b540aSrobert n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
906*404b540aSrobert while (old->next_peer)
907*404b540aSrobert {
908*404b540aSrobert old = old->next_peer;
909*404b540aSrobert n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
910*404b540aSrobert }
911*404b540aSrobert }
912*404b540aSrobert
913*404b540aSrobert return ret;
914*404b540aSrobert }
915*404b540aSrobert
916*404b540aSrobert /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
917*404b540aSrobert function and root the tree below OUTER_REGION. Remap labels using MAP
918*404b540aSrobert callback. The special case of COPY_REGION of 0 means all regions. */
919*404b540aSrobert
920*404b540aSrobert int
duplicate_eh_regions(struct function * ifun,duplicate_eh_regions_map map,void * data,int copy_region,int outer_region)921*404b540aSrobert duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
922*404b540aSrobert void *data, int copy_region, int outer_region)
923*404b540aSrobert {
924*404b540aSrobert eh_region cur, prev_try, outer, *splice;
925*404b540aSrobert int i, min_region, max_region, eh_offset, cfun_last_region_number;
926*404b540aSrobert int num_regions;
927*404b540aSrobert
928*404b540aSrobert if (!ifun->eh->region_tree)
929*404b540aSrobert return 0;
930*404b540aSrobert
931*404b540aSrobert /* Find the range of region numbers to be copied. The interface we
932*404b540aSrobert provide here mandates a single offset to find new number from old,
933*404b540aSrobert which means we must look at the numbers present, instead of the
934*404b540aSrobert count or something else. */
935*404b540aSrobert if (copy_region > 0)
936*404b540aSrobert {
937*404b540aSrobert min_region = INT_MAX;
938*404b540aSrobert max_region = 0;
939*404b540aSrobert
940*404b540aSrobert cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
941*404b540aSrobert duplicate_eh_regions_0 (cur, &min_region, &max_region);
942*404b540aSrobert }
943*404b540aSrobert else
944*404b540aSrobert min_region = 1, max_region = ifun->eh->last_region_number;
945*404b540aSrobert num_regions = max_region - min_region + 1;
946*404b540aSrobert cfun_last_region_number = cfun->eh->last_region_number;
947*404b540aSrobert eh_offset = cfun_last_region_number + 1 - min_region;
948*404b540aSrobert
949*404b540aSrobert /* If we've not yet created a region array, do so now. */
950*404b540aSrobert VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
951*404b540aSrobert cfun_last_region_number + 1 + num_regions);
952*404b540aSrobert cfun->eh->last_region_number = max_region + eh_offset;
953*404b540aSrobert
954*404b540aSrobert /* We may have just allocated the array for the first time.
955*404b540aSrobert Make sure that element zero is null. */
956*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
957*404b540aSrobert
958*404b540aSrobert /* Zero all entries in the range allocated. */
959*404b540aSrobert memset (VEC_address (eh_region, cfun->eh->region_array)
960*404b540aSrobert + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
961*404b540aSrobert
962*404b540aSrobert /* Locate the spot at which to insert the new tree. */
963*404b540aSrobert if (outer_region > 0)
964*404b540aSrobert {
965*404b540aSrobert outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
966*404b540aSrobert splice = &outer->inner;
967*404b540aSrobert }
968*404b540aSrobert else
969*404b540aSrobert {
970*404b540aSrobert outer = NULL;
971*404b540aSrobert splice = &cfun->eh->region_tree;
972*404b540aSrobert }
973*404b540aSrobert while (*splice)
974*404b540aSrobert splice = &(*splice)->next_peer;
975*404b540aSrobert
976*404b540aSrobert /* Copy all the regions in the subtree. */
977*404b540aSrobert if (copy_region > 0)
978*404b540aSrobert {
979*404b540aSrobert cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
980*404b540aSrobert *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
981*404b540aSrobert }
982*404b540aSrobert else
983*404b540aSrobert {
984*404b540aSrobert eh_region n;
985*404b540aSrobert
986*404b540aSrobert cur = ifun->eh->region_tree;
987*404b540aSrobert *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
988*404b540aSrobert while (cur->next_peer)
989*404b540aSrobert {
990*404b540aSrobert cur = cur->next_peer;
991*404b540aSrobert n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
992*404b540aSrobert }
993*404b540aSrobert }
994*404b540aSrobert
995*404b540aSrobert /* Remap all the labels in the new regions. */
996*404b540aSrobert for (i = cfun_last_region_number + 1;
997*404b540aSrobert VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
998*404b540aSrobert if (cur && cur->tree_label)
999*404b540aSrobert cur->tree_label = map (cur->tree_label, data);
1000*404b540aSrobert
1001*404b540aSrobert /* Search for the containing ERT_TRY region to fix up
1002*404b540aSrobert the prev_try short-cuts for ERT_CLEANUP regions. */
1003*404b540aSrobert prev_try = NULL;
1004*404b540aSrobert if (outer_region > 0)
1005*404b540aSrobert for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1006*404b540aSrobert prev_try && prev_try->type != ERT_TRY;
1007*404b540aSrobert prev_try = prev_try->outer)
1008*404b540aSrobert if (prev_try->type == ERT_MUST_NOT_THROW)
1009*404b540aSrobert {
1010*404b540aSrobert prev_try = NULL;
1011*404b540aSrobert break;
1012*404b540aSrobert }
1013*404b540aSrobert
1014*404b540aSrobert /* Remap all of the internal catch and cleanup linkages. Since we
1015*404b540aSrobert duplicate entire subtrees, all of the referenced regions will have
1016*404b540aSrobert been copied too. And since we renumbered them as a block, a simple
1017*404b540aSrobert bit of arithmetic finds us the index for the replacement region. */
1018*404b540aSrobert for (i = cfun_last_region_number + 1;
1019*404b540aSrobert VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1020*404b540aSrobert {
1021*404b540aSrobert if (cur == NULL)
1022*404b540aSrobert continue;
1023*404b540aSrobert
1024*404b540aSrobert #define REMAP(REG) \
1025*404b540aSrobert (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1026*404b540aSrobert (REG)->region_number + eh_offset)
1027*404b540aSrobert
1028*404b540aSrobert switch (cur->type)
1029*404b540aSrobert {
1030*404b540aSrobert case ERT_TRY:
1031*404b540aSrobert if (cur->u.try.catch)
1032*404b540aSrobert REMAP (cur->u.try.catch);
1033*404b540aSrobert if (cur->u.try.last_catch)
1034*404b540aSrobert REMAP (cur->u.try.last_catch);
1035*404b540aSrobert break;
1036*404b540aSrobert
1037*404b540aSrobert case ERT_CATCH:
1038*404b540aSrobert if (cur->u.catch.next_catch)
1039*404b540aSrobert REMAP (cur->u.catch.next_catch);
1040*404b540aSrobert if (cur->u.catch.prev_catch)
1041*404b540aSrobert REMAP (cur->u.catch.prev_catch);
1042*404b540aSrobert break;
1043*404b540aSrobert
1044*404b540aSrobert case ERT_CLEANUP:
1045*404b540aSrobert if (cur->u.cleanup.prev_try)
1046*404b540aSrobert REMAP (cur->u.cleanup.prev_try);
1047*404b540aSrobert else
1048*404b540aSrobert cur->u.cleanup.prev_try = prev_try;
1049*404b540aSrobert break;
1050*404b540aSrobert
1051*404b540aSrobert default:
1052*404b540aSrobert break;
1053*404b540aSrobert }
1054*404b540aSrobert
1055*404b540aSrobert #undef REMAP
1056*404b540aSrobert }
1057*404b540aSrobert
1058*404b540aSrobert return eh_offset;
1059*404b540aSrobert }
1060*404b540aSrobert
1061*404b540aSrobert /* Return true if REGION_A is outer to REGION_B in IFUN. */
1062*404b540aSrobert
1063*404b540aSrobert bool
eh_region_outer_p(struct function * ifun,int region_a,int region_b)1064*404b540aSrobert eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1065*404b540aSrobert {
1066*404b540aSrobert struct eh_region *rp_a, *rp_b;
1067*404b540aSrobert
1068*404b540aSrobert gcc_assert (ifun->eh->last_region_number > 0);
1069*404b540aSrobert gcc_assert (ifun->eh->region_tree);
1070*404b540aSrobert
1071*404b540aSrobert rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1072*404b540aSrobert rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1073*404b540aSrobert gcc_assert (rp_a != NULL);
1074*404b540aSrobert gcc_assert (rp_b != NULL);
1075*404b540aSrobert
1076*404b540aSrobert do
1077*404b540aSrobert {
1078*404b540aSrobert if (rp_a == rp_b)
1079*404b540aSrobert return true;
1080*404b540aSrobert rp_b = rp_b->outer;
1081*404b540aSrobert }
1082*404b540aSrobert while (rp_b);
1083*404b540aSrobert
1084*404b540aSrobert return false;
1085*404b540aSrobert }
1086*404b540aSrobert
1087*404b540aSrobert /* Return region number of region that is outer to both if REGION_A and
1088*404b540aSrobert REGION_B in IFUN. */
1089*404b540aSrobert
1090*404b540aSrobert int
eh_region_outermost(struct function * ifun,int region_a,int region_b)1091*404b540aSrobert eh_region_outermost (struct function *ifun, int region_a, int region_b)
1092*404b540aSrobert {
1093*404b540aSrobert struct eh_region *rp_a, *rp_b;
1094*404b540aSrobert sbitmap b_outer;
1095*404b540aSrobert
1096*404b540aSrobert gcc_assert (ifun->eh->last_region_number > 0);
1097*404b540aSrobert gcc_assert (ifun->eh->region_tree);
1098*404b540aSrobert
1099*404b540aSrobert rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1100*404b540aSrobert rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1101*404b540aSrobert gcc_assert (rp_a != NULL);
1102*404b540aSrobert gcc_assert (rp_b != NULL);
1103*404b540aSrobert
1104*404b540aSrobert b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1105*404b540aSrobert sbitmap_zero (b_outer);
1106*404b540aSrobert
1107*404b540aSrobert do
1108*404b540aSrobert {
1109*404b540aSrobert SET_BIT (b_outer, rp_b->region_number);
1110*404b540aSrobert rp_b = rp_b->outer;
1111*404b540aSrobert }
1112*404b540aSrobert while (rp_b);
1113*404b540aSrobert
1114*404b540aSrobert do
1115*404b540aSrobert {
1116*404b540aSrobert if (TEST_BIT (b_outer, rp_a->region_number))
1117*404b540aSrobert {
1118*404b540aSrobert sbitmap_free (b_outer);
1119*404b540aSrobert return rp_a->region_number;
1120*404b540aSrobert }
1121*404b540aSrobert rp_a = rp_a->outer;
1122*404b540aSrobert }
1123*404b540aSrobert while (rp_a);
1124*404b540aSrobert
1125*404b540aSrobert sbitmap_free (b_outer);
1126*404b540aSrobert return -1;
1127*404b540aSrobert }
1128*404b540aSrobert
1129*404b540aSrobert static int
t2r_eq(const void * pentry,const void * pdata)1130*404b540aSrobert t2r_eq (const void *pentry, const void *pdata)
1131*404b540aSrobert {
1132*404b540aSrobert tree entry = (tree) pentry;
1133*404b540aSrobert tree data = (tree) pdata;
1134*404b540aSrobert
1135*404b540aSrobert return TREE_PURPOSE (entry) == data;
1136*404b540aSrobert }
1137*404b540aSrobert
1138*404b540aSrobert static hashval_t
t2r_hash(const void * pentry)1139*404b540aSrobert t2r_hash (const void *pentry)
1140*404b540aSrobert {
1141*404b540aSrobert tree entry = (tree) pentry;
1142*404b540aSrobert return TREE_HASH (TREE_PURPOSE (entry));
1143*404b540aSrobert }
1144*404b540aSrobert
1145*404b540aSrobert static void
add_type_for_runtime(tree type)1146*404b540aSrobert add_type_for_runtime (tree type)
1147*404b540aSrobert {
1148*404b540aSrobert tree *slot;
1149*404b540aSrobert
1150*404b540aSrobert slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1151*404b540aSrobert TREE_HASH (type), INSERT);
1152*404b540aSrobert if (*slot == NULL)
1153*404b540aSrobert {
1154*404b540aSrobert tree runtime = (*lang_eh_runtime_type) (type);
1155*404b540aSrobert *slot = tree_cons (type, runtime, NULL_TREE);
1156*404b540aSrobert }
1157*404b540aSrobert }
1158*404b540aSrobert
1159*404b540aSrobert static tree
lookup_type_for_runtime(tree type)1160*404b540aSrobert lookup_type_for_runtime (tree type)
1161*404b540aSrobert {
1162*404b540aSrobert tree *slot;
1163*404b540aSrobert
1164*404b540aSrobert slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1165*404b540aSrobert TREE_HASH (type), NO_INSERT);
1166*404b540aSrobert
1167*404b540aSrobert /* We should have always inserted the data earlier. */
1168*404b540aSrobert return TREE_VALUE (*slot);
1169*404b540aSrobert }
1170*404b540aSrobert
1171*404b540aSrobert
1172*404b540aSrobert /* Represent an entry in @TTypes for either catch actions
1173*404b540aSrobert or exception filter actions. */
1174*404b540aSrobert struct ttypes_filter GTY(())
1175*404b540aSrobert {
1176*404b540aSrobert tree t;
1177*404b540aSrobert int filter;
1178*404b540aSrobert };
1179*404b540aSrobert
1180*404b540aSrobert /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1181*404b540aSrobert (a tree) for a @TTypes type node we are thinking about adding. */
1182*404b540aSrobert
1183*404b540aSrobert static int
ttypes_filter_eq(const void * pentry,const void * pdata)1184*404b540aSrobert ttypes_filter_eq (const void *pentry, const void *pdata)
1185*404b540aSrobert {
1186*404b540aSrobert const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1187*404b540aSrobert tree data = (tree) pdata;
1188*404b540aSrobert
1189*404b540aSrobert return entry->t == data;
1190*404b540aSrobert }
1191*404b540aSrobert
1192*404b540aSrobert static hashval_t
ttypes_filter_hash(const void * pentry)1193*404b540aSrobert ttypes_filter_hash (const void *pentry)
1194*404b540aSrobert {
1195*404b540aSrobert const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1196*404b540aSrobert return TREE_HASH (entry->t);
1197*404b540aSrobert }
1198*404b540aSrobert
1199*404b540aSrobert /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1200*404b540aSrobert exception specification list we are thinking about adding. */
1201*404b540aSrobert /* ??? Currently we use the type lists in the order given. Someone
1202*404b540aSrobert should put these in some canonical order. */
1203*404b540aSrobert
1204*404b540aSrobert static int
ehspec_filter_eq(const void * pentry,const void * pdata)1205*404b540aSrobert ehspec_filter_eq (const void *pentry, const void *pdata)
1206*404b540aSrobert {
1207*404b540aSrobert const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1208*404b540aSrobert const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1209*404b540aSrobert
1210*404b540aSrobert return type_list_equal (entry->t, data->t);
1211*404b540aSrobert }
1212*404b540aSrobert
1213*404b540aSrobert /* Hash function for exception specification lists. */
1214*404b540aSrobert
1215*404b540aSrobert static hashval_t
ehspec_filter_hash(const void * pentry)1216*404b540aSrobert ehspec_filter_hash (const void *pentry)
1217*404b540aSrobert {
1218*404b540aSrobert const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1219*404b540aSrobert hashval_t h = 0;
1220*404b540aSrobert tree list;
1221*404b540aSrobert
1222*404b540aSrobert for (list = entry->t; list ; list = TREE_CHAIN (list))
1223*404b540aSrobert h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1224*404b540aSrobert return h;
1225*404b540aSrobert }
1226*404b540aSrobert
1227*404b540aSrobert /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1228*404b540aSrobert to speed up the search. Return the filter value to be used. */
1229*404b540aSrobert
1230*404b540aSrobert static int
add_ttypes_entry(htab_t ttypes_hash,tree type)1231*404b540aSrobert add_ttypes_entry (htab_t ttypes_hash, tree type)
1232*404b540aSrobert {
1233*404b540aSrobert struct ttypes_filter **slot, *n;
1234*404b540aSrobert
1235*404b540aSrobert slot = (struct ttypes_filter **)
1236*404b540aSrobert htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1237*404b540aSrobert
1238*404b540aSrobert if ((n = *slot) == NULL)
1239*404b540aSrobert {
1240*404b540aSrobert /* Filter value is a 1 based table index. */
1241*404b540aSrobert
1242*404b540aSrobert n = XNEW (struct ttypes_filter);
1243*404b540aSrobert n->t = type;
1244*404b540aSrobert n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1245*404b540aSrobert *slot = n;
1246*404b540aSrobert
1247*404b540aSrobert VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1248*404b540aSrobert }
1249*404b540aSrobert
1250*404b540aSrobert return n->filter;
1251*404b540aSrobert }
1252*404b540aSrobert
1253*404b540aSrobert /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1254*404b540aSrobert to speed up the search. Return the filter value to be used. */
1255*404b540aSrobert
1256*404b540aSrobert static int
add_ehspec_entry(htab_t ehspec_hash,htab_t ttypes_hash,tree list)1257*404b540aSrobert add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1258*404b540aSrobert {
1259*404b540aSrobert struct ttypes_filter **slot, *n;
1260*404b540aSrobert struct ttypes_filter dummy;
1261*404b540aSrobert
1262*404b540aSrobert dummy.t = list;
1263*404b540aSrobert slot = (struct ttypes_filter **)
1264*404b540aSrobert htab_find_slot (ehspec_hash, &dummy, INSERT);
1265*404b540aSrobert
1266*404b540aSrobert if ((n = *slot) == NULL)
1267*404b540aSrobert {
1268*404b540aSrobert /* Filter value is a -1 based byte index into a uleb128 buffer. */
1269*404b540aSrobert
1270*404b540aSrobert n = XNEW (struct ttypes_filter);
1271*404b540aSrobert n->t = list;
1272*404b540aSrobert n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1273*404b540aSrobert *slot = n;
1274*404b540aSrobert
1275*404b540aSrobert /* Generate a 0 terminated list of filter values. */
1276*404b540aSrobert for (; list ; list = TREE_CHAIN (list))
1277*404b540aSrobert {
1278*404b540aSrobert if (targetm.arm_eabi_unwinder)
1279*404b540aSrobert VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1280*404b540aSrobert else
1281*404b540aSrobert {
1282*404b540aSrobert /* Look up each type in the list and encode its filter
1283*404b540aSrobert value as a uleb128. */
1284*404b540aSrobert push_uleb128 (&cfun->eh->ehspec_data,
1285*404b540aSrobert add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1286*404b540aSrobert }
1287*404b540aSrobert }
1288*404b540aSrobert if (targetm.arm_eabi_unwinder)
1289*404b540aSrobert VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1290*404b540aSrobert else
1291*404b540aSrobert VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1292*404b540aSrobert }
1293*404b540aSrobert
1294*404b540aSrobert return n->filter;
1295*404b540aSrobert }
1296*404b540aSrobert
1297*404b540aSrobert /* Generate the action filter values to be used for CATCH and
1298*404b540aSrobert ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1299*404b540aSrobert we use lots of landing pads, and so every type or list can share
1300*404b540aSrobert the same filter value, which saves table space. */
1301*404b540aSrobert
1302*404b540aSrobert static void
assign_filter_values(void)1303*404b540aSrobert assign_filter_values (void)
1304*404b540aSrobert {
1305*404b540aSrobert int i;
1306*404b540aSrobert htab_t ttypes, ehspec;
1307*404b540aSrobert
1308*404b540aSrobert cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1309*404b540aSrobert if (targetm.arm_eabi_unwinder)
1310*404b540aSrobert VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1311*404b540aSrobert else
1312*404b540aSrobert VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1313*404b540aSrobert
1314*404b540aSrobert ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1315*404b540aSrobert ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1316*404b540aSrobert
1317*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1318*404b540aSrobert {
1319*404b540aSrobert struct eh_region *r;
1320*404b540aSrobert
1321*404b540aSrobert r = VEC_index (eh_region, cfun->eh->region_array, i);
1322*404b540aSrobert
1323*404b540aSrobert /* Mind we don't process a region more than once. */
1324*404b540aSrobert if (!r || r->region_number != i)
1325*404b540aSrobert continue;
1326*404b540aSrobert
1327*404b540aSrobert switch (r->type)
1328*404b540aSrobert {
1329*404b540aSrobert case ERT_CATCH:
1330*404b540aSrobert /* Whatever type_list is (NULL or true list), we build a list
1331*404b540aSrobert of filters for the region. */
1332*404b540aSrobert r->u.catch.filter_list = NULL_TREE;
1333*404b540aSrobert
1334*404b540aSrobert if (r->u.catch.type_list != NULL)
1335*404b540aSrobert {
1336*404b540aSrobert /* Get a filter value for each of the types caught and store
1337*404b540aSrobert them in the region's dedicated list. */
1338*404b540aSrobert tree tp_node = r->u.catch.type_list;
1339*404b540aSrobert
1340*404b540aSrobert for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1341*404b540aSrobert {
1342*404b540aSrobert int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1343*404b540aSrobert tree flt_node = build_int_cst (NULL_TREE, flt);
1344*404b540aSrobert
1345*404b540aSrobert r->u.catch.filter_list
1346*404b540aSrobert = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1347*404b540aSrobert }
1348*404b540aSrobert }
1349*404b540aSrobert else
1350*404b540aSrobert {
1351*404b540aSrobert /* Get a filter value for the NULL list also since it will need
1352*404b540aSrobert an action record anyway. */
1353*404b540aSrobert int flt = add_ttypes_entry (ttypes, NULL);
1354*404b540aSrobert tree flt_node = build_int_cst (NULL_TREE, flt);
1355*404b540aSrobert
1356*404b540aSrobert r->u.catch.filter_list
1357*404b540aSrobert = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1358*404b540aSrobert }
1359*404b540aSrobert
1360*404b540aSrobert break;
1361*404b540aSrobert
1362*404b540aSrobert case ERT_ALLOWED_EXCEPTIONS:
1363*404b540aSrobert r->u.allowed.filter
1364*404b540aSrobert = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1365*404b540aSrobert break;
1366*404b540aSrobert
1367*404b540aSrobert default:
1368*404b540aSrobert break;
1369*404b540aSrobert }
1370*404b540aSrobert }
1371*404b540aSrobert
1372*404b540aSrobert htab_delete (ttypes);
1373*404b540aSrobert htab_delete (ehspec);
1374*404b540aSrobert }
1375*404b540aSrobert
1376*404b540aSrobert /* Emit SEQ into basic block just before INSN (that is assumed to be
1377*404b540aSrobert first instruction of some existing BB and return the newly
1378*404b540aSrobert produced block. */
1379*404b540aSrobert static basic_block
emit_to_new_bb_before(rtx seq,rtx insn)1380*404b540aSrobert emit_to_new_bb_before (rtx seq, rtx insn)
1381*404b540aSrobert {
1382*404b540aSrobert rtx last;
1383*404b540aSrobert basic_block bb;
1384*404b540aSrobert edge e;
1385*404b540aSrobert edge_iterator ei;
1386*404b540aSrobert
1387*404b540aSrobert /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1388*404b540aSrobert call), we don't want it to go into newly created landing pad or other EH
1389*404b540aSrobert construct. */
1390*404b540aSrobert for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1391*404b540aSrobert if (e->flags & EDGE_FALLTHRU)
1392*404b540aSrobert force_nonfallthru (e);
1393*404b540aSrobert else
1394*404b540aSrobert ei_next (&ei);
1395*404b540aSrobert last = emit_insn_before (seq, insn);
1396*404b540aSrobert if (BARRIER_P (last))
1397*404b540aSrobert last = PREV_INSN (last);
1398*404b540aSrobert bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1399*404b540aSrobert update_bb_for_insn (bb);
1400*404b540aSrobert bb->flags |= BB_SUPERBLOCK;
1401*404b540aSrobert return bb;
1402*404b540aSrobert }
1403*404b540aSrobert
1404*404b540aSrobert /* Generate the code to actually handle exceptions, which will follow the
1405*404b540aSrobert landing pads. */
1406*404b540aSrobert
1407*404b540aSrobert static void
build_post_landing_pads(void)1408*404b540aSrobert build_post_landing_pads (void)
1409*404b540aSrobert {
1410*404b540aSrobert int i;
1411*404b540aSrobert
1412*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1413*404b540aSrobert {
1414*404b540aSrobert struct eh_region *region;
1415*404b540aSrobert rtx seq;
1416*404b540aSrobert
1417*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
1418*404b540aSrobert /* Mind we don't process a region more than once. */
1419*404b540aSrobert if (!region || region->region_number != i)
1420*404b540aSrobert continue;
1421*404b540aSrobert
1422*404b540aSrobert switch (region->type)
1423*404b540aSrobert {
1424*404b540aSrobert case ERT_TRY:
1425*404b540aSrobert /* ??? Collect the set of all non-overlapping catch handlers
1426*404b540aSrobert all the way up the chain until blocked by a cleanup. */
1427*404b540aSrobert /* ??? Outer try regions can share landing pads with inner
1428*404b540aSrobert try regions if the types are completely non-overlapping,
1429*404b540aSrobert and there are no intervening cleanups. */
1430*404b540aSrobert
1431*404b540aSrobert region->post_landing_pad = gen_label_rtx ();
1432*404b540aSrobert
1433*404b540aSrobert start_sequence ();
1434*404b540aSrobert
1435*404b540aSrobert emit_label (region->post_landing_pad);
1436*404b540aSrobert
1437*404b540aSrobert /* ??? It is mighty inconvenient to call back into the
1438*404b540aSrobert switch statement generation code in expand_end_case.
1439*404b540aSrobert Rapid prototyping sez a sequence of ifs. */
1440*404b540aSrobert {
1441*404b540aSrobert struct eh_region *c;
1442*404b540aSrobert for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1443*404b540aSrobert {
1444*404b540aSrobert if (c->u.catch.type_list == NULL)
1445*404b540aSrobert emit_jump (c->label);
1446*404b540aSrobert else
1447*404b540aSrobert {
1448*404b540aSrobert /* Need for one cmp/jump per type caught. Each type
1449*404b540aSrobert list entry has a matching entry in the filter list
1450*404b540aSrobert (see assign_filter_values). */
1451*404b540aSrobert tree tp_node = c->u.catch.type_list;
1452*404b540aSrobert tree flt_node = c->u.catch.filter_list;
1453*404b540aSrobert
1454*404b540aSrobert for (; tp_node; )
1455*404b540aSrobert {
1456*404b540aSrobert emit_cmp_and_jump_insns
1457*404b540aSrobert (cfun->eh->filter,
1458*404b540aSrobert GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1459*404b540aSrobert EQ, NULL_RTX,
1460*404b540aSrobert targetm.eh_return_filter_mode (), 0, c->label);
1461*404b540aSrobert
1462*404b540aSrobert tp_node = TREE_CHAIN (tp_node);
1463*404b540aSrobert flt_node = TREE_CHAIN (flt_node);
1464*404b540aSrobert }
1465*404b540aSrobert }
1466*404b540aSrobert }
1467*404b540aSrobert }
1468*404b540aSrobert
1469*404b540aSrobert /* We delay the generation of the _Unwind_Resume until we generate
1470*404b540aSrobert landing pads. We emit a marker here so as to get good control
1471*404b540aSrobert flow data in the meantime. */
1472*404b540aSrobert region->resume
1473*404b540aSrobert = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1474*404b540aSrobert emit_barrier ();
1475*404b540aSrobert
1476*404b540aSrobert seq = get_insns ();
1477*404b540aSrobert end_sequence ();
1478*404b540aSrobert
1479*404b540aSrobert emit_to_new_bb_before (seq, region->u.try.catch->label);
1480*404b540aSrobert
1481*404b540aSrobert break;
1482*404b540aSrobert
1483*404b540aSrobert case ERT_ALLOWED_EXCEPTIONS:
1484*404b540aSrobert region->post_landing_pad = gen_label_rtx ();
1485*404b540aSrobert
1486*404b540aSrobert start_sequence ();
1487*404b540aSrobert
1488*404b540aSrobert emit_label (region->post_landing_pad);
1489*404b540aSrobert
1490*404b540aSrobert emit_cmp_and_jump_insns (cfun->eh->filter,
1491*404b540aSrobert GEN_INT (region->u.allowed.filter),
1492*404b540aSrobert EQ, NULL_RTX,
1493*404b540aSrobert targetm.eh_return_filter_mode (), 0, region->label);
1494*404b540aSrobert
1495*404b540aSrobert /* We delay the generation of the _Unwind_Resume until we generate
1496*404b540aSrobert landing pads. We emit a marker here so as to get good control
1497*404b540aSrobert flow data in the meantime. */
1498*404b540aSrobert region->resume
1499*404b540aSrobert = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1500*404b540aSrobert emit_barrier ();
1501*404b540aSrobert
1502*404b540aSrobert seq = get_insns ();
1503*404b540aSrobert end_sequence ();
1504*404b540aSrobert
1505*404b540aSrobert emit_to_new_bb_before (seq, region->label);
1506*404b540aSrobert break;
1507*404b540aSrobert
1508*404b540aSrobert case ERT_CLEANUP:
1509*404b540aSrobert case ERT_MUST_NOT_THROW:
1510*404b540aSrobert region->post_landing_pad = region->label;
1511*404b540aSrobert break;
1512*404b540aSrobert
1513*404b540aSrobert case ERT_CATCH:
1514*404b540aSrobert case ERT_THROW:
1515*404b540aSrobert /* Nothing to do. */
1516*404b540aSrobert break;
1517*404b540aSrobert
1518*404b540aSrobert default:
1519*404b540aSrobert gcc_unreachable ();
1520*404b540aSrobert }
1521*404b540aSrobert }
1522*404b540aSrobert }
1523*404b540aSrobert
1524*404b540aSrobert /* Replace RESX patterns with jumps to the next handler if any, or calls to
1525*404b540aSrobert _Unwind_Resume otherwise. */
1526*404b540aSrobert
1527*404b540aSrobert static void
connect_post_landing_pads(void)1528*404b540aSrobert connect_post_landing_pads (void)
1529*404b540aSrobert {
1530*404b540aSrobert int i;
1531*404b540aSrobert
1532*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1533*404b540aSrobert {
1534*404b540aSrobert struct eh_region *region;
1535*404b540aSrobert struct eh_region *outer;
1536*404b540aSrobert rtx seq;
1537*404b540aSrobert rtx barrier;
1538*404b540aSrobert
1539*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
1540*404b540aSrobert /* Mind we don't process a region more than once. */
1541*404b540aSrobert if (!region || region->region_number != i)
1542*404b540aSrobert continue;
1543*404b540aSrobert
1544*404b540aSrobert /* If there is no RESX, or it has been deleted by flow, there's
1545*404b540aSrobert nothing to fix up. */
1546*404b540aSrobert if (! region->resume || INSN_DELETED_P (region->resume))
1547*404b540aSrobert continue;
1548*404b540aSrobert
1549*404b540aSrobert /* Search for another landing pad in this function. */
1550*404b540aSrobert for (outer = region->outer; outer ; outer = outer->outer)
1551*404b540aSrobert if (outer->post_landing_pad)
1552*404b540aSrobert break;
1553*404b540aSrobert
1554*404b540aSrobert start_sequence ();
1555*404b540aSrobert
1556*404b540aSrobert if (outer)
1557*404b540aSrobert {
1558*404b540aSrobert edge e;
1559*404b540aSrobert basic_block src, dest;
1560*404b540aSrobert
1561*404b540aSrobert emit_jump (outer->post_landing_pad);
1562*404b540aSrobert src = BLOCK_FOR_INSN (region->resume);
1563*404b540aSrobert dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1564*404b540aSrobert while (EDGE_COUNT (src->succs) > 0)
1565*404b540aSrobert remove_edge (EDGE_SUCC (src, 0));
1566*404b540aSrobert e = make_edge (src, dest, 0);
1567*404b540aSrobert e->probability = REG_BR_PROB_BASE;
1568*404b540aSrobert e->count = src->count;
1569*404b540aSrobert }
1570*404b540aSrobert else
1571*404b540aSrobert {
1572*404b540aSrobert emit_library_call (unwind_resume_libfunc, LCT_THROW,
1573*404b540aSrobert VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1574*404b540aSrobert
1575*404b540aSrobert /* What we just emitted was a throwing libcall, so it got a
1576*404b540aSrobert barrier automatically added after it. If the last insn in
1577*404b540aSrobert the libcall sequence isn't the barrier, it's because the
1578*404b540aSrobert target emits multiple insns for a call, and there are insns
1579*404b540aSrobert after the actual call insn (which are redundant and would be
1580*404b540aSrobert optimized away). The barrier is inserted exactly after the
1581*404b540aSrobert call insn, so let's go get that and delete the insns after
1582*404b540aSrobert it, because below we need the barrier to be the last insn in
1583*404b540aSrobert the sequence. */
1584*404b540aSrobert delete_insns_since (NEXT_INSN (last_call_insn ()));
1585*404b540aSrobert }
1586*404b540aSrobert
1587*404b540aSrobert seq = get_insns ();
1588*404b540aSrobert end_sequence ();
1589*404b540aSrobert barrier = emit_insn_before (seq, region->resume);
1590*404b540aSrobert /* Avoid duplicate barrier. */
1591*404b540aSrobert gcc_assert (BARRIER_P (barrier));
1592*404b540aSrobert delete_insn (barrier);
1593*404b540aSrobert delete_insn (region->resume);
1594*404b540aSrobert
1595*404b540aSrobert /* ??? From tree-ssa we can wind up with catch regions whose
1596*404b540aSrobert label is not instantiated, but whose resx is present. Now
1597*404b540aSrobert that we've dealt with the resx, kill the region. */
1598*404b540aSrobert if (region->label == NULL && region->type == ERT_CLEANUP)
1599*404b540aSrobert remove_eh_handler (region);
1600*404b540aSrobert }
1601*404b540aSrobert }
1602*404b540aSrobert
1603*404b540aSrobert
1604*404b540aSrobert static void
dw2_build_landing_pads(void)1605*404b540aSrobert dw2_build_landing_pads (void)
1606*404b540aSrobert {
1607*404b540aSrobert int i;
1608*404b540aSrobert
1609*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1610*404b540aSrobert {
1611*404b540aSrobert struct eh_region *region;
1612*404b540aSrobert rtx seq;
1613*404b540aSrobert basic_block bb;
1614*404b540aSrobert edge e;
1615*404b540aSrobert
1616*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
1617*404b540aSrobert /* Mind we don't process a region more than once. */
1618*404b540aSrobert if (!region || region->region_number != i)
1619*404b540aSrobert continue;
1620*404b540aSrobert
1621*404b540aSrobert if (region->type != ERT_CLEANUP
1622*404b540aSrobert && region->type != ERT_TRY
1623*404b540aSrobert && region->type != ERT_ALLOWED_EXCEPTIONS)
1624*404b540aSrobert continue;
1625*404b540aSrobert
1626*404b540aSrobert start_sequence ();
1627*404b540aSrobert
1628*404b540aSrobert region->landing_pad = gen_label_rtx ();
1629*404b540aSrobert emit_label (region->landing_pad);
1630*404b540aSrobert
1631*404b540aSrobert #ifdef HAVE_exception_receiver
1632*404b540aSrobert if (HAVE_exception_receiver)
1633*404b540aSrobert emit_insn (gen_exception_receiver ());
1634*404b540aSrobert else
1635*404b540aSrobert #endif
1636*404b540aSrobert #ifdef HAVE_nonlocal_goto_receiver
1637*404b540aSrobert if (HAVE_nonlocal_goto_receiver)
1638*404b540aSrobert emit_insn (gen_nonlocal_goto_receiver ());
1639*404b540aSrobert else
1640*404b540aSrobert #endif
1641*404b540aSrobert { /* Nothing */ }
1642*404b540aSrobert
1643*404b540aSrobert emit_move_insn (cfun->eh->exc_ptr,
1644*404b540aSrobert gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1645*404b540aSrobert emit_move_insn (cfun->eh->filter,
1646*404b540aSrobert gen_rtx_REG (targetm.eh_return_filter_mode (),
1647*404b540aSrobert EH_RETURN_DATA_REGNO (1)));
1648*404b540aSrobert
1649*404b540aSrobert seq = get_insns ();
1650*404b540aSrobert end_sequence ();
1651*404b540aSrobert
1652*404b540aSrobert bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1653*404b540aSrobert e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1654*404b540aSrobert e->count = bb->count;
1655*404b540aSrobert e->probability = REG_BR_PROB_BASE;
1656*404b540aSrobert }
1657*404b540aSrobert }
1658*404b540aSrobert
1659*404b540aSrobert
1660*404b540aSrobert struct sjlj_lp_info
1661*404b540aSrobert {
1662*404b540aSrobert int directly_reachable;
1663*404b540aSrobert int action_index;
1664*404b540aSrobert int dispatch_index;
1665*404b540aSrobert int call_site_index;
1666*404b540aSrobert };
1667*404b540aSrobert
1668*404b540aSrobert static bool
sjlj_find_directly_reachable_regions(struct sjlj_lp_info * lp_info)1669*404b540aSrobert sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1670*404b540aSrobert {
1671*404b540aSrobert rtx insn;
1672*404b540aSrobert bool found_one = false;
1673*404b540aSrobert
1674*404b540aSrobert for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1675*404b540aSrobert {
1676*404b540aSrobert struct eh_region *region;
1677*404b540aSrobert enum reachable_code rc;
1678*404b540aSrobert tree type_thrown;
1679*404b540aSrobert rtx note;
1680*404b540aSrobert
1681*404b540aSrobert if (! INSN_P (insn))
1682*404b540aSrobert continue;
1683*404b540aSrobert
1684*404b540aSrobert note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1685*404b540aSrobert if (!note || INTVAL (XEXP (note, 0)) <= 0)
1686*404b540aSrobert continue;
1687*404b540aSrobert
1688*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1689*404b540aSrobert
1690*404b540aSrobert type_thrown = NULL_TREE;
1691*404b540aSrobert if (region->type == ERT_THROW)
1692*404b540aSrobert {
1693*404b540aSrobert type_thrown = region->u.throw.type;
1694*404b540aSrobert region = region->outer;
1695*404b540aSrobert }
1696*404b540aSrobert
1697*404b540aSrobert /* Find the first containing region that might handle the exception.
1698*404b540aSrobert That's the landing pad to which we will transfer control. */
1699*404b540aSrobert rc = RNL_NOT_CAUGHT;
1700*404b540aSrobert for (; region; region = region->outer)
1701*404b540aSrobert {
1702*404b540aSrobert rc = reachable_next_level (region, type_thrown, NULL);
1703*404b540aSrobert if (rc != RNL_NOT_CAUGHT)
1704*404b540aSrobert break;
1705*404b540aSrobert }
1706*404b540aSrobert if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1707*404b540aSrobert {
1708*404b540aSrobert lp_info[region->region_number].directly_reachable = 1;
1709*404b540aSrobert found_one = true;
1710*404b540aSrobert }
1711*404b540aSrobert }
1712*404b540aSrobert
1713*404b540aSrobert return found_one;
1714*404b540aSrobert }
1715*404b540aSrobert
1716*404b540aSrobert static void
sjlj_assign_call_site_values(rtx dispatch_label,struct sjlj_lp_info * lp_info)1717*404b540aSrobert sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1718*404b540aSrobert {
1719*404b540aSrobert htab_t ar_hash;
1720*404b540aSrobert int i, index;
1721*404b540aSrobert
1722*404b540aSrobert /* First task: build the action table. */
1723*404b540aSrobert
1724*404b540aSrobert VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1725*404b540aSrobert ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1726*404b540aSrobert
1727*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1728*404b540aSrobert if (lp_info[i].directly_reachable)
1729*404b540aSrobert {
1730*404b540aSrobert struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1731*404b540aSrobert
1732*404b540aSrobert r->landing_pad = dispatch_label;
1733*404b540aSrobert lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1734*404b540aSrobert if (lp_info[i].action_index != -1)
1735*404b540aSrobert cfun->uses_eh_lsda = 1;
1736*404b540aSrobert }
1737*404b540aSrobert
1738*404b540aSrobert htab_delete (ar_hash);
1739*404b540aSrobert
1740*404b540aSrobert /* Next: assign dispatch values. In dwarf2 terms, this would be the
1741*404b540aSrobert landing pad label for the region. For sjlj though, there is one
1742*404b540aSrobert common landing pad from which we dispatch to the post-landing pads.
1743*404b540aSrobert
1744*404b540aSrobert A region receives a dispatch index if it is directly reachable
1745*404b540aSrobert and requires in-function processing. Regions that share post-landing
1746*404b540aSrobert pads may share dispatch indices. */
1747*404b540aSrobert /* ??? Post-landing pad sharing doesn't actually happen at the moment
1748*404b540aSrobert (see build_post_landing_pads) so we don't bother checking for it. */
1749*404b540aSrobert
1750*404b540aSrobert index = 0;
1751*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1752*404b540aSrobert if (lp_info[i].directly_reachable)
1753*404b540aSrobert lp_info[i].dispatch_index = index++;
1754*404b540aSrobert
1755*404b540aSrobert /* Finally: assign call-site values. If dwarf2 terms, this would be
1756*404b540aSrobert the region number assigned by convert_to_eh_region_ranges, but
1757*404b540aSrobert handles no-action and must-not-throw differently. */
1758*404b540aSrobert
1759*404b540aSrobert call_site_base = 1;
1760*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
1761*404b540aSrobert if (lp_info[i].directly_reachable)
1762*404b540aSrobert {
1763*404b540aSrobert int action = lp_info[i].action_index;
1764*404b540aSrobert
1765*404b540aSrobert /* Map must-not-throw to otherwise unused call-site index 0. */
1766*404b540aSrobert if (action == -2)
1767*404b540aSrobert index = 0;
1768*404b540aSrobert /* Map no-action to otherwise unused call-site index -1. */
1769*404b540aSrobert else if (action == -1)
1770*404b540aSrobert index = -1;
1771*404b540aSrobert /* Otherwise, look it up in the table. */
1772*404b540aSrobert else
1773*404b540aSrobert index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1774*404b540aSrobert
1775*404b540aSrobert lp_info[i].call_site_index = index;
1776*404b540aSrobert }
1777*404b540aSrobert }
1778*404b540aSrobert
1779*404b540aSrobert static void
sjlj_mark_call_sites(struct sjlj_lp_info * lp_info)1780*404b540aSrobert sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1781*404b540aSrobert {
1782*404b540aSrobert int last_call_site = -2;
1783*404b540aSrobert rtx insn, mem;
1784*404b540aSrobert
1785*404b540aSrobert for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1786*404b540aSrobert {
1787*404b540aSrobert struct eh_region *region;
1788*404b540aSrobert int this_call_site;
1789*404b540aSrobert rtx note, before, p;
1790*404b540aSrobert
1791*404b540aSrobert /* Reset value tracking at extended basic block boundaries. */
1792*404b540aSrobert if (LABEL_P (insn))
1793*404b540aSrobert last_call_site = -2;
1794*404b540aSrobert
1795*404b540aSrobert if (! INSN_P (insn))
1796*404b540aSrobert continue;
1797*404b540aSrobert
1798*404b540aSrobert note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1799*404b540aSrobert if (!note)
1800*404b540aSrobert {
1801*404b540aSrobert /* Calls (and trapping insns) without notes are outside any
1802*404b540aSrobert exception handling region in this function. Mark them as
1803*404b540aSrobert no action. */
1804*404b540aSrobert if (CALL_P (insn)
1805*404b540aSrobert || (flag_non_call_exceptions
1806*404b540aSrobert && may_trap_p (PATTERN (insn))))
1807*404b540aSrobert this_call_site = -1;
1808*404b540aSrobert else
1809*404b540aSrobert continue;
1810*404b540aSrobert }
1811*404b540aSrobert else
1812*404b540aSrobert {
1813*404b540aSrobert /* Calls that are known to not throw need not be marked. */
1814*404b540aSrobert if (INTVAL (XEXP (note, 0)) <= 0)
1815*404b540aSrobert continue;
1816*404b540aSrobert
1817*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1818*404b540aSrobert this_call_site = lp_info[region->region_number].call_site_index;
1819*404b540aSrobert }
1820*404b540aSrobert
1821*404b540aSrobert if (this_call_site == last_call_site)
1822*404b540aSrobert continue;
1823*404b540aSrobert
1824*404b540aSrobert /* Don't separate a call from it's argument loads. */
1825*404b540aSrobert before = insn;
1826*404b540aSrobert if (CALL_P (insn))
1827*404b540aSrobert before = find_first_parameter_load (insn, NULL_RTX);
1828*404b540aSrobert
1829*404b540aSrobert start_sequence ();
1830*404b540aSrobert mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1831*404b540aSrobert sjlj_fc_call_site_ofs);
1832*404b540aSrobert emit_move_insn (mem, GEN_INT (this_call_site));
1833*404b540aSrobert p = get_insns ();
1834*404b540aSrobert end_sequence ();
1835*404b540aSrobert
1836*404b540aSrobert emit_insn_before (p, before);
1837*404b540aSrobert last_call_site = this_call_site;
1838*404b540aSrobert }
1839*404b540aSrobert }
1840*404b540aSrobert
1841*404b540aSrobert /* Construct the SjLj_Function_Context. */
1842*404b540aSrobert
1843*404b540aSrobert static void
sjlj_emit_function_enter(rtx dispatch_label)1844*404b540aSrobert sjlj_emit_function_enter (rtx dispatch_label)
1845*404b540aSrobert {
1846*404b540aSrobert rtx fn_begin, fc, mem, seq;
1847*404b540aSrobert bool fn_begin_outside_block;
1848*404b540aSrobert
1849*404b540aSrobert fc = cfun->eh->sjlj_fc;
1850*404b540aSrobert
1851*404b540aSrobert start_sequence ();
1852*404b540aSrobert
1853*404b540aSrobert /* We're storing this libcall's address into memory instead of
1854*404b540aSrobert calling it directly. Thus, we must call assemble_external_libcall
1855*404b540aSrobert here, as we can not depend on emit_library_call to do it for us. */
1856*404b540aSrobert assemble_external_libcall (eh_personality_libfunc);
1857*404b540aSrobert mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1858*404b540aSrobert emit_move_insn (mem, eh_personality_libfunc);
1859*404b540aSrobert
1860*404b540aSrobert mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1861*404b540aSrobert if (cfun->uses_eh_lsda)
1862*404b540aSrobert {
1863*404b540aSrobert char buf[20];
1864*404b540aSrobert rtx sym;
1865*404b540aSrobert
1866*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1867*404b540aSrobert sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1868*404b540aSrobert SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1869*404b540aSrobert emit_move_insn (mem, sym);
1870*404b540aSrobert }
1871*404b540aSrobert else
1872*404b540aSrobert emit_move_insn (mem, const0_rtx);
1873*404b540aSrobert
1874*404b540aSrobert #ifdef DONT_USE_BUILTIN_SETJMP
1875*404b540aSrobert {
1876*404b540aSrobert rtx x, note;
1877*404b540aSrobert x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1878*404b540aSrobert TYPE_MODE (integer_type_node), 1,
1879*404b540aSrobert plus_constant (XEXP (fc, 0),
1880*404b540aSrobert sjlj_fc_jbuf_ofs), Pmode);
1881*404b540aSrobert
1882*404b540aSrobert note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1883*404b540aSrobert NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1884*404b540aSrobert
1885*404b540aSrobert emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1886*404b540aSrobert TYPE_MODE (integer_type_node), 0, dispatch_label);
1887*404b540aSrobert }
1888*404b540aSrobert #else
1889*404b540aSrobert expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1890*404b540aSrobert dispatch_label);
1891*404b540aSrobert #endif
1892*404b540aSrobert
1893*404b540aSrobert emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1894*404b540aSrobert 1, XEXP (fc, 0), Pmode);
1895*404b540aSrobert
1896*404b540aSrobert seq = get_insns ();
1897*404b540aSrobert end_sequence ();
1898*404b540aSrobert
1899*404b540aSrobert /* ??? Instead of doing this at the beginning of the function,
1900*404b540aSrobert do this in a block that is at loop level 0 and dominates all
1901*404b540aSrobert can_throw_internal instructions. */
1902*404b540aSrobert
1903*404b540aSrobert fn_begin_outside_block = true;
1904*404b540aSrobert for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1905*404b540aSrobert if (NOTE_P (fn_begin))
1906*404b540aSrobert {
1907*404b540aSrobert if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1908*404b540aSrobert break;
1909*404b540aSrobert else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1910*404b540aSrobert fn_begin_outside_block = false;
1911*404b540aSrobert }
1912*404b540aSrobert
1913*404b540aSrobert if (fn_begin_outside_block)
1914*404b540aSrobert insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1915*404b540aSrobert else
1916*404b540aSrobert emit_insn_after (seq, fn_begin);
1917*404b540aSrobert }
1918*404b540aSrobert
1919*404b540aSrobert /* Call back from expand_function_end to know where we should put
1920*404b540aSrobert the call to unwind_sjlj_unregister_libfunc if needed. */
1921*404b540aSrobert
1922*404b540aSrobert void
sjlj_emit_function_exit_after(rtx after)1923*404b540aSrobert sjlj_emit_function_exit_after (rtx after)
1924*404b540aSrobert {
1925*404b540aSrobert cfun->eh->sjlj_exit_after = after;
1926*404b540aSrobert }
1927*404b540aSrobert
1928*404b540aSrobert static void
sjlj_emit_function_exit(void)1929*404b540aSrobert sjlj_emit_function_exit (void)
1930*404b540aSrobert {
1931*404b540aSrobert rtx seq;
1932*404b540aSrobert edge e;
1933*404b540aSrobert edge_iterator ei;
1934*404b540aSrobert
1935*404b540aSrobert start_sequence ();
1936*404b540aSrobert
1937*404b540aSrobert emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1938*404b540aSrobert 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1939*404b540aSrobert
1940*404b540aSrobert seq = get_insns ();
1941*404b540aSrobert end_sequence ();
1942*404b540aSrobert
1943*404b540aSrobert /* ??? Really this can be done in any block at loop level 0 that
1944*404b540aSrobert post-dominates all can_throw_internal instructions. This is
1945*404b540aSrobert the last possible moment. */
1946*404b540aSrobert
1947*404b540aSrobert FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1948*404b540aSrobert if (e->flags & EDGE_FALLTHRU)
1949*404b540aSrobert break;
1950*404b540aSrobert if (e)
1951*404b540aSrobert {
1952*404b540aSrobert rtx insn;
1953*404b540aSrobert
1954*404b540aSrobert /* Figure out whether the place we are supposed to insert libcall
1955*404b540aSrobert is inside the last basic block or after it. In the other case
1956*404b540aSrobert we need to emit to edge. */
1957*404b540aSrobert gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1958*404b540aSrobert for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1959*404b540aSrobert {
1960*404b540aSrobert if (insn == cfun->eh->sjlj_exit_after)
1961*404b540aSrobert {
1962*404b540aSrobert if (LABEL_P (insn))
1963*404b540aSrobert insn = NEXT_INSN (insn);
1964*404b540aSrobert emit_insn_after (seq, insn);
1965*404b540aSrobert return;
1966*404b540aSrobert }
1967*404b540aSrobert if (insn == BB_END (e->src))
1968*404b540aSrobert break;
1969*404b540aSrobert }
1970*404b540aSrobert insert_insn_on_edge (seq, e);
1971*404b540aSrobert }
1972*404b540aSrobert }
1973*404b540aSrobert
1974*404b540aSrobert static void
sjlj_emit_dispatch_table(rtx dispatch_label,struct sjlj_lp_info * lp_info)1975*404b540aSrobert sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1976*404b540aSrobert {
1977*404b540aSrobert int i, first_reachable;
1978*404b540aSrobert rtx mem, dispatch, seq, fc;
1979*404b540aSrobert rtx before;
1980*404b540aSrobert basic_block bb;
1981*404b540aSrobert edge e;
1982*404b540aSrobert
1983*404b540aSrobert fc = cfun->eh->sjlj_fc;
1984*404b540aSrobert
1985*404b540aSrobert start_sequence ();
1986*404b540aSrobert
1987*404b540aSrobert emit_label (dispatch_label);
1988*404b540aSrobert
1989*404b540aSrobert #ifndef DONT_USE_BUILTIN_SETJMP
1990*404b540aSrobert expand_builtin_setjmp_receiver (dispatch_label);
1991*404b540aSrobert #endif
1992*404b540aSrobert
1993*404b540aSrobert /* Load up dispatch index, exc_ptr and filter values from the
1994*404b540aSrobert function context. */
1995*404b540aSrobert mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1996*404b540aSrobert sjlj_fc_call_site_ofs);
1997*404b540aSrobert dispatch = copy_to_reg (mem);
1998*404b540aSrobert
1999*404b540aSrobert mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2000*404b540aSrobert if (word_mode != ptr_mode)
2001*404b540aSrobert {
2002*404b540aSrobert #ifdef POINTERS_EXTEND_UNSIGNED
2003*404b540aSrobert mem = convert_memory_address (ptr_mode, mem);
2004*404b540aSrobert #else
2005*404b540aSrobert mem = convert_to_mode (ptr_mode, mem, 0);
2006*404b540aSrobert #endif
2007*404b540aSrobert }
2008*404b540aSrobert emit_move_insn (cfun->eh->exc_ptr, mem);
2009*404b540aSrobert
2010*404b540aSrobert mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2011*404b540aSrobert emit_move_insn (cfun->eh->filter, mem);
2012*404b540aSrobert
2013*404b540aSrobert /* Jump to one of the directly reachable regions. */
2014*404b540aSrobert /* ??? This really ought to be using a switch statement. */
2015*404b540aSrobert
2016*404b540aSrobert first_reachable = 0;
2017*404b540aSrobert for (i = cfun->eh->last_region_number; i > 0; --i)
2018*404b540aSrobert {
2019*404b540aSrobert if (! lp_info[i].directly_reachable)
2020*404b540aSrobert continue;
2021*404b540aSrobert
2022*404b540aSrobert if (! first_reachable)
2023*404b540aSrobert {
2024*404b540aSrobert first_reachable = i;
2025*404b540aSrobert continue;
2026*404b540aSrobert }
2027*404b540aSrobert
2028*404b540aSrobert emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2029*404b540aSrobert EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2030*404b540aSrobert ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2031*404b540aSrobert ->post_landing_pad);
2032*404b540aSrobert }
2033*404b540aSrobert
2034*404b540aSrobert seq = get_insns ();
2035*404b540aSrobert end_sequence ();
2036*404b540aSrobert
2037*404b540aSrobert before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2038*404b540aSrobert ->post_landing_pad);
2039*404b540aSrobert
2040*404b540aSrobert bb = emit_to_new_bb_before (seq, before);
2041*404b540aSrobert e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2042*404b540aSrobert e->count = bb->count;
2043*404b540aSrobert e->probability = REG_BR_PROB_BASE;
2044*404b540aSrobert }
2045*404b540aSrobert
2046*404b540aSrobert static void
sjlj_build_landing_pads(void)2047*404b540aSrobert sjlj_build_landing_pads (void)
2048*404b540aSrobert {
2049*404b540aSrobert struct sjlj_lp_info *lp_info;
2050*404b540aSrobert
2051*404b540aSrobert lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2052*404b540aSrobert
2053*404b540aSrobert if (sjlj_find_directly_reachable_regions (lp_info))
2054*404b540aSrobert {
2055*404b540aSrobert rtx dispatch_label = gen_label_rtx ();
2056*404b540aSrobert
2057*404b540aSrobert cfun->eh->sjlj_fc
2058*404b540aSrobert = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2059*404b540aSrobert int_size_in_bytes (sjlj_fc_type_node),
2060*404b540aSrobert TYPE_ALIGN (sjlj_fc_type_node));
2061*404b540aSrobert
2062*404b540aSrobert sjlj_assign_call_site_values (dispatch_label, lp_info);
2063*404b540aSrobert sjlj_mark_call_sites (lp_info);
2064*404b540aSrobert
2065*404b540aSrobert sjlj_emit_function_enter (dispatch_label);
2066*404b540aSrobert sjlj_emit_dispatch_table (dispatch_label, lp_info);
2067*404b540aSrobert sjlj_emit_function_exit ();
2068*404b540aSrobert }
2069*404b540aSrobert
2070*404b540aSrobert free (lp_info);
2071*404b540aSrobert }
2072*404b540aSrobert
2073*404b540aSrobert void
finish_eh_generation(void)2074*404b540aSrobert finish_eh_generation (void)
2075*404b540aSrobert {
2076*404b540aSrobert basic_block bb;
2077*404b540aSrobert
2078*404b540aSrobert /* Nothing to do if no regions created. */
2079*404b540aSrobert if (cfun->eh->region_tree == NULL)
2080*404b540aSrobert return;
2081*404b540aSrobert
2082*404b540aSrobert /* The object here is to provide find_basic_blocks with detailed
2083*404b540aSrobert information (via reachable_handlers) on how exception control
2084*404b540aSrobert flows within the function. In this first pass, we can include
2085*404b540aSrobert type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2086*404b540aSrobert regions, and hope that it will be useful in deleting unreachable
2087*404b540aSrobert handlers. Subsequently, we will generate landing pads which will
2088*404b540aSrobert connect many of the handlers, and then type information will not
2089*404b540aSrobert be effective. Still, this is a win over previous implementations. */
2090*404b540aSrobert
2091*404b540aSrobert /* These registers are used by the landing pads. Make sure they
2092*404b540aSrobert have been generated. */
2093*404b540aSrobert get_exception_pointer (cfun);
2094*404b540aSrobert get_exception_filter (cfun);
2095*404b540aSrobert
2096*404b540aSrobert /* Construct the landing pads. */
2097*404b540aSrobert
2098*404b540aSrobert assign_filter_values ();
2099*404b540aSrobert build_post_landing_pads ();
2100*404b540aSrobert connect_post_landing_pads ();
2101*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
2102*404b540aSrobert sjlj_build_landing_pads ();
2103*404b540aSrobert else
2104*404b540aSrobert dw2_build_landing_pads ();
2105*404b540aSrobert
2106*404b540aSrobert cfun->eh->built_landing_pads = 1;
2107*404b540aSrobert
2108*404b540aSrobert /* We've totally changed the CFG. Start over. */
2109*404b540aSrobert find_exception_handler_labels ();
2110*404b540aSrobert break_superblocks ();
2111*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
2112*404b540aSrobert commit_edge_insertions ();
2113*404b540aSrobert FOR_EACH_BB (bb)
2114*404b540aSrobert {
2115*404b540aSrobert edge e;
2116*404b540aSrobert edge_iterator ei;
2117*404b540aSrobert bool eh = false;
2118*404b540aSrobert for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2119*404b540aSrobert {
2120*404b540aSrobert if (e->flags & EDGE_EH)
2121*404b540aSrobert {
2122*404b540aSrobert remove_edge (e);
2123*404b540aSrobert eh = true;
2124*404b540aSrobert }
2125*404b540aSrobert else
2126*404b540aSrobert ei_next (&ei);
2127*404b540aSrobert }
2128*404b540aSrobert if (eh)
2129*404b540aSrobert rtl_make_eh_edge (NULL, bb, BB_END (bb));
2130*404b540aSrobert }
2131*404b540aSrobert }
2132*404b540aSrobert
2133*404b540aSrobert static hashval_t
ehl_hash(const void * pentry)2134*404b540aSrobert ehl_hash (const void *pentry)
2135*404b540aSrobert {
2136*404b540aSrobert struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2137*404b540aSrobert
2138*404b540aSrobert /* 2^32 * ((sqrt(5) - 1) / 2) */
2139*404b540aSrobert const hashval_t scaled_golden_ratio = 0x9e3779b9;
2140*404b540aSrobert return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2141*404b540aSrobert }
2142*404b540aSrobert
2143*404b540aSrobert static int
ehl_eq(const void * pentry,const void * pdata)2144*404b540aSrobert ehl_eq (const void *pentry, const void *pdata)
2145*404b540aSrobert {
2146*404b540aSrobert struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2147*404b540aSrobert struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2148*404b540aSrobert
2149*404b540aSrobert return entry->label == data->label;
2150*404b540aSrobert }
2151*404b540aSrobert
2152*404b540aSrobert /* This section handles removing dead code for flow. */
2153*404b540aSrobert
2154*404b540aSrobert /* Remove LABEL from exception_handler_label_map. */
2155*404b540aSrobert
2156*404b540aSrobert static void
remove_exception_handler_label(rtx label)2157*404b540aSrobert remove_exception_handler_label (rtx label)
2158*404b540aSrobert {
2159*404b540aSrobert struct ehl_map_entry **slot, tmp;
2160*404b540aSrobert
2161*404b540aSrobert /* If exception_handler_label_map was not built yet,
2162*404b540aSrobert there is nothing to do. */
2163*404b540aSrobert if (cfun->eh->exception_handler_label_map == NULL)
2164*404b540aSrobert return;
2165*404b540aSrobert
2166*404b540aSrobert tmp.label = label;
2167*404b540aSrobert slot = (struct ehl_map_entry **)
2168*404b540aSrobert htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2169*404b540aSrobert gcc_assert (slot);
2170*404b540aSrobert
2171*404b540aSrobert htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2172*404b540aSrobert }
2173*404b540aSrobert
2174*404b540aSrobert /* Splice REGION from the region tree etc. */
2175*404b540aSrobert
2176*404b540aSrobert static void
remove_eh_handler(struct eh_region * region)2177*404b540aSrobert remove_eh_handler (struct eh_region *region)
2178*404b540aSrobert {
2179*404b540aSrobert struct eh_region **pp, **pp_start, *p, *outer, *inner;
2180*404b540aSrobert rtx lab;
2181*404b540aSrobert
2182*404b540aSrobert /* For the benefit of efficiently handling REG_EH_REGION notes,
2183*404b540aSrobert replace this region in the region array with its containing
2184*404b540aSrobert region. Note that previous region deletions may result in
2185*404b540aSrobert multiple copies of this region in the array, so we have a
2186*404b540aSrobert list of alternate numbers by which we are known. */
2187*404b540aSrobert
2188*404b540aSrobert outer = region->outer;
2189*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2190*404b540aSrobert if (region->aka)
2191*404b540aSrobert {
2192*404b540aSrobert unsigned i;
2193*404b540aSrobert bitmap_iterator bi;
2194*404b540aSrobert
2195*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2196*404b540aSrobert {
2197*404b540aSrobert VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2198*404b540aSrobert }
2199*404b540aSrobert }
2200*404b540aSrobert
2201*404b540aSrobert if (outer)
2202*404b540aSrobert {
2203*404b540aSrobert if (!outer->aka)
2204*404b540aSrobert outer->aka = BITMAP_GGC_ALLOC ();
2205*404b540aSrobert if (region->aka)
2206*404b540aSrobert bitmap_ior_into (outer->aka, region->aka);
2207*404b540aSrobert bitmap_set_bit (outer->aka, region->region_number);
2208*404b540aSrobert }
2209*404b540aSrobert
2210*404b540aSrobert if (cfun->eh->built_landing_pads)
2211*404b540aSrobert lab = region->landing_pad;
2212*404b540aSrobert else
2213*404b540aSrobert lab = region->label;
2214*404b540aSrobert if (lab)
2215*404b540aSrobert remove_exception_handler_label (lab);
2216*404b540aSrobert
2217*404b540aSrobert if (outer)
2218*404b540aSrobert pp_start = &outer->inner;
2219*404b540aSrobert else
2220*404b540aSrobert pp_start = &cfun->eh->region_tree;
2221*404b540aSrobert for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2222*404b540aSrobert continue;
2223*404b540aSrobert *pp = region->next_peer;
2224*404b540aSrobert
2225*404b540aSrobert inner = region->inner;
2226*404b540aSrobert if (inner)
2227*404b540aSrobert {
2228*404b540aSrobert for (p = inner; p->next_peer ; p = p->next_peer)
2229*404b540aSrobert p->outer = outer;
2230*404b540aSrobert p->outer = outer;
2231*404b540aSrobert
2232*404b540aSrobert p->next_peer = *pp_start;
2233*404b540aSrobert *pp_start = inner;
2234*404b540aSrobert }
2235*404b540aSrobert
2236*404b540aSrobert if (region->type == ERT_CATCH)
2237*404b540aSrobert {
2238*404b540aSrobert struct eh_region *try, *next, *prev;
2239*404b540aSrobert
2240*404b540aSrobert for (try = region->next_peer;
2241*404b540aSrobert try->type == ERT_CATCH;
2242*404b540aSrobert try = try->next_peer)
2243*404b540aSrobert continue;
2244*404b540aSrobert gcc_assert (try->type == ERT_TRY);
2245*404b540aSrobert
2246*404b540aSrobert next = region->u.catch.next_catch;
2247*404b540aSrobert prev = region->u.catch.prev_catch;
2248*404b540aSrobert
2249*404b540aSrobert if (next)
2250*404b540aSrobert next->u.catch.prev_catch = prev;
2251*404b540aSrobert else
2252*404b540aSrobert try->u.try.last_catch = prev;
2253*404b540aSrobert if (prev)
2254*404b540aSrobert prev->u.catch.next_catch = next;
2255*404b540aSrobert else
2256*404b540aSrobert {
2257*404b540aSrobert try->u.try.catch = next;
2258*404b540aSrobert if (! next)
2259*404b540aSrobert remove_eh_handler (try);
2260*404b540aSrobert }
2261*404b540aSrobert }
2262*404b540aSrobert }
2263*404b540aSrobert
2264*404b540aSrobert /* LABEL heads a basic block that is about to be deleted. If this
2265*404b540aSrobert label corresponds to an exception region, we may be able to
2266*404b540aSrobert delete the region. */
2267*404b540aSrobert
2268*404b540aSrobert void
maybe_remove_eh_handler(rtx label)2269*404b540aSrobert maybe_remove_eh_handler (rtx label)
2270*404b540aSrobert {
2271*404b540aSrobert struct ehl_map_entry **slot, tmp;
2272*404b540aSrobert struct eh_region *region;
2273*404b540aSrobert
2274*404b540aSrobert /* ??? After generating landing pads, it's not so simple to determine
2275*404b540aSrobert if the region data is completely unused. One must examine the
2276*404b540aSrobert landing pad and the post landing pad, and whether an inner try block
2277*404b540aSrobert is referencing the catch handlers directly. */
2278*404b540aSrobert if (cfun->eh->built_landing_pads)
2279*404b540aSrobert return;
2280*404b540aSrobert
2281*404b540aSrobert tmp.label = label;
2282*404b540aSrobert slot = (struct ehl_map_entry **)
2283*404b540aSrobert htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2284*404b540aSrobert if (! slot)
2285*404b540aSrobert return;
2286*404b540aSrobert region = (*slot)->region;
2287*404b540aSrobert if (! region)
2288*404b540aSrobert return;
2289*404b540aSrobert
2290*404b540aSrobert /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2291*404b540aSrobert because there is no path to the fallback call to terminate.
2292*404b540aSrobert But the region continues to affect call-site data until there
2293*404b540aSrobert are no more contained calls, which we don't see here. */
2294*404b540aSrobert if (region->type == ERT_MUST_NOT_THROW)
2295*404b540aSrobert {
2296*404b540aSrobert htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2297*404b540aSrobert region->label = NULL_RTX;
2298*404b540aSrobert }
2299*404b540aSrobert else
2300*404b540aSrobert remove_eh_handler (region);
2301*404b540aSrobert }
2302*404b540aSrobert
2303*404b540aSrobert /* Invokes CALLBACK for every exception handler label. Only used by old
2304*404b540aSrobert loop hackery; should not be used by new code. */
2305*404b540aSrobert
2306*404b540aSrobert void
for_each_eh_label(void (* callback)(rtx))2307*404b540aSrobert for_each_eh_label (void (*callback) (rtx))
2308*404b540aSrobert {
2309*404b540aSrobert htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2310*404b540aSrobert (void *) &callback);
2311*404b540aSrobert }
2312*404b540aSrobert
2313*404b540aSrobert static int
for_each_eh_label_1(void ** pentry,void * data)2314*404b540aSrobert for_each_eh_label_1 (void **pentry, void *data)
2315*404b540aSrobert {
2316*404b540aSrobert struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2317*404b540aSrobert void (*callback) (rtx) = *(void (**) (rtx)) data;
2318*404b540aSrobert
2319*404b540aSrobert (*callback) (entry->label);
2320*404b540aSrobert return 1;
2321*404b540aSrobert }
2322*404b540aSrobert
2323*404b540aSrobert /* Invoke CALLBACK for every exception region in the current function. */
2324*404b540aSrobert
2325*404b540aSrobert void
for_each_eh_region(void (* callback)(struct eh_region *))2326*404b540aSrobert for_each_eh_region (void (*callback) (struct eh_region *))
2327*404b540aSrobert {
2328*404b540aSrobert int i, n = cfun->eh->last_region_number;
2329*404b540aSrobert for (i = 1; i <= n; ++i)
2330*404b540aSrobert {
2331*404b540aSrobert struct eh_region *region;
2332*404b540aSrobert
2333*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, i);
2334*404b540aSrobert if (region)
2335*404b540aSrobert (*callback) (region);
2336*404b540aSrobert }
2337*404b540aSrobert }
2338*404b540aSrobert
2339*404b540aSrobert /* This section describes CFG exception edges for flow. */
2340*404b540aSrobert
2341*404b540aSrobert /* For communicating between calls to reachable_next_level. */
2342*404b540aSrobert struct reachable_info
2343*404b540aSrobert {
2344*404b540aSrobert tree types_caught;
2345*404b540aSrobert tree types_allowed;
2346*404b540aSrobert void (*callback) (struct eh_region *, void *);
2347*404b540aSrobert void *callback_data;
2348*404b540aSrobert bool saw_any_handlers;
2349*404b540aSrobert };
2350*404b540aSrobert
2351*404b540aSrobert /* A subroutine of reachable_next_level. Return true if TYPE, or a
2352*404b540aSrobert base class of TYPE, is in HANDLED. */
2353*404b540aSrobert
2354*404b540aSrobert static int
check_handled(tree handled,tree type)2355*404b540aSrobert check_handled (tree handled, tree type)
2356*404b540aSrobert {
2357*404b540aSrobert tree t;
2358*404b540aSrobert
2359*404b540aSrobert /* We can check for exact matches without front-end help. */
2360*404b540aSrobert if (! lang_eh_type_covers)
2361*404b540aSrobert {
2362*404b540aSrobert for (t = handled; t ; t = TREE_CHAIN (t))
2363*404b540aSrobert if (TREE_VALUE (t) == type)
2364*404b540aSrobert return 1;
2365*404b540aSrobert }
2366*404b540aSrobert else
2367*404b540aSrobert {
2368*404b540aSrobert for (t = handled; t ; t = TREE_CHAIN (t))
2369*404b540aSrobert if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2370*404b540aSrobert return 1;
2371*404b540aSrobert }
2372*404b540aSrobert
2373*404b540aSrobert return 0;
2374*404b540aSrobert }
2375*404b540aSrobert
2376*404b540aSrobert /* A subroutine of reachable_next_level. If we are collecting a list
2377*404b540aSrobert of handlers, add one. After landing pad generation, reference
2378*404b540aSrobert it instead of the handlers themselves. Further, the handlers are
2379*404b540aSrobert all wired together, so by referencing one, we've got them all.
2380*404b540aSrobert Before landing pad generation we reference each handler individually.
2381*404b540aSrobert
2382*404b540aSrobert LP_REGION contains the landing pad; REGION is the handler. */
2383*404b540aSrobert
2384*404b540aSrobert static void
add_reachable_handler(struct reachable_info * info,struct eh_region * lp_region,struct eh_region * region)2385*404b540aSrobert add_reachable_handler (struct reachable_info *info,
2386*404b540aSrobert struct eh_region *lp_region, struct eh_region *region)
2387*404b540aSrobert {
2388*404b540aSrobert if (! info)
2389*404b540aSrobert return;
2390*404b540aSrobert
2391*404b540aSrobert info->saw_any_handlers = true;
2392*404b540aSrobert
2393*404b540aSrobert if (cfun->eh->built_landing_pads)
2394*404b540aSrobert info->callback (lp_region, info->callback_data);
2395*404b540aSrobert else
2396*404b540aSrobert info->callback (region, info->callback_data);
2397*404b540aSrobert }
2398*404b540aSrobert
2399*404b540aSrobert /* Process one level of exception regions for reachability.
2400*404b540aSrobert If TYPE_THROWN is non-null, then it is the *exact* type being
2401*404b540aSrobert propagated. If INFO is non-null, then collect handler labels
2402*404b540aSrobert and caught/allowed type information between invocations. */
2403*404b540aSrobert
2404*404b540aSrobert static enum reachable_code
reachable_next_level(struct eh_region * region,tree type_thrown,struct reachable_info * info)2405*404b540aSrobert reachable_next_level (struct eh_region *region, tree type_thrown,
2406*404b540aSrobert struct reachable_info *info)
2407*404b540aSrobert {
2408*404b540aSrobert switch (region->type)
2409*404b540aSrobert {
2410*404b540aSrobert case ERT_CLEANUP:
2411*404b540aSrobert /* Before landing-pad generation, we model control flow
2412*404b540aSrobert directly to the individual handlers. In this way we can
2413*404b540aSrobert see that catch handler types may shadow one another. */
2414*404b540aSrobert add_reachable_handler (info, region, region);
2415*404b540aSrobert return RNL_MAYBE_CAUGHT;
2416*404b540aSrobert
2417*404b540aSrobert case ERT_TRY:
2418*404b540aSrobert {
2419*404b540aSrobert struct eh_region *c;
2420*404b540aSrobert enum reachable_code ret = RNL_NOT_CAUGHT;
2421*404b540aSrobert
2422*404b540aSrobert for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2423*404b540aSrobert {
2424*404b540aSrobert /* A catch-all handler ends the search. */
2425*404b540aSrobert if (c->u.catch.type_list == NULL)
2426*404b540aSrobert {
2427*404b540aSrobert add_reachable_handler (info, region, c);
2428*404b540aSrobert return RNL_CAUGHT;
2429*404b540aSrobert }
2430*404b540aSrobert
2431*404b540aSrobert if (type_thrown)
2432*404b540aSrobert {
2433*404b540aSrobert /* If we have at least one type match, end the search. */
2434*404b540aSrobert tree tp_node = c->u.catch.type_list;
2435*404b540aSrobert
2436*404b540aSrobert for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2437*404b540aSrobert {
2438*404b540aSrobert tree type = TREE_VALUE (tp_node);
2439*404b540aSrobert
2440*404b540aSrobert if (type == type_thrown
2441*404b540aSrobert || (lang_eh_type_covers
2442*404b540aSrobert && (*lang_eh_type_covers) (type, type_thrown)))
2443*404b540aSrobert {
2444*404b540aSrobert add_reachable_handler (info, region, c);
2445*404b540aSrobert return RNL_CAUGHT;
2446*404b540aSrobert }
2447*404b540aSrobert }
2448*404b540aSrobert
2449*404b540aSrobert /* If we have definitive information of a match failure,
2450*404b540aSrobert the catch won't trigger. */
2451*404b540aSrobert if (lang_eh_type_covers)
2452*404b540aSrobert return RNL_NOT_CAUGHT;
2453*404b540aSrobert }
2454*404b540aSrobert
2455*404b540aSrobert /* At this point, we either don't know what type is thrown or
2456*404b540aSrobert don't have front-end assistance to help deciding if it is
2457*404b540aSrobert covered by one of the types in the list for this region.
2458*404b540aSrobert
2459*404b540aSrobert We'd then like to add this region to the list of reachable
2460*404b540aSrobert handlers since it is indeed potentially reachable based on the
2461*404b540aSrobert information we have.
2462*404b540aSrobert
2463*404b540aSrobert Actually, this handler is for sure not reachable if all the
2464*404b540aSrobert types it matches have already been caught. That is, it is only
2465*404b540aSrobert potentially reachable if at least one of the types it catches
2466*404b540aSrobert has not been previously caught. */
2467*404b540aSrobert
2468*404b540aSrobert if (! info)
2469*404b540aSrobert ret = RNL_MAYBE_CAUGHT;
2470*404b540aSrobert else
2471*404b540aSrobert {
2472*404b540aSrobert tree tp_node = c->u.catch.type_list;
2473*404b540aSrobert bool maybe_reachable = false;
2474*404b540aSrobert
2475*404b540aSrobert /* Compute the potential reachability of this handler and
2476*404b540aSrobert update the list of types caught at the same time. */
2477*404b540aSrobert for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2478*404b540aSrobert {
2479*404b540aSrobert tree type = TREE_VALUE (tp_node);
2480*404b540aSrobert
2481*404b540aSrobert if (! check_handled (info->types_caught, type))
2482*404b540aSrobert {
2483*404b540aSrobert info->types_caught
2484*404b540aSrobert = tree_cons (NULL, type, info->types_caught);
2485*404b540aSrobert
2486*404b540aSrobert maybe_reachable = true;
2487*404b540aSrobert }
2488*404b540aSrobert }
2489*404b540aSrobert
2490*404b540aSrobert if (maybe_reachable)
2491*404b540aSrobert {
2492*404b540aSrobert add_reachable_handler (info, region, c);
2493*404b540aSrobert
2494*404b540aSrobert /* ??? If the catch type is a base class of every allowed
2495*404b540aSrobert type, then we know we can stop the search. */
2496*404b540aSrobert ret = RNL_MAYBE_CAUGHT;
2497*404b540aSrobert }
2498*404b540aSrobert }
2499*404b540aSrobert }
2500*404b540aSrobert
2501*404b540aSrobert return ret;
2502*404b540aSrobert }
2503*404b540aSrobert
2504*404b540aSrobert case ERT_ALLOWED_EXCEPTIONS:
2505*404b540aSrobert /* An empty list of types definitely ends the search. */
2506*404b540aSrobert if (region->u.allowed.type_list == NULL_TREE)
2507*404b540aSrobert {
2508*404b540aSrobert add_reachable_handler (info, region, region);
2509*404b540aSrobert return RNL_CAUGHT;
2510*404b540aSrobert }
2511*404b540aSrobert
2512*404b540aSrobert /* Collect a list of lists of allowed types for use in detecting
2513*404b540aSrobert when a catch may be transformed into a catch-all. */
2514*404b540aSrobert if (info)
2515*404b540aSrobert info->types_allowed = tree_cons (NULL_TREE,
2516*404b540aSrobert region->u.allowed.type_list,
2517*404b540aSrobert info->types_allowed);
2518*404b540aSrobert
2519*404b540aSrobert /* If we have definitive information about the type hierarchy,
2520*404b540aSrobert then we can tell if the thrown type will pass through the
2521*404b540aSrobert filter. */
2522*404b540aSrobert if (type_thrown && lang_eh_type_covers)
2523*404b540aSrobert {
2524*404b540aSrobert if (check_handled (region->u.allowed.type_list, type_thrown))
2525*404b540aSrobert return RNL_NOT_CAUGHT;
2526*404b540aSrobert else
2527*404b540aSrobert {
2528*404b540aSrobert add_reachable_handler (info, region, region);
2529*404b540aSrobert return RNL_CAUGHT;
2530*404b540aSrobert }
2531*404b540aSrobert }
2532*404b540aSrobert
2533*404b540aSrobert add_reachable_handler (info, region, region);
2534*404b540aSrobert return RNL_MAYBE_CAUGHT;
2535*404b540aSrobert
2536*404b540aSrobert case ERT_CATCH:
2537*404b540aSrobert /* Catch regions are handled by their controlling try region. */
2538*404b540aSrobert return RNL_NOT_CAUGHT;
2539*404b540aSrobert
2540*404b540aSrobert case ERT_MUST_NOT_THROW:
2541*404b540aSrobert /* Here we end our search, since no exceptions may propagate.
2542*404b540aSrobert If we've touched down at some landing pad previous, then the
2543*404b540aSrobert explicit function call we generated may be used. Otherwise
2544*404b540aSrobert the call is made by the runtime.
2545*404b540aSrobert
2546*404b540aSrobert Before inlining, do not perform this optimization. We may
2547*404b540aSrobert inline a subroutine that contains handlers, and that will
2548*404b540aSrobert change the value of saw_any_handlers. */
2549*404b540aSrobert
2550*404b540aSrobert if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2551*404b540aSrobert {
2552*404b540aSrobert add_reachable_handler (info, region, region);
2553*404b540aSrobert return RNL_CAUGHT;
2554*404b540aSrobert }
2555*404b540aSrobert else
2556*404b540aSrobert return RNL_BLOCKED;
2557*404b540aSrobert
2558*404b540aSrobert case ERT_THROW:
2559*404b540aSrobert case ERT_UNKNOWN:
2560*404b540aSrobert /* Shouldn't see these here. */
2561*404b540aSrobert gcc_unreachable ();
2562*404b540aSrobert break;
2563*404b540aSrobert default:
2564*404b540aSrobert gcc_unreachable ();
2565*404b540aSrobert }
2566*404b540aSrobert }
2567*404b540aSrobert
2568*404b540aSrobert /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2569*404b540aSrobert
2570*404b540aSrobert void
foreach_reachable_handler(int region_number,bool is_resx,void (* callback)(struct eh_region *,void *),void * callback_data)2571*404b540aSrobert foreach_reachable_handler (int region_number, bool is_resx,
2572*404b540aSrobert void (*callback) (struct eh_region *, void *),
2573*404b540aSrobert void *callback_data)
2574*404b540aSrobert {
2575*404b540aSrobert struct reachable_info info;
2576*404b540aSrobert struct eh_region *region;
2577*404b540aSrobert tree type_thrown;
2578*404b540aSrobert
2579*404b540aSrobert memset (&info, 0, sizeof (info));
2580*404b540aSrobert info.callback = callback;
2581*404b540aSrobert info.callback_data = callback_data;
2582*404b540aSrobert
2583*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2584*404b540aSrobert
2585*404b540aSrobert type_thrown = NULL_TREE;
2586*404b540aSrobert if (is_resx)
2587*404b540aSrobert {
2588*404b540aSrobert /* A RESX leaves a region instead of entering it. Thus the
2589*404b540aSrobert region itself may have been deleted out from under us. */
2590*404b540aSrobert if (region == NULL)
2591*404b540aSrobert return;
2592*404b540aSrobert region = region->outer;
2593*404b540aSrobert }
2594*404b540aSrobert else if (region->type == ERT_THROW)
2595*404b540aSrobert {
2596*404b540aSrobert type_thrown = region->u.throw.type;
2597*404b540aSrobert region = region->outer;
2598*404b540aSrobert }
2599*404b540aSrobert
2600*404b540aSrobert while (region)
2601*404b540aSrobert {
2602*404b540aSrobert if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2603*404b540aSrobert break;
2604*404b540aSrobert /* If we have processed one cleanup, there is no point in
2605*404b540aSrobert processing any more of them. Each cleanup will have an edge
2606*404b540aSrobert to the next outer cleanup region, so the flow graph will be
2607*404b540aSrobert accurate. */
2608*404b540aSrobert if (region->type == ERT_CLEANUP)
2609*404b540aSrobert region = region->u.cleanup.prev_try;
2610*404b540aSrobert else
2611*404b540aSrobert region = region->outer;
2612*404b540aSrobert }
2613*404b540aSrobert }
2614*404b540aSrobert
2615*404b540aSrobert /* Retrieve a list of labels of exception handlers which can be
2616*404b540aSrobert reached by a given insn. */
2617*404b540aSrobert
2618*404b540aSrobert static void
arh_to_landing_pad(struct eh_region * region,void * data)2619*404b540aSrobert arh_to_landing_pad (struct eh_region *region, void *data)
2620*404b540aSrobert {
2621*404b540aSrobert rtx *p_handlers = data;
2622*404b540aSrobert if (! *p_handlers)
2623*404b540aSrobert *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2624*404b540aSrobert }
2625*404b540aSrobert
2626*404b540aSrobert static void
arh_to_label(struct eh_region * region,void * data)2627*404b540aSrobert arh_to_label (struct eh_region *region, void *data)
2628*404b540aSrobert {
2629*404b540aSrobert rtx *p_handlers = data;
2630*404b540aSrobert *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2631*404b540aSrobert }
2632*404b540aSrobert
2633*404b540aSrobert rtx
reachable_handlers(rtx insn)2634*404b540aSrobert reachable_handlers (rtx insn)
2635*404b540aSrobert {
2636*404b540aSrobert bool is_resx = false;
2637*404b540aSrobert rtx handlers = NULL;
2638*404b540aSrobert int region_number;
2639*404b540aSrobert
2640*404b540aSrobert if (JUMP_P (insn)
2641*404b540aSrobert && GET_CODE (PATTERN (insn)) == RESX)
2642*404b540aSrobert {
2643*404b540aSrobert region_number = XINT (PATTERN (insn), 0);
2644*404b540aSrobert is_resx = true;
2645*404b540aSrobert }
2646*404b540aSrobert else
2647*404b540aSrobert {
2648*404b540aSrobert rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2649*404b540aSrobert if (!note || INTVAL (XEXP (note, 0)) <= 0)
2650*404b540aSrobert return NULL;
2651*404b540aSrobert region_number = INTVAL (XEXP (note, 0));
2652*404b540aSrobert }
2653*404b540aSrobert
2654*404b540aSrobert foreach_reachable_handler (region_number, is_resx,
2655*404b540aSrobert (cfun->eh->built_landing_pads
2656*404b540aSrobert ? arh_to_landing_pad
2657*404b540aSrobert : arh_to_label),
2658*404b540aSrobert &handlers);
2659*404b540aSrobert
2660*404b540aSrobert return handlers;
2661*404b540aSrobert }
2662*404b540aSrobert
2663*404b540aSrobert /* Determine if the given INSN can throw an exception that is caught
2664*404b540aSrobert within the function. */
2665*404b540aSrobert
2666*404b540aSrobert bool
can_throw_internal_1(int region_number,bool is_resx)2667*404b540aSrobert can_throw_internal_1 (int region_number, bool is_resx)
2668*404b540aSrobert {
2669*404b540aSrobert struct eh_region *region;
2670*404b540aSrobert tree type_thrown;
2671*404b540aSrobert
2672*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2673*404b540aSrobert
2674*404b540aSrobert type_thrown = NULL_TREE;
2675*404b540aSrobert if (is_resx)
2676*404b540aSrobert region = region->outer;
2677*404b540aSrobert else if (region->type == ERT_THROW)
2678*404b540aSrobert {
2679*404b540aSrobert type_thrown = region->u.throw.type;
2680*404b540aSrobert region = region->outer;
2681*404b540aSrobert }
2682*404b540aSrobert
2683*404b540aSrobert /* If this exception is ignored by each and every containing region,
2684*404b540aSrobert then control passes straight out. The runtime may handle some
2685*404b540aSrobert regions, which also do not require processing internally. */
2686*404b540aSrobert for (; region; region = region->outer)
2687*404b540aSrobert {
2688*404b540aSrobert enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2689*404b540aSrobert if (how == RNL_BLOCKED)
2690*404b540aSrobert return false;
2691*404b540aSrobert if (how != RNL_NOT_CAUGHT)
2692*404b540aSrobert return true;
2693*404b540aSrobert }
2694*404b540aSrobert
2695*404b540aSrobert return false;
2696*404b540aSrobert }
2697*404b540aSrobert
2698*404b540aSrobert bool
can_throw_internal(rtx insn)2699*404b540aSrobert can_throw_internal (rtx insn)
2700*404b540aSrobert {
2701*404b540aSrobert rtx note;
2702*404b540aSrobert
2703*404b540aSrobert if (! INSN_P (insn))
2704*404b540aSrobert return false;
2705*404b540aSrobert
2706*404b540aSrobert if (JUMP_P (insn)
2707*404b540aSrobert && GET_CODE (PATTERN (insn)) == RESX
2708*404b540aSrobert && XINT (PATTERN (insn), 0) > 0)
2709*404b540aSrobert return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2710*404b540aSrobert
2711*404b540aSrobert if (NONJUMP_INSN_P (insn)
2712*404b540aSrobert && GET_CODE (PATTERN (insn)) == SEQUENCE)
2713*404b540aSrobert insn = XVECEXP (PATTERN (insn), 0, 0);
2714*404b540aSrobert
2715*404b540aSrobert /* Every insn that might throw has an EH_REGION note. */
2716*404b540aSrobert note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2717*404b540aSrobert if (!note || INTVAL (XEXP (note, 0)) <= 0)
2718*404b540aSrobert return false;
2719*404b540aSrobert
2720*404b540aSrobert return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2721*404b540aSrobert }
2722*404b540aSrobert
2723*404b540aSrobert /* Determine if the given INSN can throw an exception that is
2724*404b540aSrobert visible outside the function. */
2725*404b540aSrobert
2726*404b540aSrobert bool
can_throw_external_1(int region_number,bool is_resx)2727*404b540aSrobert can_throw_external_1 (int region_number, bool is_resx)
2728*404b540aSrobert {
2729*404b540aSrobert struct eh_region *region;
2730*404b540aSrobert tree type_thrown;
2731*404b540aSrobert
2732*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2733*404b540aSrobert
2734*404b540aSrobert type_thrown = NULL_TREE;
2735*404b540aSrobert if (is_resx)
2736*404b540aSrobert region = region->outer;
2737*404b540aSrobert else if (region->type == ERT_THROW)
2738*404b540aSrobert {
2739*404b540aSrobert type_thrown = region->u.throw.type;
2740*404b540aSrobert region = region->outer;
2741*404b540aSrobert }
2742*404b540aSrobert
2743*404b540aSrobert /* If the exception is caught or blocked by any containing region,
2744*404b540aSrobert then it is not seen by any calling function. */
2745*404b540aSrobert for (; region ; region = region->outer)
2746*404b540aSrobert if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2747*404b540aSrobert return false;
2748*404b540aSrobert
2749*404b540aSrobert return true;
2750*404b540aSrobert }
2751*404b540aSrobert
2752*404b540aSrobert bool
can_throw_external(rtx insn)2753*404b540aSrobert can_throw_external (rtx insn)
2754*404b540aSrobert {
2755*404b540aSrobert rtx note;
2756*404b540aSrobert
2757*404b540aSrobert if (! INSN_P (insn))
2758*404b540aSrobert return false;
2759*404b540aSrobert
2760*404b540aSrobert if (JUMP_P (insn)
2761*404b540aSrobert && GET_CODE (PATTERN (insn)) == RESX
2762*404b540aSrobert && XINT (PATTERN (insn), 0) > 0)
2763*404b540aSrobert return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2764*404b540aSrobert
2765*404b540aSrobert if (NONJUMP_INSN_P (insn)
2766*404b540aSrobert && GET_CODE (PATTERN (insn)) == SEQUENCE)
2767*404b540aSrobert insn = XVECEXP (PATTERN (insn), 0, 0);
2768*404b540aSrobert
2769*404b540aSrobert note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2770*404b540aSrobert if (!note)
2771*404b540aSrobert {
2772*404b540aSrobert /* Calls (and trapping insns) without notes are outside any
2773*404b540aSrobert exception handling region in this function. We have to
2774*404b540aSrobert assume it might throw. Given that the front end and middle
2775*404b540aSrobert ends mark known NOTHROW functions, this isn't so wildly
2776*404b540aSrobert inaccurate. */
2777*404b540aSrobert return (CALL_P (insn)
2778*404b540aSrobert || (flag_non_call_exceptions
2779*404b540aSrobert && may_trap_p (PATTERN (insn))));
2780*404b540aSrobert }
2781*404b540aSrobert if (INTVAL (XEXP (note, 0)) <= 0)
2782*404b540aSrobert return false;
2783*404b540aSrobert
2784*404b540aSrobert return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2785*404b540aSrobert }
2786*404b540aSrobert
2787*404b540aSrobert /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2788*404b540aSrobert
2789*404b540aSrobert unsigned int
set_nothrow_function_flags(void)2790*404b540aSrobert set_nothrow_function_flags (void)
2791*404b540aSrobert {
2792*404b540aSrobert rtx insn;
2793*404b540aSrobert
2794*404b540aSrobert /* If we don't know that this implementation of the function will
2795*404b540aSrobert actually be used, then we must not set TREE_NOTHROW, since
2796*404b540aSrobert callers must not assume that this function does not throw. */
2797*404b540aSrobert if (DECL_REPLACEABLE_P (current_function_decl))
2798*404b540aSrobert return 0;
2799*404b540aSrobert
2800*404b540aSrobert TREE_NOTHROW (current_function_decl) = 1;
2801*404b540aSrobert
2802*404b540aSrobert /* Assume cfun->all_throwers_are_sibcalls until we encounter
2803*404b540aSrobert something that can throw an exception. We specifically exempt
2804*404b540aSrobert CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2805*404b540aSrobert and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2806*404b540aSrobert is optimistic. */
2807*404b540aSrobert
2808*404b540aSrobert cfun->all_throwers_are_sibcalls = 1;
2809*404b540aSrobert
2810*404b540aSrobert if (! flag_exceptions)
2811*404b540aSrobert return 0;
2812*404b540aSrobert
2813*404b540aSrobert for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2814*404b540aSrobert if (can_throw_external (insn))
2815*404b540aSrobert {
2816*404b540aSrobert TREE_NOTHROW (current_function_decl) = 0;
2817*404b540aSrobert
2818*404b540aSrobert if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2819*404b540aSrobert {
2820*404b540aSrobert cfun->all_throwers_are_sibcalls = 0;
2821*404b540aSrobert return 0;
2822*404b540aSrobert }
2823*404b540aSrobert }
2824*404b540aSrobert
2825*404b540aSrobert for (insn = current_function_epilogue_delay_list; insn;
2826*404b540aSrobert insn = XEXP (insn, 1))
2827*404b540aSrobert if (can_throw_external (insn))
2828*404b540aSrobert {
2829*404b540aSrobert TREE_NOTHROW (current_function_decl) = 0;
2830*404b540aSrobert
2831*404b540aSrobert if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2832*404b540aSrobert {
2833*404b540aSrobert cfun->all_throwers_are_sibcalls = 0;
2834*404b540aSrobert return 0;
2835*404b540aSrobert }
2836*404b540aSrobert }
2837*404b540aSrobert return 0;
2838*404b540aSrobert }
2839*404b540aSrobert
2840*404b540aSrobert struct tree_opt_pass pass_set_nothrow_function_flags =
2841*404b540aSrobert {
2842*404b540aSrobert NULL, /* name */
2843*404b540aSrobert NULL, /* gate */
2844*404b540aSrobert set_nothrow_function_flags, /* execute */
2845*404b540aSrobert NULL, /* sub */
2846*404b540aSrobert NULL, /* next */
2847*404b540aSrobert 0, /* static_pass_number */
2848*404b540aSrobert 0, /* tv_id */
2849*404b540aSrobert 0, /* properties_required */
2850*404b540aSrobert 0, /* properties_provided */
2851*404b540aSrobert 0, /* properties_destroyed */
2852*404b540aSrobert 0, /* todo_flags_start */
2853*404b540aSrobert 0, /* todo_flags_finish */
2854*404b540aSrobert 0 /* letter */
2855*404b540aSrobert };
2856*404b540aSrobert
2857*404b540aSrobert
2858*404b540aSrobert /* Various hooks for unwind library. */
2859*404b540aSrobert
2860*404b540aSrobert /* Do any necessary initialization to access arbitrary stack frames.
2861*404b540aSrobert On the SPARC, this means flushing the register windows. */
2862*404b540aSrobert
2863*404b540aSrobert void
expand_builtin_unwind_init(void)2864*404b540aSrobert expand_builtin_unwind_init (void)
2865*404b540aSrobert {
2866*404b540aSrobert /* Set this so all the registers get saved in our frame; we need to be
2867*404b540aSrobert able to copy the saved values for any registers from frames we unwind. */
2868*404b540aSrobert current_function_has_nonlocal_label = 1;
2869*404b540aSrobert
2870*404b540aSrobert #ifdef SETUP_FRAME_ADDRESSES
2871*404b540aSrobert SETUP_FRAME_ADDRESSES ();
2872*404b540aSrobert #endif
2873*404b540aSrobert }
2874*404b540aSrobert
2875*404b540aSrobert rtx
expand_builtin_eh_return_data_regno(tree arglist)2876*404b540aSrobert expand_builtin_eh_return_data_regno (tree arglist)
2877*404b540aSrobert {
2878*404b540aSrobert tree which = TREE_VALUE (arglist);
2879*404b540aSrobert unsigned HOST_WIDE_INT iwhich;
2880*404b540aSrobert
2881*404b540aSrobert if (TREE_CODE (which) != INTEGER_CST)
2882*404b540aSrobert {
2883*404b540aSrobert error ("argument of %<__builtin_eh_return_regno%> must be constant");
2884*404b540aSrobert return constm1_rtx;
2885*404b540aSrobert }
2886*404b540aSrobert
2887*404b540aSrobert iwhich = tree_low_cst (which, 1);
2888*404b540aSrobert iwhich = EH_RETURN_DATA_REGNO (iwhich);
2889*404b540aSrobert if (iwhich == INVALID_REGNUM)
2890*404b540aSrobert return constm1_rtx;
2891*404b540aSrobert
2892*404b540aSrobert #ifdef DWARF_FRAME_REGNUM
2893*404b540aSrobert iwhich = DWARF_FRAME_REGNUM (iwhich);
2894*404b540aSrobert #else
2895*404b540aSrobert iwhich = DBX_REGISTER_NUMBER (iwhich);
2896*404b540aSrobert #endif
2897*404b540aSrobert
2898*404b540aSrobert return GEN_INT (iwhich);
2899*404b540aSrobert }
2900*404b540aSrobert
2901*404b540aSrobert /* Given a value extracted from the return address register or stack slot,
2902*404b540aSrobert return the actual address encoded in that value. */
2903*404b540aSrobert
2904*404b540aSrobert rtx
expand_builtin_extract_return_addr(tree addr_tree)2905*404b540aSrobert expand_builtin_extract_return_addr (tree addr_tree)
2906*404b540aSrobert {
2907*404b540aSrobert rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2908*404b540aSrobert
2909*404b540aSrobert if (GET_MODE (addr) != Pmode
2910*404b540aSrobert && GET_MODE (addr) != VOIDmode)
2911*404b540aSrobert {
2912*404b540aSrobert #ifdef POINTERS_EXTEND_UNSIGNED
2913*404b540aSrobert addr = convert_memory_address (Pmode, addr);
2914*404b540aSrobert #else
2915*404b540aSrobert addr = convert_to_mode (Pmode, addr, 0);
2916*404b540aSrobert #endif
2917*404b540aSrobert }
2918*404b540aSrobert
2919*404b540aSrobert /* First mask out any unwanted bits. */
2920*404b540aSrobert #ifdef MASK_RETURN_ADDR
2921*404b540aSrobert expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2922*404b540aSrobert #endif
2923*404b540aSrobert
2924*404b540aSrobert /* Then adjust to find the real return address. */
2925*404b540aSrobert #if defined (RETURN_ADDR_OFFSET)
2926*404b540aSrobert addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2927*404b540aSrobert #endif
2928*404b540aSrobert
2929*404b540aSrobert return addr;
2930*404b540aSrobert }
2931*404b540aSrobert
2932*404b540aSrobert /* Given an actual address in addr_tree, do any necessary encoding
2933*404b540aSrobert and return the value to be stored in the return address register or
2934*404b540aSrobert stack slot so the epilogue will return to that address. */
2935*404b540aSrobert
2936*404b540aSrobert rtx
expand_builtin_frob_return_addr(tree addr_tree)2937*404b540aSrobert expand_builtin_frob_return_addr (tree addr_tree)
2938*404b540aSrobert {
2939*404b540aSrobert rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2940*404b540aSrobert
2941*404b540aSrobert addr = convert_memory_address (Pmode, addr);
2942*404b540aSrobert
2943*404b540aSrobert #ifdef RETURN_ADDR_OFFSET
2944*404b540aSrobert addr = force_reg (Pmode, addr);
2945*404b540aSrobert addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2946*404b540aSrobert #endif
2947*404b540aSrobert
2948*404b540aSrobert return addr;
2949*404b540aSrobert }
2950*404b540aSrobert
2951*404b540aSrobert /* Set up the epilogue with the magic bits we'll need to return to the
2952*404b540aSrobert exception handler. */
2953*404b540aSrobert
2954*404b540aSrobert void
expand_builtin_eh_return(tree stackadj_tree ATTRIBUTE_UNUSED,tree handler_tree)2955*404b540aSrobert expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2956*404b540aSrobert tree handler_tree)
2957*404b540aSrobert {
2958*404b540aSrobert rtx tmp;
2959*404b540aSrobert
2960*404b540aSrobert #ifdef EH_RETURN_STACKADJ_RTX
2961*404b540aSrobert tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2962*404b540aSrobert tmp = convert_memory_address (Pmode, tmp);
2963*404b540aSrobert if (!cfun->eh->ehr_stackadj)
2964*404b540aSrobert cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2965*404b540aSrobert else if (tmp != cfun->eh->ehr_stackadj)
2966*404b540aSrobert emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2967*404b540aSrobert #endif
2968*404b540aSrobert
2969*404b540aSrobert tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2970*404b540aSrobert tmp = convert_memory_address (Pmode, tmp);
2971*404b540aSrobert if (!cfun->eh->ehr_handler)
2972*404b540aSrobert cfun->eh->ehr_handler = copy_to_reg (tmp);
2973*404b540aSrobert else if (tmp != cfun->eh->ehr_handler)
2974*404b540aSrobert emit_move_insn (cfun->eh->ehr_handler, tmp);
2975*404b540aSrobert
2976*404b540aSrobert if (!cfun->eh->ehr_label)
2977*404b540aSrobert cfun->eh->ehr_label = gen_label_rtx ();
2978*404b540aSrobert emit_jump (cfun->eh->ehr_label);
2979*404b540aSrobert }
2980*404b540aSrobert
2981*404b540aSrobert void
expand_eh_return(void)2982*404b540aSrobert expand_eh_return (void)
2983*404b540aSrobert {
2984*404b540aSrobert rtx around_label;
2985*404b540aSrobert
2986*404b540aSrobert if (! cfun->eh->ehr_label)
2987*404b540aSrobert return;
2988*404b540aSrobert
2989*404b540aSrobert current_function_calls_eh_return = 1;
2990*404b540aSrobert
2991*404b540aSrobert #ifdef EH_RETURN_STACKADJ_RTX
2992*404b540aSrobert emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2993*404b540aSrobert #endif
2994*404b540aSrobert
2995*404b540aSrobert around_label = gen_label_rtx ();
2996*404b540aSrobert emit_jump (around_label);
2997*404b540aSrobert
2998*404b540aSrobert emit_label (cfun->eh->ehr_label);
2999*404b540aSrobert clobber_return_register ();
3000*404b540aSrobert
3001*404b540aSrobert #ifdef EH_RETURN_STACKADJ_RTX
3002*404b540aSrobert emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3003*404b540aSrobert #endif
3004*404b540aSrobert
3005*404b540aSrobert #ifdef HAVE_eh_return
3006*404b540aSrobert if (HAVE_eh_return)
3007*404b540aSrobert emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3008*404b540aSrobert else
3009*404b540aSrobert #endif
3010*404b540aSrobert {
3011*404b540aSrobert #ifdef EH_RETURN_HANDLER_RTX
3012*404b540aSrobert emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3013*404b540aSrobert #else
3014*404b540aSrobert error ("__builtin_eh_return not supported on this target");
3015*404b540aSrobert #endif
3016*404b540aSrobert }
3017*404b540aSrobert
3018*404b540aSrobert emit_label (around_label);
3019*404b540aSrobert }
3020*404b540aSrobert
3021*404b540aSrobert /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3022*404b540aSrobert POINTERS_EXTEND_UNSIGNED and return it. */
3023*404b540aSrobert
3024*404b540aSrobert rtx
expand_builtin_extend_pointer(tree addr_tree)3025*404b540aSrobert expand_builtin_extend_pointer (tree addr_tree)
3026*404b540aSrobert {
3027*404b540aSrobert rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3028*404b540aSrobert int extend;
3029*404b540aSrobert
3030*404b540aSrobert #ifdef POINTERS_EXTEND_UNSIGNED
3031*404b540aSrobert extend = POINTERS_EXTEND_UNSIGNED;
3032*404b540aSrobert #else
3033*404b540aSrobert /* The previous EH code did an unsigned extend by default, so we do this also
3034*404b540aSrobert for consistency. */
3035*404b540aSrobert extend = 1;
3036*404b540aSrobert #endif
3037*404b540aSrobert
3038*404b540aSrobert return convert_modes (word_mode, ptr_mode, addr, extend);
3039*404b540aSrobert }
3040*404b540aSrobert
3041*404b540aSrobert /* In the following functions, we represent entries in the action table
3042*404b540aSrobert as 1-based indices. Special cases are:
3043*404b540aSrobert
3044*404b540aSrobert 0: null action record, non-null landing pad; implies cleanups
3045*404b540aSrobert -1: null action record, null landing pad; implies no action
3046*404b540aSrobert -2: no call-site entry; implies must_not_throw
3047*404b540aSrobert -3: we have yet to process outer regions
3048*404b540aSrobert
3049*404b540aSrobert Further, no special cases apply to the "next" field of the record.
3050*404b540aSrobert For next, 0 means end of list. */
3051*404b540aSrobert
3052*404b540aSrobert struct action_record
3053*404b540aSrobert {
3054*404b540aSrobert int offset;
3055*404b540aSrobert int filter;
3056*404b540aSrobert int next;
3057*404b540aSrobert };
3058*404b540aSrobert
3059*404b540aSrobert static int
action_record_eq(const void * pentry,const void * pdata)3060*404b540aSrobert action_record_eq (const void *pentry, const void *pdata)
3061*404b540aSrobert {
3062*404b540aSrobert const struct action_record *entry = (const struct action_record *) pentry;
3063*404b540aSrobert const struct action_record *data = (const struct action_record *) pdata;
3064*404b540aSrobert return entry->filter == data->filter && entry->next == data->next;
3065*404b540aSrobert }
3066*404b540aSrobert
3067*404b540aSrobert static hashval_t
action_record_hash(const void * pentry)3068*404b540aSrobert action_record_hash (const void *pentry)
3069*404b540aSrobert {
3070*404b540aSrobert const struct action_record *entry = (const struct action_record *) pentry;
3071*404b540aSrobert return entry->next * 1009 + entry->filter;
3072*404b540aSrobert }
3073*404b540aSrobert
3074*404b540aSrobert static int
add_action_record(htab_t ar_hash,int filter,int next)3075*404b540aSrobert add_action_record (htab_t ar_hash, int filter, int next)
3076*404b540aSrobert {
3077*404b540aSrobert struct action_record **slot, *new, tmp;
3078*404b540aSrobert
3079*404b540aSrobert tmp.filter = filter;
3080*404b540aSrobert tmp.next = next;
3081*404b540aSrobert slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3082*404b540aSrobert
3083*404b540aSrobert if ((new = *slot) == NULL)
3084*404b540aSrobert {
3085*404b540aSrobert new = xmalloc (sizeof (*new));
3086*404b540aSrobert new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3087*404b540aSrobert new->filter = filter;
3088*404b540aSrobert new->next = next;
3089*404b540aSrobert *slot = new;
3090*404b540aSrobert
3091*404b540aSrobert /* The filter value goes in untouched. The link to the next
3092*404b540aSrobert record is a "self-relative" byte offset, or zero to indicate
3093*404b540aSrobert that there is no next record. So convert the absolute 1 based
3094*404b540aSrobert indices we've been carrying around into a displacement. */
3095*404b540aSrobert
3096*404b540aSrobert push_sleb128 (&cfun->eh->action_record_data, filter);
3097*404b540aSrobert if (next)
3098*404b540aSrobert next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3099*404b540aSrobert push_sleb128 (&cfun->eh->action_record_data, next);
3100*404b540aSrobert }
3101*404b540aSrobert
3102*404b540aSrobert return new->offset;
3103*404b540aSrobert }
3104*404b540aSrobert
3105*404b540aSrobert static int
collect_one_action_chain(htab_t ar_hash,struct eh_region * region)3106*404b540aSrobert collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3107*404b540aSrobert {
3108*404b540aSrobert struct eh_region *c;
3109*404b540aSrobert int next;
3110*404b540aSrobert
3111*404b540aSrobert /* If we've reached the top of the region chain, then we have
3112*404b540aSrobert no actions, and require no landing pad. */
3113*404b540aSrobert if (region == NULL)
3114*404b540aSrobert return -1;
3115*404b540aSrobert
3116*404b540aSrobert switch (region->type)
3117*404b540aSrobert {
3118*404b540aSrobert case ERT_CLEANUP:
3119*404b540aSrobert /* A cleanup adds a zero filter to the beginning of the chain, but
3120*404b540aSrobert there are special cases to look out for. If there are *only*
3121*404b540aSrobert cleanups along a path, then it compresses to a zero action.
3122*404b540aSrobert Further, if there are multiple cleanups along a path, we only
3123*404b540aSrobert need to represent one of them, as that is enough to trigger
3124*404b540aSrobert entry to the landing pad at runtime. */
3125*404b540aSrobert next = collect_one_action_chain (ar_hash, region->outer);
3126*404b540aSrobert if (next <= 0)
3127*404b540aSrobert return 0;
3128*404b540aSrobert for (c = region->outer; c ; c = c->outer)
3129*404b540aSrobert if (c->type == ERT_CLEANUP)
3130*404b540aSrobert return next;
3131*404b540aSrobert return add_action_record (ar_hash, 0, next);
3132*404b540aSrobert
3133*404b540aSrobert case ERT_TRY:
3134*404b540aSrobert /* Process the associated catch regions in reverse order.
3135*404b540aSrobert If there's a catch-all handler, then we don't need to
3136*404b540aSrobert search outer regions. Use a magic -3 value to record
3137*404b540aSrobert that we haven't done the outer search. */
3138*404b540aSrobert next = -3;
3139*404b540aSrobert for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3140*404b540aSrobert {
3141*404b540aSrobert if (c->u.catch.type_list == NULL)
3142*404b540aSrobert {
3143*404b540aSrobert /* Retrieve the filter from the head of the filter list
3144*404b540aSrobert where we have stored it (see assign_filter_values). */
3145*404b540aSrobert int filter
3146*404b540aSrobert = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3147*404b540aSrobert
3148*404b540aSrobert next = add_action_record (ar_hash, filter, 0);
3149*404b540aSrobert }
3150*404b540aSrobert else
3151*404b540aSrobert {
3152*404b540aSrobert /* Once the outer search is done, trigger an action record for
3153*404b540aSrobert each filter we have. */
3154*404b540aSrobert tree flt_node;
3155*404b540aSrobert
3156*404b540aSrobert if (next == -3)
3157*404b540aSrobert {
3158*404b540aSrobert next = collect_one_action_chain (ar_hash, region->outer);
3159*404b540aSrobert
3160*404b540aSrobert /* If there is no next action, terminate the chain. */
3161*404b540aSrobert if (next == -1)
3162*404b540aSrobert next = 0;
3163*404b540aSrobert /* If all outer actions are cleanups or must_not_throw,
3164*404b540aSrobert we'll have no action record for it, since we had wanted
3165*404b540aSrobert to encode these states in the call-site record directly.
3166*404b540aSrobert Add a cleanup action to the chain to catch these. */
3167*404b540aSrobert else if (next <= 0)
3168*404b540aSrobert next = add_action_record (ar_hash, 0, 0);
3169*404b540aSrobert }
3170*404b540aSrobert
3171*404b540aSrobert flt_node = c->u.catch.filter_list;
3172*404b540aSrobert for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3173*404b540aSrobert {
3174*404b540aSrobert int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3175*404b540aSrobert next = add_action_record (ar_hash, filter, next);
3176*404b540aSrobert }
3177*404b540aSrobert }
3178*404b540aSrobert }
3179*404b540aSrobert return next;
3180*404b540aSrobert
3181*404b540aSrobert case ERT_ALLOWED_EXCEPTIONS:
3182*404b540aSrobert /* An exception specification adds its filter to the
3183*404b540aSrobert beginning of the chain. */
3184*404b540aSrobert next = collect_one_action_chain (ar_hash, region->outer);
3185*404b540aSrobert
3186*404b540aSrobert /* If there is no next action, terminate the chain. */
3187*404b540aSrobert if (next == -1)
3188*404b540aSrobert next = 0;
3189*404b540aSrobert /* If all outer actions are cleanups or must_not_throw,
3190*404b540aSrobert we'll have no action record for it, since we had wanted
3191*404b540aSrobert to encode these states in the call-site record directly.
3192*404b540aSrobert Add a cleanup action to the chain to catch these. */
3193*404b540aSrobert else if (next <= 0)
3194*404b540aSrobert next = add_action_record (ar_hash, 0, 0);
3195*404b540aSrobert
3196*404b540aSrobert return add_action_record (ar_hash, region->u.allowed.filter, next);
3197*404b540aSrobert
3198*404b540aSrobert case ERT_MUST_NOT_THROW:
3199*404b540aSrobert /* A must-not-throw region with no inner handlers or cleanups
3200*404b540aSrobert requires no call-site entry. Note that this differs from
3201*404b540aSrobert the no handler or cleanup case in that we do require an lsda
3202*404b540aSrobert to be generated. Return a magic -2 value to record this. */
3203*404b540aSrobert return -2;
3204*404b540aSrobert
3205*404b540aSrobert case ERT_CATCH:
3206*404b540aSrobert case ERT_THROW:
3207*404b540aSrobert /* CATCH regions are handled in TRY above. THROW regions are
3208*404b540aSrobert for optimization information only and produce no output. */
3209*404b540aSrobert return collect_one_action_chain (ar_hash, region->outer);
3210*404b540aSrobert
3211*404b540aSrobert default:
3212*404b540aSrobert gcc_unreachable ();
3213*404b540aSrobert }
3214*404b540aSrobert }
3215*404b540aSrobert
3216*404b540aSrobert static int
add_call_site(rtx landing_pad,int action)3217*404b540aSrobert add_call_site (rtx landing_pad, int action)
3218*404b540aSrobert {
3219*404b540aSrobert struct call_site_record *data = cfun->eh->call_site_data;
3220*404b540aSrobert int used = cfun->eh->call_site_data_used;
3221*404b540aSrobert int size = cfun->eh->call_site_data_size;
3222*404b540aSrobert
3223*404b540aSrobert if (used >= size)
3224*404b540aSrobert {
3225*404b540aSrobert size = (size ? size * 2 : 64);
3226*404b540aSrobert data = ggc_realloc (data, sizeof (*data) * size);
3227*404b540aSrobert cfun->eh->call_site_data = data;
3228*404b540aSrobert cfun->eh->call_site_data_size = size;
3229*404b540aSrobert }
3230*404b540aSrobert
3231*404b540aSrobert data[used].landing_pad = landing_pad;
3232*404b540aSrobert data[used].action = action;
3233*404b540aSrobert
3234*404b540aSrobert cfun->eh->call_site_data_used = used + 1;
3235*404b540aSrobert
3236*404b540aSrobert return used + call_site_base;
3237*404b540aSrobert }
3238*404b540aSrobert
3239*404b540aSrobert /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3240*404b540aSrobert The new note numbers will not refer to region numbers, but
3241*404b540aSrobert instead to call site entries. */
3242*404b540aSrobert
3243*404b540aSrobert unsigned int
convert_to_eh_region_ranges(void)3244*404b540aSrobert convert_to_eh_region_ranges (void)
3245*404b540aSrobert {
3246*404b540aSrobert rtx insn, iter, note;
3247*404b540aSrobert htab_t ar_hash;
3248*404b540aSrobert int last_action = -3;
3249*404b540aSrobert rtx last_action_insn = NULL_RTX;
3250*404b540aSrobert rtx last_landing_pad = NULL_RTX;
3251*404b540aSrobert rtx first_no_action_insn = NULL_RTX;
3252*404b540aSrobert int call_site = 0;
3253*404b540aSrobert
3254*404b540aSrobert if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3255*404b540aSrobert return 0;
3256*404b540aSrobert
3257*404b540aSrobert VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3258*404b540aSrobert
3259*404b540aSrobert ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3260*404b540aSrobert
3261*404b540aSrobert for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3262*404b540aSrobert if (INSN_P (iter))
3263*404b540aSrobert {
3264*404b540aSrobert struct eh_region *region;
3265*404b540aSrobert int this_action;
3266*404b540aSrobert rtx this_landing_pad;
3267*404b540aSrobert
3268*404b540aSrobert insn = iter;
3269*404b540aSrobert if (NONJUMP_INSN_P (insn)
3270*404b540aSrobert && GET_CODE (PATTERN (insn)) == SEQUENCE)
3271*404b540aSrobert insn = XVECEXP (PATTERN (insn), 0, 0);
3272*404b540aSrobert
3273*404b540aSrobert note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3274*404b540aSrobert if (!note)
3275*404b540aSrobert {
3276*404b540aSrobert if (! (CALL_P (insn)
3277*404b540aSrobert || (flag_non_call_exceptions
3278*404b540aSrobert && may_trap_p (PATTERN (insn)))))
3279*404b540aSrobert continue;
3280*404b540aSrobert this_action = -1;
3281*404b540aSrobert region = NULL;
3282*404b540aSrobert }
3283*404b540aSrobert else
3284*404b540aSrobert {
3285*404b540aSrobert if (INTVAL (XEXP (note, 0)) <= 0)
3286*404b540aSrobert continue;
3287*404b540aSrobert region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3288*404b540aSrobert this_action = collect_one_action_chain (ar_hash, region);
3289*404b540aSrobert }
3290*404b540aSrobert
3291*404b540aSrobert /* Existence of catch handlers, or must-not-throw regions
3292*404b540aSrobert implies that an lsda is needed (even if empty). */
3293*404b540aSrobert if (this_action != -1)
3294*404b540aSrobert cfun->uses_eh_lsda = 1;
3295*404b540aSrobert
3296*404b540aSrobert /* Delay creation of region notes for no-action regions
3297*404b540aSrobert until we're sure that an lsda will be required. */
3298*404b540aSrobert else if (last_action == -3)
3299*404b540aSrobert {
3300*404b540aSrobert first_no_action_insn = iter;
3301*404b540aSrobert last_action = -1;
3302*404b540aSrobert }
3303*404b540aSrobert
3304*404b540aSrobert /* Cleanups and handlers may share action chains but not
3305*404b540aSrobert landing pads. Collect the landing pad for this region. */
3306*404b540aSrobert if (this_action >= 0)
3307*404b540aSrobert {
3308*404b540aSrobert struct eh_region *o;
3309*404b540aSrobert for (o = region; ! o->landing_pad ; o = o->outer)
3310*404b540aSrobert continue;
3311*404b540aSrobert this_landing_pad = o->landing_pad;
3312*404b540aSrobert }
3313*404b540aSrobert else
3314*404b540aSrobert this_landing_pad = NULL_RTX;
3315*404b540aSrobert
3316*404b540aSrobert /* Differing actions or landing pads implies a change in call-site
3317*404b540aSrobert info, which implies some EH_REGION note should be emitted. */
3318*404b540aSrobert if (last_action != this_action
3319*404b540aSrobert || last_landing_pad != this_landing_pad)
3320*404b540aSrobert {
3321*404b540aSrobert /* If we'd not seen a previous action (-3) or the previous
3322*404b540aSrobert action was must-not-throw (-2), then we do not need an
3323*404b540aSrobert end note. */
3324*404b540aSrobert if (last_action >= -1)
3325*404b540aSrobert {
3326*404b540aSrobert /* If we delayed the creation of the begin, do it now. */
3327*404b540aSrobert if (first_no_action_insn)
3328*404b540aSrobert {
3329*404b540aSrobert call_site = add_call_site (NULL_RTX, 0);
3330*404b540aSrobert note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3331*404b540aSrobert first_no_action_insn);
3332*404b540aSrobert NOTE_EH_HANDLER (note) = call_site;
3333*404b540aSrobert first_no_action_insn = NULL_RTX;
3334*404b540aSrobert }
3335*404b540aSrobert
3336*404b540aSrobert note = emit_note_after (NOTE_INSN_EH_REGION_END,
3337*404b540aSrobert last_action_insn);
3338*404b540aSrobert NOTE_EH_HANDLER (note) = call_site;
3339*404b540aSrobert }
3340*404b540aSrobert
3341*404b540aSrobert /* If the new action is must-not-throw, then no region notes
3342*404b540aSrobert are created. */
3343*404b540aSrobert if (this_action >= -1)
3344*404b540aSrobert {
3345*404b540aSrobert call_site = add_call_site (this_landing_pad,
3346*404b540aSrobert this_action < 0 ? 0 : this_action);
3347*404b540aSrobert note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3348*404b540aSrobert NOTE_EH_HANDLER (note) = call_site;
3349*404b540aSrobert }
3350*404b540aSrobert
3351*404b540aSrobert last_action = this_action;
3352*404b540aSrobert last_landing_pad = this_landing_pad;
3353*404b540aSrobert }
3354*404b540aSrobert last_action_insn = iter;
3355*404b540aSrobert }
3356*404b540aSrobert
3357*404b540aSrobert if (last_action >= -1 && ! first_no_action_insn)
3358*404b540aSrobert {
3359*404b540aSrobert note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3360*404b540aSrobert NOTE_EH_HANDLER (note) = call_site;
3361*404b540aSrobert }
3362*404b540aSrobert
3363*404b540aSrobert htab_delete (ar_hash);
3364*404b540aSrobert return 0;
3365*404b540aSrobert }
3366*404b540aSrobert
3367*404b540aSrobert struct tree_opt_pass pass_convert_to_eh_region_ranges =
3368*404b540aSrobert {
3369*404b540aSrobert "eh-ranges", /* name */
3370*404b540aSrobert NULL, /* gate */
3371*404b540aSrobert convert_to_eh_region_ranges, /* execute */
3372*404b540aSrobert NULL, /* sub */
3373*404b540aSrobert NULL, /* next */
3374*404b540aSrobert 0, /* static_pass_number */
3375*404b540aSrobert 0, /* tv_id */
3376*404b540aSrobert 0, /* properties_required */
3377*404b540aSrobert 0, /* properties_provided */
3378*404b540aSrobert 0, /* properties_destroyed */
3379*404b540aSrobert 0, /* todo_flags_start */
3380*404b540aSrobert TODO_dump_func, /* todo_flags_finish */
3381*404b540aSrobert 0 /* letter */
3382*404b540aSrobert };
3383*404b540aSrobert
3384*404b540aSrobert
3385*404b540aSrobert static void
push_uleb128(varray_type * data_area,unsigned int value)3386*404b540aSrobert push_uleb128 (varray_type *data_area, unsigned int value)
3387*404b540aSrobert {
3388*404b540aSrobert do
3389*404b540aSrobert {
3390*404b540aSrobert unsigned char byte = value & 0x7f;
3391*404b540aSrobert value >>= 7;
3392*404b540aSrobert if (value)
3393*404b540aSrobert byte |= 0x80;
3394*404b540aSrobert VARRAY_PUSH_UCHAR (*data_area, byte);
3395*404b540aSrobert }
3396*404b540aSrobert while (value);
3397*404b540aSrobert }
3398*404b540aSrobert
3399*404b540aSrobert static void
push_sleb128(varray_type * data_area,int value)3400*404b540aSrobert push_sleb128 (varray_type *data_area, int value)
3401*404b540aSrobert {
3402*404b540aSrobert unsigned char byte;
3403*404b540aSrobert int more;
3404*404b540aSrobert
3405*404b540aSrobert do
3406*404b540aSrobert {
3407*404b540aSrobert byte = value & 0x7f;
3408*404b540aSrobert value >>= 7;
3409*404b540aSrobert more = ! ((value == 0 && (byte & 0x40) == 0)
3410*404b540aSrobert || (value == -1 && (byte & 0x40) != 0));
3411*404b540aSrobert if (more)
3412*404b540aSrobert byte |= 0x80;
3413*404b540aSrobert VARRAY_PUSH_UCHAR (*data_area, byte);
3414*404b540aSrobert }
3415*404b540aSrobert while (more);
3416*404b540aSrobert }
3417*404b540aSrobert
3418*404b540aSrobert
3419*404b540aSrobert #ifndef HAVE_AS_LEB128
3420*404b540aSrobert static int
dw2_size_of_call_site_table(void)3421*404b540aSrobert dw2_size_of_call_site_table (void)
3422*404b540aSrobert {
3423*404b540aSrobert int n = cfun->eh->call_site_data_used;
3424*404b540aSrobert int size = n * (4 + 4 + 4);
3425*404b540aSrobert int i;
3426*404b540aSrobert
3427*404b540aSrobert for (i = 0; i < n; ++i)
3428*404b540aSrobert {
3429*404b540aSrobert struct call_site_record *cs = &cfun->eh->call_site_data[i];
3430*404b540aSrobert size += size_of_uleb128 (cs->action);
3431*404b540aSrobert }
3432*404b540aSrobert
3433*404b540aSrobert return size;
3434*404b540aSrobert }
3435*404b540aSrobert
3436*404b540aSrobert static int
sjlj_size_of_call_site_table(void)3437*404b540aSrobert sjlj_size_of_call_site_table (void)
3438*404b540aSrobert {
3439*404b540aSrobert int n = cfun->eh->call_site_data_used;
3440*404b540aSrobert int size = 0;
3441*404b540aSrobert int i;
3442*404b540aSrobert
3443*404b540aSrobert for (i = 0; i < n; ++i)
3444*404b540aSrobert {
3445*404b540aSrobert struct call_site_record *cs = &cfun->eh->call_site_data[i];
3446*404b540aSrobert size += size_of_uleb128 (INTVAL (cs->landing_pad));
3447*404b540aSrobert size += size_of_uleb128 (cs->action);
3448*404b540aSrobert }
3449*404b540aSrobert
3450*404b540aSrobert return size;
3451*404b540aSrobert }
3452*404b540aSrobert #endif
3453*404b540aSrobert
3454*404b540aSrobert static void
dw2_output_call_site_table(void)3455*404b540aSrobert dw2_output_call_site_table (void)
3456*404b540aSrobert {
3457*404b540aSrobert int n = cfun->eh->call_site_data_used;
3458*404b540aSrobert int i;
3459*404b540aSrobert
3460*404b540aSrobert for (i = 0; i < n; ++i)
3461*404b540aSrobert {
3462*404b540aSrobert struct call_site_record *cs = &cfun->eh->call_site_data[i];
3463*404b540aSrobert char reg_start_lab[32];
3464*404b540aSrobert char reg_end_lab[32];
3465*404b540aSrobert char landing_pad_lab[32];
3466*404b540aSrobert
3467*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3468*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3469*404b540aSrobert
3470*404b540aSrobert if (cs->landing_pad)
3471*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3472*404b540aSrobert CODE_LABEL_NUMBER (cs->landing_pad));
3473*404b540aSrobert
3474*404b540aSrobert /* ??? Perhaps use insn length scaling if the assembler supports
3475*404b540aSrobert generic arithmetic. */
3476*404b540aSrobert /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3477*404b540aSrobert data4 if the function is small enough. */
3478*404b540aSrobert #ifdef HAVE_AS_LEB128
3479*404b540aSrobert dw2_asm_output_delta_uleb128 (reg_start_lab,
3480*404b540aSrobert current_function_func_begin_label,
3481*404b540aSrobert "region %d start", i);
3482*404b540aSrobert dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3483*404b540aSrobert "length");
3484*404b540aSrobert if (cs->landing_pad)
3485*404b540aSrobert dw2_asm_output_delta_uleb128 (landing_pad_lab,
3486*404b540aSrobert current_function_func_begin_label,
3487*404b540aSrobert "landing pad");
3488*404b540aSrobert else
3489*404b540aSrobert dw2_asm_output_data_uleb128 (0, "landing pad");
3490*404b540aSrobert #else
3491*404b540aSrobert dw2_asm_output_delta (4, reg_start_lab,
3492*404b540aSrobert current_function_func_begin_label,
3493*404b540aSrobert "region %d start", i);
3494*404b540aSrobert dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3495*404b540aSrobert if (cs->landing_pad)
3496*404b540aSrobert dw2_asm_output_delta (4, landing_pad_lab,
3497*404b540aSrobert current_function_func_begin_label,
3498*404b540aSrobert "landing pad");
3499*404b540aSrobert else
3500*404b540aSrobert dw2_asm_output_data (4, 0, "landing pad");
3501*404b540aSrobert #endif
3502*404b540aSrobert dw2_asm_output_data_uleb128 (cs->action, "action");
3503*404b540aSrobert }
3504*404b540aSrobert
3505*404b540aSrobert call_site_base += n;
3506*404b540aSrobert }
3507*404b540aSrobert
3508*404b540aSrobert static void
sjlj_output_call_site_table(void)3509*404b540aSrobert sjlj_output_call_site_table (void)
3510*404b540aSrobert {
3511*404b540aSrobert int n = cfun->eh->call_site_data_used;
3512*404b540aSrobert int i;
3513*404b540aSrobert
3514*404b540aSrobert for (i = 0; i < n; ++i)
3515*404b540aSrobert {
3516*404b540aSrobert struct call_site_record *cs = &cfun->eh->call_site_data[i];
3517*404b540aSrobert
3518*404b540aSrobert dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3519*404b540aSrobert "region %d landing pad", i);
3520*404b540aSrobert dw2_asm_output_data_uleb128 (cs->action, "action");
3521*404b540aSrobert }
3522*404b540aSrobert
3523*404b540aSrobert call_site_base += n;
3524*404b540aSrobert }
3525*404b540aSrobert
3526*404b540aSrobert #ifndef TARGET_UNWIND_INFO
3527*404b540aSrobert /* Switch to the section that should be used for exception tables. */
3528*404b540aSrobert
3529*404b540aSrobert static void
switch_to_exception_section(void)3530*404b540aSrobert switch_to_exception_section (void)
3531*404b540aSrobert {
3532*404b540aSrobert if (exception_section == 0)
3533*404b540aSrobert {
3534*404b540aSrobert if (targetm.have_named_sections)
3535*404b540aSrobert {
3536*404b540aSrobert int flags;
3537*404b540aSrobert
3538*404b540aSrobert if (EH_TABLES_CAN_BE_READ_ONLY)
3539*404b540aSrobert {
3540*404b540aSrobert int tt_format =
3541*404b540aSrobert ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3542*404b540aSrobert flags = ((! flag_pic
3543*404b540aSrobert || ((tt_format & 0x70) != DW_EH_PE_absptr
3544*404b540aSrobert && (tt_format & 0x70) != DW_EH_PE_aligned))
3545*404b540aSrobert ? 0 : SECTION_WRITE);
3546*404b540aSrobert }
3547*404b540aSrobert else
3548*404b540aSrobert flags = SECTION_WRITE;
3549*404b540aSrobert exception_section = get_section (".gcc_except_table", flags, NULL);
3550*404b540aSrobert }
3551*404b540aSrobert else
3552*404b540aSrobert exception_section = flag_pic ? data_section : readonly_data_section;
3553*404b540aSrobert }
3554*404b540aSrobert switch_to_section (exception_section);
3555*404b540aSrobert }
3556*404b540aSrobert #endif
3557*404b540aSrobert
3558*404b540aSrobert
3559*404b540aSrobert /* Output a reference from an exception table to the type_info object TYPE.
3560*404b540aSrobert TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3561*404b540aSrobert the value. */
3562*404b540aSrobert
3563*404b540aSrobert static void
output_ttype(tree type,int tt_format,int tt_format_size)3564*404b540aSrobert output_ttype (tree type, int tt_format, int tt_format_size)
3565*404b540aSrobert {
3566*404b540aSrobert rtx value;
3567*404b540aSrobert bool public = true;
3568*404b540aSrobert
3569*404b540aSrobert if (type == NULL_TREE)
3570*404b540aSrobert value = const0_rtx;
3571*404b540aSrobert else
3572*404b540aSrobert {
3573*404b540aSrobert struct cgraph_varpool_node *node;
3574*404b540aSrobert
3575*404b540aSrobert type = lookup_type_for_runtime (type);
3576*404b540aSrobert value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3577*404b540aSrobert
3578*404b540aSrobert /* Let cgraph know that the rtti decl is used. Not all of the
3579*404b540aSrobert paths below go through assemble_integer, which would take
3580*404b540aSrobert care of this for us. */
3581*404b540aSrobert STRIP_NOPS (type);
3582*404b540aSrobert if (TREE_CODE (type) == ADDR_EXPR)
3583*404b540aSrobert {
3584*404b540aSrobert type = TREE_OPERAND (type, 0);
3585*404b540aSrobert if (TREE_CODE (type) == VAR_DECL)
3586*404b540aSrobert {
3587*404b540aSrobert node = cgraph_varpool_node (type);
3588*404b540aSrobert if (node)
3589*404b540aSrobert cgraph_varpool_mark_needed_node (node);
3590*404b540aSrobert public = TREE_PUBLIC (type);
3591*404b540aSrobert }
3592*404b540aSrobert }
3593*404b540aSrobert else
3594*404b540aSrobert gcc_assert (TREE_CODE (type) == INTEGER_CST);
3595*404b540aSrobert }
3596*404b540aSrobert
3597*404b540aSrobert /* Allow the target to override the type table entry format. */
3598*404b540aSrobert if (targetm.asm_out.ttype (value))
3599*404b540aSrobert return;
3600*404b540aSrobert
3601*404b540aSrobert if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3602*404b540aSrobert assemble_integer (value, tt_format_size,
3603*404b540aSrobert tt_format_size * BITS_PER_UNIT, 1);
3604*404b540aSrobert else
3605*404b540aSrobert dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
3606*404b540aSrobert }
3607*404b540aSrobert
3608*404b540aSrobert void
output_function_exception_table(void)3609*404b540aSrobert output_function_exception_table (void)
3610*404b540aSrobert {
3611*404b540aSrobert int tt_format, cs_format, lp_format, i, n;
3612*404b540aSrobert #ifdef HAVE_AS_LEB128
3613*404b540aSrobert char ttype_label[32];
3614*404b540aSrobert char cs_after_size_label[32];
3615*404b540aSrobert char cs_end_label[32];
3616*404b540aSrobert #else
3617*404b540aSrobert int call_site_len;
3618*404b540aSrobert #endif
3619*404b540aSrobert int have_tt_data;
3620*404b540aSrobert int tt_format_size = 0;
3621*404b540aSrobert
3622*404b540aSrobert if (eh_personality_libfunc)
3623*404b540aSrobert assemble_external_libcall (eh_personality_libfunc);
3624*404b540aSrobert
3625*404b540aSrobert /* Not all functions need anything. */
3626*404b540aSrobert if (! cfun->uses_eh_lsda)
3627*404b540aSrobert return;
3628*404b540aSrobert
3629*404b540aSrobert #ifdef TARGET_UNWIND_INFO
3630*404b540aSrobert /* TODO: Move this into target file. */
3631*404b540aSrobert fputs ("\t.personality\t", asm_out_file);
3632*404b540aSrobert output_addr_const (asm_out_file, eh_personality_libfunc);
3633*404b540aSrobert fputs ("\n\t.handlerdata\n", asm_out_file);
3634*404b540aSrobert /* Note that varasm still thinks we're in the function's code section.
3635*404b540aSrobert The ".endp" directive that will immediately follow will take us back. */
3636*404b540aSrobert #else
3637*404b540aSrobert switch_to_exception_section ();
3638*404b540aSrobert #endif
3639*404b540aSrobert
3640*404b540aSrobert /* If the target wants a label to begin the table, emit it here. */
3641*404b540aSrobert targetm.asm_out.except_table_label (asm_out_file);
3642*404b540aSrobert
3643*404b540aSrobert have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3644*404b540aSrobert || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3645*404b540aSrobert
3646*404b540aSrobert /* Indicate the format of the @TType entries. */
3647*404b540aSrobert if (! have_tt_data)
3648*404b540aSrobert tt_format = DW_EH_PE_omit;
3649*404b540aSrobert else
3650*404b540aSrobert {
3651*404b540aSrobert tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3652*404b540aSrobert #ifdef HAVE_AS_LEB128
3653*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3654*404b540aSrobert current_function_funcdef_no);
3655*404b540aSrobert #endif
3656*404b540aSrobert tt_format_size = size_of_encoded_value (tt_format);
3657*404b540aSrobert
3658*404b540aSrobert assemble_align (tt_format_size * BITS_PER_UNIT);
3659*404b540aSrobert }
3660*404b540aSrobert
3661*404b540aSrobert targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3662*404b540aSrobert current_function_funcdef_no);
3663*404b540aSrobert
3664*404b540aSrobert /* The LSDA header. */
3665*404b540aSrobert
3666*404b540aSrobert /* Indicate the format of the landing pad start pointer. An omitted
3667*404b540aSrobert field implies @LPStart == @Start. */
3668*404b540aSrobert /* Currently we always put @LPStart == @Start. This field would
3669*404b540aSrobert be most useful in moving the landing pads completely out of
3670*404b540aSrobert line to another section, but it could also be used to minimize
3671*404b540aSrobert the size of uleb128 landing pad offsets. */
3672*404b540aSrobert lp_format = DW_EH_PE_omit;
3673*404b540aSrobert dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3674*404b540aSrobert eh_data_format_name (lp_format));
3675*404b540aSrobert
3676*404b540aSrobert /* @LPStart pointer would go here. */
3677*404b540aSrobert
3678*404b540aSrobert dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3679*404b540aSrobert eh_data_format_name (tt_format));
3680*404b540aSrobert
3681*404b540aSrobert #ifndef HAVE_AS_LEB128
3682*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
3683*404b540aSrobert call_site_len = sjlj_size_of_call_site_table ();
3684*404b540aSrobert else
3685*404b540aSrobert call_site_len = dw2_size_of_call_site_table ();
3686*404b540aSrobert #endif
3687*404b540aSrobert
3688*404b540aSrobert /* A pc-relative 4-byte displacement to the @TType data. */
3689*404b540aSrobert if (have_tt_data)
3690*404b540aSrobert {
3691*404b540aSrobert #ifdef HAVE_AS_LEB128
3692*404b540aSrobert char ttype_after_disp_label[32];
3693*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3694*404b540aSrobert current_function_funcdef_no);
3695*404b540aSrobert dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3696*404b540aSrobert "@TType base offset");
3697*404b540aSrobert ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3698*404b540aSrobert #else
3699*404b540aSrobert /* Ug. Alignment queers things. */
3700*404b540aSrobert unsigned int before_disp, after_disp, last_disp, disp;
3701*404b540aSrobert
3702*404b540aSrobert before_disp = 1 + 1;
3703*404b540aSrobert after_disp = (1 + size_of_uleb128 (call_site_len)
3704*404b540aSrobert + call_site_len
3705*404b540aSrobert + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3706*404b540aSrobert + (VEC_length (tree, cfun->eh->ttype_data)
3707*404b540aSrobert * tt_format_size));
3708*404b540aSrobert
3709*404b540aSrobert disp = after_disp;
3710*404b540aSrobert do
3711*404b540aSrobert {
3712*404b540aSrobert unsigned int disp_size, pad;
3713*404b540aSrobert
3714*404b540aSrobert last_disp = disp;
3715*404b540aSrobert disp_size = size_of_uleb128 (disp);
3716*404b540aSrobert pad = before_disp + disp_size + after_disp;
3717*404b540aSrobert if (pad % tt_format_size)
3718*404b540aSrobert pad = tt_format_size - (pad % tt_format_size);
3719*404b540aSrobert else
3720*404b540aSrobert pad = 0;
3721*404b540aSrobert disp = after_disp + pad;
3722*404b540aSrobert }
3723*404b540aSrobert while (disp != last_disp);
3724*404b540aSrobert
3725*404b540aSrobert dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3726*404b540aSrobert #endif
3727*404b540aSrobert }
3728*404b540aSrobert
3729*404b540aSrobert /* Indicate the format of the call-site offsets. */
3730*404b540aSrobert #ifdef HAVE_AS_LEB128
3731*404b540aSrobert cs_format = DW_EH_PE_uleb128;
3732*404b540aSrobert #else
3733*404b540aSrobert cs_format = DW_EH_PE_udata4;
3734*404b540aSrobert #endif
3735*404b540aSrobert dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3736*404b540aSrobert eh_data_format_name (cs_format));
3737*404b540aSrobert
3738*404b540aSrobert #ifdef HAVE_AS_LEB128
3739*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3740*404b540aSrobert current_function_funcdef_no);
3741*404b540aSrobert ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3742*404b540aSrobert current_function_funcdef_no);
3743*404b540aSrobert dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3744*404b540aSrobert "Call-site table length");
3745*404b540aSrobert ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3746*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
3747*404b540aSrobert sjlj_output_call_site_table ();
3748*404b540aSrobert else
3749*404b540aSrobert dw2_output_call_site_table ();
3750*404b540aSrobert ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3751*404b540aSrobert #else
3752*404b540aSrobert dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3753*404b540aSrobert if (USING_SJLJ_EXCEPTIONS)
3754*404b540aSrobert sjlj_output_call_site_table ();
3755*404b540aSrobert else
3756*404b540aSrobert dw2_output_call_site_table ();
3757*404b540aSrobert #endif
3758*404b540aSrobert
3759*404b540aSrobert /* ??? Decode and interpret the data for flag_debug_asm. */
3760*404b540aSrobert n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3761*404b540aSrobert for (i = 0; i < n; ++i)
3762*404b540aSrobert dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3763*404b540aSrobert (i ? NULL : "Action record table"));
3764*404b540aSrobert
3765*404b540aSrobert if (have_tt_data)
3766*404b540aSrobert assemble_align (tt_format_size * BITS_PER_UNIT);
3767*404b540aSrobert
3768*404b540aSrobert i = VEC_length (tree, cfun->eh->ttype_data);
3769*404b540aSrobert while (i-- > 0)
3770*404b540aSrobert {
3771*404b540aSrobert tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3772*404b540aSrobert output_ttype (type, tt_format, tt_format_size);
3773*404b540aSrobert }
3774*404b540aSrobert
3775*404b540aSrobert #ifdef HAVE_AS_LEB128
3776*404b540aSrobert if (have_tt_data)
3777*404b540aSrobert ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3778*404b540aSrobert #endif
3779*404b540aSrobert
3780*404b540aSrobert /* ??? Decode and interpret the data for flag_debug_asm. */
3781*404b540aSrobert n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3782*404b540aSrobert for (i = 0; i < n; ++i)
3783*404b540aSrobert {
3784*404b540aSrobert if (targetm.arm_eabi_unwinder)
3785*404b540aSrobert {
3786*404b540aSrobert tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3787*404b540aSrobert output_ttype (type, tt_format, tt_format_size);
3788*404b540aSrobert }
3789*404b540aSrobert else
3790*404b540aSrobert dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3791*404b540aSrobert (i ? NULL : "Exception specification table"));
3792*404b540aSrobert }
3793*404b540aSrobert
3794*404b540aSrobert switch_to_section (current_function_section ());
3795*404b540aSrobert }
3796*404b540aSrobert
3797*404b540aSrobert void
set_eh_throw_stmt_table(struct function * fun,struct htab * table)3798*404b540aSrobert set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3799*404b540aSrobert {
3800*404b540aSrobert fun->eh->throw_stmt_table = table;
3801*404b540aSrobert }
3802*404b540aSrobert
3803*404b540aSrobert htab_t
get_eh_throw_stmt_table(struct function * fun)3804*404b540aSrobert get_eh_throw_stmt_table (struct function *fun)
3805*404b540aSrobert {
3806*404b540aSrobert return fun->eh->throw_stmt_table;
3807*404b540aSrobert }
3808*404b540aSrobert
3809*404b540aSrobert /* Dump EH information to OUT. */
3810*404b540aSrobert void
dump_eh_tree(FILE * out,struct function * fun)3811*404b540aSrobert dump_eh_tree (FILE *out, struct function *fun)
3812*404b540aSrobert {
3813*404b540aSrobert struct eh_region *i;
3814*404b540aSrobert int depth = 0;
3815*404b540aSrobert static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3816*404b540aSrobert "allowed_exceptions", "must_not_throw",
3817*404b540aSrobert "throw"};
3818*404b540aSrobert
3819*404b540aSrobert i = fun->eh->region_tree;
3820*404b540aSrobert if (! i)
3821*404b540aSrobert return;
3822*404b540aSrobert
3823*404b540aSrobert fprintf (out, "Eh tree:\n");
3824*404b540aSrobert while (1)
3825*404b540aSrobert {
3826*404b540aSrobert fprintf (out, " %*s %i %s", depth * 2, "",
3827*404b540aSrobert i->region_number, type_name [(int)i->type]);
3828*404b540aSrobert if (i->tree_label)
3829*404b540aSrobert {
3830*404b540aSrobert fprintf (out, " tree_label:");
3831*404b540aSrobert print_generic_expr (out, i->tree_label, 0);
3832*404b540aSrobert }
3833*404b540aSrobert fprintf (out, "\n");
3834*404b540aSrobert /* If there are sub-regions, process them. */
3835*404b540aSrobert if (i->inner)
3836*404b540aSrobert i = i->inner, depth++;
3837*404b540aSrobert /* If there are peers, process them. */
3838*404b540aSrobert else if (i->next_peer)
3839*404b540aSrobert i = i->next_peer;
3840*404b540aSrobert /* Otherwise, step back up the tree to the next peer. */
3841*404b540aSrobert else
3842*404b540aSrobert {
3843*404b540aSrobert do {
3844*404b540aSrobert i = i->outer;
3845*404b540aSrobert depth--;
3846*404b540aSrobert if (i == NULL)
3847*404b540aSrobert return;
3848*404b540aSrobert } while (i->next_peer == NULL);
3849*404b540aSrobert i = i->next_peer;
3850*404b540aSrobert }
3851*404b540aSrobert }
3852*404b540aSrobert }
3853*404b540aSrobert
3854*404b540aSrobert /* Verify some basic invariants on EH datastructures. Could be extended to
3855*404b540aSrobert catch more. */
3856*404b540aSrobert void
verify_eh_tree(struct function * fun)3857*404b540aSrobert verify_eh_tree (struct function *fun)
3858*404b540aSrobert {
3859*404b540aSrobert struct eh_region *i, *outer = NULL;
3860*404b540aSrobert bool err = false;
3861*404b540aSrobert int nvisited = 0;
3862*404b540aSrobert int count = 0;
3863*404b540aSrobert int j;
3864*404b540aSrobert int depth = 0;
3865*404b540aSrobert
3866*404b540aSrobert i = fun->eh->region_tree;
3867*404b540aSrobert if (! i)
3868*404b540aSrobert return;
3869*404b540aSrobert for (j = fun->eh->last_region_number; j > 0; --j)
3870*404b540aSrobert if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3871*404b540aSrobert {
3872*404b540aSrobert count++;
3873*404b540aSrobert if (i->region_number != j)
3874*404b540aSrobert {
3875*404b540aSrobert error ("region_array is corrupted for region %i", i->region_number);
3876*404b540aSrobert err = true;
3877*404b540aSrobert }
3878*404b540aSrobert }
3879*404b540aSrobert
3880*404b540aSrobert while (1)
3881*404b540aSrobert {
3882*404b540aSrobert if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3883*404b540aSrobert {
3884*404b540aSrobert error ("region_array is corrupted for region %i", i->region_number);
3885*404b540aSrobert err = true;
3886*404b540aSrobert }
3887*404b540aSrobert if (i->outer != outer)
3888*404b540aSrobert {
3889*404b540aSrobert error ("outer block of region %i is wrong", i->region_number);
3890*404b540aSrobert err = true;
3891*404b540aSrobert }
3892*404b540aSrobert if (i->may_contain_throw && outer && !outer->may_contain_throw)
3893*404b540aSrobert {
3894*404b540aSrobert error ("region %i may contain throw and is contained in region that may not",
3895*404b540aSrobert i->region_number);
3896*404b540aSrobert err = true;
3897*404b540aSrobert }
3898*404b540aSrobert if (depth < 0)
3899*404b540aSrobert {
3900*404b540aSrobert error ("negative nesting depth of region %i", i->region_number);
3901*404b540aSrobert err = true;
3902*404b540aSrobert }
3903*404b540aSrobert nvisited ++;
3904*404b540aSrobert /* If there are sub-regions, process them. */
3905*404b540aSrobert if (i->inner)
3906*404b540aSrobert outer = i, i = i->inner, depth++;
3907*404b540aSrobert /* If there are peers, process them. */
3908*404b540aSrobert else if (i->next_peer)
3909*404b540aSrobert i = i->next_peer;
3910*404b540aSrobert /* Otherwise, step back up the tree to the next peer. */
3911*404b540aSrobert else
3912*404b540aSrobert {
3913*404b540aSrobert do {
3914*404b540aSrobert i = i->outer;
3915*404b540aSrobert depth--;
3916*404b540aSrobert if (i == NULL)
3917*404b540aSrobert {
3918*404b540aSrobert if (depth != -1)
3919*404b540aSrobert {
3920*404b540aSrobert error ("tree list ends on depth %i", depth + 1);
3921*404b540aSrobert err = true;
3922*404b540aSrobert }
3923*404b540aSrobert if (count != nvisited)
3924*404b540aSrobert {
3925*404b540aSrobert error ("array does not match the region tree");
3926*404b540aSrobert err = true;
3927*404b540aSrobert }
3928*404b540aSrobert if (err)
3929*404b540aSrobert {
3930*404b540aSrobert dump_eh_tree (stderr, fun);
3931*404b540aSrobert internal_error ("verify_eh_tree failed");
3932*404b540aSrobert }
3933*404b540aSrobert return;
3934*404b540aSrobert }
3935*404b540aSrobert outer = i->outer;
3936*404b540aSrobert } while (i->next_peer == NULL);
3937*404b540aSrobert i = i->next_peer;
3938*404b540aSrobert }
3939*404b540aSrobert }
3940*404b540aSrobert }
3941*404b540aSrobert
3942*404b540aSrobert /* Initialize unwind_resume_libfunc. */
3943*404b540aSrobert
3944*404b540aSrobert void
default_init_unwind_resume_libfunc(void)3945*404b540aSrobert default_init_unwind_resume_libfunc (void)
3946*404b540aSrobert {
3947*404b540aSrobert /* The default c++ routines aren't actually c++ specific, so use those. */
3948*404b540aSrobert unwind_resume_libfunc =
3949*404b540aSrobert init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3950*404b540aSrobert : "_Unwind_Resume");
3951*404b540aSrobert }
3952*404b540aSrobert
3953*404b540aSrobert
3954*404b540aSrobert static bool
gate_handle_eh(void)3955*404b540aSrobert gate_handle_eh (void)
3956*404b540aSrobert {
3957*404b540aSrobert return doing_eh (0);
3958*404b540aSrobert }
3959*404b540aSrobert
3960*404b540aSrobert /* Complete generation of exception handling code. */
3961*404b540aSrobert static unsigned int
rest_of_handle_eh(void)3962*404b540aSrobert rest_of_handle_eh (void)
3963*404b540aSrobert {
3964*404b540aSrobert cleanup_cfg (CLEANUP_NO_INSN_DEL);
3965*404b540aSrobert finish_eh_generation ();
3966*404b540aSrobert cleanup_cfg (CLEANUP_NO_INSN_DEL);
3967*404b540aSrobert return 0;
3968*404b540aSrobert }
3969*404b540aSrobert
3970*404b540aSrobert struct tree_opt_pass pass_rtl_eh =
3971*404b540aSrobert {
3972*404b540aSrobert "eh", /* name */
3973*404b540aSrobert gate_handle_eh, /* gate */
3974*404b540aSrobert rest_of_handle_eh, /* execute */
3975*404b540aSrobert NULL, /* sub */
3976*404b540aSrobert NULL, /* next */
3977*404b540aSrobert 0, /* static_pass_number */
3978*404b540aSrobert TV_JUMP, /* tv_id */
3979*404b540aSrobert 0, /* properties_required */
3980*404b540aSrobert 0, /* properties_provided */
3981*404b540aSrobert 0, /* properties_destroyed */
3982*404b540aSrobert 0, /* todo_flags_start */
3983*404b540aSrobert TODO_dump_func, /* todo_flags_finish */
3984*404b540aSrobert 'h' /* letter */
3985*404b540aSrobert };
3986*404b540aSrobert
3987*404b540aSrobert #include "gt-except.h"
3988