xref: /llvm-project/llvm/test/CodeGen/AArch64/seh-finally.ll (revision c65b4d64d4b09795fe237b62a4226121c5b13248)
1; RUN: llc -mtriple arm64-windows -o - %s | FileCheck %s
2
3; struct S { int x; };
4; void foo(int n);
5; void foo(struct S o);
6; void simple_seh() {
7;   struct S o;
8;
9;   __try { foo(o.x); }
10;   __finally { foo(o.x); }
11; }
12; void stack_realign() {
13;   struct S __declspec(align(32)) o;
14;
15;   __try { foo(o.x); }
16;   __finally { foo(o.x); }
17; }
18; void vla_present(int n) {
19;   int vla[n];
20;
21;   __try { foo(n); }
22;   __finally { foo(n); }
23; }
24; void vla_and_realign(int n) {
25;   struct S __declspec(align(32)) o;
26;   int vla[n];
27;
28;   __try { foo(o.x); }
29;   __finally { foo(o.x); }
30; }
31
32%struct.S = type { i32 }
33
34; Test simple SEH (__try/__finally).
35define void @simple_seh() #0 personality ptr @__C_specific_handler {
36entry:
37; CHECK-LABEL: simple_seh
38; CHECK: add     x29, sp, #16
39; CHECK: mov     x0, #-2
40; CHECK: stur    x0, [x29, #16]
41; CHECK: .set .Lsimple_seh$frame_escape_0, -8
42; CHECK: ldur    w0, [x29, #-8]
43; CHECK: bl      foo
44
45  %o = alloca %struct.S, align 8
46  call void (...) @llvm.localescape(ptr %o)
47  %0 = load i32, ptr %o, align 4
48  invoke void @foo(i32 %0) #5
49          to label %invoke.cont unwind label %ehcleanup
50
51invoke.cont:                                      ; preds = %entry
52  %1 = call ptr @llvm.localaddress()
53  call void @fin_simple_seh(i8 0, ptr %1)
54  ret void
55
56ehcleanup:                                        ; preds = %entry
57  %2 = cleanuppad within none []
58  %3 = call ptr @llvm.localaddress()
59  call void @fin_simple_seh(i8 1, ptr %3) [ "funclet"(token %2) ]
60  cleanupret from %2 unwind to caller
61}
62
63define void @fin_simple_seh(i8 %abnormal_termination, ptr %frame_pointer) {
64entry:
65; CHECK-LABEL: fin_simple_seh
66; CHECK: movz    x8, #:abs_g1_s:.Lsimple_seh$frame_escape_0
67; CHECK: movk    x8, #:abs_g0_nc:.Lsimple_seh$frame_escape_0
68; CHECK: strb    w0, [sp, #15]
69; CHECK: ldr     w8, [x1, x8]
70; CHECK: bl      foo
71
72  %frame_pointer.addr = alloca ptr, align 8
73  %abnormal_termination.addr = alloca i8, align 1
74  %0 = call ptr @llvm.localrecover(ptr @simple_seh, ptr %frame_pointer, i32 0)
75  store ptr %frame_pointer, ptr %frame_pointer.addr, align 8
76  store i8 %abnormal_termination, ptr %abnormal_termination.addr, align 1
77  %1 = load i32, ptr %0, align 4
78  call void @foo(i32 %1)
79  ret void
80}
81
82; Test SEH when stack realignment is needed in case highly aligned stack objects are present.
83define void @stack_realign() #0 personality ptr @__C_specific_handler {
84entry:
85; CHECK-LABEL: stack_realign
86; CHECK: add     x29, sp, #8
87; CHECK: sub     x9, sp, #16
88; CHECK: and     sp, x9, #0xffffffffffffffe0
89; CHECK: mov     x19, sp
90; CHECK: mov     x0, #-2
91; CHECK: stur    x0, [x29, #24]
92; CHECK: .set .Lstack_realign$frame_escape_0, 0
93; CHECK: ldr     w0, [x19]
94; CHECK: bl      foo
95
96  %o = alloca %struct.S, align 32
97  call void (...) @llvm.localescape(ptr %o)
98  %0 = load i32, ptr %o, align 32
99  invoke void @foo(i32 %0) #5
100          to label %invoke.cont unwind label %ehcleanup
101
102invoke.cont:                                      ; preds = %entry
103  %1 = call ptr @llvm.localaddress()
104  call void @fin_stack_realign(i8 0, ptr %1)
105  ret void
106
107ehcleanup:                                        ; preds = %entry
108  %2 = cleanuppad within none []
109  %3 = call ptr @llvm.localaddress()
110  call void @fin_stack_realign(i8 1, ptr %3) [ "funclet"(token %2) ]
111  cleanupret from %2 unwind to caller
112}
113
114define void @fin_stack_realign(i8 %abnormal_termination, ptr %frame_pointer) {
115entry:
116; CHECK-LABEL: fin_stack_realign
117; CHECK: movz    x8, #:abs_g1_s:.Lstack_realign$frame_escape_0
118; CHECK: movk    x8, #:abs_g0_nc:.Lstack_realign$frame_escape_0
119; CHECK: strb    w0, [sp, #15]
120; CHECK: ldr     w8, [x1, x8]
121; CHECK: bl      foo
122
123  %frame_pointer.addr = alloca ptr, align 8
124  %abnormal_termination.addr = alloca i8, align 1
125  %0 = call ptr @llvm.localrecover(ptr @stack_realign, ptr %frame_pointer, i32 0)
126  store ptr %frame_pointer, ptr %frame_pointer.addr, align 8
127  store i8 %abnormal_termination, ptr %abnormal_termination.addr, align 1
128  %1 = load i32, ptr %0, align 32
129  call void @foo(i32 %1)
130  ret void
131}
132
133; Test SEH when variable size objects are present on the stack. Note: Escaped vla's are current not supported by SEH.
134define void @vla_present(i32 %n) #0 personality ptr @__C_specific_handler {
135entry:
136; CHECK-LABEL: vla_present
137; CHECK: add     x29, sp, #32
138; CHECK: mov     x1, #-2
139; CHECK: stur    x1, [x29, #16]
140; CHECK: .set .Lvla_present$frame_escape_0, -4
141; CHECK: stur    w0, [x29, #-4]
142; CHECK: ldur    w8, [x29, #-4]
143; CHECK: mov     x9, sp
144; CHECK: stur    x9, [x29, #-16]
145; CHECK: stur    x8, [x29, #-24]
146; CHECK: ldur    w0, [x29, #-4]
147; CHECK: bl      foo
148
149  %n.addr = alloca i32, align 4
150  %saved_stack = alloca ptr, align 8
151  %__vla_expr0 = alloca i64, align 8
152  call void (...) @llvm.localescape(ptr %n.addr)
153  store i32 %n, ptr %n.addr, align 4
154  %0 = load i32, ptr %n.addr, align 4
155  %1 = zext i32 %0 to i64
156  %2 = call ptr @llvm.stacksave()
157  store ptr %2, ptr %saved_stack, align 8
158  %vla = alloca i32, i64 %1, align 4
159  store i64 %1, ptr %__vla_expr0, align 8
160  %3 = load i32, ptr %n.addr, align 4
161  invoke void @foo(i32 %3) #5
162          to label %invoke.cont unwind label %ehcleanup
163
164invoke.cont:                                      ; preds = %entry
165  %4 = call ptr @llvm.localaddress()
166  call void @fin_vla_present(i8 0, ptr %4)
167  %5 = load ptr, ptr %saved_stack, align 8
168  call void @llvm.stackrestore(ptr %5)
169  ret void
170
171ehcleanup:                                        ; preds = %entry
172  %6 = cleanuppad within none []
173  %7 = call ptr @llvm.localaddress()
174  call void @fin_vla_present(i8 1, ptr %7) [ "funclet"(token %6) ]
175  cleanupret from %6 unwind to caller
176}
177
178define void @fin_vla_present(i8 %abnormal_termination, ptr %frame_pointer) {
179entry:
180; CHECK-LABEL: fin_vla_present
181; CHECK: movz    x8, #:abs_g1_s:.Lvla_present$frame_escape_0
182; CHECK: movk    x8, #:abs_g0_nc:.Lvla_present$frame_escape_0
183; CHECK: strb    w0, [sp, #15]
184; CHECK: ldr     w8, [x1, x8]
185; CHECK: bl      foo
186
187  %frame_pointer.addr = alloca ptr, align 8
188  %abnormal_termination.addr = alloca i8, align 1
189  %0 = call ptr @llvm.localrecover(ptr @vla_present, ptr %frame_pointer, i32 0)
190  store ptr %frame_pointer, ptr %frame_pointer.addr, align 8
191  store i8 %abnormal_termination, ptr %abnormal_termination.addr, align 1
192  %1 = load i32, ptr %0, align 4
193  call void @foo(i32 %1)
194  ret void
195}
196
197; Test when both vla's and highly aligned objects are present on stack.
198define void @vla_and_realign(i32 %n) #0 personality ptr @__C_specific_handler {
199entry:
200; CHECK-LABEL: vla_and_realign
201; CHECK: add     x29, sp, #8
202; CHECK: sub     x9, sp, #48
203; CHECK: and     sp, x9, #0xffffffffffffffe0
204; CHECK: mov     x19, sp
205; CHECK: mov     x1, #-2
206; CHECK: stur    x1, [x29, #24]
207; CHECK: .set .Lvla_and_realign$frame_escape_0, 32
208; CHECK: str     w0, [x29, #36]
209; CHECK: ldr     w8, [x29, #36]
210; CHECK: mov     x9, sp
211; CHECK: str     x9, [x29, #16]
212; CHECK: str     x8, [x19, #24]
213; CHECK: ldr     w0, [x19, #32]
214; CHECK: bl      foo
215
216  %n.addr = alloca i32, align 4
217  %o = alloca %struct.S, align 32
218  %saved_stack = alloca ptr, align 8
219  %__vla_expr0 = alloca i64, align 8
220  call void (...) @llvm.localescape(ptr %o)
221  store i32 %n, ptr %n.addr, align 4
222  %0 = load i32, ptr %n.addr, align 4
223  %1 = zext i32 %0 to i64
224  %2 = call ptr @llvm.stacksave()
225  store ptr %2, ptr %saved_stack, align 8
226  %vla = alloca i32, i64 %1, align 4
227  store i64 %1, ptr %__vla_expr0, align 8
228  %3 = load i32, ptr %o, align 32
229  invoke void @foo(i32 %3) #5
230          to label %invoke.cont unwind label %ehcleanup
231
232invoke.cont:                                      ; preds = %entry
233  %4 = call ptr @llvm.localaddress()
234  call void @fin_vla_and_realign(i8 0, ptr %4)
235  %5 = load ptr, ptr %saved_stack, align 8
236  call void @llvm.stackrestore(ptr %5)
237  ret void
238
239ehcleanup:                                        ; preds = %entry
240  %6 = cleanuppad within none []
241  %7 = call ptr @llvm.localaddress()
242  call void @fin_vla_and_realign(i8 1, ptr %7) [ "funclet"(token %6) ]
243  cleanupret from %6 unwind to caller
244}
245
246define void @fin_vla_and_realign(i8 %abnormal_termination, ptr %frame_pointer) {
247entry:
248; CHECK-LABEL: fin_vla_and_realign
249; CHECK: movz    x8, #:abs_g1_s:.Lvla_and_realign$frame_escape_0
250; CHECK: movk    x8, #:abs_g0_nc:.Lvla_and_realign$frame_escape_0
251; CHECK: strb    w0, [sp, #15]
252; CHECK: ldr     w8, [x1, x8]
253; CHECK: bl      foo
254
255  %frame_pointer.addr = alloca ptr, align 8
256  %abnormal_termination.addr = alloca i8, align 1
257  %0 = call ptr @llvm.localrecover(ptr @vla_and_realign, ptr %frame_pointer, i32 0)
258  store ptr %frame_pointer, ptr %frame_pointer.addr, align 8
259  store i8 %abnormal_termination, ptr %abnormal_termination.addr, align 1
260  %1 = load i32, ptr %0, align 32
261  call void @foo(i32 %1)
262  ret void
263}
264
265declare void @foo(i32)
266declare void @llvm.stackrestore(ptr)
267declare ptr @llvm.stacksave()
268declare ptr @llvm.localrecover(ptr, ptr, i32)
269declare ptr @llvm.localaddress()
270declare void @llvm.localescape(...)
271declare i32 @__C_specific_handler(...)
272
273attributes #0 = { noinline optnone }
274