xref: /llvm-project/llvm/test/CodeGen/AArch64/store-swift-async-context-clobber-live-reg.ll (revision 155d5849da2b2bfa2da918923d8f148a96c03e72)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
2; RUN: llc -o - -mtriple=arm64e-apple-macosx %s | FileCheck %s
3
4target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128"
5
6; x16 is not available, shrink-wrapping cannot happen because
7; StoreSwiftAsyncContext needs it.
8define swifttailcc void @test_async_with_jumptable_x16_clobbered(ptr %src, ptr swiftasync %as) #0 {
9; CHECK-LABEL: test_async_with_jumptable_x16_clobbered:
10; CHECK:       ; %bb.0: ; %entry
11; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
12; CHECK-NEXT:    sub sp, sp, #32
13; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
14; CHECK-NEXT:    add x16, sp, #8
15; CHECK-NEXT:    movk x16, #49946, lsl #48
16; CHECK-NEXT:    mov x17, x22
17; CHECK-NEXT:    pacdb x17, x16
18; CHECK-NEXT:    str x17, [sp, #8]
19; CHECK-NEXT:    add x29, sp, #16
20; CHECK-NEXT:    .cfi_def_cfa w29, 16
21; CHECK-NEXT:    .cfi_offset w30, -8
22; CHECK-NEXT:    .cfi_offset w29, -16
23; CHECK-NEXT:    mov x20, x22
24; CHECK-NEXT:    mov x22, x0
25; CHECK-NEXT:    ; InlineAsm Start
26; CHECK-NEXT:    ; InlineAsm End
27; CHECK-NEXT:    ldr x8, [x0]
28; CHECK-NEXT:    mov x0, x20
29; CHECK-NEXT:    cbnz x8, LBB0_2
30; CHECK-NEXT:  ; %bb.1: ; %then.1
31; CHECK-NEXT:    str xzr, [x22]
32; CHECK-NEXT:    mov x0, x22
33; CHECK-NEXT:  LBB0_2: ; %exit
34; CHECK-NEXT:    ; InlineAsm Start
35; CHECK-NEXT:    ; InlineAsm End
36; CHECK-NEXT:    bl _foo
37; CHECK-NEXT:    mov x1, x0
38; CHECK-NEXT:    mov x0, x20
39; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
40; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
41; CHECK-NEXT:    add sp, sp, #32
42; CHECK-NEXT:    br x1
43entry:
44  %x16 = tail call i64 asm "", "={x16}"()
45  %l = load i64, ptr %src, align 8
46  %c = icmp eq i64 %l, 0
47  br i1 %c, label %then.1, label %exit
48
49then.1:
50  store i64 0, ptr %src
51  br label %exit
52
53exit:
54  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
55  tail call void asm sideeffect "", "{x16}"(i64 %x16)
56  %r = call i64 @foo(ptr %p)
57  %fn = inttoptr i64 %r to ptr
58  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
59  ret void
60}
61
62; x17 is not available, shrink-wrapping cannot happen because
63; StoreSwiftAsyncContext needs it.
64define swifttailcc void @test_async_with_jumptable_x17_clobbered(ptr %src, ptr swiftasync %as) #0 {
65; CHECK-LABEL: test_async_with_jumptable_x17_clobbered:
66; CHECK:       ; %bb.0: ; %entry
67; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
68; CHECK-NEXT:    sub sp, sp, #32
69; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
70; CHECK-NEXT:    add x16, sp, #8
71; CHECK-NEXT:    movk x16, #49946, lsl #48
72; CHECK-NEXT:    mov x17, x22
73; CHECK-NEXT:    pacdb x17, x16
74; CHECK-NEXT:    str x17, [sp, #8]
75; CHECK-NEXT:    add x29, sp, #16
76; CHECK-NEXT:    .cfi_def_cfa w29, 16
77; CHECK-NEXT:    .cfi_offset w30, -8
78; CHECK-NEXT:    .cfi_offset w29, -16
79; CHECK-NEXT:    mov x20, x22
80; CHECK-NEXT:    mov x22, x0
81; CHECK-NEXT:    ; InlineAsm Start
82; CHECK-NEXT:    ; InlineAsm End
83; CHECK-NEXT:    ldr x8, [x0]
84; CHECK-NEXT:    mov x0, x20
85; CHECK-NEXT:    cbnz x8, LBB1_2
86; CHECK-NEXT:  ; %bb.1: ; %then.1
87; CHECK-NEXT:    str xzr, [x22]
88; CHECK-NEXT:    mov x0, x22
89; CHECK-NEXT:  LBB1_2: ; %exit
90; CHECK-NEXT:    ; InlineAsm Start
91; CHECK-NEXT:    ; InlineAsm End
92; CHECK-NEXT:    bl _foo
93; CHECK-NEXT:    mov x1, x0
94; CHECK-NEXT:    mov x0, x20
95; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
96; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
97; CHECK-NEXT:    add sp, sp, #32
98; CHECK-NEXT:    br x1
99entry:
100  %x17 = tail call i64 asm "", "={x17}"()
101  %l = load i64, ptr %src, align 8
102  %c = icmp eq i64 %l, 0
103  br i1 %c, label %then.1, label %exit
104
105then.1:
106  store i64 0, ptr %src
107  br label %exit
108
109exit:
110  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
111  tail call void asm sideeffect "", "{x17}"(i64 %x17)
112  %r = call i64 @foo(ptr %p)
113  %fn = inttoptr i64 %r to ptr
114  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
115  ret void
116}
117
118define swifttailcc void @test_async_with_jumptable_x1_clobbered(ptr %src, ptr swiftasync %as) #0 {
119; CHECK-LABEL: test_async_with_jumptable_x1_clobbered:
120; CHECK:       ; %bb.0: ; %entry
121; CHECK-NEXT:    mov x20, x22
122; CHECK-NEXT:    mov x22, x0
123; CHECK-NEXT:    ; InlineAsm Start
124; CHECK-NEXT:    ; InlineAsm End
125; CHECK-NEXT:    ldr x8, [x0]
126; CHECK-NEXT:    mov x0, x20
127; CHECK-NEXT:    cbnz x8, LBB2_2
128; CHECK-NEXT:  ; %bb.1: ; %then.1
129; CHECK-NEXT:    str xzr, [x22]
130; CHECK-NEXT:    mov x0, x22
131; CHECK-NEXT:  LBB2_2: ; %exit
132; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
133; CHECK-NEXT:    sub sp, sp, #32
134; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
135; CHECK-NEXT:    add x16, sp, #8
136; CHECK-NEXT:    movk x16, #49946, lsl #48
137; CHECK-NEXT:    mov x17, x22
138; CHECK-NEXT:    pacdb x17, x16
139; CHECK-NEXT:    str x17, [sp, #8]
140; CHECK-NEXT:    add x29, sp, #16
141; CHECK-NEXT:    .cfi_def_cfa w29, 16
142; CHECK-NEXT:    .cfi_offset w30, -8
143; CHECK-NEXT:    .cfi_offset w29, -16
144; CHECK-NEXT:    ; InlineAsm Start
145; CHECK-NEXT:    ; InlineAsm End
146; CHECK-NEXT:    bl _foo
147; CHECK-NEXT:    mov x1, x0
148; CHECK-NEXT:    mov x0, x20
149; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
150; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
151; CHECK-NEXT:    add sp, sp, #32
152; CHECK-NEXT:    br x1
153entry:
154  %x1 = tail call i64 asm "", "={x1}"()
155  %l = load i64, ptr %src, align 8
156  %c = icmp eq i64 %l, 0
157  br i1 %c, label %then.1, label %exit
158
159then.1:
160  store i64 0, ptr %src
161  br label %exit
162
163exit:
164  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
165  tail call void asm sideeffect "", "{x1}"(i64 %x1)
166  %r = call i64 @foo(ptr %p)
167  %fn = inttoptr i64 %r to ptr
168  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
169  ret void
170}
171
172define swifttailcc void @test_async_with_jumptable_x1_x9_clobbered(ptr %src, ptr swiftasync %as) #0 {
173; CHECK-LABEL: test_async_with_jumptable_x1_x9_clobbered:
174; CHECK:       ; %bb.0: ; %entry
175; CHECK-NEXT:    mov x20, x22
176; CHECK-NEXT:    mov x22, x0
177; CHECK-NEXT:    ; InlineAsm Start
178; CHECK-NEXT:    ; InlineAsm End
179; CHECK-NEXT:    ; InlineAsm Start
180; CHECK-NEXT:    ; InlineAsm End
181; CHECK-NEXT:    ldr x8, [x0]
182; CHECK-NEXT:    mov x0, x20
183; CHECK-NEXT:    cbnz x8, LBB3_2
184; CHECK-NEXT:  ; %bb.1: ; %then.1
185; CHECK-NEXT:    str xzr, [x22]
186; CHECK-NEXT:    mov x0, x22
187; CHECK-NEXT:  LBB3_2: ; %exit
188; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
189; CHECK-NEXT:    sub sp, sp, #32
190; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
191; CHECK-NEXT:    add x16, sp, #8
192; CHECK-NEXT:    movk x16, #49946, lsl #48
193; CHECK-NEXT:    mov x17, x22
194; CHECK-NEXT:    pacdb x17, x16
195; CHECK-NEXT:    str x17, [sp, #8]
196; CHECK-NEXT:    add x29, sp, #16
197; CHECK-NEXT:    .cfi_def_cfa w29, 16
198; CHECK-NEXT:    .cfi_offset w30, -8
199; CHECK-NEXT:    .cfi_offset w29, -16
200; CHECK-NEXT:    ; InlineAsm Start
201; CHECK-NEXT:    ; InlineAsm End
202; CHECK-NEXT:    ; InlineAsm Start
203; CHECK-NEXT:    ; InlineAsm End
204; CHECK-NEXT:    bl _foo
205; CHECK-NEXT:    mov x1, x0
206; CHECK-NEXT:    mov x0, x20
207; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
208; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
209; CHECK-NEXT:    add sp, sp, #32
210; CHECK-NEXT:    br x1
211entry:
212  %x1 = tail call i64 asm "", "={x1}"()
213  %x9 = tail call i64 asm "", "={x9}"()
214  %l = load i64, ptr %src, align 8
215  %c = icmp eq i64 %l, 0
216  br i1 %c, label %then.1, label %exit
217
218then.1:
219  store i64 0, ptr %src
220  br label %exit
221
222exit:
223  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
224  tail call void asm sideeffect "", "{x1}"(i64 %x1)
225  tail call void asm sideeffect "", "{x9}"(i64 %x9)
226  %r = call i64 @foo(ptr %p)
227  %fn = inttoptr i64 %r to ptr
228  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
229  ret void
230}
231
232; There are 2 available scratch registers left, shrink-wrapping can happen.
233define swifttailcc void @test_async_with_jumptable_2_available_regs_left(ptr %src, ptr swiftasync %as) #0 {
234; CHECK-LABEL: test_async_with_jumptable_2_available_regs_left:
235; CHECK:       ; %bb.0: ; %entry
236; CHECK-NEXT:    mov x20, x22
237; CHECK-NEXT:    mov x22, x0
238; CHECK-NEXT:    ; InlineAsm Start
239; CHECK-NEXT:    ; InlineAsm End
240; CHECK-NEXT:    ; InlineAsm Start
241; CHECK-NEXT:    ; InlineAsm End
242; CHECK-NEXT:    ; InlineAsm Start
243; CHECK-NEXT:    ; InlineAsm End
244; CHECK-NEXT:    ; InlineAsm Start
245; CHECK-NEXT:    ; InlineAsm End
246; CHECK-NEXT:    ; InlineAsm Start
247; CHECK-NEXT:    ; InlineAsm End
248; CHECK-NEXT:    ; InlineAsm Start
249; CHECK-NEXT:    ; InlineAsm End
250; CHECK-NEXT:    ; InlineAsm Start
251; CHECK-NEXT:    ; InlineAsm End
252; CHECK-NEXT:    ; InlineAsm Start
253; CHECK-NEXT:    ; InlineAsm End
254; CHECK-NEXT:    ; InlineAsm Start
255; CHECK-NEXT:    ; InlineAsm End
256; CHECK-NEXT:    ; InlineAsm Start
257; CHECK-NEXT:    ; InlineAsm End
258; CHECK-NEXT:    ; InlineAsm Start
259; CHECK-NEXT:    ; InlineAsm End
260; CHECK-NEXT:    ; InlineAsm Start
261; CHECK-NEXT:    ; InlineAsm End
262; CHECK-NEXT:    ; InlineAsm Start
263; CHECK-NEXT:    ; InlineAsm End
264; CHECK-NEXT:    ; InlineAsm Start
265; CHECK-NEXT:    ; InlineAsm End
266; CHECK-NEXT:    mov x0, x20
267; CHECK-NEXT:    ldr x10, [x22]
268; CHECK-NEXT:    cbnz x10, LBB4_2
269; CHECK-NEXT:  ; %bb.1: ; %then.1
270; CHECK-NEXT:    str xzr, [x22]
271; CHECK-NEXT:    mov x0, x22
272; CHECK-NEXT:  LBB4_2: ; %exit
273; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
274; CHECK-NEXT:    sub sp, sp, #32
275; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
276; CHECK-NEXT:    add x16, sp, #8
277; CHECK-NEXT:    movk x16, #49946, lsl #48
278; CHECK-NEXT:    mov x17, x22
279; CHECK-NEXT:    pacdb x17, x16
280; CHECK-NEXT:    str x17, [sp, #8]
281; CHECK-NEXT:    add x29, sp, #16
282; CHECK-NEXT:    .cfi_def_cfa w29, 16
283; CHECK-NEXT:    .cfi_offset w30, -8
284; CHECK-NEXT:    .cfi_offset w29, -16
285; CHECK-NEXT:    ; InlineAsm Start
286; CHECK-NEXT:    ; InlineAsm End
287; CHECK-NEXT:    ; InlineAsm Start
288; CHECK-NEXT:    ; InlineAsm End
289; CHECK-NEXT:    ; InlineAsm Start
290; CHECK-NEXT:    ; InlineAsm End
291; CHECK-NEXT:    ; InlineAsm Start
292; CHECK-NEXT:    ; InlineAsm End
293; CHECK-NEXT:    ; InlineAsm Start
294; CHECK-NEXT:    ; InlineAsm End
295; CHECK-NEXT:    ; InlineAsm Start
296; CHECK-NEXT:    ; InlineAsm End
297; CHECK-NEXT:    ; InlineAsm Start
298; CHECK-NEXT:    ; InlineAsm End
299; CHECK-NEXT:    ; InlineAsm Start
300; CHECK-NEXT:    ; InlineAsm End
301; CHECK-NEXT:    ; InlineAsm Start
302; CHECK-NEXT:    ; InlineAsm End
303; CHECK-NEXT:    ; InlineAsm Start
304; CHECK-NEXT:    ; InlineAsm End
305; CHECK-NEXT:    ; InlineAsm Start
306; CHECK-NEXT:    ; InlineAsm End
307; CHECK-NEXT:    ; InlineAsm Start
308; CHECK-NEXT:    ; InlineAsm End
309; CHECK-NEXT:    ; InlineAsm Start
310; CHECK-NEXT:    ; InlineAsm End
311; CHECK-NEXT:    ; InlineAsm Start
312; CHECK-NEXT:    ; InlineAsm End
313; CHECK-NEXT:    bl _foo
314; CHECK-NEXT:    mov x1, x0
315; CHECK-NEXT:    mov x0, x20
316; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
317; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
318; CHECK-NEXT:    add sp, sp, #32
319; CHECK-NEXT:    br x1
320entry:
321  %x1 = tail call i64 asm "", "={x1}"()
322  %x2 = tail call i64 asm "", "={x2}"()
323  %x3 = tail call i64 asm "", "={x3}"()
324  %x4 = tail call i64 asm "", "={x4}"()
325  %x5 = tail call i64 asm "", "={x5}"()
326  %x6 = tail call i64 asm "", "={x6}"()
327  %x7 = tail call i64 asm "", "={x7}"()
328  %x8 = tail call i64 asm "", "={x8}"()
329  %x9 = tail call i64 asm "", "={x9}"()
330  %x11 = tail call i64 asm "", "={x11}"()
331  %x12 = tail call i64 asm "", "={x12}"()
332  %x13 = tail call i64 asm "", "={x13}"()
333  %x14 = tail call i64 asm "", "={x14}"()
334  %x15 = tail call i64 asm "", "={x15}"()
335  %l = load i64, ptr %src, align 8
336  %c = icmp eq i64 %l, 0
337  br i1 %c, label %then.1, label %exit
338
339then.1:
340  store i64 0, ptr %src
341  br label %exit
342
343exit:
344  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
345  tail call void asm sideeffect "", "{x1}"(i64 %x1)
346  tail call void asm sideeffect "", "{x2}"(i64 %x2)
347  tail call void asm sideeffect "", "{x3}"(i64 %x3)
348  tail call void asm sideeffect "", "{x4}"(i64 %x4)
349  tail call void asm sideeffect "", "{x5}"(i64 %x5)
350  tail call void asm sideeffect "", "{x6}"(i64 %x6)
351  tail call void asm sideeffect "", "{x7}"(i64 %x7)
352  tail call void asm sideeffect "", "{x8}"(i64 %x8)
353  tail call void asm sideeffect "", "{x9}"(i64 %x9)
354  tail call void asm sideeffect "", "{x11}"(i64 %x11)
355  tail call void asm sideeffect "", "{x12}"(i64 %x12)
356  tail call void asm sideeffect "", "{x13}"(i64 %x13)
357  tail call void asm sideeffect "", "{x14}"(i64 %x14)
358  tail call void asm sideeffect "", "{x15}"(i64 %x15)
359  %r = call i64 @foo(ptr %p)
360  %fn = inttoptr i64 %r to ptr
361  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
362  ret void
363}
364
365
366; There is only 1 available scratch registers left, shrink-wrapping cannot
367; happen because StoreSwiftAsyncContext needs 2 free scratch registers.
368define swifttailcc void @test_async_with_jumptable_1_available_reg_left(ptr %src, ptr swiftasync %as) #0 {
369; CHECK-LABEL: test_async_with_jumptable_1_available_reg_left:
370; CHECK:       ; %bb.0: ; %entry
371; CHECK-NEXT:    orr x29, x29, #0x1000000000000000
372; CHECK-NEXT:    sub sp, sp, #32
373; CHECK-NEXT:    stp x29, x30, [sp, #16] ; 16-byte Folded Spill
374; CHECK-NEXT:    add x16, sp, #8
375; CHECK-NEXT:    movk x16, #49946, lsl #48
376; CHECK-NEXT:    mov x17, x22
377; CHECK-NEXT:    pacdb x17, x16
378; CHECK-NEXT:    str x17, [sp, #8]
379; CHECK-NEXT:    add x29, sp, #16
380; CHECK-NEXT:    .cfi_def_cfa w29, 16
381; CHECK-NEXT:    .cfi_offset w30, -8
382; CHECK-NEXT:    .cfi_offset w29, -16
383; CHECK-NEXT:    mov x20, x22
384; CHECK-NEXT:    mov x22, x0
385; CHECK-NEXT:    ; InlineAsm Start
386; CHECK-NEXT:    ; InlineAsm End
387; CHECK-NEXT:    ; InlineAsm Start
388; CHECK-NEXT:    ; InlineAsm End
389; CHECK-NEXT:    ; InlineAsm Start
390; CHECK-NEXT:    ; InlineAsm End
391; CHECK-NEXT:    ; InlineAsm Start
392; CHECK-NEXT:    ; InlineAsm End
393; CHECK-NEXT:    ; InlineAsm Start
394; CHECK-NEXT:    ; InlineAsm End
395; CHECK-NEXT:    ; InlineAsm Start
396; CHECK-NEXT:    ; InlineAsm End
397; CHECK-NEXT:    ; InlineAsm Start
398; CHECK-NEXT:    ; InlineAsm End
399; CHECK-NEXT:    ; InlineAsm Start
400; CHECK-NEXT:    ; InlineAsm End
401; CHECK-NEXT:    ; InlineAsm Start
402; CHECK-NEXT:    ; InlineAsm End
403; CHECK-NEXT:    ; InlineAsm Start
404; CHECK-NEXT:    ; InlineAsm End
405; CHECK-NEXT:    ; InlineAsm Start
406; CHECK-NEXT:    ; InlineAsm End
407; CHECK-NEXT:    ; InlineAsm Start
408; CHECK-NEXT:    ; InlineAsm End
409; CHECK-NEXT:    ; InlineAsm Start
410; CHECK-NEXT:    ; InlineAsm End
411; CHECK-NEXT:    ; InlineAsm Start
412; CHECK-NEXT:    ; InlineAsm End
413; CHECK-NEXT:    ; InlineAsm Start
414; CHECK-NEXT:    ; InlineAsm End
415; CHECK-NEXT:    mov x0, x20
416; CHECK-NEXT:    ldr x10, [x22]
417; CHECK-NEXT:    cbnz x10, LBB5_2
418; CHECK-NEXT:  ; %bb.1: ; %then.1
419; CHECK-NEXT:    str xzr, [x22]
420; CHECK-NEXT:    mov x0, x22
421; CHECK-NEXT:  LBB5_2: ; %exit
422; CHECK-NEXT:    ; InlineAsm Start
423; CHECK-NEXT:    ; InlineAsm End
424; CHECK-NEXT:    ; InlineAsm Start
425; CHECK-NEXT:    ; InlineAsm End
426; CHECK-NEXT:    ; InlineAsm Start
427; CHECK-NEXT:    ; InlineAsm End
428; CHECK-NEXT:    ; InlineAsm Start
429; CHECK-NEXT:    ; InlineAsm End
430; CHECK-NEXT:    ; InlineAsm Start
431; CHECK-NEXT:    ; InlineAsm End
432; CHECK-NEXT:    ; InlineAsm Start
433; CHECK-NEXT:    ; InlineAsm End
434; CHECK-NEXT:    ; InlineAsm Start
435; CHECK-NEXT:    ; InlineAsm End
436; CHECK-NEXT:    ; InlineAsm Start
437; CHECK-NEXT:    ; InlineAsm End
438; CHECK-NEXT:    ; InlineAsm Start
439; CHECK-NEXT:    ; InlineAsm End
440; CHECK-NEXT:    ; InlineAsm Start
441; CHECK-NEXT:    ; InlineAsm End
442; CHECK-NEXT:    ; InlineAsm Start
443; CHECK-NEXT:    ; InlineAsm End
444; CHECK-NEXT:    ; InlineAsm Start
445; CHECK-NEXT:    ; InlineAsm End
446; CHECK-NEXT:    ; InlineAsm Start
447; CHECK-NEXT:    ; InlineAsm End
448; CHECK-NEXT:    ; InlineAsm Start
449; CHECK-NEXT:    ; InlineAsm End
450; CHECK-NEXT:    ; InlineAsm Start
451; CHECK-NEXT:    ; InlineAsm End
452; CHECK-NEXT:    bl _foo
453; CHECK-NEXT:    mov x1, x0
454; CHECK-NEXT:    mov x0, x20
455; CHECK-NEXT:    ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
456; CHECK-NEXT:    and x29, x29, #0xefffffffffffffff
457; CHECK-NEXT:    add sp, sp, #32
458; CHECK-NEXT:    br x1
459entry:
460  %x1 = tail call i64 asm "", "={x1}"()
461  %x2 = tail call i64 asm "", "={x2}"()
462  %x3 = tail call i64 asm "", "={x3}"()
463  %x4 = tail call i64 asm "", "={x4}"()
464  %x5 = tail call i64 asm "", "={x5}"()
465  %x6 = tail call i64 asm "", "={x6}"()
466  %x7 = tail call i64 asm "", "={x7}"()
467  %x8 = tail call i64 asm "", "={x8}"()
468  %x9 = tail call i64 asm "", "={x9}"()
469  %x11 = tail call i64 asm "", "={x11}"()
470  %x12 = tail call i64 asm "", "={x12}"()
471  %x13 = tail call i64 asm "", "={x13}"()
472  %x14 = tail call i64 asm "", "={x14}"()
473  %x15 = tail call i64 asm "", "={x15}"()
474  %x16 = tail call i64 asm "", "={x16}"()
475  %l = load i64, ptr %src, align 8
476  %c = icmp eq i64 %l, 0
477  br i1 %c, label %then.1, label %exit
478
479then.1:
480  store i64 0, ptr %src
481  br label %exit
482
483exit:
484  %p = phi ptr [ %src, %then.1 ], [ %as, %entry ]
485  tail call void asm sideeffect "", "{x1}"(i64 %x1)
486  tail call void asm sideeffect "", "{x2}"(i64 %x2)
487  tail call void asm sideeffect "", "{x3}"(i64 %x3)
488  tail call void asm sideeffect "", "{x4}"(i64 %x4)
489  tail call void asm sideeffect "", "{x5}"(i64 %x5)
490  tail call void asm sideeffect "", "{x6}"(i64 %x6)
491  tail call void asm sideeffect "", "{x7}"(i64 %x7)
492  tail call void asm sideeffect "", "{x8}"(i64 %x8)
493  tail call void asm sideeffect "", "{x9}"(i64 %x9)
494  tail call void asm sideeffect "", "{x11}"(i64 %x11)
495  tail call void asm sideeffect "", "{x12}"(i64 %x12)
496  tail call void asm sideeffect "", "{x13}"(i64 %x13)
497  tail call void asm sideeffect "", "{x14}"(i64 %x14)
498  tail call void asm sideeffect "", "{x15}"(i64 %x15)
499  tail call void asm sideeffect "", "{x16}"(i64 %x16)
500  %r = call i64 @foo(ptr %p)
501  %fn = inttoptr i64 %r to ptr
502  musttail call swifttailcc void %fn(ptr swiftasync %src, ptr %as)
503  ret void
504}
505
506declare i64 @foo(ptr)
507
508attributes #0 = { "frame-pointer"="non-leaf" }
509