xref: /llvm-project/llvm/test/CodeGen/PowerPC/aix64-vector-vararg-callee.ll (revision fd9f42fad22ca6ca7cb5041f9fc3c89d22f12d92)
1; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2; RUN: llc -verify-machineinstrs -stop-before=ppc-vsx-copy -vec-extabi \
3; RUN:     -mcpu=pwr7  -mtriple powerpc64-ibm-aix-xcoff < %s | \
4; RUN: FileCheck %s
5
6; Testing passing a vector <4 x i32> through ellipses of a variadic function.
7define <4 x i32> @callee(i32 signext %count, ...) {
8  ; CHECK-LABEL: name: callee
9  ; CHECK: bb.0.entry:
10  ; CHECK:   liveins: $x4, $x5, $x6, $x7, $x8, $x9, $x10
11  ; CHECK:   [[COPY:%[0-9]+]]:g8rc = COPY $x10
12  ; CHECK:   [[COPY1:%[0-9]+]]:g8rc = COPY $x9
13  ; CHECK:   [[COPY2:%[0-9]+]]:g8rc = COPY $x8
14  ; CHECK:   [[COPY3:%[0-9]+]]:g8rc = COPY $x7
15  ; CHECK:   [[COPY4:%[0-9]+]]:g8rc = COPY $x6
16  ; CHECK:   [[COPY5:%[0-9]+]]:g8rc = COPY $x5
17  ; CHECK:   [[COPY6:%[0-9]+]]:g8rc = COPY $x4
18  ; CHECK:   STD [[COPY6]], 0, %fixed-stack.0 :: (store (s64) into %fixed-stack.0)
19  ; CHECK:   STD [[COPY5]], 8, %fixed-stack.0 :: (store (s64) into %fixed-stack.0 + 8)
20  ; CHECK:   STD [[COPY4]], 16, %fixed-stack.0 :: (store (s64))
21  ; CHECK:   STD [[COPY3]], 24, %fixed-stack.0 :: (store (s64))
22  ; CHECK:   STD [[COPY2]], 32, %fixed-stack.0 :: (store (s64))
23  ; CHECK:   STD [[COPY1]], 40, %fixed-stack.0 :: (store (s64))
24  ; CHECK:   STD [[COPY]], 48, %fixed-stack.0 :: (store (s64))
25  ; CHECK:   LIFETIME_START %stack.0.arg_list
26  ; CHECK:   [[ADDI8_:%[0-9]+]]:g8rc = ADDI8 %fixed-stack.0, 0
27  ; CHECK:   STD killed [[ADDI8_]], 0, %stack.0.arg_list :: (store (s64) into %ir.arg_list)
28  ; CHECK:   [[ADDI8_1:%[0-9]+]]:g8rc = ADDI8 %fixed-stack.0, 15
29  ; CHECK:   [[RLDICR:%[0-9]+]]:g8rc = RLDICR killed [[ADDI8_1]], 0, 59
30  ; CHECK:   [[LXVW4X:%[0-9]+]]:vsrc = LXVW4X $zero8, killed [[RLDICR]] :: (load (s128) from %ir.argp.cur.aligned)
31  ; CHECK:   LIFETIME_END %stack.0.arg_list
32  ; CHECK:   $v2 = COPY [[LXVW4X]]
33  ; CHECK:   BLR8 implicit $lr8, implicit $rm, implicit $v2
34entry:
35  %arg_list = alloca ptr, align 8
36  call void @llvm.lifetime.start.p0(i64 8, ptr nonnull %arg_list)
37  call void @llvm.va_start(ptr nonnull %arg_list)
38  %argp.cur = load ptr, ptr %arg_list, align 8
39  %0 = ptrtoint ptr %argp.cur to i64
40  %1 = add i64 %0, 15
41  %2 = and i64 %1, -16
42  %argp.cur.aligned = inttoptr i64 %2 to ptr
43  %argp.next = getelementptr inbounds i8, ptr %argp.cur.aligned, i64 16
44  store ptr %argp.next, ptr %arg_list, align 8
45  %3 = inttoptr i64 %2 to ptr
46  %4 = load <4 x i32>, ptr %3, align 16
47  call void @llvm.va_end(ptr nonnull %arg_list)
48  call void @llvm.lifetime.end.p0(i64 8, ptr nonnull %arg_list)
49  ret <4 x i32> %4
50}
51
52declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture)
53
54declare void @llvm.va_start(ptr)
55
56declare void @llvm.va_end(ptr)
57
58declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture)
59
60