xref: /llvm-project/clang/test/CodeGenCXX/ptrauth-member-function-pointer.cpp (revision 485c80e1188192a4bb2a8cbddccdca82a6e33b81)
1 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -o - %s | FileCheck -check-prefixes=CHECK,NODEBUG,DARWIN %s
2 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++17 -O1 -disable-llvm-passes -o - %s | FileCheck -check-prefixes=CHECK,NODEBUG,DARWIN,CXX17 %s
3 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -debug-info-kind=limited -o - %s | FileCheck -check-prefixes=CHECK,DARWIN %s
4 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 1 -o - %s | FileCheck %s -check-prefix=STACK-PROT
5 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 2 -o - %s | FileCheck %s -check-prefix=STACK-PROT
6 // RUN: %clang_cc1 -triple arm64-apple-ios   -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 3 -o - %s | FileCheck %s -check-prefix=STACK-PROT
7 
8 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -o - %s | FileCheck -check-prefixes=CHECK,NODEBUG,ELF %s
9 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++17 -O1 -disable-llvm-passes -o - %s | FileCheck -check-prefixes=CHECK,NODEBUG,ELF,CXX17 %s
10 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -debug-info-kind=limited -o - %s | FileCheck -check-prefixes=CHECK,ELF %s
11 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 1 -o - %s | FileCheck %s -check-prefix=STACK-PROT
12 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 2 -o - %s | FileCheck %s -check-prefix=STACK-PROT
13 // RUN: %clang_cc1 -triple aarch64-linux-gnu -fptrauth-calls -fptrauth-intrinsics -emit-llvm -std=c++11 -O1 -disable-llvm-passes -stack-protector 3 -o - %s | FileCheck %s -check-prefix=STACK-PROT
14 
15 
16 // CHECK: @gmethod0 = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 [[TYPEDISC1:35591]]) to i64), i64 0 }, align 8
17 // CHECK: @gmethod1 = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived011nonvirtual5Ev, i32 0, i64 [[TYPEDISC0:22163]]) to i64), i64 0 }, align 8
18 // CHECK: @gmethod2 = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 [[TYPEDISC0]]) to i64), i64 0 }, align 8
19 
20 // CHECK: @__const._Z13testArrayInitv.p0 = private unnamed_addr constant [1 x { i64, i64 }] [{ i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 35591) to i64), i64 0 }], align 8
21 // CHECK: @__const._Z13testArrayInitv.p1 = private unnamed_addr constant [1 x { i64, i64 }] [{ i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 35591) to i64), i64 0 }], align 8
22 // CHECK: @__const._Z13testArrayInitv.c0 = private unnamed_addr constant %struct.Class0 { { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 35591) to i64), i64 0 } }, align 8
23 // CHECK: @__const._Z13testArrayInitv.c1 = private unnamed_addr constant %struct.Class0 { { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 35591) to i64), i64 0 } }, align 8
24 
25 // CHECK: @_ZN22testNoexceptConversion6mfptr1E = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN22testNoexceptConversion1S19nonvirtual_noexceptEv, i32 0, i64 [[TYPEDISC3:.*]]) to i64), i64 0 },
26 // CHECK: @_ZN22testNoexceptConversion6mfptr2E = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN22testNoexceptConversion1S16virtual_noexceptEv_vfpthunk_, i32 0, i64 [[TYPEDISC3]]) to i64), i64 0 },
27 // CHECK: @_ZN22testNoexceptConversion15mfptr3_noexceptE = global { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN22testNoexceptConversion1S19nonvirtual_noexceptEv, i32 0, i64 [[TYPEDISC3]]) to i64), i64 0 },
28 
29 // CHECK: @_ZTV5Base0 = unnamed_addr constant { [5 x ptr] } { [5 x ptr] [ptr null, ptr @_ZTI5Base0,
30 // CHECK-SAME: ptr ptrauth (ptr @_ZN5Base08virtual1Ev, i32 0, i64 55600, ptr getelementptr inbounds ({ [5 x ptr] }, ptr @_ZTV5Base0, i32 0, i32 0, i32 2)),
31 // CHECK-SAME: ptr ptrauth (ptr @_ZN5Base08virtual3Ev, i32 0, i64 53007, ptr getelementptr inbounds ({ [5 x ptr] }, ptr @_ZTV5Base0, i32 0, i32 0, i32 3)),
32 // CHECK-SAME: ptr ptrauth (ptr @_ZN5Base016virtual_variadicEiz, i32 0, i64 7464, ptr getelementptr inbounds ({ [5 x ptr] }, ptr @_ZTV5Base0, i32 0, i32 0, i32 4))] }, align 8
33 
34 typedef __SIZE_TYPE__ size_t;
35 
36 namespace std {
37 template <typename _Ep>
38 class initializer_list {
39   const _Ep *__begin_;
40   size_t __size_;
41 
42   initializer_list(const _Ep *__b, size_t __s);
43 };
44 } // namespace std
45 
46 struct Base0 {
47   void nonvirtual0();
48   virtual void virtual1();
49   virtual void virtual3();
50   virtual void virtual_variadic(int, ...);
51 };
52 
53 struct A0 {
54   int d[4];
55 };
56 
57 struct A1 {
58   int d[8];
59 };
60 
61 struct __attribute__((trivial_abi)) TrivialS {
62   TrivialS(const TrivialS &);
63   ~TrivialS();
64   int p[4];
65 };
66 
67 struct Derived0 : Base0 {
68   void virtual1() override;
69   void nonvirtual5();
70   virtual void virtual6();
71   virtual A0 return_agg();
72   virtual A1 sret();
73   virtual void trivial_abi(TrivialS);
74 };
75 
76 struct Base1 {
77   virtual void virtual7();
78 };
79 
80 struct Derived1 : Base0, Base1 {
81   void virtual1() override;
82   void virtual7() override;
83 };
84 
85 typedef void (Base0::*MethodTy0)();
86 #if __cplusplus >= 201703L
87 typedef void (Base0::*NoExceptMethodTy0)() noexcept;
88 #endif
89 typedef void (Base0::*VariadicMethodTy0)(int, ...);
90 typedef void (Derived0::*MethodTy1)();
91 
92 struct Class0 {
93   MethodTy1 m0;
94 };
95 
96 // CHECK: define{{.*}} void @_ZN5Base08virtual1Ev(
97 
98 // CHECK: define{{.*}} void @_Z5test0v()
99 // CHECK: %[[METHOD0:.*]] = alloca { i64, i64 }, align 8
100 // CHECK-NEXT: %[[VARMETHOD1:.*]] = alloca { i64, i64 }, align 8
101 // CHECK-NEXT: %[[METHOD2:.*]] = alloca { i64, i64 }, align 8
102 // CHECK-NEXT: %[[METHOD3:.*]] = alloca { i64, i64 }, align 8
103 // CHECK-NEXT: %[[METHOD4:.*]] = alloca { i64, i64 }, align 8
104 // CHECK-NEXT: %[[METHOD5:.*]] = alloca { i64, i64 }, align 8
105 // CHECK-NEXT: %[[METHOD6:.*]] = alloca { i64, i64 }, align 8
106 // CHECK-NEXT: %[[METHOD7:.*]] = alloca { i64, i64 }, align 8
107 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 [[TYPEDISC0]]) to i64), i64 0 }, ptr %[[METHOD0]], align 8
108 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 [[TYPEDISC0]]) to i64), i64 0 }, ptr %[[METHOD0]], align 8
109 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual3Ev_vfpthunk_, i32 0, i64 [[TYPEDISC0]]) to i64), i64 0 }, ptr %[[METHOD0]], align 8
110 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base016virtual_variadicEiz_vfpthunk_, i32 0, i64 34368) to i64), i64 0 }, ptr %[[VARMETHOD1]], align 8
111 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %[[METHOD2]], align 8
112 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %[[METHOD2]], align 8
113 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual3Ev_vfpthunk_, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %[[METHOD2]], align 8
114 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived011nonvirtual5Ev, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %[[METHOD2]], align 8
115 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived08virtual6Ev_vfpthunk_, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %[[METHOD2]], align 8
116 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived010return_aggEv_vfpthunk_, i32 0, i64 64418) to i64), i64 0 }, ptr %[[METHOD3]], align 8
117 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived04sretEv_vfpthunk_, i32 0, i64 28187) to i64), i64 0 }, ptr %[[METHOD4]], align 8
118 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived011trivial_abiE8TrivialS_vfpthunk_, i32 0, i64 8992) to i64), i64 0 }, ptr %[[METHOD5]], align 8
119 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base18virtual7Ev_vfpthunk_, i32 0, i64 [[TYPEDISC2:61596]]) to i64), i64 0 }, ptr %[[METHOD6]], align 8
120 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN8Derived18virtual7Ev_vfpthunk_, i32 0, i64 25206) to i64), i64 0 }, ptr %[[METHOD7]], align 8
121 // CHECK-NEXT: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 25206) to i64), i64 0 }, ptr %[[METHOD7]], align 8
122 // CHECK: ret void
123 
124 // CHECK: define linkonce_odr hidden void @_ZN5Base08virtual1Ev_vfpthunk_(ptr noundef %[[THIS:.*]])
125 // CHECK: %[[THIS_ADDR:.*]] = alloca ptr, align 8
126 // CHECK: store ptr %[[THIS]], ptr %[[THIS_ADDR]], align 8
127 // CHECK: %[[THIS1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
128 // CHECK-NEXT: %[[V0:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
129 // CHECK-NEXT: %[[VTABLE:.*]] = load ptr, ptr %[[THIS1]], align 8
130 // CHECK-NEXT: %[[V2:.*]] = ptrtoint ptr %[[VTABLE]] to i64
131 // CHECK-NEXT: %[[V3:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V2]], i32 2, i64 0)
132 // CHECK-NEXT: %[[V4:.*]] = inttoptr i64 %[[V3]] to ptr
133 // CHECK-NEXT: %[[VFN:.*]] = getelementptr inbounds ptr, ptr %[[V4]], i64 0
134 // CHECK-NEXT: %[[V5:.*]] = load ptr, ptr %[[VFN]], align 8
135 // CHECK-NEXT: %[[V6:.*]] = ptrtoint ptr %[[VFN]] to i64
136 // CHECK-NEXT: %[[V7:.*]] = call i64 @llvm.ptrauth.blend(i64 %[[V6]], i64 55600)
137 // CHECK-NEXT: musttail call void %[[V5]](ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %[[V0]]) [ "ptrauth"(i32 0, i64 %[[V7]]) ]
138 // CHECK-NEXT: ret void
139 
140 // CHECK: define linkonce_odr hidden void @_ZN5Base08virtual3Ev_vfpthunk_(ptr noundef %{{.*}})
141 // CHECK: load ptr, ptr %{{.*}}, align 8
142 // CHECK: load ptr, ptr %{{.*}}, align 8
143 // CHECK: %[[VTABLE:.*]] = load ptr, ptr %{{.*}}, align 8
144 // CHECK: %[[V2:.*]] = ptrtoint ptr %[[VTABLE]] to i64
145 // CHECK: %[[V3:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V2]], i32 2, i64 0)
146 // CHECK: %[[V4:.*]] = inttoptr i64 %[[V3]] to ptr
147 // CHECK: getelementptr inbounds ptr, ptr %[[V4]], i64 1
148 // CHECK: call i64 @llvm.ptrauth.blend(i64 %{{.*}}, i64 53007)
149 
150 // CHECK: define linkonce_odr hidden void @_ZN5Base016virtual_variadicEiz_vfpthunk_(ptr noundef %[[THIS:.*]], i32 noundef %0, ...)
151 // CHECK: %[[THIS_ADDR:.*]] = alloca ptr, align 8
152 // CHECK-NEXT: %[[_ADDR:.*]] = alloca i32, align 4
153 // CHECK-NEXT: store ptr %[[THIS]], ptr %[[THIS_ADDR]], align 8
154 // CHECK: store i32 %0, ptr %[[_ADDR]], align 4
155 // CHECK: %[[THIS1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
156 // CHECK-NEXT: %[[V1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
157 // CHECK-NEXT: %[[V2:.*]] = load i32, ptr %[[_ADDR]], align 4
158 // CHECK-NEXT: %[[VTABLE:.*]] = load ptr, ptr %[[THIS1]], align 8
159 // CHECK-NEXT: %[[V4:.*]] = ptrtoint ptr %[[VTABLE]] to i64
160 // CHECK-NEXT: %[[V5:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V4]], i32 2, i64 0)
161 // CHECK-NEXT: %[[V6:.*]] = inttoptr i64 %[[V5]] to ptr
162 // CHECK-NEXT: %[[VFN:.*]] = getelementptr inbounds ptr, ptr %[[V6]], i64 2
163 // CHECK-NEXT: %[[V7:.*]] = load ptr, ptr %[[VFN]], align 8
164 // CHECK-NEXT: %[[V8:.*]] = ptrtoint ptr %[[VFN]] to i64
165 // CHECK-NEXT: %[[V9:.*]] = call i64 @llvm.ptrauth.blend(i64 %[[V8]], i64 7464)
166 // CHECK-NEXT: musttail call void (ptr, i32, ...) %[[V7]](ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %[[V1]], i32 noundef %[[V2]], ...) [ "ptrauth"(i32 0, i64 %[[V9]]) ]
167 // CHECK-NEXT: ret void
168 
169 // CHECK: define linkonce_odr hidden void @_ZN8Derived08virtual6Ev_vfpthunk_(ptr noundef %[[THIS:.*]])
170 // CHECK: %[[THIS_ADDR:.*]] = alloca ptr, align 8
171 // CHECK: store ptr %[[THIS]], ptr %[[THIS_ADDR]], align 8
172 // CHECK: %[[THIS1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
173 // CHECK: %[[V0:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
174 // CHECK: %[[VTABLE:.*]] = load ptr, ptr %[[THIS1]], align 8
175 // CHECK: %[[V1:.*]] = ptrtoint ptr %[[VTABLE]] to i64
176 // CHECK: %[[V2:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V1]], i32 2, i64 0)
177 // CHECK: %[[V3:.*]] = inttoptr i64 %[[V2]] to ptr
178 // CHECK: %[[VFN:.*]] = getelementptr inbounds ptr, ptr %[[V3]], i64 3
179 // CHECK: %[[V5:.*]] = ptrtoint ptr %[[VFN]] to i64
180 // CHECK: call i64 @llvm.ptrauth.blend(i64 %[[V5]], i64 55535)
181 
182 // Check that the return value of the musttail call isn't copied to a temporary.
183 
184 // CHECK: define linkonce_odr hidden [2 x i64] @_ZN8Derived010return_aggEv_vfpthunk_(ptr noundef %{{.*}})
185 // CHECK: %[[CALL:.*]] = musttail call [2 x i64] %{{.*}}(ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %{{.*}}) [ "ptrauth"(i32 0, i64 %{{.*}}) ]
186 // CHECK-NEXT: ret [2 x i64] %[[CALL]]
187 
188 // Check that the sret pointer passed to the caller is forwarded to the musttail
189 // call.
190 
191 // CHECK: define linkonce_odr hidden void @_ZN8Derived04sretEv_vfpthunk_(ptr dead_on_unwind noalias writable sret(%struct.A1) align 4 %[[AGG_RESULT:.*]], ptr noundef %{{.*}})
192 // CHECK: musttail call void %{{.*}}(ptr dead_on_unwind writable  sret(%struct.A1) align 4 %[[AGG_RESULT]], ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %{{.*}}) [ "ptrauth"(i32 0, i64 %{{.*}}) ]
193 // CHECK-NEXT: ret void
194 
195 // Check that the thunk function doesn't destruct the trivial_abi argument.
196 
197 // CHECK: define linkonce_odr hidden void @_ZN8Derived011trivial_abiE8TrivialS_vfpthunk_(ptr noundef %{{.*}}, [2 x i64] %{{.*}})
198 // NODEBUG-NOT: call
199 // CHECK: call i64 @llvm.ptrauth.auth(
200 // NODEBUG-NOT: call
201 // CHECK: call i64 @llvm.ptrauth.blend(
202 // NODEBUG-NOT: call
203 // CHECK: musttail call void
204 // CHECK-NEXT: ret void
205 
206 // CHECK: define linkonce_odr hidden void @_ZN5Base18virtual7Ev_vfpthunk_(ptr noundef %[[THIS:.*]])
207 // CHECK: entry:
208 // CHECK: %[[THIS_ADDR:.*]] = alloca ptr, align 8
209 // CHECK: store ptr %[[THIS]], ptr %[[THIS_ADDR]], align 8
210 // CHECK: %[[THIS1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
211 // CHECK: %[[V0:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
212 // CHECK: %[[VTABLE:.*]] = load ptr, ptr %[[THIS1]], align 8
213 // CHECK: %[[V1:.*]] = ptrtoint ptr %[[VTABLE]] to i64
214 // CHECK: %[[V2:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V1]], i32 2, i64 0)
215 // CHECK: %[[V3:.*]] = inttoptr i64 %[[V2]] to ptr
216 // CHECK: getelementptr inbounds ptr, ptr %[[V3]], i64 0
217 
218 // CHECK: define linkonce_odr hidden void @_ZN8Derived18virtual7Ev_vfpthunk_(ptr noundef %[[THIS:.*]])
219 // CHECK: %[[THIS_ADDR:.*]] = alloca ptr, align 8
220 // CHECK: store ptr %[[THIS]], ptr %[[THIS_ADDR]], align 8
221 // CHECK: %[[THIS1:.*]] = load ptr, ptr %[[THIS_ADDR]], align 8
222 // CHECK: load ptr, ptr %[[THIS_ADDR]], align 8
223 // CHECK: %[[VTABLE:.*]] = load ptr, ptr %[[THIS1]], align 8
224 // CHECK: %[[V1:.*]] = ptrtoint ptr %[[VTABLE]] to i64
225 // CHECK: %[[V2:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V1]], i32 2, i64 0)
226 // CHECK: %[[V3:.*]] = inttoptr i64 %[[V2]] to ptr
227 // CHECK: getelementptr inbounds ptr, ptr %[[V3]], i64 3
228 
229 void Base0::virtual1() {}
230 
231 void test0() {
232   MethodTy0 method0;
233   method0 = &Base0::nonvirtual0;
234   method0 = &Base0::virtual1;
235   method0 = &Base0::virtual3;
236 
237   VariadicMethodTy0 varmethod1;
238   varmethod1 = &Base0::virtual_variadic;
239 
240   MethodTy1 method2;
241   method2 = &Derived0::nonvirtual0;
242   method2 = &Derived0::virtual1;
243   method2 = &Derived0::virtual3;
244   method2 = &Derived0::nonvirtual5;
245   method2 = &Derived0::virtual6;
246 
247   A0 (Derived0::*method3)();
248   method3 = &Derived0::return_agg;
249 
250   A1 (Derived0::*method4)();
251   method4 = &Derived0::sret;
252 
253   void (Derived0::*method5)(TrivialS);
254   method5 = &Derived0::trivial_abi;
255 
256   void (Base1::*method6)();
257   method6 = &Base1::virtual7;
258 
259   void (Derived1::*method7)();
260   method7 = &Derived1::virtual7;
261   method7 = &Derived1::virtual1;
262 }
263 
264 // CHECK: define{{.*}} void @_Z5test1P5Base0MS_FvvE(ptr noundef %[[A0:.*]], [2 x i64] %[[A1_COERCE:.*]])
265 // CHECK: %[[A1:.*]] = alloca { i64, i64 }, align 8
266 // CHECK: %[[A0_ADDR:.*]] = alloca ptr, align 8
267 // CHECK: %[[A1_ADDR:.*]] = alloca { i64, i64 }, align 8
268 // CHECK: store [2 x i64] %[[A1_COERCE]], ptr %[[A1]], align 8
269 // CHECK: %[[A11:.*]] = load { i64, i64 }, ptr %[[A1]], align 8
270 // CHECK: store ptr %[[A0]], ptr %[[A0_ADDR]], align 8
271 // CHECK: store { i64, i64 } %[[A11]], ptr %[[A1_ADDR]], align 8
272 // CHECK: %[[V1:.*]] = load ptr, ptr %[[A0_ADDR]], align 8
273 // CHECK: %[[V2:.*]] = load { i64, i64 }, ptr %[[A1_ADDR]], align 8
274 // CHECK: %[[MEMPTR_ADJ:.*]] = extractvalue { i64, i64 } %[[V2]], 1
275 // CHECK: %[[MEMPTR_ADJ_SHIFTED:.*]] = ashr i64 %[[MEMPTR_ADJ]], 1
276 // CHECK: %[[V4:.*]] = getelementptr inbounds i8, ptr %[[V1]], i64 %[[MEMPTR_ADJ_SHIFTED]]
277 // CHECK: %[[MEMPTR_PTR:.*]] = extractvalue { i64, i64 } %[[V2]], 0
278 // CHECK: %[[V5:.*]] = and i64 %[[MEMPTR_ADJ]], 1
279 // CHECK: %[[MEMPTR_ISVIRTUAL:.*]] = icmp ne i64 %[[V5]], 0
280 // CHECK: br i1 %[[MEMPTR_ISVIRTUAL]]
281 
282 // CHECK:  %[[VTABLE:.*]] = load ptr, ptr %[[V4]], align 8
283 // CHECK:  %[[V7:.*]] = ptrtoint ptr %[[VTABLE]] to i64
284 // CHECK:  %[[V8:.*]] = call i64 @llvm.ptrauth.auth(i64 %[[V7]], i32 2, i64 0)
285 // CHECK:  %[[V9:.*]] = inttoptr i64 %[[V8]] to ptr
286 // DARWIN: %[[V10:.*]] = trunc i64 %[[MEMPTR_PTR]] to i32
287 // DARWIN: %[[V11:.*]] = zext i32 %[[V10]] to i64
288 // DARWIN: %[[V12:.*]] = getelementptr i8, ptr %[[V9]], i64 %[[V11]]
289 // ELF:    %[[V12:.*]] = getelementptr i8, ptr %[[V9]], i64 %[[MEMPTR_PTR]]
290 // CHECK:  %[[MEMPTR_VIRTUALFN:.*]] = load ptr, ptr %[[V12]], align 8
291 // CHECK:  br
292 
293 // CHECK: %[[MEMPTR_NONVIRTUALFN:.*]] = inttoptr i64 %[[MEMPTR_PTR]] to ptr
294 // CHECK: br
295 
296 // CHECK: %[[V14:.*]] = phi ptr [ %[[MEMPTR_VIRTUALFN]], {{.*}} ], [ %[[MEMPTR_NONVIRTUALFN]], {{.*}} ]
297 // CHECK: %[[V15:.*]] = phi i64 [ 0, {{.*}} ], [ [[TYPEDISC0]], {{.*}} ]
298 // CHECK: call void %[[V14]](ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %[[V4]]) [ "ptrauth"(i32 0, i64 %[[V15]]) ]
299 // CHECK: ret void
300 
301 void test1(Base0 *a0, MethodTy0 a1) {
302   (a0->*a1)();
303 }
304 
305 // CXX17: define{{.*}} void @_Z14test1_noexceptP5Base0MS_DoFvvE(
306 // CXX17: %[[V14:.*]] = phi ptr [ %{{.*}}, {{.*}} ], [ %{{.*}}, {{.*}} ]
307 // CXX17: %[[V15:.*]] = phi i64 [ 0, {{.*}} ], [ [[TYPEDISC0]], {{.*}} ]
308 // CXX17: call void %[[V14]](ptr noundef nonnull align {{[0-9]+}} dereferenceable(8) %{{.*}}) {{.*}}[ "ptrauth"(i32 0, i64 %[[V15]]) ]
309 #if __cplusplus >= 201703L
310 void test1_noexcept(Base0 *a0, NoExceptMethodTy0 a1) {
311   (a0->*a1)();
312 }
313 #endif
314 
315 // CHECK: define{{.*}} void @_Z15testConversion0M5Base0FvvEM8Derived0FvvE([2 x i64] %[[METHOD0_COERCE:.*]], [2 x i64] %[[METHOD1_COERCE:.*]])
316 // CHECK: %[[METHOD0:.*]] = alloca { i64, i64 }, align 8
317 // CHECK: %[[METHOD1:.*]] = alloca { i64, i64 }, align 8
318 // CHECK: %[[METHOD0_ADDR:.*]] = alloca { i64, i64 }, align 8
319 // CHECK: %[[METHOD1_ADDR:.*]] = alloca { i64, i64 }, align 8
320 // CHECK: store [2 x i64] %[[METHOD0_COERCE]], ptr %[[METHOD0]], align 8
321 // CHECK: %[[METHOD01:.*]] = load { i64, i64 }, ptr %[[METHOD0]], align 8
322 // CHECK: store [2 x i64] %[[METHOD1_COERCE]], ptr %[[METHOD1]], align 8
323 // CHECK: %[[METHOD12:.*]] = load { i64, i64 }, ptr %[[METHOD1]], align 8
324 // CHECK: store { i64, i64 } %[[METHOD01]], ptr %[[METHOD0_ADDR]], align 8
325 // CHECK: store { i64, i64 } %[[METHOD12]], ptr %[[METHOD1_ADDR]], align 8
326 // CHECK: %[[V2:.*]] = load { i64, i64 }, ptr %[[METHOD0_ADDR]], align 8
327 // CHECK: %[[MEMPTR_PTR:.*]] = extractvalue { i64, i64 } %[[V2]], 0
328 // CHECK: %[[MEMPTR_ADJ:.*]] = extractvalue { i64, i64 } %[[V2]], 1
329 // CHECK: %[[V3:.*]] = and i64 %[[MEMPTR_ADJ]], 1
330 // CHECK: %[[IS_VIRTUAL_OFFSET:.*]] = icmp ne i64 %[[V3]], 0
331 // CHECK: br i1 %[[IS_VIRTUAL_OFFSET]]
332 
333 // CHECK: %[[V4:.*]] = inttoptr i64 %[[MEMPTR_PTR]] to ptr
334 // CHECK: %[[V5:.*]] = icmp ne ptr %[[V4]], null
335 // CHECK: br i1 %[[V5]]
336 
337 // CHECK: %[[V6:.*]] = ptrtoint ptr %[[V4]] to i64
338 // CHECK: %[[V7:.*]] = call i64 @llvm.ptrauth.resign(i64 %[[V6]], i32 0, i64 [[TYPEDISC0]], i32 0, i64 [[TYPEDISC1]])
339 // CHECK: %[[V8:.*]] = inttoptr i64 %[[V7]] to ptr
340 // CHECK: br
341 
342 // CHECK: %[[V9:.*]] = phi ptr [ null, {{.*}} ], [ %[[V8]], {{.*}} ]
343 // CHECK: %[[V1:.*]] = ptrtoint ptr %[[V9]] to i64
344 // CHECK: %[[V11:.*]] = insertvalue { i64, i64 } %[[V2]], i64 %[[V1]], 0
345 // CHECK: br
346 
347 // CHECK: %[[V12:.*]] = phi { i64, i64 } [ %[[V2]], {{.*}} ], [ %[[V11]], {{.*}} ]
348 // CHECK: store { i64, i64 } %[[V12]], ptr %[[METHOD1_ADDR]], align 8
349 // CHECK: ret void
350 
351 void testConversion0(MethodTy0 method0, MethodTy1 method1) {
352   method1 = method0;
353 }
354 
355 // CHECK: define{{.*}} void @_Z15testConversion1M5Base0FvvE(
356 // CHECK: call i64 @llvm.ptrauth.resign(i64 %{{.*}}, i32 0, i64 [[TYPEDISC0]], i32 0, i64 [[TYPEDISC1]])
357 
358 void testConversion1(MethodTy0 method0) {
359   MethodTy1 method1 = reinterpret_cast<MethodTy1>(method0);
360 }
361 
362 // CHECK: define{{.*}} void @_Z15testConversion2M8Derived0FvvE(
363 // CHECK: call i64 @llvm.ptrauth.resign(i64 %{{.*}}, i32 0, i64 [[TYPEDISC1]], i32 0, i64 [[TYPEDISC0]])
364 
365 void testConversion2(MethodTy1 method1) {
366   MethodTy0 method0 = static_cast<MethodTy0>(method1);
367 }
368 
369 // CHECK: define{{.*}} void @_Z15testConversion3M8Derived0FvvE(
370 // CHECK: call i64 @llvm.ptrauth.resign(i64 %{{.*}}, i32 0, i64 [[TYPEDISC1]], i32 0, i64 [[TYPEDISC0]])
371 
372 void testConversion3(MethodTy1 method1) {
373   MethodTy0 method0 = reinterpret_cast<MethodTy0>(method1);
374 }
375 
376 // No need to call @llvm.ptrauth.resign if the source member function
377 // pointer is a constant.
378 
379 // CHECK: define{{.*}} void @_Z15testConversion4v(
380 // CHECK: %[[METHOD0:.*]] = alloca { i64, i64 }, align 8
381 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 [[TYPEDISC0]]) to i64), i64 0 }, ptr %[[METHOD0]], align 8
382 // CHECK: ret void
383 
384 void testConversion4() {
385   MethodTy0 method0 = reinterpret_cast<MethodTy0>(&Derived0::virtual1);
386 }
387 
388 // This code used to crash.
389 namespace testNonVirtualThunk {
390   struct R {};
391 
392   struct B0 {
393     virtual void bar();
394   };
395 
396   struct B1 {
397     virtual R foo();
398   };
399 
400   struct D : B0, B1 {
401     virtual R foo();
402   };
403 
404   D d;
405 }
406 
407 // CHECK: define internal void @_ZN22TestAnonymousNamespace12_GLOBAL__N_11S3fooEv_vfpthunk_(
408 
409 namespace TestAnonymousNamespace {
410 namespace {
411 struct S {
412   virtual void foo(){};
413 };
414 } // namespace
415 
416 void test() {
417   auto t = &S::foo;
418 }
419 } // namespace TestAnonymousNamespace
420 
421 MethodTy1 gmethod0 = reinterpret_cast<MethodTy1>(&Base0::nonvirtual0);
422 MethodTy0 gmethod1 = reinterpret_cast<MethodTy0>(&Derived0::nonvirtual5);
423 MethodTy0 gmethod2 = reinterpret_cast<MethodTy0>(&Derived0::virtual1);
424 
425 // CHECK-LABEL: define{{.*}} void @_Z13testArrayInitv()
426 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 %p0, ptr align 8 @__const._Z13testArrayInitv.p0, i64 16, i1 false)
427 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 %p1, ptr align 8 @__const._Z13testArrayInitv.p1, i64 16, i1 false)
428 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 %c0, ptr align 8 @__const._Z13testArrayInitv.c0, i64 16, i1 false)
429 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 %c1, ptr align 8 @__const._Z13testArrayInitv.c1, i64 16, i1 false)
430 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base011nonvirtual0Ev, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %{{.*}} align 8
431 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN5Base08virtual1Ev_vfpthunk_, i32 0, i64 [[TYPEDISC1]]) to i64), i64 0 }, ptr %{{.*}}, align 8
432 
433 void initList(std::initializer_list<MethodTy1>);
434 
435 void testArrayInit() {
436   MethodTy1 p0[] = {&Base0::nonvirtual0};
437   MethodTy1 p1[] = {&Base0::virtual1};
438   Class0 c0{&Base0::nonvirtual0};
439   Class0 c1{&Base0::virtual1};
440   initList({&Base0::nonvirtual0});
441   initList({&Base0::virtual1});
442 }
443 
444 
445 
446 // STACK-PROT: define {{.*}}_vfpthunk{{.*}}[[ATTRS:#[0-9]+]]
447 // STACK-PROT: attributes [[ATTRS]] =
448 // STACK-PROT-NOT: ssp
449 // STACK-PROT-NOT: sspstrong
450 // STACK-PROT-NOT: sspreq
451 // STACK-PROT-NEXT: attributes
452 
453 // CHECK: define{{.*}} void @_Z15testConvertNullv(
454 // CHECK: %[[T:.*]] = alloca { i64, i64 },
455 // store { i64, i64 } zeroinitializer, { i64, i64 }* %[[T]],
456 
457 void testConvertNull() {
458   VariadicMethodTy0 t = (VariadicMethodTy0)(MethodTy0{});
459 }
460 
461 namespace testNoexceptConversion {
462 
463 // CHECK-LABEL: define internal void @__cxx_global_var_init()
464 // CHECK: %[[V0:.*]] = load { i64, i64 }, ptr @_ZN22testNoexceptConversion15mfptr0_noexceptE, align 8
465 // CHECK: store { i64, i64 } %[[V0]], ptr @_ZN22testNoexceptConversion6mfptr4E, align 8
466 
467 // CHECK: define {{.*}}void @_ZN22testNoexceptConversion5test0Ev()
468 // CHECK: %[[P0:.*]] = alloca { i64, i64 }, align 8
469 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN22testNoexceptConversion1S19nonvirtual_noexceptEv, i32 0, i64 [[TYPEDISC3]]) to i64), i64 0 }, ptr %[[P0]], align 8,
470 
471 // CHECK: define {{.*}}void @_ZN22testNoexceptConversion5test1Ev()
472 // CHECK: %[[P0:.*]] = alloca { i64, i64 }, align 8
473 // CHECK: store { i64, i64 } { i64 ptrtoint (ptr ptrauth (ptr @_ZN22testNoexceptConversion1S16virtual_noexceptEv_vfpthunk_, i32 0, i64 [[TYPEDISC3]]) to i64), i64 0 }, ptr %[[P0]], align 8,
474 
475 // CHECK: define {{.*}}void @_ZN22testNoexceptConversion5test2Ev()
476 // CHECK: %[[P0:.*]] = alloca { i64, i64 }, align 8
477 // CHECK: %[[V0:.*]] = load { i64, i64 }, ptr @_ZN22testNoexceptConversion15mfptr0_noexceptE, align 8
478 // CHECK: store { i64, i64 } %[[V0]], ptr %[[P0]], align 8,
479 
480 struct S {
481   void nonvirtual_noexcept() noexcept;
482   virtual void virtual_noexcept() noexcept;
483 };
484 
485 void (S::*mfptr0_noexcept)() noexcept;
486 void (S::*mfptr1)() = &S::nonvirtual_noexcept;
487 void (S::*mfptr2)() = &S::virtual_noexcept;
488 void (S::*mfptr3_noexcept)() noexcept = &S::nonvirtual_noexcept;
489 void (S::*mfptr4)() = mfptr0_noexcept;
490 
491 void test0() {
492   void (S::*p0)() = &S::nonvirtual_noexcept;
493 }
494 
495 void test1() {
496   void (S::*p0)() = &S::virtual_noexcept;
497 }
498 
499 void test2() {
500   void (S::*p0)() = mfptr0_noexcept;
501 }
502 
503 }
504