1*f01a6f5eSMatt Arsenault; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5 2*f01a6f5eSMatt Arsenault; RUN: opt -S -mtriple=amdgcn-amd-amdhsa -passes=infer-address-spaces %s | FileCheck %s 3*f01a6f5eSMatt Arsenault 4*f01a6f5eSMatt Arsenaultdefine void @prefetch_shared_to_flat(ptr addrspace(3) %group.ptr) { 5*f01a6f5eSMatt Arsenault; CHECK-LABEL: define void @prefetch_shared_to_flat( 6*f01a6f5eSMatt Arsenault; CHECK-SAME: ptr addrspace(3) [[GROUP_PTR:%.*]]) { 7*f01a6f5eSMatt Arsenault; CHECK-NEXT: tail call void @llvm.prefetch.p3(ptr addrspace(3) [[GROUP_PTR]], i32 0, i32 0, i32 1) 8*f01a6f5eSMatt Arsenault; CHECK-NEXT: ret void 9*f01a6f5eSMatt Arsenault; 10*f01a6f5eSMatt Arsenault %cast = addrspacecast ptr addrspace(3) %group.ptr to ptr 11*f01a6f5eSMatt Arsenault tail call void @llvm.prefetch.p0(ptr %cast, i32 0, i32 0, i32 1) 12*f01a6f5eSMatt Arsenault ret void 13*f01a6f5eSMatt Arsenault} 14*f01a6f5eSMatt Arsenault 15*f01a6f5eSMatt Arsenaultdefine void @prefetch_global_to_flat(ptr addrspace(1) %global.ptr) { 16*f01a6f5eSMatt Arsenault; CHECK-LABEL: define void @prefetch_global_to_flat( 17*f01a6f5eSMatt Arsenault; CHECK-SAME: ptr addrspace(1) [[GLOBAL_PTR:%.*]]) { 18*f01a6f5eSMatt Arsenault; CHECK-NEXT: tail call void @llvm.prefetch.p1(ptr addrspace(1) [[GLOBAL_PTR]], i32 0, i32 0, i32 1) 19*f01a6f5eSMatt Arsenault; CHECK-NEXT: ret void 20*f01a6f5eSMatt Arsenault; 21*f01a6f5eSMatt Arsenault %cast = addrspacecast ptr addrspace(1) %global.ptr to ptr 22*f01a6f5eSMatt Arsenault tail call void @llvm.prefetch.p0(ptr addrspace(0) %cast, i32 0, i32 0, i32 1) 23*f01a6f5eSMatt Arsenault ret void 24*f01a6f5eSMatt Arsenault} 25*f01a6f5eSMatt Arsenault 26*f01a6f5eSMatt Arsenaultdefine void @prefetch_constant_to_flat(ptr addrspace(4) %const.ptr) { 27*f01a6f5eSMatt Arsenault; CHECK-LABEL: define void @prefetch_constant_to_flat( 28*f01a6f5eSMatt Arsenault; CHECK-SAME: ptr addrspace(4) [[CONST_PTR:%.*]]) { 29*f01a6f5eSMatt Arsenault; CHECK-NEXT: tail call void @llvm.prefetch.p4(ptr addrspace(4) [[CONST_PTR]], i32 0, i32 0, i32 1) 30*f01a6f5eSMatt Arsenault; CHECK-NEXT: ret void 31*f01a6f5eSMatt Arsenault; 32*f01a6f5eSMatt Arsenault %cast = addrspacecast ptr addrspace(4) %const.ptr to ptr 33*f01a6f5eSMatt Arsenault tail call void @llvm.prefetch.p0(ptr %cast, i32 0, i32 0, i32 1) 34*f01a6f5eSMatt Arsenault ret void 35*f01a6f5eSMatt Arsenault} 36*f01a6f5eSMatt Arsenault 37*f01a6f5eSMatt Arsenaultdefine void @prefetch_flat_to_shared(ptr %flat.ptr) { 38*f01a6f5eSMatt Arsenault; CHECK-LABEL: define void @prefetch_flat_to_shared( 39*f01a6f5eSMatt Arsenault; CHECK-SAME: ptr [[FLAT_PTR:%.*]]) { 40*f01a6f5eSMatt Arsenault; CHECK-NEXT: [[CAST:%.*]] = addrspacecast ptr [[FLAT_PTR]] to ptr addrspace(3) 41*f01a6f5eSMatt Arsenault; CHECK-NEXT: tail call void @llvm.prefetch.p3(ptr addrspace(3) [[CAST]], i32 0, i32 0, i32 1) 42*f01a6f5eSMatt Arsenault; CHECK-NEXT: ret void 43*f01a6f5eSMatt Arsenault; 44*f01a6f5eSMatt Arsenault %cast = addrspacecast ptr %flat.ptr to ptr addrspace(3) 45*f01a6f5eSMatt Arsenault tail call void @llvm.prefetch.p3(ptr addrspace(3) %cast, i32 0, i32 0, i32 1) 46*f01a6f5eSMatt Arsenault ret void 47*f01a6f5eSMatt Arsenault} 48*f01a6f5eSMatt Arsenault 49*f01a6f5eSMatt Arsenaultdefine void @prefetch_flat_to_global(ptr %flat.ptr) { 50*f01a6f5eSMatt Arsenault; CHECK-LABEL: define void @prefetch_flat_to_global( 51*f01a6f5eSMatt Arsenault; CHECK-SAME: ptr [[FLAT_PTR:%.*]]) { 52*f01a6f5eSMatt Arsenault; CHECK-NEXT: [[CAST:%.*]] = addrspacecast ptr [[FLAT_PTR]] to ptr addrspace(1) 53*f01a6f5eSMatt Arsenault; CHECK-NEXT: tail call void @llvm.prefetch.p1(ptr addrspace(1) [[CAST]], i32 0, i32 0, i32 1) 54*f01a6f5eSMatt Arsenault; CHECK-NEXT: ret void 55*f01a6f5eSMatt Arsenault; 56*f01a6f5eSMatt Arsenault %cast = addrspacecast ptr %flat.ptr to ptr addrspace(1) 57*f01a6f5eSMatt Arsenault tail call void @llvm.prefetch.p1(ptr addrspace(1) %cast, i32 0, i32 0, i32 1) 58*f01a6f5eSMatt Arsenault ret void 59*f01a6f5eSMatt Arsenault} 60