1// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 5 2// REQUIRES: amdgpu-registered-target 3// RUN: %clang_cc1 -triple amdgcn-amd-amdhsa -target-cpu verde -emit-llvm -disable-llvm-optzns -fcuda-is-device -o - %s | FileCheck %s 4 5#define __device__ __attribute__((device)) 6 7// CHECK-LABEL: define dso_local ptr addrspace(8) @_Z31test_amdgcn_make_buffer_rsrc_p0Pvsii( 8// CHECK-SAME: ptr noundef [[P:%.*]], i16 noundef signext [[STRIDE:%.*]], i32 noundef [[NUM:%.*]], i32 noundef [[FLAGS:%.*]]) #[[ATTR0:[0-9]+]] { 9// CHECK-NEXT: [[ENTRY:.*:]] 10// CHECK-NEXT: [[RETVAL:%.*]] = alloca ptr addrspace(8), align 16, addrspace(5) 11// CHECK-NEXT: [[P_ADDR:%.*]] = alloca ptr, align 8, addrspace(5) 12// CHECK-NEXT: [[STRIDE_ADDR:%.*]] = alloca i16, align 2, addrspace(5) 13// CHECK-NEXT: [[NUM_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 14// CHECK-NEXT: [[FLAGS_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 15// CHECK-NEXT: [[RETVAL_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[RETVAL]] to ptr 16// CHECK-NEXT: [[P_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[P_ADDR]] to ptr 17// CHECK-NEXT: [[STRIDE_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[STRIDE_ADDR]] to ptr 18// CHECK-NEXT: [[NUM_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[NUM_ADDR]] to ptr 19// CHECK-NEXT: [[FLAGS_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[FLAGS_ADDR]] to ptr 20// CHECK-NEXT: store ptr [[P]], ptr [[P_ADDR_ASCAST]], align 8 21// CHECK-NEXT: store i16 [[STRIDE]], ptr [[STRIDE_ADDR_ASCAST]], align 2 22// CHECK-NEXT: store i32 [[NUM]], ptr [[NUM_ADDR_ASCAST]], align 4 23// CHECK-NEXT: store i32 [[FLAGS]], ptr [[FLAGS_ADDR_ASCAST]], align 4 24// CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[P_ADDR_ASCAST]], align 8 25// CHECK-NEXT: [[TMP1:%.*]] = load i16, ptr [[STRIDE_ADDR_ASCAST]], align 2 26// CHECK-NEXT: [[TMP2:%.*]] = load i32, ptr [[NUM_ADDR_ASCAST]], align 4 27// CHECK-NEXT: [[TMP3:%.*]] = load i32, ptr [[FLAGS_ADDR_ASCAST]], align 4 28// CHECK-NEXT: [[TMP4:%.*]] = call ptr addrspace(8) @llvm.amdgcn.make.buffer.rsrc.p0(ptr [[TMP0]], i16 [[TMP1]], i32 [[TMP2]], i32 [[TMP3]]) 29// CHECK-NEXT: ret ptr addrspace(8) [[TMP4]] 30// 31__device__ __amdgpu_buffer_rsrc_t test_amdgcn_make_buffer_rsrc_p0(void *p, short stride, int num, int flags) { 32 return __builtin_amdgcn_make_buffer_rsrc(p, stride, num, flags); 33} 34 35// CHECK-LABEL: define dso_local ptr addrspace(8) @_Z47test_amdgcn_make_buffer_rsrc_p0_stride_constantPvii( 36// CHECK-SAME: ptr noundef [[P:%.*]], i32 noundef [[NUM:%.*]], i32 noundef [[FLAGS:%.*]]) #[[ATTR0]] { 37// CHECK-NEXT: [[ENTRY:.*:]] 38// CHECK-NEXT: [[RETVAL:%.*]] = alloca ptr addrspace(8), align 16, addrspace(5) 39// CHECK-NEXT: [[P_ADDR:%.*]] = alloca ptr, align 8, addrspace(5) 40// CHECK-NEXT: [[NUM_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 41// CHECK-NEXT: [[FLAGS_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 42// CHECK-NEXT: [[RETVAL_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[RETVAL]] to ptr 43// CHECK-NEXT: [[P_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[P_ADDR]] to ptr 44// CHECK-NEXT: [[NUM_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[NUM_ADDR]] to ptr 45// CHECK-NEXT: [[FLAGS_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[FLAGS_ADDR]] to ptr 46// CHECK-NEXT: store ptr [[P]], ptr [[P_ADDR_ASCAST]], align 8 47// CHECK-NEXT: store i32 [[NUM]], ptr [[NUM_ADDR_ASCAST]], align 4 48// CHECK-NEXT: store i32 [[FLAGS]], ptr [[FLAGS_ADDR_ASCAST]], align 4 49// CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[P_ADDR_ASCAST]], align 8 50// CHECK-NEXT: [[TMP1:%.*]] = load i32, ptr [[NUM_ADDR_ASCAST]], align 4 51// CHECK-NEXT: [[TMP2:%.*]] = load i32, ptr [[FLAGS_ADDR_ASCAST]], align 4 52// CHECK-NEXT: [[TMP3:%.*]] = call ptr addrspace(8) @llvm.amdgcn.make.buffer.rsrc.p0(ptr [[TMP0]], i16 4, i32 [[TMP1]], i32 [[TMP2]]) 53// CHECK-NEXT: ret ptr addrspace(8) [[TMP3]] 54// 55__device__ __amdgpu_buffer_rsrc_t test_amdgcn_make_buffer_rsrc_p0_stride_constant(void *p, int num, int flags) { 56 return __builtin_amdgcn_make_buffer_rsrc(p, /*stride=*/4, num, flags); 57} 58 59// CHECK-LABEL: define dso_local ptr addrspace(8) @_Z44test_amdgcn_make_buffer_rsrc_p0_num_constantPvsi( 60// CHECK-SAME: ptr noundef [[P:%.*]], i16 noundef signext [[STRIDE:%.*]], i32 noundef [[FLAGS:%.*]]) #[[ATTR0]] { 61// CHECK-NEXT: [[ENTRY:.*:]] 62// CHECK-NEXT: [[RETVAL:%.*]] = alloca ptr addrspace(8), align 16, addrspace(5) 63// CHECK-NEXT: [[P_ADDR:%.*]] = alloca ptr, align 8, addrspace(5) 64// CHECK-NEXT: [[STRIDE_ADDR:%.*]] = alloca i16, align 2, addrspace(5) 65// CHECK-NEXT: [[FLAGS_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 66// CHECK-NEXT: [[RETVAL_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[RETVAL]] to ptr 67// CHECK-NEXT: [[P_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[P_ADDR]] to ptr 68// CHECK-NEXT: [[STRIDE_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[STRIDE_ADDR]] to ptr 69// CHECK-NEXT: [[FLAGS_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[FLAGS_ADDR]] to ptr 70// CHECK-NEXT: store ptr [[P]], ptr [[P_ADDR_ASCAST]], align 8 71// CHECK-NEXT: store i16 [[STRIDE]], ptr [[STRIDE_ADDR_ASCAST]], align 2 72// CHECK-NEXT: store i32 [[FLAGS]], ptr [[FLAGS_ADDR_ASCAST]], align 4 73// CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[P_ADDR_ASCAST]], align 8 74// CHECK-NEXT: [[TMP1:%.*]] = load i16, ptr [[STRIDE_ADDR_ASCAST]], align 2 75// CHECK-NEXT: [[TMP2:%.*]] = load i32, ptr [[FLAGS_ADDR_ASCAST]], align 4 76// CHECK-NEXT: [[TMP3:%.*]] = call ptr addrspace(8) @llvm.amdgcn.make.buffer.rsrc.p0(ptr [[TMP0]], i16 [[TMP1]], i32 1234, i32 [[TMP2]]) 77// CHECK-NEXT: ret ptr addrspace(8) [[TMP3]] 78// 79__device__ __amdgpu_buffer_rsrc_t test_amdgcn_make_buffer_rsrc_p0_num_constant(void *p, short stride, int flags) { 80 return __builtin_amdgcn_make_buffer_rsrc(p, stride, /*num=*/1234, flags); 81} 82 83// CHECK-LABEL: define dso_local ptr addrspace(8) @_Z46test_amdgcn_make_buffer_rsrc_p0_flags_constantPvsi( 84// CHECK-SAME: ptr noundef [[P:%.*]], i16 noundef signext [[STRIDE:%.*]], i32 noundef [[NUM:%.*]]) #[[ATTR0]] { 85// CHECK-NEXT: [[ENTRY:.*:]] 86// CHECK-NEXT: [[RETVAL:%.*]] = alloca ptr addrspace(8), align 16, addrspace(5) 87// CHECK-NEXT: [[P_ADDR:%.*]] = alloca ptr, align 8, addrspace(5) 88// CHECK-NEXT: [[STRIDE_ADDR:%.*]] = alloca i16, align 2, addrspace(5) 89// CHECK-NEXT: [[NUM_ADDR:%.*]] = alloca i32, align 4, addrspace(5) 90// CHECK-NEXT: [[RETVAL_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[RETVAL]] to ptr 91// CHECK-NEXT: [[P_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[P_ADDR]] to ptr 92// CHECK-NEXT: [[STRIDE_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[STRIDE_ADDR]] to ptr 93// CHECK-NEXT: [[NUM_ADDR_ASCAST:%.*]] = addrspacecast ptr addrspace(5) [[NUM_ADDR]] to ptr 94// CHECK-NEXT: store ptr [[P]], ptr [[P_ADDR_ASCAST]], align 8 95// CHECK-NEXT: store i16 [[STRIDE]], ptr [[STRIDE_ADDR_ASCAST]], align 2 96// CHECK-NEXT: store i32 [[NUM]], ptr [[NUM_ADDR_ASCAST]], align 4 97// CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[P_ADDR_ASCAST]], align 8 98// CHECK-NEXT: [[TMP1:%.*]] = load i16, ptr [[STRIDE_ADDR_ASCAST]], align 2 99// CHECK-NEXT: [[TMP2:%.*]] = load i32, ptr [[NUM_ADDR_ASCAST]], align 4 100// CHECK-NEXT: [[TMP3:%.*]] = call ptr addrspace(8) @llvm.amdgcn.make.buffer.rsrc.p0(ptr [[TMP0]], i16 [[TMP1]], i32 [[TMP2]], i32 5678) 101// CHECK-NEXT: ret ptr addrspace(8) [[TMP3]] 102// 103__device__ __amdgpu_buffer_rsrc_t test_amdgcn_make_buffer_rsrc_p0_flags_constant(void *p, short stride, int num) { 104 return __builtin_amdgcn_make_buffer_rsrc(p, stride, num, /*flags=*/5678); 105} 106