Lines Matching refs:vst
7 declare void @llvm.ve.vl.vst.vssl(<256 x double>, i64, ptr, i32)
21 ; CHECK-NEXT: vst %v0, 16, %s3
27 ; CHECK-NEXT: vst %v0, 16, %s3
31 ; CHECK-NEXT: vst %v0, 16, %s3
34 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l0, i64 16, ptr %Q, i32 %evl)
36 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l1, i64 16, ptr %Q, i32 %evl2)
38 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l2, i64 16, ptr %Q, i32 %evl)
52 ; CHECK-NEXT: vst %v0, 16, %s2
54 ; CHECK-NEXT: vst %v0, 16, %s2
56 ; CHECK-NEXT: vst %v0, 16, %s2
59 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l0, i64 16, ptr %Q, i32 %evl)
61 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l1, i64 16, ptr %Q, i32 %evl)
63 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l2, i64 16, ptr %Q, i32 %evl)
81 ; CHECK-NEXT: vst %v0, 16, %s2
88 ; CHECK-NEXT: vst %v0, 16, %s19
90 ; CHECK-NEXT: vst %v0, 16, %s19
96 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l0, i64 16, ptr %Q, i32 %evl)
99 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l1, i64 16, ptr %Q, i32 %evl)
101 tail call void @llvm.ve.vl.vst.vssl(<256 x double> %l2, i64 16, ptr %Q, i32 %evl)