1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py 2; RUN: opt -S %s -passes=scalarize-masked-mem-intrin -mtriple=x86_64-linux-gnu | FileCheck %s 3 4define void @scalarize_v2i64(ptr %p, <2 x i1> %mask, <2 x i64> %data) { 5; CHECK-LABEL: @scalarize_v2i64( 6; CHECK-NEXT: [[SCALAR_MASK:%.*]] = bitcast <2 x i1> [[MASK:%.*]] to i2 7; CHECK-NEXT: [[TMP1:%.*]] = and i2 [[SCALAR_MASK]], 1 8; CHECK-NEXT: [[TMP2:%.*]] = icmp ne i2 [[TMP1]], 0 9; CHECK-NEXT: br i1 [[TMP2]], label [[COND_STORE:%.*]], label [[ELSE:%.*]] 10; CHECK: cond.store: 11; CHECK-NEXT: [[TMP3:%.*]] = extractelement <2 x i64> [[DATA:%.*]], i64 0 12; CHECK-NEXT: store i64 [[TMP3]], ptr [[P:%.*]], align 1 13; CHECK-NEXT: [[TMP4:%.*]] = getelementptr inbounds i64, ptr [[P]], i32 1 14; CHECK-NEXT: br label [[ELSE]] 15; CHECK: else: 16; CHECK-NEXT: [[PTR_PHI_ELSE:%.*]] = phi ptr [ [[TMP4]], [[COND_STORE]] ], [ [[P]], [[TMP0:%.*]] ] 17; CHECK-NEXT: [[TMP5:%.*]] = and i2 [[SCALAR_MASK]], -2 18; CHECK-NEXT: [[TMP6:%.*]] = icmp ne i2 [[TMP5]], 0 19; CHECK-NEXT: br i1 [[TMP6]], label [[COND_STORE1:%.*]], label [[ELSE2:%.*]] 20; CHECK: cond.store1: 21; CHECK-NEXT: [[TMP7:%.*]] = extractelement <2 x i64> [[DATA]], i64 1 22; CHECK-NEXT: store i64 [[TMP7]], ptr [[PTR_PHI_ELSE]], align 1 23; CHECK-NEXT: br label [[ELSE2]] 24; CHECK: else2: 25; CHECK-NEXT: ret void 26; 27 call void @llvm.masked.compressstore.v2i64.p0(<2 x i64> %data, ptr %p, <2 x i1> %mask) 28 ret void 29} 30 31define void @scalarize_v2i64_ones_mask(ptr %p, <2 x i64> %data) { 32; CHECK-LABEL: @scalarize_v2i64_ones_mask( 33; CHECK-NEXT: [[ELT0:%.*]] = extractelement <2 x i64> [[DATA:%.*]], i64 0 34; CHECK-NEXT: [[TMP1:%.*]] = getelementptr inbounds i64, ptr [[P:%.*]], i32 0 35; CHECK-NEXT: store i64 [[ELT0]], ptr [[TMP1]], align 1 36; CHECK-NEXT: [[ELT1:%.*]] = extractelement <2 x i64> [[DATA]], i64 1 37; CHECK-NEXT: [[TMP2:%.*]] = getelementptr inbounds i64, ptr [[P]], i32 1 38; CHECK-NEXT: store i64 [[ELT1]], ptr [[TMP2]], align 1 39; CHECK-NEXT: ret void 40; 41 call void @llvm.masked.compressstore.v2i64.p0(<2 x i64> %data, ptr %p, <2 x i1> <i1 true, i1 true>) 42 ret void 43} 44 45define void @scalarize_v2i64_zero_mask(ptr %p, <2 x i64> %data) { 46; CHECK-LABEL: @scalarize_v2i64_zero_mask( 47; CHECK-NEXT: ret void 48; 49 call void @llvm.masked.compressstore.v2i64.p0(<2 x i64> %data, ptr %p, <2 x i1> <i1 false, i1 false>) 50 ret void 51} 52 53define void @scalarize_v2i64_const_mask(ptr %p, <2 x i64> %data) { 54; CHECK-LABEL: @scalarize_v2i64_const_mask( 55; CHECK-NEXT: [[ELT1:%.*]] = extractelement <2 x i64> [[DATA:%.*]], i64 1 56; CHECK-NEXT: [[TMP1:%.*]] = getelementptr inbounds i64, ptr [[P:%.*]], i32 0 57; CHECK-NEXT: store i64 [[ELT1]], ptr [[TMP1]], align 1 58; CHECK-NEXT: ret void 59; 60 call void @llvm.masked.compressstore.v2i64.p0(<2 x i64> %data, ptr %p, <2 x i1> <i1 false, i1 true>) 61 ret void 62} 63 64declare void @llvm.masked.compressstore.v2i64.p0(<2 x i64>, ptr, <2 x i1>) 65