1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py 2; RUN: opt -passes=constraint-elimination -S %s | FileCheck %s 3 4declare void @llvm.assume(i1 noundef) 5 6define i1 @gep_sub_1_sge_inbounds(ptr %dst, ptr %lower) { 7; CHECK-LABEL: @gep_sub_1_sge_inbounds( 8; CHECK-NEXT: [[PRE:%.*]] = icmp sge ptr [[DST:%.*]], [[LOWER:%.*]] 9; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 10; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3 11; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -1 12; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sge ptr [[DST_SUB_1]], [[LOWER]] 13; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -3 14; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sge ptr [[DST_SUB_3]], [[LOWER]] 15; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_3]] 16; CHECK-NEXT: [[DST_SUB_4:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -4 17; CHECK-NEXT: [[CMP_SUB_4:%.*]] = icmp sge ptr [[DST_SUB_4]], [[LOWER]] 18; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_4]] 19; CHECK-NEXT: ret i1 [[RES_2]] 20; 21 %pre = icmp sge ptr %dst, %lower 22 call void @llvm.assume(i1 %pre) 23 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3 24 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.3, i64 -1 25 %cmp.sub.1 = icmp sge ptr %dst.sub.1, %lower 26 %dst.sub.3 = getelementptr inbounds i8, ptr %dst.add.3, i64 -3 27 %cmp.sub.3 = icmp sge ptr %dst.sub.3, %lower 28 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.3 29 %dst.sub.4 = getelementptr inbounds i8, ptr %dst.add.3, i64 -4 30 %cmp.sub.4 = icmp sge ptr %dst.sub.4, %lower 31 %res.2 = xor i1 %res.1, %cmp.sub.4 32 ret i1 %res.2 33} 34 35define i1 @gep_sub_1_sge_only_inner_inbounds(ptr %dst, ptr %lower) { 36; CHECK-LABEL: @gep_sub_1_sge_only_inner_inbounds( 37; CHECK-NEXT: [[PRE:%.*]] = icmp sge ptr [[DST:%.*]], [[LOWER:%.*]] 38; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 39; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3 40; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -1 41; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sge ptr [[DST_SUB_1]], [[LOWER]] 42; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -3 43; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sge ptr [[DST_SUB_3]], [[LOWER]] 44; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_3]] 45; CHECK-NEXT: [[DST_SUB_4:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -4 46; CHECK-NEXT: [[CMP_SUB_4:%.*]] = icmp sge ptr [[DST_SUB_4]], [[LOWER]] 47; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_4]] 48; CHECK-NEXT: ret i1 [[RES_2]] 49; 50 %pre = icmp sge ptr %dst, %lower 51 call void @llvm.assume(i1 %pre) 52 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3 53 %dst.sub.1 = getelementptr i8, ptr %dst.add.3, i64 -1 54 %cmp.sub.1 = icmp sge ptr %dst.sub.1, %lower 55 %dst.sub.3 = getelementptr i8, ptr %dst.add.3, i64 -3 56 %cmp.sub.3 = icmp sge ptr %dst.sub.3, %lower 57 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.3 58 %dst.sub.4 = getelementptr i8, ptr %dst.add.3, i64 -4 59 %cmp.sub.4 = icmp sge ptr %dst.sub.4, %lower 60 %res.2 = xor i1 %res.1, %cmp.sub.4 61 ret i1 %res.2 62} 63 64define i1 @gep_sub_1_sge_only_outer_inbounds(ptr %dst, ptr %lower) { 65; CHECK-LABEL: @gep_sub_1_sge_only_outer_inbounds( 66; CHECK-NEXT: [[PRE:%.*]] = icmp sge ptr [[DST:%.*]], [[LOWER:%.*]] 67; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 68; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr i8, ptr [[DST]], i64 3 69; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -1 70; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sge ptr [[DST_SUB_1]], [[LOWER]] 71; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -3 72; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sge ptr [[DST_SUB_3]], [[LOWER]] 73; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_3]] 74; CHECK-NEXT: [[DST_SUB_4:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -4 75; CHECK-NEXT: [[CMP_SUB_4:%.*]] = icmp sge ptr [[DST_SUB_4]], [[LOWER]] 76; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_4]] 77; CHECK-NEXT: ret i1 [[RES_2]] 78; 79 %pre = icmp sge ptr %dst, %lower 80 call void @llvm.assume(i1 %pre) 81 %dst.add.3 = getelementptr i8, ptr %dst, i64 3 82 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.3, i64 -1 83 %cmp.sub.1 = icmp sge ptr %dst.sub.1, %lower 84 %dst.sub.3 = getelementptr inbounds i8, ptr %dst.add.3, i64 -3 85 %cmp.sub.3 = icmp sge ptr %dst.sub.3, %lower 86 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.3 87 %dst.sub.4 = getelementptr inbounds i8, ptr %dst.add.3, i64 -4 88 %cmp.sub.4 = icmp sge ptr %dst.sub.4, %lower 89 %res.2 = xor i1 %res.1, %cmp.sub.4 90 ret i1 %res.2 91} 92 93define i1 @gep_sub_1_sge_no_inbounds(ptr %dst, ptr %lower) { 94; CHECK-LABEL: @gep_sub_1_sge_no_inbounds( 95; CHECK-NEXT: [[PRE:%.*]] = icmp sge ptr [[DST:%.*]], [[LOWER:%.*]] 96; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 97; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr i8, ptr [[DST]], i64 3 98; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -1 99; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sge ptr [[DST_SUB_1]], [[LOWER]] 100; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -3 101; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sge ptr [[DST_SUB_3]], [[LOWER]] 102; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_3]] 103; CHECK-NEXT: [[DST_SUB_4:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 -4 104; CHECK-NEXT: [[CMP_SUB_4:%.*]] = icmp sge ptr [[DST_SUB_4]], [[LOWER]] 105; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_4]] 106; CHECK-NEXT: ret i1 [[RES_2]] 107; 108 %pre = icmp sge ptr %dst, %lower 109 call void @llvm.assume(i1 %pre) 110 %dst.add.3 = getelementptr i8, ptr %dst, i64 3 111 %dst.sub.1 = getelementptr i8, ptr %dst.add.3, i64 -1 112 %cmp.sub.1 = icmp sge ptr %dst.sub.1, %lower 113 %dst.sub.3 = getelementptr i8, ptr %dst.add.3, i64 -3 114 %cmp.sub.3 = icmp sge ptr %dst.sub.3, %lower 115 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.3 116 %dst.sub.4 = getelementptr i8, ptr %dst.add.3, i64 -4 117 %cmp.sub.4 = icmp sge ptr %dst.sub.4, %lower 118 %res.2 = xor i1 %res.1, %cmp.sub.4 119 ret i1 %res.2 120} 121 122define i1 @gep_sub_1_slt(ptr %dst, ptr %upper) { 123; CHECK-LABEL: @gep_sub_1_slt( 124; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i64 4 125; CHECK-NEXT: [[PRE:%.*]] = icmp slt ptr [[DST_ADD_4]], [[UPPER:%.*]] 126; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 127; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3 128; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -1 129; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER]] 130; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 -3 131; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp slt ptr [[DST_SUB_3]], [[UPPER]] 132; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_3]] 133; CHECK-NEXT: ret i1 [[RES_1]] 134; 135 %dst.add.4 = getelementptr inbounds i8, ptr %dst, i64 4 136 %pre = icmp slt ptr %dst.add.4, %upper 137 call void @llvm.assume(i1 %pre) 138 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3 139 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.3, i64 -1 140 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 141 %dst.sub.3 = getelementptr inbounds i8, ptr %dst.add.3, i64 -3 142 %cmp.sub.3 = icmp slt ptr %dst.sub.3, %upper 143 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.3 144 ret i1 %res.1 145} 146 147define i1 @gep_sub_slt_var_idx(ptr %dst, ptr %upper, i8 %idx) { 148; CHECK-LABEL: @gep_sub_slt_var_idx( 149; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[IDX:%.*]], 0 150; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]]) 151; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 152; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[IDX_EXT]] 153; CHECK-NEXT: [[PRE:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER:%.*]] 154; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 155; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 -1 156; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER]] 157; CHECK-NEXT: [[DST_SUB_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 -2 158; CHECK-NEXT: [[CMP_SUB_2:%.*]] = icmp slt ptr [[DST_SUB_2]], [[UPPER]] 159; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_2]] 160; CHECK-NEXT: [[DST_SUB_1_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_SUB_1]], i64 -1 161; CHECK-NEXT: [[CMP_SUB_1_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1_SUB_1]], [[UPPER]] 162; CHECK-NEXT: [[CMP_SUB_1_SUB_1_EQ:%.*]] = icmp eq ptr [[DST_SUB_1_SUB_1]], [[DST_SUB_2]] 163; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_1_SUB_1]] 164; CHECK-NEXT: [[RES_3:%.*]] = xor i1 [[RES_2]], [[CMP_SUB_1_SUB_1_EQ]] 165; CHECK-NEXT: ret i1 [[RES_3]] 166; 167 %not.zero = icmp ne i8 %idx, 0 168 call void @llvm.assume(i1 %not.zero) 169 %idx.ext = zext i8 %idx to i16 170 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 171 %pre = icmp slt ptr %dst.add.idx, %upper 172 call void @llvm.assume(i1 %pre) 173 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.idx, i64 -1 174 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 175 %dst.sub.2 = getelementptr inbounds i8, ptr %dst.add.idx, i64 -2 176 %cmp.sub.2 = icmp slt ptr %dst.sub.2, %upper 177 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.2 178 %dst.sub.1.sub.1 = getelementptr inbounds i8, ptr %dst.sub.1, i64 -1 179 %cmp.sub.1.sub.1 = icmp slt ptr %dst.sub.1.sub.1, %upper 180 %cmp.sub.1.sub.1.eq = icmp eq ptr %dst.sub.1.sub.1, %dst.sub.2 181 %res.2 = xor i1 %res.1, %cmp.sub.1.sub.1 182 %res.3 = xor i1 %res.2, %cmp.sub.1.sub.1.eq 183 ret i1 %res.3 184} 185 186define i1 @gep_sub_slt_var_idx_sgt_1(ptr %dst, ptr %upper, i8 %idx) { 187; CHECK-LABEL: @gep_sub_slt_var_idx_sgt_1( 188; CHECK-NEXT: [[SGT_1:%.*]] = icmp sgt i8 [[IDX:%.*]], 1 189; CHECK-NEXT: call void @llvm.assume(i1 [[SGT_1]]) 190; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 191; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[IDX_EXT]] 192; CHECK-NEXT: [[PRE:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER:%.*]] 193; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 194; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 -1 195; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER]] 196; CHECK-NEXT: [[DST_SUB_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 -2 197; CHECK-NEXT: [[CMP_SUB_2:%.*]] = icmp slt ptr [[DST_SUB_2]], [[UPPER]] 198; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_2]] 199; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 -3 200; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp slt ptr [[DST_SUB_3]], [[UPPER]] 201; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_3]] 202; CHECK-NEXT: ret i1 [[RES_2]] 203; 204 %sgt.1 = icmp sgt i8 %idx, 1 205 call void @llvm.assume(i1 %sgt.1) 206 %idx.ext = zext i8 %idx to i16 207 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 208 %pre = icmp slt ptr %dst.add.idx, %upper 209 call void @llvm.assume(i1 %pre) 210 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.idx, i64 -1 211 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 212 %dst.sub.2 = getelementptr inbounds i8, ptr %dst.add.idx, i64 -2 213 %cmp.sub.2 = icmp slt ptr %dst.sub.2, %upper 214 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.2 215 %dst.sub.3 = getelementptr inbounds i8, ptr %dst.add.idx, i64 -3 216 %cmp.sub.3 = icmp slt ptr %dst.sub.3, %upper 217 %res.2 = xor i1 %res.1, %cmp.sub.3 218 ret i1 %res.2 219} 220 221define i1 @gep_sub_1_slt_var_idx_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 222; CHECK-LABEL: @gep_sub_1_slt_var_idx_inbounds( 223; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0 224; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]]) 225; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 226; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 227; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 -1 228; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 229; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 230; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 231; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 232; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 233; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]] 234; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 235; CHECK-NEXT: ret i1 [[CMP_IDX]] 236; 237 %not.zero = icmp ne i8 %len, 0 238 call void @llvm.assume(i1 %not.zero) 239 %len.ext = zext i8 %len to i16 240 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext 241 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 -1 242 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 243 call void @llvm.assume(i1 %cmp.sub.1) 244 %cmp.idx.slt.len = icmp slt i8 %idx, %len 245 call void @llvm.assume(i1 %cmp.idx.slt.len) 246 %idx.ext = zext i8 %idx to i16 247 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 248 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 249 ret i1 %cmp.idx 250} 251 252define i1 @gep_sub_1_slt_var_idx_only_inner_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 253; CHECK-LABEL: @gep_sub_1_slt_var_idx_only_inner_inbounds( 254; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0 255; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]]) 256; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 257; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 258; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr i8, ptr [[DST_ADD_LEN]], i64 -1 259; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 260; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 261; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 262; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 263; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 264; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]] 265; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 266; CHECK-NEXT: ret i1 [[CMP_IDX]] 267; 268 %not.zero = icmp ne i8 %len, 0 269 call void @llvm.assume(i1 %not.zero) 270 %len.ext = zext i8 %len to i16 271 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext 272 %dst.sub.1 = getelementptr i8, ptr %dst.add.len, i64 -1 273 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 274 call void @llvm.assume(i1 %cmp.sub.1) 275 %cmp.idx.slt.len = icmp slt i8 %idx, %len 276 call void @llvm.assume(i1 %cmp.idx.slt.len) 277 %idx.ext = zext i8 %idx to i16 278 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 279 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 280 ret i1 %cmp.idx 281} 282 283define i1 @gep_sub_1_slt_var_idx_no_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 284; CHECK-LABEL: @gep_sub_1_slt_var_idx_no_inbounds( 285; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0 286; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]]) 287; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 288; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 289; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr i8, ptr [[DST_ADD_LEN]], i64 -1 290; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 291; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 292; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 293; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 294; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 295; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr i8, ptr [[DST]], i16 [[IDX_EXT]] 296; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 297; CHECK-NEXT: ret i1 [[CMP_IDX]] 298; 299 %not.zero = icmp ne i8 %len, 0 300 call void @llvm.assume(i1 %not.zero) 301 %len.ext = zext i8 %len to i16 302 %dst.add.len = getelementptr i8, ptr %dst, i16 %len.ext 303 %dst.sub.1 = getelementptr i8, ptr %dst.add.len, i64 -1 304 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 305 call void @llvm.assume(i1 %cmp.sub.1) 306 %cmp.idx.slt.len = icmp slt i8 %idx, %len 307 call void @llvm.assume(i1 %cmp.idx.slt.len) 308 %idx.ext = zext i8 %idx to i16 309 %dst.add.idx = getelementptr i8, ptr %dst, i16 %idx.ext 310 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 311 ret i1 %cmp.idx 312} 313 314define i1 @gep_sub_2_slt_var_idx(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 315; CHECK-LABEL: @gep_sub_2_slt_var_idx( 316; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0 317; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]]) 318; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 319; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 320; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 -2 321; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 322; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 323; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 324; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 325; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 326; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]] 327; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 328; CHECK-NEXT: ret i1 [[CMP_IDX]] 329; 330 %not.zero = icmp ne i8 %len, 0 331 call void @llvm.assume(i1 %not.zero) 332 %len.ext = zext i8 %len to i16 333 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext 334 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 -2 335 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 336 call void @llvm.assume(i1 %cmp.sub.1) 337 %cmp.idx.slt.len = icmp slt i8 %idx, %len 338 call void @llvm.assume(i1 %cmp.idx.slt.len) 339 %idx.ext = zext i8 %idx to i16 340 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 341 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 342 ret i1 %cmp.idx 343} 344 345define i1 @gep_sub_2_slt_var_idx_inbounds_len_sge_2(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 346; CHECK-LABEL: @gep_sub_2_slt_var_idx_inbounds_len_sge_2( 347; CHECK-NEXT: [[SGE_2:%.*]] = icmp sge i8 [[LEN:%.*]], 2 348; CHECK-NEXT: call void @llvm.assume(i1 [[SGE_2]]) 349; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 350; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 351; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 -1 352; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 353; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 354; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 355; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 356; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 357; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]] 358; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 359; CHECK-NEXT: ret i1 [[CMP_IDX]] 360; 361 %sge.2 = icmp sge i8 %len, 2 362 call void @llvm.assume(i1 %sge.2) 363 %len.ext = zext i8 %len to i16 364 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext 365 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 -1 366 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 367 call void @llvm.assume(i1 %cmp.sub.1) 368 %cmp.idx.slt.len = icmp slt i8 %idx, %len 369 call void @llvm.assume(i1 %cmp.idx.slt.len) 370 %idx.ext = zext i8 %idx to i16 371 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 372 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 373 ret i1 %cmp.idx 374} 375 376define i1 @gep_sub_slt_var_idx_len_sgt_1(ptr %dst, ptr %upper, i8 %len, i8 %idx) { 377; CHECK-LABEL: @gep_sub_slt_var_idx_len_sgt_1( 378; CHECK-NEXT: [[SGT_1:%.*]] = icmp sgt i8 [[LEN:%.*]], 1 379; CHECK-NEXT: call void @llvm.assume(i1 [[SGT_1]]) 380; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16 381; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]] 382; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 -2 383; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp slt ptr [[DST_SUB_1]], [[UPPER:%.*]] 384; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_SUB_1]]) 385; CHECK-NEXT: [[CMP_IDX_SLT_LEN:%.*]] = icmp slt i8 [[IDX:%.*]], [[LEN]] 386; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_SLT_LEN]]) 387; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16 388; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]] 389; CHECK-NEXT: [[CMP_IDX:%.*]] = icmp slt ptr [[DST_ADD_IDX]], [[UPPER]] 390; CHECK-NEXT: ret i1 [[CMP_IDX]] 391; 392 %sgt.1 = icmp sgt i8 %len, 1 393 call void @llvm.assume(i1 %sgt.1) 394 %len.ext = zext i8 %len to i16 395 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext 396 %dst.sub.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 -2 397 %cmp.sub.1 = icmp slt ptr %dst.sub.1, %upper 398 call void @llvm.assume(i1 %cmp.sub.1) 399 %cmp.idx.slt.len = icmp slt i8 %idx, %len 400 call void @llvm.assume(i1 %cmp.idx.slt.len) 401 %idx.ext = zext i8 %idx to i16 402 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext 403 %cmp.idx = icmp slt ptr %dst.add.idx, %upper 404 ret i1 %cmp.idx 405} 406 407define i1 @gep_sub_1_slt_var_idx_lower_bound(ptr %lower, ptr %src, i8 %len) { 408; CHECK-LABEL: @gep_sub_1_slt_var_idx_lower_bound( 409; CHECK-NEXT: entry: 410; CHECK-NEXT: [[SRC_SGE_LOWER:%.*]] = icmp sge ptr [[SRC:%.*]], [[LOWER:%.*]] 411; CHECK-NEXT: call void @llvm.assume(i1 [[SRC_SGE_LOWER]]) 412; CHECK-NEXT: [[LEN_POS:%.*]] = icmp sge i8 [[LEN:%.*]], 0 413; CHECK-NEXT: call void @llvm.assume(i1 [[LEN_POS]]) 414; CHECK-NEXT: [[GEP_LEN:%.*]] = getelementptr inbounds i8, ptr [[SRC]], i8 [[LEN]] 415; CHECK-NEXT: [[GEP_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[GEP_LEN]], i8 -1 416; CHECK-NEXT: [[RES:%.*]] = icmp slt ptr [[GEP_SUB_1]], [[LOWER]] 417; CHECK-NEXT: ret i1 [[RES]] 418; 419entry: 420 %src.sge.lower = icmp sge ptr %src, %lower 421 call void @llvm.assume(i1 %src.sge.lower) 422 423 %len.pos = icmp sge i8 %len, 0 424 call void @llvm.assume(i1 %len.pos) 425 426 %gep.len = getelementptr inbounds i8, ptr %src, i8 %len 427 %gep.sub.1 = getelementptr inbounds i8, ptr %gep.len, i8 -1 428 %res = icmp slt ptr %gep.sub.1, %lower 429 ret i1 %res 430} 431 432define i1 @gep_sub_1_slt_var_idx_lower_bound_len_ne_0(ptr %lower, ptr %src, i8 %len) { 433; CHECK-LABEL: @gep_sub_1_slt_var_idx_lower_bound_len_ne_0( 434; CHECK-NEXT: entry: 435; CHECK-NEXT: [[LEN_NE_0:%.*]] = icmp ne i8 [[LEN:%.*]], 0 436; CHECK-NEXT: call void @llvm.assume(i1 [[LEN_NE_0]]) 437; CHECK-NEXT: [[SRC_SGE_LOWER:%.*]] = icmp sge ptr [[SRC:%.*]], [[LOWER:%.*]] 438; CHECK-NEXT: call void @llvm.assume(i1 [[SRC_SGE_LOWER]]) 439; CHECK-NEXT: [[LEN_POS:%.*]] = icmp sge i8 [[LEN]], 0 440; CHECK-NEXT: call void @llvm.assume(i1 [[LEN_POS]]) 441; CHECK-NEXT: [[GEP_LEN:%.*]] = getelementptr inbounds i8, ptr [[SRC]], i8 [[LEN]] 442; CHECK-NEXT: [[GEP_SUB_1:%.*]] = getelementptr inbounds i8, ptr [[GEP_LEN]], i8 -1 443; CHECK-NEXT: [[RES:%.*]] = icmp slt ptr [[GEP_SUB_1]], [[LOWER]] 444; CHECK-NEXT: ret i1 [[RES]] 445; 446entry: 447 %len.ne.0 = icmp ne i8 %len, 0 448 call void @llvm.assume(i1 %len.ne.0) 449 450 %src.sge.lower = icmp sge ptr %src, %lower 451 call void @llvm.assume(i1 %src.sge.lower) 452 453 %len.pos = icmp sge i8 %len, 0 454 call void @llvm.assume(i1 %len.pos) 455 456 %gep.len = getelementptr inbounds i8, ptr %src, i8 %len 457 %gep.sub.1 = getelementptr inbounds i8, ptr %gep.len, i8 -1 458 %res = icmp slt ptr %gep.sub.1, %lower 459 ret i1 %res 460} 461 462define i1 @gep_sub_2_slt_var_idx_lower_bound_len_ne_0(ptr %lower, ptr %src, i8 %len) { 463; CHECK-LABEL: @gep_sub_2_slt_var_idx_lower_bound_len_ne_0( 464; CHECK-NEXT: entry: 465; CHECK-NEXT: [[LEN_NE_0:%.*]] = icmp ne i8 [[LEN:%.*]], 0 466; CHECK-NEXT: call void @llvm.assume(i1 [[LEN_NE_0]]) 467; CHECK-NEXT: [[SRC_SGE_LOWER:%.*]] = icmp sge ptr [[SRC:%.*]], [[LOWER:%.*]] 468; CHECK-NEXT: call void @llvm.assume(i1 [[SRC_SGE_LOWER]]) 469; CHECK-NEXT: [[LEN_POS:%.*]] = icmp sge i8 [[LEN]], 0 470; CHECK-NEXT: call void @llvm.assume(i1 [[LEN_POS]]) 471; CHECK-NEXT: [[GEP_LEN:%.*]] = getelementptr inbounds i8, ptr [[SRC]], i8 [[LEN]] 472; CHECK-NEXT: [[GEP_SUB_2:%.*]] = getelementptr inbounds i8, ptr [[GEP_LEN]], i8 -2 473; CHECK-NEXT: [[RES:%.*]] = icmp slt ptr [[GEP_SUB_2]], [[LOWER]] 474; CHECK-NEXT: ret i1 [[RES]] 475; 476entry: 477 %len.ne.0 = icmp ne i8 %len, 0 478 call void @llvm.assume(i1 %len.ne.0) 479 480 %src.sge.lower = icmp sge ptr %src, %lower 481 call void @llvm.assume(i1 %src.sge.lower) 482 483 %len.pos = icmp sge i8 %len, 0 484 call void @llvm.assume(i1 %len.pos) 485 486 %gep.len = getelementptr inbounds i8, ptr %src, i8 %len 487 %gep.sub.2 = getelementptr inbounds i8, ptr %gep.len, i8 -2 488 %res = icmp slt ptr %gep.sub.2, %lower 489 ret i1 %res 490} 491 492define i1 @gep_i16_sub_1_sge_inbounds(ptr %dst, ptr %lower) { 493; CHECK-LABEL: @gep_i16_sub_1_sge_inbounds( 494; CHECK-NEXT: [[PRE:%.*]] = icmp sge ptr [[DST:%.*]], [[LOWER:%.*]] 495; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]]) 496; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3 497; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i64 -1 498; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sle ptr [[DST_SUB_1]], [[LOWER]] 499; CHECK-NEXT: [[DST_SUB_2:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i64 -2 500; CHECK-NEXT: [[CMP_SUB_2:%.*]] = icmp sle ptr [[DST_SUB_2]], [[DST]] 501; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_2]] 502; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i64 -3 503; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sle ptr [[DST_SUB_3]], [[LOWER]] 504; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_3]] 505; CHECK-NEXT: ret i1 [[RES_2]] 506; 507 %pre = icmp sge ptr %dst, %lower 508 call void @llvm.assume(i1 %pre) 509 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3 510 %dst.sub.1 = getelementptr inbounds i16, ptr %dst.add.3, i64 -1 511 %cmp.sub.1 = icmp sle ptr %dst.sub.1, %lower 512 %dst.sub.2 = getelementptr inbounds i16, ptr %dst.add.3, i64 -2 513 %cmp.sub.2 = icmp sle ptr %dst.sub.2, %dst 514 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.2 515 %dst.sub.3 = getelementptr inbounds i16, ptr %dst.add.3, i64 -3 516 %cmp.sub.3 = icmp sle ptr %dst.sub.3, %lower 517 %res.2 = xor i1 %res.1, %cmp.sub.3 518 ret i1 %res.2 519} 520 521define i1 @gep_i16_sub_1_sge_inbounds_var_idx(ptr %dst, i64 %off) { 522; CHECK-LABEL: @gep_i16_sub_1_sge_inbounds_var_idx( 523; CHECK-NEXT: [[OFF_SGE:%.*]] = icmp sge i64 [[OFF:%.*]], 1 524; CHECK-NEXT: call void @llvm.assume(i1 [[OFF_SGE]]) 525; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i64 [[OFF]] 526; CHECK-NEXT: [[DST_SUB_1:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i32 -1 527; CHECK-NEXT: [[CMP_SUB_1:%.*]] = icmp sle ptr [[DST_SUB_1]], [[DST]] 528; CHECK-NEXT: [[DST_SUB_2:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i64 -2 529; CHECK-NEXT: [[CMP_SUB_2:%.*]] = icmp sle ptr [[DST_SUB_2]], [[DST]] 530; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_SUB_1]], [[CMP_SUB_2]] 531; CHECK-NEXT: [[DST_SUB_3:%.*]] = getelementptr inbounds i16, ptr [[DST_ADD_3]], i64 -3 532; CHECK-NEXT: [[CMP_SUB_3:%.*]] = icmp sle ptr [[DST_SUB_3]], [[DST]] 533; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_SUB_3]] 534; CHECK-NEXT: ret i1 [[RES_2]] 535; 536 %off.sge = icmp sge i64 %off, 1 537 call void @llvm.assume(i1 %off.sge) 538 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 %off 539 %dst.sub.1 = getelementptr inbounds i16, ptr %dst.add.3, i32 -1 540 %cmp.sub.1 = icmp sle ptr %dst.sub.1, %dst 541 %dst.sub.2 = getelementptr inbounds i16, ptr %dst.add.3, i64 -2 542 %cmp.sub.2 = icmp sle ptr %dst.sub.2, %dst 543 %res.1 = xor i1 %cmp.sub.1, %cmp.sub.2 544 %dst.sub.3 = getelementptr inbounds i16, ptr %dst.add.3, i64 -3 545 %cmp.sub.3 = icmp sle ptr %dst.sub.3, %dst 546 %res.2 = xor i1 %res.1, %cmp.sub.3 547 ret i1 %res.2 548} 549