1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py 2; RUN: opt < %s -passes=instcombine -S | FileCheck %s 3 4; Check that assume is propagated backwards through all 5; operations that are `isGuaranteedToTransferExecutionToSuccessor` 6define i32 @assume_inevitable(ptr %a, ptr %b, ptr %c) { 7; CHECK-LABEL: @assume_inevitable( 8; CHECK-NEXT: entry: 9; CHECK-NEXT: [[M:%.*]] = alloca i64, align 8 10; CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr [[A:%.*]], align 4 11; CHECK-NEXT: [[LOADRES:%.*]] = load i32, ptr [[B:%.*]], align 4 12; CHECK-NEXT: [[LOADRES2:%.*]] = call i32 @llvm.annotation.i32.p0(i32 [[LOADRES]], ptr nonnull @.str, ptr nonnull @.str1, i32 2) 13; CHECK-NEXT: store i32 [[LOADRES2]], ptr [[A]], align 4 14; CHECK-NEXT: [[DUMMY_EQ:%.*]] = icmp ugt i32 [[LOADRES]], 42 15; CHECK-NEXT: tail call void @llvm.assume(i1 [[DUMMY_EQ]]) 16; CHECK-NEXT: [[M_A:%.*]] = call ptr @llvm.ptr.annotation.p0.p0(ptr nonnull [[M]], ptr nonnull @.str, ptr nonnull @.str1, i32 2, ptr null) 17; CHECK-NEXT: [[OBJSZ:%.*]] = call i64 @llvm.objectsize.i64.p0(ptr [[C:%.*]], i1 false, i1 false, i1 false) 18; CHECK-NEXT: store i64 [[OBJSZ]], ptr [[M_A]], align 4 19; CHECK-NEXT: [[PTRINT:%.*]] = ptrtoint ptr [[A]] to i64 20; CHECK-NEXT: [[MASKEDPTR:%.*]] = and i64 [[PTRINT]], 31 21; CHECK-NEXT: [[MASKCOND:%.*]] = icmp eq i64 [[MASKEDPTR]], 0 22; CHECK-NEXT: tail call void @llvm.assume(i1 [[MASKCOND]]) 23; CHECK-NEXT: ret i32 [[TMP0]] 24; 25entry: 26 %dummy = alloca i8, align 4 27 %m = alloca i64 28 %0 = load i32, ptr %a, align 4 29 30 ; START perform a bunch of inevitable operations 31 %loadres = load i32, ptr %b 32 %loadres2 = call i32 @llvm.annotation.i32(i32 %loadres, ptr @.str, ptr @.str1, i32 2) 33 store i32 %loadres2, ptr %a 34 35 %dummy_eq = icmp ugt i32 %loadres, 42 36 tail call void @llvm.assume(i1 %dummy_eq) 37 38 call void @llvm.lifetime.start.p0(i64 1, ptr %dummy) 39 %i = call ptr @llvm.invariant.start.p0(i64 1, ptr %dummy) 40 call void @llvm.invariant.end.p0(ptr %i, i64 1, ptr %dummy) 41 call void @llvm.lifetime.end.p0(i64 1, ptr %dummy) 42 43 %m_a = call ptr @llvm.ptr.annotation.p0(ptr %m, ptr @.str, ptr @.str1, i32 2, ptr null) 44 %objsz = call i64 @llvm.objectsize.i64.p0(ptr %c, i1 false) 45 store i64 %objsz, ptr %m_a 46 ; END perform a bunch of inevitable operations 47 48 ; AND here's the assume: 49 %ptrint = ptrtoint ptr %a to i64 50 %maskedptr = and i64 %ptrint, 31 51 %maskcond = icmp eq i64 %maskedptr, 0 52 tail call void @llvm.assume(i1 %maskcond) 53 54 ret i32 %0 55} 56 57@.str = private unnamed_addr constant [4 x i8] c"sth\00", section "llvm.metadata" 58@.str1 = private unnamed_addr constant [4 x i8] c"t.c\00", section "llvm.metadata" 59 60declare i64 @llvm.objectsize.i64.p0(ptr, i1) 61declare i32 @llvm.annotation.i32(i32, ptr, ptr, i32) 62declare ptr @llvm.ptr.annotation.p0(ptr, ptr, ptr, i32, ptr) 63 64declare void @llvm.lifetime.start.p0(i64, ptr nocapture) 65declare void @llvm.lifetime.end.p0(i64, ptr nocapture) 66 67declare ptr @llvm.invariant.start.p0(i64, ptr nocapture) 68declare void @llvm.invariant.end.p0(ptr, i64, ptr nocapture) 69declare void @llvm.assume(i1) 70