1cea80760SAndrew Litteken; NOTE: Assertions have been autogenerated by utils/update_test_checks.py 29dd9575cSRoman Lebedev; RUN: opt -S -passes=verify,iroutliner -ir-outlining-no-cost -no-ir-sim-intrinsics < %s | FileCheck %s 3cea80760SAndrew Litteken 4cea80760SAndrew Litteken; This test ensures that we do not outline vararg instructions or intrinsics, as 5cea80760SAndrew Litteken; they may cause inconsistencies when outlining. 6cea80760SAndrew Litteken 7f4b925eeSMatt Arsenaultdeclare void @llvm.va_start(ptr) 8f4b925eeSMatt Arsenaultdeclare void @llvm.va_copy(ptr, ptr) 9f4b925eeSMatt Arsenaultdeclare void @llvm.va_end(ptr) 10cea80760SAndrew Litteken 11f4b925eeSMatt Arsenaultdefine i32 @func1(i32 %a, double %b, ptr %v, ...) nounwind { 12cea80760SAndrew Litteken; CHECK-LABEL: @func1( 13cea80760SAndrew Litteken; CHECK-NEXT: entry: 14f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP_LOC:%.*]] = alloca i32, align 4 15cea80760SAndrew Litteken; CHECK-NEXT: [[A_ADDR:%.*]] = alloca i32, align 4 16cea80760SAndrew Litteken; CHECK-NEXT: [[B_ADDR:%.*]] = alloca double, align 8 17f4b925eeSMatt Arsenault; CHECK-NEXT: [[AP:%.*]] = alloca ptr, align 4 18cea80760SAndrew Litteken; CHECK-NEXT: [[C:%.*]] = alloca i32, align 4 19f4b925eeSMatt Arsenault; CHECK-NEXT: call void @outlined_ir_func_0(i32 [[A:%.*]], ptr [[A_ADDR]], double [[B:%.*]], ptr [[B_ADDR]]) 20*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_start.p0(ptr [[AP]]) 21f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP0:%.*]] = va_arg ptr [[AP]], i32 22*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_copy.p0(ptr [[V:%.*]], ptr [[AP]]) 23*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_end.p0(ptr [[AP]]) 24f4b925eeSMatt Arsenault; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 -1, ptr [[TMP_LOC]]) 25f4b925eeSMatt Arsenault; CHECK-NEXT: call void @outlined_ir_func_1(i32 [[TMP0]], ptr [[C]], ptr [[TMP_LOC]]) 26f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP_RELOAD:%.*]] = load i32, ptr [[TMP_LOC]], align 4 27f4b925eeSMatt Arsenault; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 -1, ptr [[TMP_LOC]]) 28f4b925eeSMatt Arsenault; CHECK-NEXT: ret i32 [[TMP_RELOAD]] 29cea80760SAndrew Litteken; 30cea80760SAndrew Littekenentry: 31cea80760SAndrew Litteken %a.addr = alloca i32, align 4 32cea80760SAndrew Litteken %b.addr = alloca double, align 8 33f4b925eeSMatt Arsenault %ap = alloca ptr, align 4 34cea80760SAndrew Litteken %c = alloca i32, align 4 35f4b925eeSMatt Arsenault store i32 %a, ptr %a.addr, align 4 36f4b925eeSMatt Arsenault store double %b, ptr %b.addr, align 8 37f4b925eeSMatt Arsenault call void @llvm.va_start(ptr %ap) 38f4b925eeSMatt Arsenault %0 = va_arg ptr %ap, i32 39f4b925eeSMatt Arsenault call void @llvm.va_copy(ptr %v, ptr %ap) 40f4b925eeSMatt Arsenault call void @llvm.va_end(ptr %ap) 41f4b925eeSMatt Arsenault store i32 %0, ptr %c, align 4 42f4b925eeSMatt Arsenault %tmp = load i32, ptr %c, align 4 43cea80760SAndrew Litteken ret i32 %tmp 44cea80760SAndrew Litteken} 45cea80760SAndrew Litteken 46f4b925eeSMatt Arsenaultdefine i32 @func2(i32 %a, double %b, ptr %v, ...) nounwind { 47cea80760SAndrew Litteken; CHECK-LABEL: @func2( 48cea80760SAndrew Litteken; CHECK-NEXT: entry: 49f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP_LOC:%.*]] = alloca i32, align 4 50cea80760SAndrew Litteken; CHECK-NEXT: [[A_ADDR:%.*]] = alloca i32, align 4 51cea80760SAndrew Litteken; CHECK-NEXT: [[B_ADDR:%.*]] = alloca double, align 8 52f4b925eeSMatt Arsenault; CHECK-NEXT: [[AP:%.*]] = alloca ptr, align 4 53cea80760SAndrew Litteken; CHECK-NEXT: [[C:%.*]] = alloca i32, align 4 54f4b925eeSMatt Arsenault; CHECK-NEXT: call void @outlined_ir_func_0(i32 [[A:%.*]], ptr [[A_ADDR]], double [[B:%.*]], ptr [[B_ADDR]]) 55*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_start.p0(ptr [[AP]]) 56f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP0:%.*]] = va_arg ptr [[AP]], i32 57*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_copy.p0(ptr [[V:%.*]], ptr [[AP]]) 58*ab7dba23SAlex Voicu; CHECK-NEXT: call void @llvm.va_end.p0(ptr [[AP]]) 59f4b925eeSMatt Arsenault; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 -1, ptr [[TMP_LOC]]) 60f4b925eeSMatt Arsenault; CHECK-NEXT: call void @outlined_ir_func_1(i32 [[TMP0]], ptr [[C]], ptr [[TMP_LOC]]) 61f4b925eeSMatt Arsenault; CHECK-NEXT: [[TMP_RELOAD:%.*]] = load i32, ptr [[TMP_LOC]], align 4 62f4b925eeSMatt Arsenault; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 -1, ptr [[TMP_LOC]]) 63f4b925eeSMatt Arsenault; CHECK-NEXT: ret i32 [[TMP_RELOAD]] 64cea80760SAndrew Litteken; 65cea80760SAndrew Littekenentry: 66cea80760SAndrew Litteken %a.addr = alloca i32, align 4 67cea80760SAndrew Litteken %b.addr = alloca double, align 8 68f4b925eeSMatt Arsenault %ap = alloca ptr, align 4 69cea80760SAndrew Litteken %c = alloca i32, align 4 70f4b925eeSMatt Arsenault store i32 %a, ptr %a.addr, align 4 71f4b925eeSMatt Arsenault store double %b, ptr %b.addr, align 8 72f4b925eeSMatt Arsenault call void @llvm.va_start(ptr %ap) 73f4b925eeSMatt Arsenault %0 = va_arg ptr %ap, i32 74f4b925eeSMatt Arsenault call void @llvm.va_copy(ptr %v, ptr %ap) 75f4b925eeSMatt Arsenault call void @llvm.va_end(ptr %ap) 76f4b925eeSMatt Arsenault store i32 %0, ptr %c, align 4 77f4b925eeSMatt Arsenault %tmp = load i32, ptr %c, align 4 78cea80760SAndrew Litteken ret i32 %tmp 79cea80760SAndrew Litteken} 80