1; RUN: llc < %s -verify-machineinstrs -mtriple=powerpc-unknown-unkown \ 2; RUN: -mcpu=pwr7 -O0 2>&1 | FileCheck %s 3; RUN: llc < %s -verify-machineinstrs -mtriple=powerpc64-unknown-unkown \ 4; RUN: -mcpu=pwr7 -O0 2>&1 | FileCheck %s 5 6define void @test_r1_clobber() { 7entry: 8 call void asm sideeffect "nop", "~{r1}"() 9 ret void 10} 11 12; CHECK: warning: inline asm clobber list contains reserved registers: R1 13; CHECK-NEXT: note: Reserved registers on the clobber list may not be preserved across the asm statement, and clobbering them may lead to undefined behaviour. 14 15define void @test_x1_clobber() { 16entry: 17 call void asm sideeffect "nop", "~{x1}"() 18 ret void 19} 20 21; CHECK: warning: inline asm clobber list contains reserved registers: X1 22; CHECK-NEXT: note: Reserved registers on the clobber list may not be preserved across the asm statement, and clobbering them may lead to undefined behaviour. 23 24; CHECK: warning: inline asm clobber list contains reserved registers: R31 25; CHECK-NEXT: note: Reserved registers on the clobber list may not be preserved across the asm statement, and clobbering them may lead to undefined behaviour. 26 27@a = dso_local global i32 100, align 4 28define dso_local signext i32 @main() { 29entry: 30 %retval = alloca i32, align 4 31 %old = alloca i64, align 8 32 store i32 0, ptr %retval, align 4 33 call void asm sideeffect "li 31, 1", "~{r31}"() 34 call void asm sideeffect "li 30, 1", "~{r30}"() 35 %0 = call i64 asm sideeffect "mr $0, 31", "=r"() 36 store i64 %0, ptr %old, align 8 37 %1 = load i32, ptr @a, align 4 38 %conv = sext i32 %1 to i64 39 %2 = alloca i8, i64 %conv, align 16 40 %3 = load i64, ptr %old, align 8 41 %conv1 = trunc i64 %3 to i32 42 ret i32 %conv1 43} 44