1; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=corei7 | FileCheck %s 2 3; Make sure XMM0 (return register) and R11 are saved before the call 4declare preserve_mostcc double @foo_double(i64, i64) 5define void @preserve_mostcc1() nounwind { 6entry: 7;CHECK-LABEL: preserve_mostcc1 8;CHECK: movq %r11, [[REG1:%[a-z0-9]+]] 9;CHECK: movaps %xmm0, [[REG2:[-0-9]*\(%r[sb]p\)]] 10;CHECK: call 11;CHECK: movq [[REG1]], %r11 12;CHECK: movaps [[REG2]], %xmm0 13 %a0 = call i64 asm sideeffect "", "={rax}"() nounwind 14 %a1 = call i64 asm sideeffect "", "={rcx}"() nounwind 15 %a2 = call i64 asm sideeffect "", "={rdx}"() nounwind 16 %a3 = call i64 asm sideeffect "", "={r8}"() nounwind 17 %a4 = call i64 asm sideeffect "", "={r9}"() nounwind 18 %a5 = call i64 asm sideeffect "", "={r10}"() nounwind 19 %a6 = call i64 asm sideeffect "", "={r11}"() nounwind 20 %a10 = call <2 x double> asm sideeffect "", "={xmm0}"() nounwind 21 %a11 = call <2 x double> asm sideeffect "", "={xmm1}"() nounwind 22 %a12 = call <2 x double> asm sideeffect "", "={xmm2}"() nounwind 23 %a13 = call <2 x double> asm sideeffect "", "={xmm3}"() nounwind 24 %a14 = call <2 x double> asm sideeffect "", "={xmm4}"() nounwind 25 %a15 = call <2 x double> asm sideeffect "", "={xmm5}"() nounwind 26 %a16 = call <2 x double> asm sideeffect "", "={xmm6}"() nounwind 27 %a17 = call <2 x double> asm sideeffect "", "={xmm7}"() nounwind 28 %a18 = call <2 x double> asm sideeffect "", "={xmm8}"() nounwind 29 %a19 = call <2 x double> asm sideeffect "", "={xmm9}"() nounwind 30 %a20 = call <2 x double> asm sideeffect "", "={xmm10}"() nounwind 31 %a21 = call <2 x double> asm sideeffect "", "={xmm11}"() nounwind 32 %a22 = call <2 x double> asm sideeffect "", "={xmm12}"() nounwind 33 %a23 = call <2 x double> asm sideeffect "", "={xmm13}"() nounwind 34 %a24 = call <2 x double> asm sideeffect "", "={xmm14}"() nounwind 35 %a25 = call <2 x double> asm sideeffect "", "={xmm15}"() nounwind 36 call preserve_mostcc double @foo_double(i64 1, i64 2) 37 call void asm sideeffect "", "{rax},{rcx},{rdx},{r8},{r9},{r10},{r11},{xmm0},{xmm1},{xmm2},{xmm3},{xmm4},{xmm5},{xmm6},{xmm7},{xmm8},{xmm9},{xmm10},{xmm11},{xmm12},{xmm13},{xmm14},{xmm15}"(i64 %a0, i64 %a1, i64 %a2, i64 %a3, i64 %a4, i64 %a5, i64 %a6, <2 x double> %a10, <2 x double> %a11, <2 x double> %a12, <2 x double> %a13, <2 x double> %a14, <2 x double> %a15, <2 x double> %a16, <2 x double> %a17, <2 x double> %a18, <2 x double> %a19, <2 x double> %a20, <2 x double> %a21, <2 x double> %a22, <2 x double> %a23, <2 x double> %a24, <2 x double> %a25) 38 ret void 39} 40