1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2 2; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+usermsr | FileCheck %s --check-prefixes=X64 3; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+usermsr,+egpr | FileCheck %s --check-prefixes=EGPR 4 5define i64 @test_int_x86_urdmsr(i64 %A) nounwind { 6; X64-LABEL: test_int_x86_urdmsr: 7; X64: # %bb.0: 8; X64-NEXT: urdmsr %rdi, %rax # encoding: [0xf2,0x0f,0x38,0xf8,0xc7] 9; X64-NEXT: retq # encoding: [0xc3] 10; 11; EGPR-LABEL: test_int_x86_urdmsr: 12; EGPR: # %bb.0: 13; EGPR-NEXT: urdmsr %rdi, %rax # EVEX TO LEGACY Compression encoding: [0xf2,0x0f,0x38,0xf8,0xc7] 14; EGPR-NEXT: retq # encoding: [0xc3] 15 %ret = call i64 @llvm.x86.urdmsr(i64 %A) 16 ret i64 %ret 17} 18 19define i64 @test_int_x86_urdmsr_const() nounwind { 20; X64-LABEL: test_int_x86_urdmsr_const: 21; X64: # %bb.0: 22; X64-NEXT: urdmsr $123, %rax # encoding: [0xc4,0xe7,0x7b,0xf8,0xc0,0x7b,0x00,0x00,0x00] 23; X64-NEXT: retq # encoding: [0xc3] 24; 25; EGPR-LABEL: test_int_x86_urdmsr_const: 26; EGPR: # %bb.0: 27; EGPR-NEXT: urdmsr $123, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe7,0x7b,0xf8,0xc0,0x7b,0x00,0x00,0x00] 28; EGPR-NEXT: retq # encoding: [0xc3] 29 %ret = call i64 @llvm.x86.urdmsr(i64 123) 30 ret i64 %ret 31} 32 33define i64 @test_int_x86_urdmsr_const_i64() nounwind { 34; X64-LABEL: test_int_x86_urdmsr_const_i64: 35; X64: # %bb.0: 36; X64-NEXT: movabsq $8589934591, %rax # encoding: [0x48,0xb8,0xff,0xff,0xff,0xff,0x01,0x00,0x00,0x00] 37; X64-NEXT: # imm = 0x1FFFFFFFF 38; X64-NEXT: urdmsr %rax, %rax # encoding: [0xf2,0x0f,0x38,0xf8,0xc0] 39; X64-NEXT: retq # encoding: [0xc3] 40; 41; EGPR-LABEL: test_int_x86_urdmsr_const_i64: 42; EGPR: # %bb.0: 43; EGPR-NEXT: movabsq $8589934591, %rax # encoding: [0x48,0xb8,0xff,0xff,0xff,0xff,0x01,0x00,0x00,0x00] 44; EGPR-NEXT: # imm = 0x1FFFFFFFF 45; EGPR-NEXT: urdmsr %rax, %rax # EVEX TO LEGACY Compression encoding: [0xf2,0x0f,0x38,0xf8,0xc0] 46; EGPR-NEXT: retq # encoding: [0xc3] 47 %ret = call i64 @llvm.x86.urdmsr(i64 8589934591) 48 ret i64 %ret 49} 50 51declare i64 @llvm.x86.urdmsr(i64 %A) 52 53define void @test_int_x86_uwrmsr(i64 %A, i64 %B) nounwind { 54; X64-LABEL: test_int_x86_uwrmsr: 55; X64: # %bb.0: 56; X64-NEXT: uwrmsr %rsi, %rdi # encoding: [0xf3,0x0f,0x38,0xf8,0xfe] 57; X64-NEXT: retq # encoding: [0xc3] 58; 59; EGPR-LABEL: test_int_x86_uwrmsr: 60; EGPR: # %bb.0: 61; EGPR-NEXT: uwrmsr %rsi, %rdi # EVEX TO LEGACY Compression encoding: [0xf3,0x0f,0x38,0xf8,0xfe] 62; EGPR-NEXT: retq # encoding: [0xc3] 63 call void @llvm.x86.uwrmsr(i64 %A, i64 %B) 64 ret void 65} 66 67define void @test_int_x86_uwrmsr_const(i64 %A) nounwind { 68; X64-LABEL: test_int_x86_uwrmsr_const: 69; X64: # %bb.0: 70; X64-NEXT: uwrmsr %rdi, $123 # encoding: [0xc4,0xe7,0x7a,0xf8,0xc7,0x7b,0x00,0x00,0x00] 71; X64-NEXT: retq # encoding: [0xc3] 72; 73; EGPR-LABEL: test_int_x86_uwrmsr_const: 74; EGPR: # %bb.0: 75; EGPR-NEXT: uwrmsr %rdi, $123 # EVEX TO VEX Compression encoding: [0xc4,0xe7,0x7a,0xf8,0xc7,0x7b,0x00,0x00,0x00] 76; EGPR-NEXT: retq # encoding: [0xc3] 77 call void @llvm.x86.uwrmsr(i64 123, i64 %A) 78 ret void 79} 80 81define void @test_int_x86_uwrmsr_const_i64(i64 %A) nounwind { 82; X64-LABEL: test_int_x86_uwrmsr_const_i64: 83; X64: # %bb.0: 84; X64-NEXT: movabsq $8589934591, %rax # encoding: [0x48,0xb8,0xff,0xff,0xff,0xff,0x01,0x00,0x00,0x00] 85; X64-NEXT: # imm = 0x1FFFFFFFF 86; X64-NEXT: uwrmsr %rdi, %rax # encoding: [0xf3,0x0f,0x38,0xf8,0xc7] 87; X64-NEXT: retq # encoding: [0xc3] 88; 89; EGPR-LABEL: test_int_x86_uwrmsr_const_i64: 90; EGPR: # %bb.0: 91; EGPR-NEXT: movabsq $8589934591, %rax # encoding: [0x48,0xb8,0xff,0xff,0xff,0xff,0x01,0x00,0x00,0x00] 92; EGPR-NEXT: # imm = 0x1FFFFFFFF 93; EGPR-NEXT: uwrmsr %rdi, %rax # EVEX TO LEGACY Compression encoding: [0xf3,0x0f,0x38,0xf8,0xc7] 94; EGPR-NEXT: retq # encoding: [0xc3] 95 call void @llvm.x86.uwrmsr(i64 8589934591, i64 %A) 96 ret void 97} 98 99declare void @llvm.x86.uwrmsr(i64 %A, i64 %B) 100