1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -mtriple=aarch64 -mattr=+ls64 -verify-machineinstrs -o - %s | FileCheck %s 3; RUN: llc -mtriple=aarch64_be -mattr=+ls64 -verify-machineinstrs -o - %s | FileCheck %s 4 5define void @test_ld64b(ptr %out, ptr %addr) { 6; CHECK-LABEL: test_ld64b: 7; CHECK: // %bb.0: // %entry 8; CHECK-NEXT: ld64b x2, [x1] 9; CHECK-NEXT: stp x8, x9, [x0, #48] 10; CHECK-NEXT: stp x6, x7, [x0, #32] 11; CHECK-NEXT: stp x4, x5, [x0, #16] 12; CHECK-NEXT: stp x2, x3, [x0] 13; CHECK-NEXT: ret 14entry: 15 %val = tail call { i64, i64, i64, i64, i64, i64, i64, i64 } @llvm.aarch64.ld64b(ptr %addr) 16 store { i64, i64, i64, i64, i64, i64, i64, i64 } %val, ptr %out, align 8 17 ret void 18} 19 20define void @test_st64b(ptr %in, ptr %addr) { 21; CHECK-LABEL: test_st64b: 22; CHECK: // %bb.0: // %entry 23; CHECK-NEXT: ldp x8, x9, [x0, #48] 24; CHECK-NEXT: ldp x6, x7, [x0, #32] 25; CHECK-NEXT: ldp x4, x5, [x0, #16] 26; CHECK-NEXT: ldp x2, x3, [x0] 27; CHECK-NEXT: st64b x2, [x1] 28; CHECK-NEXT: ret 29entry: 30 %val = load { i64, i64, i64, i64, i64, i64, i64, i64 }, ptr %in, align 8 31 %v0 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 0 32 %v1 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 1 33 %v2 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 2 34 %v3 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 3 35 %v4 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 4 36 %v5 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 5 37 %v6 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 6 38 %v7 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 7 39 tail call void @llvm.aarch64.st64b(ptr %addr, i64 %v0, i64 %v1, i64 %v2, i64 %v3, i64 %v4, i64 %v5, i64 %v6, i64 %v7) 40 ret void 41} 42 43define i64 @test_st64bv(ptr %in, ptr %addr) { 44; CHECK-LABEL: test_st64bv: 45; CHECK: // %bb.0: // %entry 46; CHECK-NEXT: ldp x8, x9, [x0, #48] 47; CHECK-NEXT: ldp x6, x7, [x0, #32] 48; CHECK-NEXT: ldp x4, x5, [x0, #16] 49; CHECK-NEXT: ldp x2, x3, [x0] 50; CHECK-NEXT: st64bv x0, x2, [x1] 51; CHECK-NEXT: ret 52entry: 53 %val = load { i64, i64, i64, i64, i64, i64, i64, i64 }, ptr %in, align 8 54 %v0 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 0 55 %v1 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 1 56 %v2 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 2 57 %v3 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 3 58 %v4 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 4 59 %v5 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 5 60 %v6 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 6 61 %v7 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 7 62 %status = tail call i64 @llvm.aarch64.st64bv(ptr %addr, i64 %v0, i64 %v1, i64 %v2, i64 %v3, i64 %v4, i64 %v5, i64 %v6, i64 %v7) 63 ret i64 %status 64} 65 66define i64 @test_st64bv0(ptr %in, ptr %addr) { 67; CHECK-LABEL: test_st64bv0: 68; CHECK: // %bb.0: // %entry 69; CHECK-NEXT: ldp x8, x9, [x0, #48] 70; CHECK-NEXT: ldp x6, x7, [x0, #32] 71; CHECK-NEXT: ldp x4, x5, [x0, #16] 72; CHECK-NEXT: ldp x2, x3, [x0] 73; CHECK-NEXT: st64bv0 x0, x2, [x1] 74; CHECK-NEXT: ret 75entry: 76 %val = load { i64, i64, i64, i64, i64, i64, i64, i64 }, ptr %in, align 8 77 %v0 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 0 78 %v1 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 1 79 %v2 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 2 80 %v3 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 3 81 %v4 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 4 82 %v5 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 5 83 %v6 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 6 84 %v7 = extractvalue { i64, i64, i64, i64, i64, i64, i64, i64 } %val, 7 85 %status = tail call i64 @llvm.aarch64.st64bv0(ptr %addr, i64 %v0, i64 %v1, i64 %v2, i64 %v3, i64 %v4, i64 %v5, i64 %v6, i64 %v7) 86 ret i64 %status 87} 88 89declare { i64, i64, i64, i64, i64, i64, i64, i64 } @llvm.aarch64.ld64b(ptr) 90declare void @llvm.aarch64.st64b(ptr, i64, i64, i64, i64, i64, i64, i64, i64) 91declare i64 @llvm.aarch64.st64bv(ptr, i64, i64, i64, i64, i64, i64, i64, i64) 92declare i64 @llvm.aarch64.st64bv0(ptr, i64, i64, i64, i64, i64, i64, i64, i64) 93