1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s | FileCheck %s 3 4; LD1B 5 6define <vscale x 16 x i8> @ld1b_lower_bound(ptr %a) { 7; CHECK-LABEL: ld1b_lower_bound: 8; CHECK: // %bb.0: 9; CHECK-NEXT: ptrue p0.b 10; CHECK-NEXT: ld1b { z0.b }, p0/z, [x0, #-8, mul vl] 11; CHECK-NEXT: ret 12 %base = getelementptr <vscale x 16 x i8>, ptr %a, i64 -8 13 %load = load <vscale x 16 x i8>, ptr %base 14 ret <vscale x 16 x i8> %load 15} 16 17define <vscale x 16 x i8> @ld1b_inbound(ptr %a) { 18; CHECK-LABEL: ld1b_inbound: 19; CHECK: // %bb.0: 20; CHECK-NEXT: ptrue p0.b 21; CHECK-NEXT: ld1b { z0.b }, p0/z, [x0, #2, mul vl] 22; CHECK-NEXT: ret 23 %base = getelementptr <vscale x 16 x i8>, ptr %a, i64 2 24 %load = load <vscale x 16 x i8>, ptr %base 25 ret <vscale x 16 x i8> %load 26} 27 28define <vscale x 16 x i8> @ld1b_upper_bound(ptr %a) { 29; CHECK-LABEL: ld1b_upper_bound: 30; CHECK: // %bb.0: 31; CHECK-NEXT: ptrue p0.b 32; CHECK-NEXT: ld1b { z0.b }, p0/z, [x0, #7, mul vl] 33; CHECK-NEXT: ret 34 %base = getelementptr <vscale x 16 x i8>, ptr %a, i64 7 35 %load = load <vscale x 16 x i8>, ptr %base 36 ret <vscale x 16 x i8> %load 37} 38 39define <vscale x 16 x i8> @ld1b_out_of_upper_bound(ptr %a) { 40; CHECK-LABEL: ld1b_out_of_upper_bound: 41; CHECK: // %bb.0: 42; CHECK-NEXT: ptrue p0.b 43; CHECK-NEXT: rdvl x8, #8 44; CHECK-NEXT: ld1b { z0.b }, p0/z, [x0, x8] 45; CHECK-NEXT: ret 46 %base = getelementptr <vscale x 16 x i8>, ptr %a, i64 8 47 %load = load <vscale x 16 x i8>, ptr %base 48 ret <vscale x 16 x i8> %load 49} 50 51define <vscale x 16 x i8> @ld1b_out_of_lower_bound(ptr %a) { 52; CHECK-LABEL: ld1b_out_of_lower_bound: 53; CHECK: // %bb.0: 54; CHECK-NEXT: ptrue p0.b 55; CHECK-NEXT: rdvl x8, #-9 56; CHECK-NEXT: ld1b { z0.b }, p0/z, [x0, x8] 57; CHECK-NEXT: ret 58 %base = getelementptr <vscale x 16 x i8>, ptr %a, i64 -9 59 %load = load <vscale x 16 x i8>, ptr %base 60 ret <vscale x 16 x i8> %load 61} 62 63; LD1H 64 65define <vscale x 8 x i16> @ld1h_inbound(ptr %a) { 66; CHECK-LABEL: ld1h_inbound: 67; CHECK: // %bb.0: 68; CHECK-NEXT: ptrue p0.h 69; CHECK-NEXT: ld1h { z0.h }, p0/z, [x0, #-2, mul vl] 70; CHECK-NEXT: ret 71 %base = getelementptr <vscale x 8 x i16>, ptr %a, i64 -2 72 %load = load <vscale x 8 x i16>, ptr %base 73 ret <vscale x 8 x i16> %load 74} 75 76; LD1W 77 78define <vscale x 4 x i32> @ld1s_inbound(ptr %a) { 79; CHECK-LABEL: ld1s_inbound: 80; CHECK: // %bb.0: 81; CHECK-NEXT: ptrue p0.s 82; CHECK-NEXT: ld1w { z0.s }, p0/z, [x0, #4, mul vl] 83; CHECK-NEXT: ret 84 %base = getelementptr <vscale x 4 x i32>, ptr %a, i64 4 85 %load = load <vscale x 4 x i32>, ptr %base 86 ret <vscale x 4 x i32> %load 87} 88 89; LD1D 90 91define <vscale x 2 x i64> @ld1d_inbound(ptr %a) { 92; CHECK-LABEL: ld1d_inbound: 93; CHECK: // %bb.0: 94; CHECK-NEXT: ptrue p0.d 95; CHECK-NEXT: ld1d { z0.d }, p0/z, [x0, #6, mul vl] 96; CHECK-NEXT: ret 97 %base = getelementptr <vscale x 2 x i64>, ptr %a, i64 6 98 %load = load <vscale x 2 x i64>, ptr %base 99 ret <vscale x 2 x i64> %load 100} 101 102define void @load_nxv6f16(ptr %a) { 103; CHECK-LABEL: load_nxv6f16: 104; CHECK: // %bb.0: 105; CHECK-NEXT: ptrue p0.d 106; CHECK-NEXT: ptrue p1.s 107; CHECK-NEXT: ld1h { z0.d }, p0/z, [x0, #2, mul vl] 108; CHECK-NEXT: ld1h { z0.s }, p1/z, [x0] 109; CHECK-NEXT: ret 110 %val = load volatile <vscale x 6 x half>, ptr %a 111 ret void 112} 113 114define void @load_nxv6f32(ptr %a) { 115; CHECK-LABEL: load_nxv6f32: 116; CHECK: // %bb.0: 117; CHECK-NEXT: ptrue p0.d 118; CHECK-NEXT: ptrue p1.s 119; CHECK-NEXT: ld1w { z0.d }, p0/z, [x0, #2, mul vl] 120; CHECK-NEXT: ld1w { z0.s }, p1/z, [x0] 121; CHECK-NEXT: ret 122 %val = load volatile <vscale x 6 x float>, ptr %a 123 ret void 124} 125 126define void @load_nxv12f16(ptr %a) { 127; CHECK-LABEL: load_nxv12f16: 128; CHECK: // %bb.0: 129; CHECK-NEXT: ptrue p0.s 130; CHECK-NEXT: ptrue p1.h 131; CHECK-NEXT: ld1h { z0.s }, p0/z, [x0, #2, mul vl] 132; CHECK-NEXT: ld1h { z0.h }, p1/z, [x0] 133; CHECK-NEXT: ret 134 %val = load volatile <vscale x 12 x half>, ptr %a 135 ret void 136} 137