1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5 2;RUN: opt -S -passes=instcombine < %s | FileCheck %s 3target triple = "aarch64-unknown-linux-gnu" 4 5define void @test_st1(ptr %a, <vscale x 16 x i8> %b) { 6; CHECK-LABEL: define void @test_st1( 7; CHECK-SAME: ptr [[A:%.*]], <vscale x 16 x i8> [[B:%.*]]) { 8; CHECK-NEXT: [[ENTRY:.*:]] 9; CHECK-NEXT: ret void 10; 11entry: 12 call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %b, <vscale x 16 x i1> zeroinitializer, ptr %a) 13 ret void 14} 15 16define void @test_st1_scatter(<vscale x 2 x i16> %data_trunc, ptr %base, <vscale x 2 x i64> %b) { 17; CHECK-LABEL: define void @test_st1_scatter( 18; CHECK-SAME: <vscale x 2 x i16> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[B:%.*]]) { 19; CHECK-NEXT: [[ENTRY:.*:]] 20; CHECK-NEXT: ret void 21; 22entry: 23 call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %data_trunc, 24 <vscale x 2 x i1> zeroinitializer, 25 ptr %base, 26 <vscale x 2 x i64> %b) 27 ret void 28} 29 30define void @test_st1_scatter_index(<vscale x 2 x i32> %data_trunc, ptr %base, <vscale x 2 x i64> %offsets) { 31; CHECK-LABEL: define void @test_st1_scatter_index( 32; CHECK-SAME: <vscale x 2 x i32> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[OFFSETS:%.*]]) { 33; CHECK-NEXT: [[ENTRY:.*:]] 34; CHECK-NEXT: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> [[DATA_TRUNC]], <vscale x 2 x i1> zeroinitializer, ptr [[BASE]], <vscale x 2 x i64> [[OFFSETS]]) 35; CHECK-NEXT: ret void 36; 37entry: 38 call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> %data_trunc, 39 <vscale x 2 x i1> zeroinitializer, 40 ptr %base, 41 <vscale x 2 x i64> %offsets) 42 ret void 43} 44 45define void @test_st1_scatter_scalar_offset(<vscale x 4 x i8> %data_trunc, <vscale x 4 x i32> %base) { 46; CHECK-LABEL: define void @test_st1_scatter_scalar_offset( 47; CHECK-SAME: <vscale x 4 x i8> [[DATA_TRUNC:%.*]], <vscale x 4 x i32> [[BASE:%.*]]) { 48; CHECK-NEXT: [[ENTRY:.*:]] 49; CHECK-NEXT: ret void 50; 51entry: 52 call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %data_trunc, 53 <vscale x 4 x i1> zeroinitializer, 54 <vscale x 4 x i32> %base, 55 i64 16) 56 ret void 57} 58 59define void @test_st1_scatter_sxtw(<vscale x 4 x i8> %data_trunc, ptr %base, <vscale x 4 x i32> %offsets) { 60; CHECK-LABEL: define void @test_st1_scatter_sxtw( 61; CHECK-SAME: <vscale x 4 x i8> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 4 x i32> [[OFFSETS:%.*]]) { 62; CHECK-NEXT: ret void 63; 64 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %data_trunc, 65 <vscale x 4 x i1> zeroinitializer, 66 ptr %base, 67 <vscale x 4 x i32> %offsets) 68 ret void 69} 70 71define void @test_st1_scatter_sxtw_index(<vscale x 4 x i16> %data_trunc, ptr %base, <vscale x 4 x i32> %indices) { 72; CHECK-LABEL: define void @test_st1_scatter_sxtw_index( 73; CHECK-SAME: <vscale x 4 x i16> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 4 x i32> [[INDICES:%.*]]) { 74; CHECK-NEXT: ret void 75; 76 call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i16(<vscale x 4 x i16> %data_trunc, 77 <vscale x 4 x i1> zeroinitializer, 78 ptr %base, 79 <vscale x 4 x i32> %indices) 80 ret void 81} 82 83define void @test_st1_scatter_uxtw(<vscale x 4 x i8> %data_trunc, ptr %base, <vscale x 4 x i32> %offsets) { 84; CHECK-LABEL: define void @test_st1_scatter_uxtw( 85; CHECK-SAME: <vscale x 4 x i8> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 4 x i32> [[OFFSETS:%.*]]) { 86; CHECK-NEXT: ret void 87; 88 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %data_trunc, 89 <vscale x 4 x i1> zeroinitializer, 90 ptr %base, 91 <vscale x 4 x i32> %offsets) 92 ret void 93} 94 95define void @test_st1_scatter_uxtw_index(<vscale x 4 x i16> %data_trunc, ptr %base, <vscale x 4 x i32> %indices) { 96; CHECK-LABEL: define void @test_st1_scatter_uxtw_index( 97; CHECK-SAME: <vscale x 4 x i16> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 4 x i32> [[INDICES:%.*]]) { 98; CHECK-NEXT: ret void 99; 100 call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i16(<vscale x 4 x i16> %data_trunc, 101 <vscale x 4 x i1> zeroinitializer, 102 ptr %base, 103 <vscale x 4 x i32> %indices) 104 ret void 105} 106 107define void @test_st1dq(<vscale x 2 x i64> %zt, ptr %gep1) { 108; CHECK-LABEL: define void @test_st1dq( 109; CHECK-SAME: <vscale x 2 x i64> [[ZT:%.*]], ptr [[GEP1:%.*]]) { 110; CHECK-NEXT: [[ENTRY:.*:]] 111; CHECK-NEXT: ret void 112; 113entry: 114 call void @llvm.aarch64.sve.st1dq.nxv2i64(<vscale x 2 x i64> %zt, <vscale x 1 x i1> zeroinitializer, ptr %gep1) 115 ret void 116} 117 118define void @test_st1q_scatter_index(<vscale x 8 x i16> %data, <vscale x 1 x i1> %pg, ptr %base, <vscale x 2 x i64> %idx) { 119; CHECK-LABEL: define void @test_st1q_scatter_index( 120; CHECK-SAME: <vscale x 8 x i16> [[DATA:%.*]], <vscale x 1 x i1> [[PG:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[IDX:%.*]]) { 121; CHECK-NEXT: [[ENTRY:.*:]] 122; CHECK-NEXT: ret void 123; 124entry: 125 call void @llvm.aarch64.sve.st1q.scatter.index.nxv8i16(<vscale x 8 x i16> %data, <vscale x 1 x i1> zeroinitializer, ptr %base, <vscale x 2 x i64> %idx) 126 ret void 127} 128 129define void @test_st1q_scatter_scalar_offset(<vscale x 2 x i64> %data, <vscale x 2 x i64> %base) { 130; CHECK-LABEL: define void @test_st1q_scatter_scalar_offset( 131; CHECK-SAME: <vscale x 2 x i64> [[DATA:%.*]], <vscale x 2 x i64> [[BASE:%.*]]) { 132; CHECK-NEXT: [[ENTRY:.*:]] 133; CHECK-NEXT: ret void 134; 135entry: 136 call void @llvm.aarch64.sve.st1q.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 1 x i1> zeroinitializer, <vscale x 2 x i64> %base, i64 0) 137 ret void 138} 139 140define void @test_st1q_scatter_vector_offset(<vscale x 8 x i16> %data, ptr %base, <vscale x 2 x i64> %off) { 141; CHECK-LABEL: define void @test_st1q_scatter_vector_offset( 142; CHECK-SAME: <vscale x 8 x i16> [[DATA:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[OFF:%.*]]) { 143; CHECK-NEXT: [[ENTRY:.*:]] 144; CHECK-NEXT: ret void 145; 146entry: 147 call void @llvm.aarch64.sve.st1q.scatter.vector.offset.nxv8i16(<vscale x 8 x i16> %data, <vscale x 1 x i1> zeroinitializer, ptr %base, <vscale x 2 x i64> %off) 148 ret void 149} 150 151define void @test_st1wq(ptr %a, <vscale x 4 x i32> %b) { 152; CHECK-LABEL: define void @test_st1wq( 153; CHECK-SAME: ptr [[A:%.*]], <vscale x 4 x i32> [[B:%.*]]) { 154; CHECK-NEXT: [[ENTRY:.*:]] 155; CHECK-NEXT: ret void 156; 157entry: 158 call void @llvm.aarch64.sve.st1wq.nxv4i32(<vscale x 4 x i32> %b, <vscale x 1 x i1> zeroinitializer, ptr %a) 159 ret void 160} 161 162 163define void @test_st2(ptr %a, <vscale x 8 x i32> %b) { 164; CHECK-LABEL: define void @test_st2( 165; CHECK-SAME: ptr [[A:%.*]], <vscale x 8 x i32> [[B:%.*]]) { 166; CHECK-NEXT: [[ENTRY:.*:]] 167; CHECK-NEXT: ret void 168; 169entry: 170 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv8i32(<vscale x 8 x i32> %b, i64 0) 171 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv8i32(<vscale x 8 x i32> %b, i64 4) 172 tail call void @llvm.aarch64.sve.st2.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i1> zeroinitializer, ptr %a) 173 ret void 174} 175 176define void @test_st2q(ptr %a, <vscale x 8 x i32> %b) { 177; CHECK-LABEL: define void @test_st2q( 178; CHECK-SAME: ptr [[A:%.*]], <vscale x 8 x i32> [[B:%.*]]) { 179; CHECK-NEXT: [[ENTRY:.*:]] 180; CHECK-NEXT: ret void 181; 182entry: 183 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv8i32(<vscale x 8 x i32> %b, i64 0) 184 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv8i32(<vscale x 8 x i32> %b, i64 4) 185 tail call void @llvm.aarch64.sve.st2q.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i1> zeroinitializer, ptr %a) 186 ret void 187} 188 189define void @test_st3(ptr %a, <vscale x 12 x i32> %b) { 190; CHECK-LABEL: define void @test_st3( 191; CHECK-SAME: ptr [[A:%.*]], <vscale x 12 x i32> [[B:%.*]]) { 192; CHECK-NEXT: [[ENTRY:.*:]] 193; CHECK-NEXT: ret void 194; 195entry: 196 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 0) 197 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 4) 198 %2 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 8) 199 tail call void @llvm.aarch64.sve.st3.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i32> %2, <vscale x 4 x i1> zeroinitializer, ptr %a) 200 ret void 201} 202 203define void @test_st3q(ptr %a, <vscale x 12 x i32> %b) { 204; CHECK-LABEL: define void @test_st3q( 205; CHECK-SAME: ptr [[A:%.*]], <vscale x 12 x i32> [[B:%.*]]) { 206; CHECK-NEXT: [[ENTRY:.*:]] 207; CHECK-NEXT: ret void 208; 209entry: 210 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 0) 211 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 4) 212 %2 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv12i32(<vscale x 12 x i32> %b, i64 8) 213 tail call void @llvm.aarch64.sve.st3q.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i32> %2, <vscale x 4 x i1> zeroinitializer, ptr %a) 214 ret void 215} 216 217define void @test_st4(ptr %a, <vscale x 16 x i32> %b) { 218; CHECK-LABEL: define void @test_st4( 219; CHECK-SAME: ptr [[A:%.*]], <vscale x 16 x i32> [[B:%.*]]) { 220; CHECK-NEXT: [[ENTRY:.*:]] 221; CHECK-NEXT: ret void 222; 223entry: 224 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 0) 225 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 4) 226 %2 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 8) 227 %3 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 12) 228 tail call void @llvm.aarch64.sve.st4.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i32> %2, <vscale x 4 x i32> %3, <vscale x 4 x i1> zeroinitializer, ptr %a) 229 ret void 230} 231 232define void @test_st4q(ptr %a, <vscale x 16 x i32> %b) { 233; CHECK-LABEL: define void @test_st4q( 234; CHECK-SAME: ptr [[A:%.*]], <vscale x 16 x i32> [[B:%.*]]) { 235; CHECK-NEXT: [[ENTRY:.*:]] 236; CHECK-NEXT: ret void 237; 238entry: 239 %0 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 0) 240 %1 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 4) 241 %2 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 8) 242 %3 = tail call <vscale x 4 x i32> @llvm.vector.extract.nxv4i32.nxv16i32(<vscale x 16 x i32> %b, i64 12) 243 tail call void @llvm.aarch64.sve.st4q.nxv4i32(<vscale x 4 x i32> %0, <vscale x 4 x i32> %1, <vscale x 4 x i32> %2, <vscale x 4 x i32> %3, <vscale x 4 x i1> zeroinitializer, ptr %a) 244 ret void 245} 246 247define void @test_stnt1(ptr %a, <vscale x 16 x i8> %b) { 248; CHECK-LABEL: define void @test_stnt1( 249; CHECK-SAME: ptr [[A:%.*]], <vscale x 16 x i8> [[B:%.*]]) { 250; CHECK-NEXT: [[ENTRY:.*:]] 251; CHECK-NEXT: ret void 252; 253entry: 254 call void @llvm.aarch64.sve.stnt1.nxv16i8(<vscale x 16 x i8> %b, <vscale x 16 x i1> zeroinitializer, ptr %a) 255 ret void 256} 257 258define void @test_stnt1_scatter(<vscale x 2 x i16> %data_trunc, ptr %base, <vscale x 2 x i64> %b) { 259; CHECK-LABEL: define void @test_stnt1_scatter( 260; CHECK-SAME: <vscale x 2 x i16> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[B:%.*]]) { 261; CHECK-NEXT: [[ENTRY:.*:]] 262; CHECK-NEXT: ret void 263; 264entry: 265 call void @llvm.aarch64.sve.stnt1.scatter.nxv2i16(<vscale x 2 x i16> %data_trunc, 266 <vscale x 2 x i1> zeroinitializer, 267 ptr %base, 268 <vscale x 2 x i64> %b) 269 ret void 270} 271 272define void @test_stnt1_scatter_index(<vscale x 2 x i32> %data_trunc, ptr %base, <vscale x 2 x i64> %offsets) { 273; CHECK-LABEL: define void @test_stnt1_scatter_index( 274; CHECK-SAME: <vscale x 2 x i32> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 2 x i64> [[OFFSETS:%.*]]) { 275; CHECK-NEXT: [[ENTRY:.*:]] 276; CHECK-NEXT: ret void 277; 278entry: 279 call void @llvm.aarch64.sve.stnt1.scatter.index.nxv2i32(<vscale x 2 x i32> %data_trunc, 280 <vscale x 2 x i1> zeroinitializer, 281 ptr %base, 282 <vscale x 2 x i64> %offsets) 283 ret void 284} 285 286define void @test_stnt1_scatter_scalar_offset(<vscale x 4 x i8> %data_trunc, <vscale x 4 x i32> %base) { 287; CHECK-LABEL: define void @test_stnt1_scatter_scalar_offset( 288; CHECK-SAME: <vscale x 4 x i8> [[DATA_TRUNC:%.*]], <vscale x 4 x i32> [[BASE:%.*]]) { 289; CHECK-NEXT: [[ENTRY:.*:]] 290; CHECK-NEXT: ret void 291; 292entry: 293 call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %data_trunc, 294 <vscale x 4 x i1> zeroinitializer, 295 <vscale x 4 x i32> %base, 296 i64 16) 297 ret void 298} 299 300define void @test_stnt1_scatter_uxtw(<vscale x 4 x i8> %data_trunc, ptr %base, <vscale x 4 x i32> %offsets) { 301; CHECK-LABEL: define void @test_stnt1_scatter_uxtw( 302; CHECK-SAME: <vscale x 4 x i8> [[DATA_TRUNC:%.*]], ptr [[BASE:%.*]], <vscale x 4 x i32> [[OFFSETS:%.*]]) { 303; CHECK-NEXT: ret void 304; 305 call void @llvm.aarch64.sve.stnt1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %data_trunc, 306 <vscale x 4 x i1> zeroinitializer, 307 ptr %base, 308 <vscale x 4 x i32> %offsets) 309 ret void 310} 311