xref: /llvm-project/llvm/unittests/Target/ARM/MachineInstrTest.cpp (revision c697dd9ffdb14f08766c738eb28c377ee964937f)
1 #include "ARMBaseInstrInfo.h"
2 #include "ARMSubtarget.h"
3 #include "ARMTargetMachine.h"
4 #include "llvm/Support/TargetRegistry.h"
5 #include "llvm/Support/TargetSelect.h"
6 #include "llvm/Target/TargetMachine.h"
7 #include "llvm/Target/TargetOptions.h"
8 
9 #include "gtest/gtest.h"
10 
11 using namespace llvm;
12 
13 TEST(MachineInstructionDoubleWidthResult, IsCorrect) {
14   using namespace ARM;
15 
16   auto DoubleWidthResult = [](unsigned Opcode) {
17     switch (Opcode) {
18     default:
19       break;
20     case MVE_VMULLBp16:
21     case MVE_VMULLBp8:
22     case MVE_VMULLBs16:
23     case MVE_VMULLBs32:
24     case MVE_VMULLBs8:
25     case MVE_VMULLBu16:
26     case MVE_VMULLBu32:
27     case MVE_VMULLBu8:
28     case MVE_VMULLTp16:
29     case MVE_VMULLTp8:
30     case MVE_VMULLTs16:
31     case MVE_VMULLTs32:
32     case MVE_VMULLTs8:
33     case MVE_VMULLTu16:
34     case MVE_VMULLTu32:
35     case MVE_VMULLTu8:
36     case MVE_VQDMULL_qr_s16bh:
37     case MVE_VQDMULL_qr_s16th:
38     case MVE_VQDMULL_qr_s32bh:
39     case MVE_VQDMULL_qr_s32th:
40     case MVE_VQDMULLs16bh:
41     case MVE_VQDMULLs16th:
42     case MVE_VQDMULLs32bh:
43     case MVE_VQDMULLs32th:
44     case MVE_VMOVLs16bh:
45     case MVE_VMOVLs16th:
46     case MVE_VMOVLs8bh:
47     case MVE_VMOVLs8th:
48     case MVE_VMOVLu16bh:
49     case MVE_VMOVLu16th:
50     case MVE_VMOVLu8bh:
51     case MVE_VMOVLu8th:
52     case MVE_VSHLL_imms16bh:
53     case MVE_VSHLL_imms16th:
54     case MVE_VSHLL_imms8bh:
55     case MVE_VSHLL_imms8th:
56     case MVE_VSHLL_immu16bh:
57     case MVE_VSHLL_immu16th:
58     case MVE_VSHLL_immu8bh:
59     case MVE_VSHLL_immu8th:
60     case MVE_VSHLL_lws16bh:
61     case MVE_VSHLL_lws16th:
62     case MVE_VSHLL_lws8bh:
63     case MVE_VSHLL_lws8th:
64     case MVE_VSHLL_lwu16bh:
65     case MVE_VSHLL_lwu16th:
66     case MVE_VSHLL_lwu8bh:
67     case MVE_VSHLL_lwu8th:
68       return true;
69     }
70     return false;
71   };
72 
73   LLVMInitializeARMTargetInfo();
74   LLVMInitializeARMTarget();
75   LLVMInitializeARMTargetMC();
76 
77   auto TT(Triple::normalize("thumbv8.1m.main-arm-none-eabi"));
78   std::string Error;
79   const Target *T = TargetRegistry::lookupTarget(TT, Error);
80   if (!T) {
81     dbgs() << Error;
82     return;
83   }
84 
85   TargetOptions Options;
86   auto TM = std::unique_ptr<LLVMTargetMachine>(
87     static_cast<LLVMTargetMachine*>(
88       T->createTargetMachine(TT, "generic", "", Options, None, None,
89                              CodeGenOpt::Default)));
90   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
91                   std::string(TM->getTargetFeatureString()),
92                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
93   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
94   auto MII = TM->getMCInstrInfo();
95 
96   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
97     const MCInstrDesc &Desc = TII->get(i);
98 
99     uint64_t Flags = Desc.TSFlags;
100     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
101       continue;
102 
103     bool Valid = (Flags & ARMII::DoubleWidthResult) != 0;
104     ASSERT_EQ(DoubleWidthResult(i), Valid)
105               << MII->getName(i)
106               << ": mismatched expectation for tail-predicated safety\n";
107   }
108 }
109 
110 TEST(MachineInstructionHorizontalReduction, IsCorrect) {
111   using namespace ARM;
112 
113   auto HorizontalReduction = [](unsigned Opcode) {
114     switch (Opcode) {
115     default:
116       break;
117     case MVE_VABAVs16:
118     case MVE_VABAVs32:
119     case MVE_VABAVs8:
120     case MVE_VABAVu16:
121     case MVE_VABAVu32:
122     case MVE_VABAVu8:
123     case MVE_VADDLVs32acc:
124     case MVE_VADDLVs32no_acc:
125     case MVE_VADDLVu32acc:
126     case MVE_VADDLVu32no_acc:
127     case MVE_VADDVs16acc:
128     case MVE_VADDVs16no_acc:
129     case MVE_VADDVs32acc:
130     case MVE_VADDVs32no_acc:
131     case MVE_VADDVs8acc:
132     case MVE_VADDVs8no_acc:
133     case MVE_VADDVu16acc:
134     case MVE_VADDVu16no_acc:
135     case MVE_VADDVu32acc:
136     case MVE_VADDVu32no_acc:
137     case MVE_VADDVu8acc:
138     case MVE_VADDVu8no_acc:
139     case MVE_VMAXAVs16:
140     case MVE_VMAXAVs32:
141     case MVE_VMAXAVs8:
142     case MVE_VMAXNMAVf16:
143     case MVE_VMAXNMAVf32:
144     case MVE_VMAXNMVf16:
145     case MVE_VMAXNMVf32:
146     case MVE_VMAXVs16:
147     case MVE_VMAXVs32:
148     case MVE_VMAXVs8:
149     case MVE_VMAXVu16:
150     case MVE_VMAXVu32:
151     case MVE_VMAXVu8:
152     case MVE_VMINAVs16:
153     case MVE_VMINAVs32:
154     case MVE_VMINAVs8:
155     case MVE_VMINNMAVf16:
156     case MVE_VMINNMAVf32:
157     case MVE_VMINNMVf16:
158     case MVE_VMINNMVf32:
159     case MVE_VMINVs16:
160     case MVE_VMINVs32:
161     case MVE_VMINVs8:
162     case MVE_VMINVu16:
163     case MVE_VMINVu32:
164     case MVE_VMINVu8:
165     case MVE_VMLADAVas16:
166     case MVE_VMLADAVas32:
167     case MVE_VMLADAVas8:
168     case MVE_VMLADAVau16:
169     case MVE_VMLADAVau32:
170     case MVE_VMLADAVau8:
171     case MVE_VMLADAVaxs16:
172     case MVE_VMLADAVaxs32:
173     case MVE_VMLADAVaxs8:
174     case MVE_VMLADAVs16:
175     case MVE_VMLADAVs32:
176     case MVE_VMLADAVs8:
177     case MVE_VMLADAVu16:
178     case MVE_VMLADAVu32:
179     case MVE_VMLADAVu8:
180     case MVE_VMLADAVxs16:
181     case MVE_VMLADAVxs32:
182     case MVE_VMLADAVxs8:
183     case MVE_VMLALDAVas16:
184     case MVE_VMLALDAVas32:
185     case MVE_VMLALDAVau16:
186     case MVE_VMLALDAVau32:
187     case MVE_VMLALDAVaxs16:
188     case MVE_VMLALDAVaxs32:
189     case MVE_VMLALDAVs16:
190     case MVE_VMLALDAVs32:
191     case MVE_VMLALDAVu16:
192     case MVE_VMLALDAVu32:
193     case MVE_VMLALDAVxs16:
194     case MVE_VMLALDAVxs32:
195     case MVE_VMLSDAVas16:
196     case MVE_VMLSDAVas32:
197     case MVE_VMLSDAVas8:
198     case MVE_VMLSDAVaxs16:
199     case MVE_VMLSDAVaxs32:
200     case MVE_VMLSDAVaxs8:
201     case MVE_VMLSDAVs16:
202     case MVE_VMLSDAVs32:
203     case MVE_VMLSDAVs8:
204     case MVE_VMLSDAVxs16:
205     case MVE_VMLSDAVxs32:
206     case MVE_VMLSDAVxs8:
207     case MVE_VMLSLDAVas16:
208     case MVE_VMLSLDAVas32:
209     case MVE_VMLSLDAVaxs16:
210     case MVE_VMLSLDAVaxs32:
211     case MVE_VMLSLDAVs16:
212     case MVE_VMLSLDAVs32:
213     case MVE_VMLSLDAVxs16:
214     case MVE_VMLSLDAVxs32:
215     case MVE_VRMLALDAVHas32:
216     case MVE_VRMLALDAVHau32:
217     case MVE_VRMLALDAVHaxs32:
218     case MVE_VRMLALDAVHs32:
219     case MVE_VRMLALDAVHu32:
220     case MVE_VRMLALDAVHxs32:
221     case MVE_VRMLSLDAVHas32:
222     case MVE_VRMLSLDAVHaxs32:
223     case MVE_VRMLSLDAVHs32:
224     case MVE_VRMLSLDAVHxs32:
225       return true;
226     }
227     return false;
228   };
229 
230   LLVMInitializeARMTargetInfo();
231   LLVMInitializeARMTarget();
232   LLVMInitializeARMTargetMC();
233 
234   auto TT(Triple::normalize("thumbv8.1m.main-arm-none-eabi"));
235   std::string Error;
236   const Target *T = TargetRegistry::lookupTarget(TT, Error);
237   if (!T) {
238     dbgs() << Error;
239     return;
240   }
241 
242   TargetOptions Options;
243   auto TM = std::unique_ptr<LLVMTargetMachine>(
244     static_cast<LLVMTargetMachine*>(
245       T->createTargetMachine(TT, "generic", "", Options, None, None,
246                              CodeGenOpt::Default)));
247   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
248                   std::string(TM->getTargetFeatureString()),
249                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
250   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
251   auto MII = TM->getMCInstrInfo();
252 
253   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
254     const MCInstrDesc &Desc = TII->get(i);
255 
256     uint64_t Flags = Desc.TSFlags;
257     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
258       continue;
259     bool Valid = (Flags & ARMII::HorizontalReduction) != 0;
260     ASSERT_EQ(HorizontalReduction(i), Valid)
261               << MII->getName(i)
262               << ": mismatched expectation for tail-predicated safety\n";
263   }
264 }
265 
266 TEST(MachineInstructionRetainsPreviousHalfElement, IsCorrect) {
267   using namespace ARM;
268 
269   auto RetainsPreviousHalfElement = [](unsigned Opcode) {
270     switch (Opcode) {
271     default:
272       break;
273     case MVE_VMOVNi16bh:
274     case MVE_VMOVNi16th:
275     case MVE_VMOVNi32bh:
276     case MVE_VMOVNi32th:
277     case MVE_VQMOVNs16bh:
278     case MVE_VQMOVNs16th:
279     case MVE_VQMOVNs32bh:
280     case MVE_VQMOVNs32th:
281     case MVE_VQMOVNu16bh:
282     case MVE_VQMOVNu16th:
283     case MVE_VQMOVNu32bh:
284     case MVE_VQMOVNu32th:
285     case MVE_VQMOVUNs16bh:
286     case MVE_VQMOVUNs16th:
287     case MVE_VQMOVUNs32bh:
288     case MVE_VQMOVUNs32th:
289     case MVE_VQRSHRNbhs16:
290     case MVE_VQRSHRNbhs32:
291     case MVE_VQRSHRNbhu16:
292     case MVE_VQRSHRNbhu32:
293     case MVE_VQRSHRNths16:
294     case MVE_VQRSHRNths32:
295     case MVE_VQRSHRNthu16:
296     case MVE_VQRSHRNthu32:
297     case MVE_VQRSHRUNs16bh:
298     case MVE_VQRSHRUNs16th:
299     case MVE_VQRSHRUNs32bh:
300     case MVE_VQRSHRUNs32th:
301     case MVE_VQSHRNbhs16:
302     case MVE_VQSHRNbhs32:
303     case MVE_VQSHRNbhu16:
304     case MVE_VQSHRNbhu32:
305     case MVE_VQSHRNths16:
306     case MVE_VQSHRNths32:
307     case MVE_VQSHRNthu16:
308     case MVE_VQSHRNthu32:
309     case MVE_VQSHRUNs16bh:
310     case MVE_VQSHRUNs16th:
311     case MVE_VQSHRUNs32bh:
312     case MVE_VQSHRUNs32th:
313     case MVE_VRSHRNi16bh:
314     case MVE_VRSHRNi16th:
315     case MVE_VRSHRNi32bh:
316     case MVE_VRSHRNi32th:
317     case MVE_VSHRNi16bh:
318     case MVE_VSHRNi16th:
319     case MVE_VSHRNi32bh:
320     case MVE_VSHRNi32th:
321     case MVE_VCVTf16f32bh:
322     case MVE_VCVTf16f32th:
323     case MVE_VCVTf32f16bh:
324     case MVE_VCVTf32f16th:
325       return true;
326     }
327     return false;
328   };
329 
330   LLVMInitializeARMTargetInfo();
331   LLVMInitializeARMTarget();
332   LLVMInitializeARMTargetMC();
333 
334   auto TT(Triple::normalize("thumbv8.1m.main-arm-none-eabi"));
335   std::string Error;
336   const Target *T = TargetRegistry::lookupTarget(TT, Error);
337   if (!T) {
338     dbgs() << Error;
339     return;
340   }
341 
342   TargetOptions Options;
343   auto TM = std::unique_ptr<LLVMTargetMachine>(
344     static_cast<LLVMTargetMachine*>(
345       T->createTargetMachine(TT, "generic", "", Options, None, None,
346                              CodeGenOpt::Default)));
347   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
348                   std::string(TM->getTargetFeatureString()),
349                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
350   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
351   auto MII = TM->getMCInstrInfo();
352 
353   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
354     const MCInstrDesc &Desc = TII->get(i);
355 
356     uint64_t Flags = Desc.TSFlags;
357     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
358       continue;
359 
360     bool Valid = (Flags & ARMII::RetainsPreviousHalfElement) != 0;
361     ASSERT_EQ(RetainsPreviousHalfElement(i), Valid)
362               << MII->getName(i)
363               << ": mismatched expectation for tail-predicated safety\n";
364   }
365 }
366 // Test for instructions that aren't immediately obviously valid within a
367 // tail-predicated loop. This should be marked up in their tablegen
368 // descriptions. Currently we, conservatively, disallow:
369 // - cross beat carries.
370 // - complex operations.
371 // - horizontal operations.
372 // - byte swapping.
373 // - interleaved memory instructions.
374 // TODO: Add to this list once we can handle them safely.
375 TEST(MachineInstrValidTailPredication, IsCorrect) {
376 
377   using namespace ARM;
378 
379   auto IsValidTPOpcode = [](unsigned Opcode) {
380     switch (Opcode) {
381     default:
382       return false;
383     case MVE_ASRLi:
384     case MVE_ASRLr:
385     case MVE_LSRL:
386     case MVE_SQRSHR:
387     case MVE_SQSHL:
388     case MVE_SRSHR:
389     case MVE_UQRSHL:
390     case MVE_UQSHL:
391     case MVE_URSHR:
392     case MVE_VABDf16:
393     case MVE_VABDf32:
394     case MVE_VABDs16:
395     case MVE_VABDs32:
396     case MVE_VABDs8:
397     case MVE_VABDu16:
398     case MVE_VABDu32:
399     case MVE_VABDu8:
400     case MVE_VABSf16:
401     case MVE_VABSf32:
402     case MVE_VABSs16:
403     case MVE_VABSs32:
404     case MVE_VABSs8:
405     case MVE_VADD_qr_f16:
406     case MVE_VADD_qr_f32:
407     case MVE_VADD_qr_i16:
408     case MVE_VADD_qr_i32:
409     case MVE_VADD_qr_i8:
410     case MVE_VADDVs16acc:
411     case MVE_VADDVs16no_acc:
412     case MVE_VADDVs32acc:
413     case MVE_VADDVs32no_acc:
414     case MVE_VADDVs8acc:
415     case MVE_VADDVs8no_acc:
416     case MVE_VADDVu16acc:
417     case MVE_VADDVu16no_acc:
418     case MVE_VADDVu32acc:
419     case MVE_VADDVu32no_acc:
420     case MVE_VADDVu8acc:
421     case MVE_VADDVu8no_acc:
422     case MVE_VADDf16:
423     case MVE_VADDf32:
424     case MVE_VADDi16:
425     case MVE_VADDi32:
426     case MVE_VADDi8:
427     case MVE_VAND:
428     case MVE_VBIC:
429     case MVE_VBICimmi16:
430     case MVE_VBICimmi32:
431     case MVE_VBRSR16:
432     case MVE_VBRSR32:
433     case MVE_VBRSR8:
434     case MVE_VCLSs16:
435     case MVE_VCLSs32:
436     case MVE_VCLSs8:
437     case MVE_VCLZs16:
438     case MVE_VCLZs32:
439     case MVE_VCLZs8:
440     case MVE_VCMPf16:
441     case MVE_VCMPf16r:
442     case MVE_VCMPf32:
443     case MVE_VCMPf32r:
444     case MVE_VCMPi16:
445     case MVE_VCMPi16r:
446     case MVE_VCMPi32:
447     case MVE_VCMPi32r:
448     case MVE_VCMPi8:
449     case MVE_VCMPi8r:
450     case MVE_VCMPs16:
451     case MVE_VCMPs16r:
452     case MVE_VCMPs32:
453     case MVE_VCMPs32r:
454     case MVE_VCMPs8:
455     case MVE_VCMPs8r:
456     case MVE_VCMPu16:
457     case MVE_VCMPu16r:
458     case MVE_VCMPu32:
459     case MVE_VCMPu32r:
460     case MVE_VCMPu8:
461     case MVE_VCMPu8r:
462     case MVE_VCTP16:
463     case MVE_VCTP32:
464     case MVE_VCTP64:
465     case MVE_VCTP8:
466     case MVE_VCVTf16s16_fix:
467     case MVE_VCVTf16s16n:
468     case MVE_VCVTf16u16_fix:
469     case MVE_VCVTf16u16n:
470     case MVE_VCVTf32s32_fix:
471     case MVE_VCVTf32s32n:
472     case MVE_VCVTf32u32_fix:
473     case MVE_VCVTf32u32n:
474     case MVE_VCVTs16f16_fix:
475     case MVE_VCVTs16f16a:
476     case MVE_VCVTs16f16m:
477     case MVE_VCVTs16f16n:
478     case MVE_VCVTs16f16p:
479     case MVE_VCVTs16f16z:
480     case MVE_VCVTs32f32_fix:
481     case MVE_VCVTs32f32a:
482     case MVE_VCVTs32f32m:
483     case MVE_VCVTs32f32n:
484     case MVE_VCVTs32f32p:
485     case MVE_VCVTs32f32z:
486     case MVE_VCVTu16f16_fix:
487     case MVE_VCVTu16f16a:
488     case MVE_VCVTu16f16m:
489     case MVE_VCVTu16f16n:
490     case MVE_VCVTu16f16p:
491     case MVE_VCVTu16f16z:
492     case MVE_VCVTu32f32_fix:
493     case MVE_VCVTu32f32a:
494     case MVE_VCVTu32f32m:
495     case MVE_VCVTu32f32n:
496     case MVE_VCVTu32f32p:
497     case MVE_VCVTu32f32z:
498     case MVE_VDDUPu16:
499     case MVE_VDDUPu32:
500     case MVE_VDDUPu8:
501     case MVE_VDUP16:
502     case MVE_VDUP32:
503     case MVE_VDUP8:
504     case MVE_VDWDUPu16:
505     case MVE_VDWDUPu32:
506     case MVE_VDWDUPu8:
507     case MVE_VEOR:
508     case MVE_VFMA_qr_Sf16:
509     case MVE_VFMA_qr_Sf32:
510     case MVE_VFMA_qr_f16:
511     case MVE_VFMA_qr_f32:
512     case MVE_VFMAf16:
513     case MVE_VFMAf32:
514     case MVE_VFMSf16:
515     case MVE_VFMSf32:
516     case MVE_VMAXAs16:
517     case MVE_VMAXAs32:
518     case MVE_VMAXAs8:
519     case MVE_VMAXs16:
520     case MVE_VMAXs32:
521     case MVE_VMAXs8:
522     case MVE_VMAXu16:
523     case MVE_VMAXu32:
524     case MVE_VMAXu8:
525     case MVE_VMINAs16:
526     case MVE_VMINAs32:
527     case MVE_VMINAs8:
528     case MVE_VMINs16:
529     case MVE_VMINs32:
530     case MVE_VMINs8:
531     case MVE_VMINu16:
532     case MVE_VMINu32:
533     case MVE_VMINu8:
534     case MVE_VMLAS_qr_s16:
535     case MVE_VMLAS_qr_s32:
536     case MVE_VMLAS_qr_s8:
537     case MVE_VMLAS_qr_u16:
538     case MVE_VMLAS_qr_u32:
539     case MVE_VMLAS_qr_u8:
540     case MVE_VMLA_qr_s16:
541     case MVE_VMLA_qr_s32:
542     case MVE_VMLA_qr_s8:
543     case MVE_VMLA_qr_u16:
544     case MVE_VMLA_qr_u32:
545     case MVE_VMLA_qr_u8:
546     case MVE_VHADD_qr_s16:
547     case MVE_VHADD_qr_s32:
548     case MVE_VHADD_qr_s8:
549     case MVE_VHADD_qr_u16:
550     case MVE_VHADD_qr_u32:
551     case MVE_VHADD_qr_u8:
552     case MVE_VHADDs16:
553     case MVE_VHADDs32:
554     case MVE_VHADDs8:
555     case MVE_VHADDu16:
556     case MVE_VHADDu32:
557     case MVE_VHADDu8:
558     case MVE_VHSUB_qr_s16:
559     case MVE_VHSUB_qr_s32:
560     case MVE_VHSUB_qr_s8:
561     case MVE_VHSUB_qr_u16:
562     case MVE_VHSUB_qr_u32:
563     case MVE_VHSUB_qr_u8:
564     case MVE_VHSUBs16:
565     case MVE_VHSUBs32:
566     case MVE_VHSUBs8:
567     case MVE_VHSUBu16:
568     case MVE_VHSUBu32:
569     case MVE_VHSUBu8:
570     case MVE_VIDUPu16:
571     case MVE_VIDUPu32:
572     case MVE_VIDUPu8:
573     case MVE_VIWDUPu16:
574     case MVE_VIWDUPu32:
575     case MVE_VIWDUPu8:
576     case MVE_VLDRBS16:
577     case MVE_VLDRBS16_post:
578     case MVE_VLDRBS16_pre:
579     case MVE_VLDRBS16_rq:
580     case MVE_VLDRBS32:
581     case MVE_VLDRBS32_post:
582     case MVE_VLDRBS32_pre:
583     case MVE_VLDRBS32_rq:
584     case MVE_VLDRBU16:
585     case MVE_VLDRBU16_post:
586     case MVE_VLDRBU16_pre:
587     case MVE_VLDRBU16_rq:
588     case MVE_VLDRBU32:
589     case MVE_VLDRBU32_post:
590     case MVE_VLDRBU32_pre:
591     case MVE_VLDRBU32_rq:
592     case MVE_VLDRBU8:
593     case MVE_VLDRBU8_post:
594     case MVE_VLDRBU8_pre:
595     case MVE_VLDRBU8_rq:
596     case MVE_VLDRDU64_qi:
597     case MVE_VLDRDU64_qi_pre:
598     case MVE_VLDRDU64_rq:
599     case MVE_VLDRDU64_rq_u:
600     case MVE_VLDRHS32:
601     case MVE_VLDRHS32_post:
602     case MVE_VLDRHS32_pre:
603     case MVE_VLDRHS32_rq:
604     case MVE_VLDRHS32_rq_u:
605     case MVE_VLDRHU16:
606     case MVE_VLDRHU16_post:
607     case MVE_VLDRHU16_pre:
608     case MVE_VLDRHU16_rq:
609     case MVE_VLDRHU16_rq_u:
610     case MVE_VLDRHU32:
611     case MVE_VLDRHU32_post:
612     case MVE_VLDRHU32_pre:
613     case MVE_VLDRHU32_rq:
614     case MVE_VLDRHU32_rq_u:
615     case MVE_VLDRWU32:
616     case MVE_VLDRWU32_post:
617     case MVE_VLDRWU32_pre:
618     case MVE_VLDRWU32_qi:
619     case MVE_VLDRWU32_qi_pre:
620     case MVE_VLDRWU32_rq:
621     case MVE_VLDRWU32_rq_u:
622     case MVE_VMOVimmf32:
623     case MVE_VMOVimmi16:
624     case MVE_VMOVimmi32:
625     case MVE_VMOVimmi64:
626     case MVE_VMOVimmi8:
627     case MVE_VMOVNi16bh:
628     case MVE_VMOVNi16th:
629     case MVE_VMOVNi32bh:
630     case MVE_VMOVNi32th:
631     case MVE_VMULLBp16:
632     case MVE_VMULLBp8:
633     case MVE_VMULLBs16:
634     case MVE_VMULLBs32:
635     case MVE_VMULLBs8:
636     case MVE_VMULLBu16:
637     case MVE_VMULLBu32:
638     case MVE_VMULLBu8:
639     case MVE_VMULLTp16:
640     case MVE_VMULLTp8:
641     case MVE_VMULLTs16:
642     case MVE_VMULLTs32:
643     case MVE_VMULLTs8:
644     case MVE_VMULLTu16:
645     case MVE_VMULLTu32:
646     case MVE_VMULLTu8:
647     case MVE_VMUL_qr_f16:
648     case MVE_VMUL_qr_f32:
649     case MVE_VMUL_qr_i16:
650     case MVE_VMUL_qr_i32:
651     case MVE_VMUL_qr_i8:
652     case MVE_VMULf16:
653     case MVE_VMULf32:
654     case MVE_VMULi16:
655     case MVE_VMULi8:
656     case MVE_VMULi32:
657     case MVE_VMVN:
658     case MVE_VMVNimmi16:
659     case MVE_VMVNimmi32:
660     case MVE_VNEGf16:
661     case MVE_VNEGf32:
662     case MVE_VNEGs16:
663     case MVE_VNEGs32:
664     case MVE_VNEGs8:
665     case MVE_VORN:
666     case MVE_VORR:
667     case MVE_VORRimmi16:
668     case MVE_VORRimmi32:
669     case MVE_VPST:
670     case MVE_VQABSs16:
671     case MVE_VQABSs32:
672     case MVE_VQABSs8:
673     case MVE_VQADD_qr_s16:
674     case MVE_VQADD_qr_s32:
675     case MVE_VQADD_qr_s8:
676     case MVE_VQADD_qr_u16:
677     case MVE_VQADD_qr_u32:
678     case MVE_VQADD_qr_u8:
679     case MVE_VQADDs16:
680     case MVE_VQADDs32:
681     case MVE_VQADDs8:
682     case MVE_VQADDu16:
683     case MVE_VQADDu32:
684     case MVE_VQADDu8:
685     case MVE_VQDMULL_qr_s16bh:
686     case MVE_VQDMULL_qr_s16th:
687     case MVE_VQDMULL_qr_s32bh:
688     case MVE_VQDMULL_qr_s32th:
689     case MVE_VQDMULLs16bh:
690     case MVE_VQDMULLs16th:
691     case MVE_VQDMULLs32bh:
692     case MVE_VQDMULLs32th:
693     case MVE_VQNEGs16:
694     case MVE_VQNEGs32:
695     case MVE_VQNEGs8:
696     case MVE_VQMOVNs16bh:
697     case MVE_VQMOVNs16th:
698     case MVE_VQMOVNs32bh:
699     case MVE_VQMOVNs32th:
700     case MVE_VQMOVNu16bh:
701     case MVE_VQMOVNu16th:
702     case MVE_VQMOVNu32bh:
703     case MVE_VQMOVNu32th:
704     case MVE_VQMOVUNs16bh:
705     case MVE_VQMOVUNs16th:
706     case MVE_VQMOVUNs32bh:
707     case MVE_VQMOVUNs32th:
708     case MVE_VQRSHL_by_vecs16:
709     case MVE_VQRSHL_by_vecs32:
710     case MVE_VQRSHL_by_vecs8:
711     case MVE_VQRSHL_by_vecu16:
712     case MVE_VQRSHL_by_vecu32:
713     case MVE_VQRSHL_by_vecu8:
714     case MVE_VQRSHL_qrs16:
715     case MVE_VQRSHL_qrs32:
716     case MVE_VQRSHL_qrs8:
717     case MVE_VQRSHL_qru16:
718     case MVE_VQRSHL_qru8:
719     case MVE_VQRSHL_qru32:
720     case MVE_VQSHLU_imms16:
721     case MVE_VQSHLU_imms32:
722     case MVE_VQSHLU_imms8:
723     case MVE_VQSHLimms16:
724     case MVE_VQSHLimms32:
725     case MVE_VQSHLimms8:
726     case MVE_VQSHLimmu16:
727     case MVE_VQSHLimmu32:
728     case MVE_VQSHLimmu8:
729     case MVE_VQSHL_by_vecs16:
730     case MVE_VQSHL_by_vecs32:
731     case MVE_VQSHL_by_vecs8:
732     case MVE_VQSHL_by_vecu16:
733     case MVE_VQSHL_by_vecu32:
734     case MVE_VQSHL_by_vecu8:
735     case MVE_VQSHL_qrs16:
736     case MVE_VQSHL_qrs32:
737     case MVE_VQSHL_qrs8:
738     case MVE_VQSHL_qru16:
739     case MVE_VQSHL_qru32:
740     case MVE_VQSHL_qru8:
741     case MVE_VQRSHRNbhs16:
742     case MVE_VQRSHRNbhs32:
743     case MVE_VQRSHRNbhu16:
744     case MVE_VQRSHRNbhu32:
745     case MVE_VQRSHRNths16:
746     case MVE_VQRSHRNths32:
747     case MVE_VQRSHRNthu16:
748     case MVE_VQRSHRNthu32:
749     case MVE_VQRSHRUNs16bh:
750     case MVE_VQRSHRUNs16th:
751     case MVE_VQRSHRUNs32bh:
752     case MVE_VQRSHRUNs32th:
753     case MVE_VQSHRNbhs16:
754     case MVE_VQSHRNbhs32:
755     case MVE_VQSHRNbhu16:
756     case MVE_VQSHRNbhu32:
757     case MVE_VQSHRNths16:
758     case MVE_VQSHRNths32:
759     case MVE_VQSHRNthu16:
760     case MVE_VQSHRNthu32:
761     case MVE_VQSHRUNs16bh:
762     case MVE_VQSHRUNs16th:
763     case MVE_VQSHRUNs32bh:
764     case MVE_VQSHRUNs32th:
765     case MVE_VQSUB_qr_s16:
766     case MVE_VQSUB_qr_s32:
767     case MVE_VQSUB_qr_s8:
768     case MVE_VQSUB_qr_u16:
769     case MVE_VQSUB_qr_u32:
770     case MVE_VQSUB_qr_u8:
771     case MVE_VQSUBs16:
772     case MVE_VQSUBs32:
773     case MVE_VQSUBs8:
774     case MVE_VQSUBu16:
775     case MVE_VQSUBu32:
776     case MVE_VQSUBu8:
777     case MVE_VRHADDs16:
778     case MVE_VRHADDs32:
779     case MVE_VRHADDs8:
780     case MVE_VRHADDu16:
781     case MVE_VRHADDu32:
782     case MVE_VRHADDu8:
783     case MVE_VRINTf16A:
784     case MVE_VRINTf16M:
785     case MVE_VRINTf16N:
786     case MVE_VRINTf16P:
787     case MVE_VRINTf16X:
788     case MVE_VRINTf16Z:
789     case MVE_VRINTf32A:
790     case MVE_VRINTf32M:
791     case MVE_VRINTf32N:
792     case MVE_VRINTf32P:
793     case MVE_VRINTf32X:
794     case MVE_VRINTf32Z:
795     case MVE_VRSHL_by_vecs16:
796     case MVE_VRSHL_by_vecs32:
797     case MVE_VRSHL_by_vecs8:
798     case MVE_VRSHL_by_vecu16:
799     case MVE_VRSHL_by_vecu32:
800     case MVE_VRSHL_by_vecu8:
801     case MVE_VRSHL_qrs16:
802     case MVE_VRSHL_qrs32:
803     case MVE_VRSHL_qrs8:
804     case MVE_VRSHL_qru16:
805     case MVE_VRSHL_qru32:
806     case MVE_VRSHL_qru8:
807     case MVE_VRSHR_imms16:
808     case MVE_VRSHR_imms32:
809     case MVE_VRSHR_imms8:
810     case MVE_VRSHR_immu16:
811     case MVE_VRSHR_immu32:
812     case MVE_VRSHR_immu8:
813     case MVE_VRSHRNi16bh:
814     case MVE_VRSHRNi16th:
815     case MVE_VRSHRNi32bh:
816     case MVE_VRSHRNi32th:
817     case MVE_VSHL_by_vecs16:
818     case MVE_VSHL_by_vecs32:
819     case MVE_VSHL_by_vecs8:
820     case MVE_VSHL_by_vecu16:
821     case MVE_VSHL_by_vecu32:
822     case MVE_VSHL_by_vecu8:
823     case MVE_VSHL_immi16:
824     case MVE_VSHL_immi32:
825     case MVE_VSHL_immi8:
826     case MVE_VSHL_qrs16:
827     case MVE_VSHL_qrs32:
828     case MVE_VSHL_qrs8:
829     case MVE_VSHL_qru16:
830     case MVE_VSHL_qru32:
831     case MVE_VSHL_qru8:
832     case MVE_VSHR_imms16:
833     case MVE_VSHR_imms32:
834     case MVE_VSHR_imms8:
835     case MVE_VSHR_immu16:
836     case MVE_VSHR_immu32:
837     case MVE_VSHR_immu8:
838     case MVE_VSHRNi16bh:
839     case MVE_VSHRNi16th:
840     case MVE_VSHRNi32bh:
841     case MVE_VSHRNi32th:
842     case MVE_VSLIimm16:
843     case MVE_VSLIimm32:
844     case MVE_VSLIimm8:
845     case MVE_VSRIimm16:
846     case MVE_VSRIimm32:
847     case MVE_VSRIimm8:
848     case MVE_VSTRB16:
849     case MVE_VSTRB16_post:
850     case MVE_VSTRB16_pre:
851     case MVE_VSTRB16_rq:
852     case MVE_VSTRB32:
853     case MVE_VSTRB32_post:
854     case MVE_VSTRB32_pre:
855     case MVE_VSTRB32_rq:
856     case MVE_VSTRB8_rq:
857     case MVE_VSTRBU8:
858     case MVE_VSTRBU8_post:
859     case MVE_VSTRBU8_pre:
860     case MVE_VSTRD64_qi:
861     case MVE_VSTRD64_qi_pre:
862     case MVE_VSTRD64_rq:
863     case MVE_VSTRD64_rq_u:
864     case MVE_VSTRH16_rq:
865     case MVE_VSTRH16_rq_u:
866     case MVE_VSTRH32:
867     case MVE_VSTRH32_post:
868     case MVE_VSTRH32_pre:
869     case MVE_VSTRH32_rq:
870     case MVE_VSTRH32_rq_u:
871     case MVE_VSTRHU16:
872     case MVE_VSTRHU16_post:
873     case MVE_VSTRHU16_pre:
874     case MVE_VSTRW32_qi:
875     case MVE_VSTRW32_qi_pre:
876     case MVE_VSTRW32_rq:
877     case MVE_VSTRW32_rq_u:
878     case MVE_VSTRWU32:
879     case MVE_VSTRWU32_post:
880     case MVE_VSTRWU32_pre:
881     case MVE_VSUB_qr_f16:
882     case MVE_VSUB_qr_f32:
883     case MVE_VSUB_qr_i16:
884     case MVE_VSUB_qr_i32:
885     case MVE_VSUB_qr_i8:
886     case MVE_VSUBf16:
887     case MVE_VSUBf32:
888     case MVE_VSUBi16:
889     case MVE_VSUBi32:
890     case MVE_VSUBi8:
891       return true;
892     }
893   };
894 
895   LLVMInitializeARMTargetInfo();
896   LLVMInitializeARMTarget();
897   LLVMInitializeARMTargetMC();
898 
899   auto TT(Triple::normalize("thumbv8.1m.main-arm-none-eabi"));
900   std::string Error;
901   const Target *T = TargetRegistry::lookupTarget(TT, Error);
902   if (!T) {
903     dbgs() << Error;
904     return;
905   }
906 
907   TargetOptions Options;
908   auto TM = std::unique_ptr<LLVMTargetMachine>(
909     static_cast<LLVMTargetMachine*>(
910       T->createTargetMachine(TT, "generic", "", Options, None, None,
911                              CodeGenOpt::Default)));
912   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
913                   std::string(TM->getTargetFeatureString()),
914                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
915   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
916   auto MII = TM->getMCInstrInfo();
917 
918   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
919     const MCInstrDesc &Desc = TII->get(i);
920 
921     for (auto &Op : Desc.operands()) {
922       // Only check instructions that access the MQPR regs.
923       if ((Op.OperandType & MCOI::OPERAND_REGISTER) == 0 ||
924           Op.RegClass != ARM::MQPRRegClassID)
925         continue;
926 
927       uint64_t Flags = MII->get(i).TSFlags;
928       bool Valid = (Flags & ARMII::ValidForTailPredication) != 0;
929       ASSERT_EQ(IsValidTPOpcode(i), Valid)
930                 << MII->getName(i)
931                 << ": mismatched expectation for tail-predicated safety\n";
932       break;
933     }
934   }
935 }
936 
937 TEST(MachineInstr, HasSideEffects) {
938   using namespace ARM;
939   std::set<unsigned> UnpredictableOpcodes = {
940       MVE_VCTP8,     MVE_VCTP16,    MVE_VCTP32,    MVE_VCTP64,    MVE_VPST,
941       MVE_VPTv16i8,  MVE_VPTv8i16,  MVE_VPTv4i32,  MVE_VPTv16i8r, MVE_VPTv8i16r,
942       MVE_VPTv4i32r, MVE_VPTv16s8,  MVE_VPTv8s16,  MVE_VPTv4s32,  MVE_VPTv16s8r,
943       MVE_VPTv8s16r, MVE_VPTv4s32r, MVE_VPTv16u8,  MVE_VPTv8u16,  MVE_VPTv4u32,
944       MVE_VPTv16u8r, MVE_VPTv8u16r, MVE_VPTv4u32r, MVE_VPTv8f16,  MVE_VPTv4f32,
945       MVE_VPTv8f16r, MVE_VPTv4f32r, MVE_VADC,      MVE_VADCI,     MVE_VSBC,
946       MVE_VSBCI,     MVE_VSHLC,
947   };
948 
949   LLVMInitializeARMTargetInfo();
950   LLVMInitializeARMTarget();
951   LLVMInitializeARMTargetMC();
952 
953   auto TT(Triple::normalize("thumbv8.1m.main-arm-none-eabi"));
954   std::string Error;
955   const Target *T = TargetRegistry::lookupTarget(TT, Error);
956   if (!T) {
957     dbgs() << Error;
958     return;
959   }
960 
961   TargetOptions Options;
962   auto TM = std::unique_ptr<LLVMTargetMachine>(
963       static_cast<LLVMTargetMachine *>(T->createTargetMachine(
964           TT, "generic", "", Options, None, None, CodeGenOpt::Default)));
965   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
966                   std::string(TM->getTargetFeatureString()),
967                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
968   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
969   auto MII = TM->getMCInstrInfo();
970 
971   for (unsigned Op = 0; Op < ARM::INSTRUCTION_LIST_END; ++Op) {
972     const MCInstrDesc &Desc = TII->get(Op);
973     if ((Desc.TSFlags & ARMII::DomainMask) != ARMII::DomainMVE)
974       continue;
975     if (UnpredictableOpcodes.count(Op))
976       continue;
977 
978     ASSERT_FALSE(Desc.hasUnmodeledSideEffects())
979         << MII->getName(Op) << " has unexpected side effects";
980   }
981 }
982