xref: /llvm-project/llvm/unittests/Target/ARM/MachineInstrTest.cpp (revision 3471520b1f6bc4fedfe45505f02924dc44e5106f)
1 #include "ARMBaseInstrInfo.h"
2 #include "ARMSubtarget.h"
3 #include "ARMTargetMachine.h"
4 #include "llvm/Support/TargetRegistry.h"
5 #include "llvm/Support/TargetSelect.h"
6 #include "llvm/Target/TargetMachine.h"
7 #include "llvm/Target/TargetOptions.h"
8 
9 #include "gtest/gtest.h"
10 
11 using namespace llvm;
12 
13 TEST(MachineInstructionDoubleWidthResult, IsCorrect) {
14   using namespace ARM;
15 
16   auto DoubleWidthResult = [](unsigned Opcode) {
17     switch (Opcode) {
18     default:
19       break;
20     case MVE_VMULLBp16:
21     case MVE_VMULLBp8:
22     case MVE_VMULLBs16:
23     case MVE_VMULLBs32:
24     case MVE_VMULLBs8:
25     case MVE_VMULLBu16:
26     case MVE_VMULLBu32:
27     case MVE_VMULLBu8:
28     case MVE_VMULLTp16:
29     case MVE_VMULLTp8:
30     case MVE_VMULLTs16:
31     case MVE_VMULLTs32:
32     case MVE_VMULLTs8:
33     case MVE_VMULLTu16:
34     case MVE_VMULLTu32:
35     case MVE_VMULLTu8:
36     case MVE_VQDMULL_qr_s16bh:
37     case MVE_VQDMULL_qr_s16th:
38     case MVE_VQDMULL_qr_s32bh:
39     case MVE_VQDMULL_qr_s32th:
40     case MVE_VQDMULLs16bh:
41     case MVE_VQDMULLs16th:
42     case MVE_VQDMULLs32bh:
43     case MVE_VQDMULLs32th:
44     case MVE_VMOVLs16bh:
45     case MVE_VMOVLs16th:
46     case MVE_VMOVLs8bh:
47     case MVE_VMOVLs8th:
48     case MVE_VMOVLu16bh:
49     case MVE_VMOVLu16th:
50     case MVE_VMOVLu8bh:
51     case MVE_VMOVLu8th:
52     case MVE_VSHLL_imms16bh:
53     case MVE_VSHLL_imms16th:
54     case MVE_VSHLL_imms8bh:
55     case MVE_VSHLL_imms8th:
56     case MVE_VSHLL_immu16bh:
57     case MVE_VSHLL_immu16th:
58     case MVE_VSHLL_immu8bh:
59     case MVE_VSHLL_immu8th:
60     case MVE_VSHLL_lws16bh:
61     case MVE_VSHLL_lws16th:
62     case MVE_VSHLL_lws8bh:
63     case MVE_VSHLL_lws8th:
64     case MVE_VSHLL_lwu16bh:
65     case MVE_VSHLL_lwu16th:
66     case MVE_VSHLL_lwu8bh:
67     case MVE_VSHLL_lwu8th:
68       return true;
69     }
70     return false;
71   };
72 
73   LLVMInitializeARMTargetInfo();
74   LLVMInitializeARMTarget();
75   LLVMInitializeARMTargetMC();
76 
77   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
78   std::string Error;
79   const Target *T = TargetRegistry::lookupTarget(TT, Error);
80   if (!T) {
81     dbgs() << Error;
82     return;
83   }
84 
85   TargetOptions Options;
86   auto TM = std::unique_ptr<LLVMTargetMachine>(
87     static_cast<LLVMTargetMachine*>(
88       T->createTargetMachine(TT, "generic", "", Options, None, None,
89                              CodeGenOpt::Default)));
90   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
91                   std::string(TM->getTargetFeatureString()),
92                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
93   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
94   auto MII = TM->getMCInstrInfo();
95 
96   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
97     const MCInstrDesc &Desc = TII->get(i);
98 
99     uint64_t Flags = Desc.TSFlags;
100     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
101       continue;
102 
103     bool Valid = (Flags & ARMII::DoubleWidthResult) != 0;
104     ASSERT_EQ(DoubleWidthResult(i), Valid)
105               << MII->getName(i)
106               << ": mismatched expectation for tail-predicated safety\n";
107   }
108 }
109 
110 TEST(MachineInstructionHorizontalReduction, IsCorrect) {
111   using namespace ARM;
112 
113   auto HorizontalReduction = [](unsigned Opcode) {
114     switch (Opcode) {
115     default:
116       break;
117     case MVE_VABAVs16:
118     case MVE_VABAVs32:
119     case MVE_VABAVs8:
120     case MVE_VABAVu16:
121     case MVE_VABAVu32:
122     case MVE_VABAVu8:
123     case MVE_VADDLVs32acc:
124     case MVE_VADDLVs32no_acc:
125     case MVE_VADDLVu32acc:
126     case MVE_VADDLVu32no_acc:
127     case MVE_VADDVs16acc:
128     case MVE_VADDVs16no_acc:
129     case MVE_VADDVs32acc:
130     case MVE_VADDVs32no_acc:
131     case MVE_VADDVs8acc:
132     case MVE_VADDVs8no_acc:
133     case MVE_VADDVu16acc:
134     case MVE_VADDVu16no_acc:
135     case MVE_VADDVu32acc:
136     case MVE_VADDVu32no_acc:
137     case MVE_VADDVu8acc:
138     case MVE_VADDVu8no_acc:
139     case MVE_VMAXAVs16:
140     case MVE_VMAXAVs32:
141     case MVE_VMAXAVs8:
142     case MVE_VMAXNMAVf16:
143     case MVE_VMAXNMAVf32:
144     case MVE_VMAXNMVf16:
145     case MVE_VMAXNMVf32:
146     case MVE_VMAXVs16:
147     case MVE_VMAXVs32:
148     case MVE_VMAXVs8:
149     case MVE_VMAXVu16:
150     case MVE_VMAXVu32:
151     case MVE_VMAXVu8:
152     case MVE_VMINAVs16:
153     case MVE_VMINAVs32:
154     case MVE_VMINAVs8:
155     case MVE_VMINNMAVf16:
156     case MVE_VMINNMAVf32:
157     case MVE_VMINNMVf16:
158     case MVE_VMINNMVf32:
159     case MVE_VMINVs16:
160     case MVE_VMINVs32:
161     case MVE_VMINVs8:
162     case MVE_VMINVu16:
163     case MVE_VMINVu32:
164     case MVE_VMINVu8:
165     case MVE_VMLADAVas16:
166     case MVE_VMLADAVas32:
167     case MVE_VMLADAVas8:
168     case MVE_VMLADAVau16:
169     case MVE_VMLADAVau32:
170     case MVE_VMLADAVau8:
171     case MVE_VMLADAVaxs16:
172     case MVE_VMLADAVaxs32:
173     case MVE_VMLADAVaxs8:
174     case MVE_VMLADAVs16:
175     case MVE_VMLADAVs32:
176     case MVE_VMLADAVs8:
177     case MVE_VMLADAVu16:
178     case MVE_VMLADAVu32:
179     case MVE_VMLADAVu8:
180     case MVE_VMLADAVxs16:
181     case MVE_VMLADAVxs32:
182     case MVE_VMLADAVxs8:
183     case MVE_VMLALDAVas16:
184     case MVE_VMLALDAVas32:
185     case MVE_VMLALDAVau16:
186     case MVE_VMLALDAVau32:
187     case MVE_VMLALDAVaxs16:
188     case MVE_VMLALDAVaxs32:
189     case MVE_VMLALDAVs16:
190     case MVE_VMLALDAVs32:
191     case MVE_VMLALDAVu16:
192     case MVE_VMLALDAVu32:
193     case MVE_VMLALDAVxs16:
194     case MVE_VMLALDAVxs32:
195     case MVE_VMLSDAVas16:
196     case MVE_VMLSDAVas32:
197     case MVE_VMLSDAVas8:
198     case MVE_VMLSDAVaxs16:
199     case MVE_VMLSDAVaxs32:
200     case MVE_VMLSDAVaxs8:
201     case MVE_VMLSDAVs16:
202     case MVE_VMLSDAVs32:
203     case MVE_VMLSDAVs8:
204     case MVE_VMLSDAVxs16:
205     case MVE_VMLSDAVxs32:
206     case MVE_VMLSDAVxs8:
207     case MVE_VMLSLDAVas16:
208     case MVE_VMLSLDAVas32:
209     case MVE_VMLSLDAVaxs16:
210     case MVE_VMLSLDAVaxs32:
211     case MVE_VMLSLDAVs16:
212     case MVE_VMLSLDAVs32:
213     case MVE_VMLSLDAVxs16:
214     case MVE_VMLSLDAVxs32:
215     case MVE_VRMLALDAVHas32:
216     case MVE_VRMLALDAVHau32:
217     case MVE_VRMLALDAVHaxs32:
218     case MVE_VRMLALDAVHs32:
219     case MVE_VRMLALDAVHu32:
220     case MVE_VRMLALDAVHxs32:
221     case MVE_VRMLSLDAVHas32:
222     case MVE_VRMLSLDAVHaxs32:
223     case MVE_VRMLSLDAVHs32:
224     case MVE_VRMLSLDAVHxs32:
225       return true;
226     }
227     return false;
228   };
229 
230   LLVMInitializeARMTargetInfo();
231   LLVMInitializeARMTarget();
232   LLVMInitializeARMTargetMC();
233 
234   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
235   std::string Error;
236   const Target *T = TargetRegistry::lookupTarget(TT, Error);
237   if (!T) {
238     dbgs() << Error;
239     return;
240   }
241 
242   TargetOptions Options;
243   auto TM = std::unique_ptr<LLVMTargetMachine>(
244     static_cast<LLVMTargetMachine*>(
245       T->createTargetMachine(TT, "generic", "", Options, None, None,
246                              CodeGenOpt::Default)));
247   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
248                   std::string(TM->getTargetFeatureString()),
249                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
250   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
251   auto MII = TM->getMCInstrInfo();
252 
253   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
254     const MCInstrDesc &Desc = TII->get(i);
255 
256     uint64_t Flags = Desc.TSFlags;
257     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
258       continue;
259     bool Valid = (Flags & ARMII::HorizontalReduction) != 0;
260     ASSERT_EQ(HorizontalReduction(i), Valid)
261               << MII->getName(i)
262               << ": mismatched expectation for tail-predicated safety\n";
263   }
264 }
265 
266 TEST(MachineInstructionRetainsPreviousHalfElement, IsCorrect) {
267   using namespace ARM;
268 
269   auto RetainsPreviousHalfElement = [](unsigned Opcode) {
270     switch (Opcode) {
271     default:
272       break;
273     case MVE_VMOVNi16bh:
274     case MVE_VMOVNi16th:
275     case MVE_VMOVNi32bh:
276     case MVE_VMOVNi32th:
277     case MVE_VQMOVNs16bh:
278     case MVE_VQMOVNs16th:
279     case MVE_VQMOVNs32bh:
280     case MVE_VQMOVNs32th:
281     case MVE_VQMOVNu16bh:
282     case MVE_VQMOVNu16th:
283     case MVE_VQMOVNu32bh:
284     case MVE_VQMOVNu32th:
285     case MVE_VQMOVUNs16bh:
286     case MVE_VQMOVUNs16th:
287     case MVE_VQMOVUNs32bh:
288     case MVE_VQMOVUNs32th:
289     case MVE_VQRSHRNbhs16:
290     case MVE_VQRSHRNbhs32:
291     case MVE_VQRSHRNbhu16:
292     case MVE_VQRSHRNbhu32:
293     case MVE_VQRSHRNths16:
294     case MVE_VQRSHRNths32:
295     case MVE_VQRSHRNthu16:
296     case MVE_VQRSHRNthu32:
297     case MVE_VQRSHRUNs16bh:
298     case MVE_VQRSHRUNs16th:
299     case MVE_VQRSHRUNs32bh:
300     case MVE_VQRSHRUNs32th:
301     case MVE_VQSHRNbhs16:
302     case MVE_VQSHRNbhs32:
303     case MVE_VQSHRNbhu16:
304     case MVE_VQSHRNbhu32:
305     case MVE_VQSHRNths16:
306     case MVE_VQSHRNths32:
307     case MVE_VQSHRNthu16:
308     case MVE_VQSHRNthu32:
309     case MVE_VQSHRUNs16bh:
310     case MVE_VQSHRUNs16th:
311     case MVE_VQSHRUNs32bh:
312     case MVE_VQSHRUNs32th:
313     case MVE_VRSHRNi16bh:
314     case MVE_VRSHRNi16th:
315     case MVE_VRSHRNi32bh:
316     case MVE_VRSHRNi32th:
317     case MVE_VSHRNi16bh:
318     case MVE_VSHRNi16th:
319     case MVE_VSHRNi32bh:
320     case MVE_VSHRNi32th:
321     case MVE_VCVTf16f32bh:
322     case MVE_VCVTf16f32th:
323     case MVE_VCVTf32f16bh:
324     case MVE_VCVTf32f16th:
325       return true;
326     }
327     return false;
328   };
329 
330   LLVMInitializeARMTargetInfo();
331   LLVMInitializeARMTarget();
332   LLVMInitializeARMTargetMC();
333 
334   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
335   std::string Error;
336   const Target *T = TargetRegistry::lookupTarget(TT, Error);
337   if (!T) {
338     dbgs() << Error;
339     return;
340   }
341 
342   TargetOptions Options;
343   auto TM = std::unique_ptr<LLVMTargetMachine>(
344     static_cast<LLVMTargetMachine*>(
345       T->createTargetMachine(TT, "generic", "", Options, None, None,
346                              CodeGenOpt::Default)));
347   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
348                   std::string(TM->getTargetFeatureString()),
349                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
350   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
351   auto MII = TM->getMCInstrInfo();
352 
353   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
354     const MCInstrDesc &Desc = TII->get(i);
355 
356     uint64_t Flags = Desc.TSFlags;
357     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
358       continue;
359 
360     bool Valid = (Flags & ARMII::RetainsPreviousHalfElement) != 0;
361     ASSERT_EQ(RetainsPreviousHalfElement(i), Valid)
362               << MII->getName(i)
363               << ": mismatched expectation for tail-predicated safety\n";
364   }
365 }
366 // Test for instructions that aren't immediately obviously valid within a
367 // tail-predicated loop. This should be marked up in their tablegen
368 // descriptions. Currently we, conservatively, disallow:
369 // - cross beat carries.
370 // - complex operations.
371 // - horizontal operations with exchange.
372 // - byte swapping.
373 // - interleaved memory instructions.
374 // TODO: Add to this list once we can handle them safely.
375 TEST(MachineInstrValidTailPredication, IsCorrect) {
376 
377   using namespace ARM;
378 
379   auto IsValidTPOpcode = [](unsigned Opcode) {
380     switch (Opcode) {
381     default:
382       return false;
383     case MVE_ASRLi:
384     case MVE_ASRLr:
385     case MVE_LSRL:
386     case MVE_SQRSHR:
387     case MVE_SQSHL:
388     case MVE_SRSHR:
389     case MVE_UQRSHL:
390     case MVE_UQSHL:
391     case MVE_URSHR:
392     case MVE_VABDf16:
393     case MVE_VABDf32:
394     case MVE_VABDs16:
395     case MVE_VABDs32:
396     case MVE_VABDs8:
397     case MVE_VABDu16:
398     case MVE_VABDu32:
399     case MVE_VABDu8:
400     case MVE_VABSf16:
401     case MVE_VABSf32:
402     case MVE_VABSs16:
403     case MVE_VABSs32:
404     case MVE_VABSs8:
405     case MVE_VADD_qr_f16:
406     case MVE_VADD_qr_f32:
407     case MVE_VADD_qr_i16:
408     case MVE_VADD_qr_i32:
409     case MVE_VADD_qr_i8:
410     case MVE_VADDVs16acc:
411     case MVE_VADDVs16no_acc:
412     case MVE_VADDVs32acc:
413     case MVE_VADDVs32no_acc:
414     case MVE_VADDVs8acc:
415     case MVE_VADDVs8no_acc:
416     case MVE_VADDVu16acc:
417     case MVE_VADDVu16no_acc:
418     case MVE_VADDVu32acc:
419     case MVE_VADDVu32no_acc:
420     case MVE_VADDVu8acc:
421     case MVE_VADDVu8no_acc:
422     case MVE_VADDf16:
423     case MVE_VADDf32:
424     case MVE_VADDi16:
425     case MVE_VADDi32:
426     case MVE_VADDi8:
427     case MVE_VAND:
428     case MVE_VBIC:
429     case MVE_VBICimmi16:
430     case MVE_VBICimmi32:
431     case MVE_VBRSR16:
432     case MVE_VBRSR32:
433     case MVE_VBRSR8:
434     case MVE_VCLSs16:
435     case MVE_VCLSs32:
436     case MVE_VCLSs8:
437     case MVE_VCLZs16:
438     case MVE_VCLZs32:
439     case MVE_VCLZs8:
440     case MVE_VCMPf16:
441     case MVE_VCMPf16r:
442     case MVE_VCMPf32:
443     case MVE_VCMPf32r:
444     case MVE_VCMPi16:
445     case MVE_VCMPi16r:
446     case MVE_VCMPi32:
447     case MVE_VCMPi32r:
448     case MVE_VCMPi8:
449     case MVE_VCMPi8r:
450     case MVE_VCMPs16:
451     case MVE_VCMPs16r:
452     case MVE_VCMPs32:
453     case MVE_VCMPs32r:
454     case MVE_VCMPs8:
455     case MVE_VCMPs8r:
456     case MVE_VCMPu16:
457     case MVE_VCMPu16r:
458     case MVE_VCMPu32:
459     case MVE_VCMPu32r:
460     case MVE_VCMPu8:
461     case MVE_VCMPu8r:
462     case MVE_VCTP16:
463     case MVE_VCTP32:
464     case MVE_VCTP64:
465     case MVE_VCTP8:
466     case MVE_VCVTf16s16_fix:
467     case MVE_VCVTf16s16n:
468     case MVE_VCVTf16u16_fix:
469     case MVE_VCVTf16u16n:
470     case MVE_VCVTf32s32_fix:
471     case MVE_VCVTf32s32n:
472     case MVE_VCVTf32u32_fix:
473     case MVE_VCVTf32u32n:
474     case MVE_VCVTs16f16_fix:
475     case MVE_VCVTs16f16a:
476     case MVE_VCVTs16f16m:
477     case MVE_VCVTs16f16n:
478     case MVE_VCVTs16f16p:
479     case MVE_VCVTs16f16z:
480     case MVE_VCVTs32f32_fix:
481     case MVE_VCVTs32f32a:
482     case MVE_VCVTs32f32m:
483     case MVE_VCVTs32f32n:
484     case MVE_VCVTs32f32p:
485     case MVE_VCVTs32f32z:
486     case MVE_VCVTu16f16_fix:
487     case MVE_VCVTu16f16a:
488     case MVE_VCVTu16f16m:
489     case MVE_VCVTu16f16n:
490     case MVE_VCVTu16f16p:
491     case MVE_VCVTu16f16z:
492     case MVE_VCVTu32f32_fix:
493     case MVE_VCVTu32f32a:
494     case MVE_VCVTu32f32m:
495     case MVE_VCVTu32f32n:
496     case MVE_VCVTu32f32p:
497     case MVE_VCVTu32f32z:
498     case MVE_VDDUPu16:
499     case MVE_VDDUPu32:
500     case MVE_VDDUPu8:
501     case MVE_VDUP16:
502     case MVE_VDUP32:
503     case MVE_VDUP8:
504     case MVE_VDWDUPu16:
505     case MVE_VDWDUPu32:
506     case MVE_VDWDUPu8:
507     case MVE_VEOR:
508     case MVE_VFMA_qr_Sf16:
509     case MVE_VFMA_qr_Sf32:
510     case MVE_VFMA_qr_f16:
511     case MVE_VFMA_qr_f32:
512     case MVE_VFMAf16:
513     case MVE_VFMAf32:
514     case MVE_VFMSf16:
515     case MVE_VFMSf32:
516     case MVE_VMAXAs16:
517     case MVE_VMAXAs32:
518     case MVE_VMAXAs8:
519     case MVE_VMAXs16:
520     case MVE_VMAXs32:
521     case MVE_VMAXs8:
522     case MVE_VMAXu16:
523     case MVE_VMAXu32:
524     case MVE_VMAXu8:
525     case MVE_VMINAs16:
526     case MVE_VMINAs32:
527     case MVE_VMINAs8:
528     case MVE_VMINs16:
529     case MVE_VMINs32:
530     case MVE_VMINs8:
531     case MVE_VMINu16:
532     case MVE_VMINu32:
533     case MVE_VMINu8:
534     case MVE_VMLADAVas16:
535     case MVE_VMLADAVas32:
536     case MVE_VMLADAVas8:
537     case MVE_VMLADAVau16:
538     case MVE_VMLADAVau32:
539     case MVE_VMLADAVau8:
540     case MVE_VMLADAVs16:
541     case MVE_VMLADAVs32:
542     case MVE_VMLADAVs8:
543     case MVE_VMLADAVu16:
544     case MVE_VMLADAVu32:
545     case MVE_VMLADAVu8:
546     case MVE_VMLALDAVs16:
547     case MVE_VMLALDAVs32:
548     case MVE_VMLALDAVu16:
549     case MVE_VMLALDAVu32:
550     case MVE_VMLALDAVas16:
551     case MVE_VMLALDAVas32:
552     case MVE_VMLALDAVau16:
553     case MVE_VMLALDAVau32:
554     case MVE_VMLSDAVas16:
555     case MVE_VMLSDAVas32:
556     case MVE_VMLSDAVas8:
557     case MVE_VMLSDAVs16:
558     case MVE_VMLSDAVs32:
559     case MVE_VMLSDAVs8:
560     case MVE_VMLSLDAVas16:
561     case MVE_VMLSLDAVas32:
562     case MVE_VMLSLDAVs16:
563     case MVE_VMLSLDAVs32:
564     case MVE_VRMLALDAVHas32:
565     case MVE_VRMLALDAVHau32:
566     case MVE_VRMLALDAVHs32:
567     case MVE_VRMLALDAVHu32:
568     case MVE_VRMLSLDAVHas32:
569     case MVE_VRMLSLDAVHs32:
570     case MVE_VMLAS_qr_s16:
571     case MVE_VMLAS_qr_s32:
572     case MVE_VMLAS_qr_s8:
573     case MVE_VMLAS_qr_u16:
574     case MVE_VMLAS_qr_u32:
575     case MVE_VMLAS_qr_u8:
576     case MVE_VMLA_qr_s16:
577     case MVE_VMLA_qr_s32:
578     case MVE_VMLA_qr_s8:
579     case MVE_VMLA_qr_u16:
580     case MVE_VMLA_qr_u32:
581     case MVE_VMLA_qr_u8:
582     case MVE_VHADD_qr_s16:
583     case MVE_VHADD_qr_s32:
584     case MVE_VHADD_qr_s8:
585     case MVE_VHADD_qr_u16:
586     case MVE_VHADD_qr_u32:
587     case MVE_VHADD_qr_u8:
588     case MVE_VHADDs16:
589     case MVE_VHADDs32:
590     case MVE_VHADDs8:
591     case MVE_VHADDu16:
592     case MVE_VHADDu32:
593     case MVE_VHADDu8:
594     case MVE_VHSUB_qr_s16:
595     case MVE_VHSUB_qr_s32:
596     case MVE_VHSUB_qr_s8:
597     case MVE_VHSUB_qr_u16:
598     case MVE_VHSUB_qr_u32:
599     case MVE_VHSUB_qr_u8:
600     case MVE_VHSUBs16:
601     case MVE_VHSUBs32:
602     case MVE_VHSUBs8:
603     case MVE_VHSUBu16:
604     case MVE_VHSUBu32:
605     case MVE_VHSUBu8:
606     case MVE_VIDUPu16:
607     case MVE_VIDUPu32:
608     case MVE_VIDUPu8:
609     case MVE_VIWDUPu16:
610     case MVE_VIWDUPu32:
611     case MVE_VIWDUPu8:
612     case MVE_VLD20_8:
613     case MVE_VLD21_8:
614     case MVE_VLD20_16:
615     case MVE_VLD21_16:
616     case MVE_VLD20_32:
617     case MVE_VLD21_32:
618     case MVE_VLD20_8_wb:
619     case MVE_VLD21_8_wb:
620     case MVE_VLD20_16_wb:
621     case MVE_VLD21_16_wb:
622     case MVE_VLD20_32_wb:
623     case MVE_VLD21_32_wb:
624     case MVE_VLD40_8:
625     case MVE_VLD41_8:
626     case MVE_VLD42_8:
627     case MVE_VLD43_8:
628     case MVE_VLD40_16:
629     case MVE_VLD41_16:
630     case MVE_VLD42_16:
631     case MVE_VLD43_16:
632     case MVE_VLD40_32:
633     case MVE_VLD41_32:
634     case MVE_VLD42_32:
635     case MVE_VLD43_32:
636     case MVE_VLD40_8_wb:
637     case MVE_VLD41_8_wb:
638     case MVE_VLD42_8_wb:
639     case MVE_VLD43_8_wb:
640     case MVE_VLD40_16_wb:
641     case MVE_VLD41_16_wb:
642     case MVE_VLD42_16_wb:
643     case MVE_VLD43_16_wb:
644     case MVE_VLD40_32_wb:
645     case MVE_VLD41_32_wb:
646     case MVE_VLD42_32_wb:
647     case MVE_VLD43_32_wb:
648     case MVE_VLDRBS16:
649     case MVE_VLDRBS16_post:
650     case MVE_VLDRBS16_pre:
651     case MVE_VLDRBS16_rq:
652     case MVE_VLDRBS32:
653     case MVE_VLDRBS32_post:
654     case MVE_VLDRBS32_pre:
655     case MVE_VLDRBS32_rq:
656     case MVE_VLDRBU16:
657     case MVE_VLDRBU16_post:
658     case MVE_VLDRBU16_pre:
659     case MVE_VLDRBU16_rq:
660     case MVE_VLDRBU32:
661     case MVE_VLDRBU32_post:
662     case MVE_VLDRBU32_pre:
663     case MVE_VLDRBU32_rq:
664     case MVE_VLDRBU8:
665     case MVE_VLDRBU8_post:
666     case MVE_VLDRBU8_pre:
667     case MVE_VLDRBU8_rq:
668     case MVE_VLDRDU64_qi:
669     case MVE_VLDRDU64_qi_pre:
670     case MVE_VLDRDU64_rq:
671     case MVE_VLDRDU64_rq_u:
672     case MVE_VLDRHS32:
673     case MVE_VLDRHS32_post:
674     case MVE_VLDRHS32_pre:
675     case MVE_VLDRHS32_rq:
676     case MVE_VLDRHS32_rq_u:
677     case MVE_VLDRHU16:
678     case MVE_VLDRHU16_post:
679     case MVE_VLDRHU16_pre:
680     case MVE_VLDRHU16_rq:
681     case MVE_VLDRHU16_rq_u:
682     case MVE_VLDRHU32:
683     case MVE_VLDRHU32_post:
684     case MVE_VLDRHU32_pre:
685     case MVE_VLDRHU32_rq:
686     case MVE_VLDRHU32_rq_u:
687     case MVE_VLDRWU32:
688     case MVE_VLDRWU32_post:
689     case MVE_VLDRWU32_pre:
690     case MVE_VLDRWU32_qi:
691     case MVE_VLDRWU32_qi_pre:
692     case MVE_VLDRWU32_rq:
693     case MVE_VLDRWU32_rq_u:
694     case MVE_VMOVimmf32:
695     case MVE_VMOVimmi16:
696     case MVE_VMOVimmi32:
697     case MVE_VMOVimmi64:
698     case MVE_VMOVimmi8:
699     case MVE_VMOVNi16bh:
700     case MVE_VMOVNi16th:
701     case MVE_VMOVNi32bh:
702     case MVE_VMOVNi32th:
703     case MVE_VMULLBp16:
704     case MVE_VMULLBp8:
705     case MVE_VMULLBs16:
706     case MVE_VMULLBs32:
707     case MVE_VMULLBs8:
708     case MVE_VMULLBu16:
709     case MVE_VMULLBu32:
710     case MVE_VMULLBu8:
711     case MVE_VMULLTp16:
712     case MVE_VMULLTp8:
713     case MVE_VMULLTs16:
714     case MVE_VMULLTs32:
715     case MVE_VMULLTs8:
716     case MVE_VMULLTu16:
717     case MVE_VMULLTu32:
718     case MVE_VMULLTu8:
719     case MVE_VMUL_qr_f16:
720     case MVE_VMUL_qr_f32:
721     case MVE_VMUL_qr_i16:
722     case MVE_VMUL_qr_i32:
723     case MVE_VMUL_qr_i8:
724     case MVE_VMULf16:
725     case MVE_VMULf32:
726     case MVE_VMULi16:
727     case MVE_VMULi8:
728     case MVE_VMULi32:
729     case MVE_VMVN:
730     case MVE_VMVNimmi16:
731     case MVE_VMVNimmi32:
732     case MVE_VNEGf16:
733     case MVE_VNEGf32:
734     case MVE_VNEGs16:
735     case MVE_VNEGs32:
736     case MVE_VNEGs8:
737     case MVE_VORN:
738     case MVE_VORR:
739     case MVE_VORRimmi16:
740     case MVE_VORRimmi32:
741     case MVE_VPST:
742     case MVE_VQABSs16:
743     case MVE_VQABSs32:
744     case MVE_VQABSs8:
745     case MVE_VQADD_qr_s16:
746     case MVE_VQADD_qr_s32:
747     case MVE_VQADD_qr_s8:
748     case MVE_VQADD_qr_u16:
749     case MVE_VQADD_qr_u32:
750     case MVE_VQADD_qr_u8:
751     case MVE_VQADDs16:
752     case MVE_VQADDs32:
753     case MVE_VQADDs8:
754     case MVE_VQADDu16:
755     case MVE_VQADDu32:
756     case MVE_VQADDu8:
757     case MVE_VQDMULL_qr_s16bh:
758     case MVE_VQDMULL_qr_s16th:
759     case MVE_VQDMULL_qr_s32bh:
760     case MVE_VQDMULL_qr_s32th:
761     case MVE_VQDMULLs16bh:
762     case MVE_VQDMULLs16th:
763     case MVE_VQDMULLs32bh:
764     case MVE_VQDMULLs32th:
765     case MVE_VQNEGs16:
766     case MVE_VQNEGs32:
767     case MVE_VQNEGs8:
768     case MVE_VQMOVNs16bh:
769     case MVE_VQMOVNs16th:
770     case MVE_VQMOVNs32bh:
771     case MVE_VQMOVNs32th:
772     case MVE_VQMOVNu16bh:
773     case MVE_VQMOVNu16th:
774     case MVE_VQMOVNu32bh:
775     case MVE_VQMOVNu32th:
776     case MVE_VQMOVUNs16bh:
777     case MVE_VQMOVUNs16th:
778     case MVE_VQMOVUNs32bh:
779     case MVE_VQMOVUNs32th:
780     case MVE_VQRSHL_by_vecs16:
781     case MVE_VQRSHL_by_vecs32:
782     case MVE_VQRSHL_by_vecs8:
783     case MVE_VQRSHL_by_vecu16:
784     case MVE_VQRSHL_by_vecu32:
785     case MVE_VQRSHL_by_vecu8:
786     case MVE_VQRSHL_qrs16:
787     case MVE_VQRSHL_qrs32:
788     case MVE_VQRSHL_qrs8:
789     case MVE_VQRSHL_qru16:
790     case MVE_VQRSHL_qru8:
791     case MVE_VQRSHL_qru32:
792     case MVE_VQSHLU_imms16:
793     case MVE_VQSHLU_imms32:
794     case MVE_VQSHLU_imms8:
795     case MVE_VQSHLimms16:
796     case MVE_VQSHLimms32:
797     case MVE_VQSHLimms8:
798     case MVE_VQSHLimmu16:
799     case MVE_VQSHLimmu32:
800     case MVE_VQSHLimmu8:
801     case MVE_VQSHL_by_vecs16:
802     case MVE_VQSHL_by_vecs32:
803     case MVE_VQSHL_by_vecs8:
804     case MVE_VQSHL_by_vecu16:
805     case MVE_VQSHL_by_vecu32:
806     case MVE_VQSHL_by_vecu8:
807     case MVE_VQSHL_qrs16:
808     case MVE_VQSHL_qrs32:
809     case MVE_VQSHL_qrs8:
810     case MVE_VQSHL_qru16:
811     case MVE_VQSHL_qru32:
812     case MVE_VQSHL_qru8:
813     case MVE_VQRSHRNbhs16:
814     case MVE_VQRSHRNbhs32:
815     case MVE_VQRSHRNbhu16:
816     case MVE_VQRSHRNbhu32:
817     case MVE_VQRSHRNths16:
818     case MVE_VQRSHRNths32:
819     case MVE_VQRSHRNthu16:
820     case MVE_VQRSHRNthu32:
821     case MVE_VQRSHRUNs16bh:
822     case MVE_VQRSHRUNs16th:
823     case MVE_VQRSHRUNs32bh:
824     case MVE_VQRSHRUNs32th:
825     case MVE_VQSHRNbhs16:
826     case MVE_VQSHRNbhs32:
827     case MVE_VQSHRNbhu16:
828     case MVE_VQSHRNbhu32:
829     case MVE_VQSHRNths16:
830     case MVE_VQSHRNths32:
831     case MVE_VQSHRNthu16:
832     case MVE_VQSHRNthu32:
833     case MVE_VQSHRUNs16bh:
834     case MVE_VQSHRUNs16th:
835     case MVE_VQSHRUNs32bh:
836     case MVE_VQSHRUNs32th:
837     case MVE_VQSUB_qr_s16:
838     case MVE_VQSUB_qr_s32:
839     case MVE_VQSUB_qr_s8:
840     case MVE_VQSUB_qr_u16:
841     case MVE_VQSUB_qr_u32:
842     case MVE_VQSUB_qr_u8:
843     case MVE_VQSUBs16:
844     case MVE_VQSUBs32:
845     case MVE_VQSUBs8:
846     case MVE_VQSUBu16:
847     case MVE_VQSUBu32:
848     case MVE_VQSUBu8:
849     case MVE_VRHADDs16:
850     case MVE_VRHADDs32:
851     case MVE_VRHADDs8:
852     case MVE_VRHADDu16:
853     case MVE_VRHADDu32:
854     case MVE_VRHADDu8:
855     case MVE_VRINTf16A:
856     case MVE_VRINTf16M:
857     case MVE_VRINTf16N:
858     case MVE_VRINTf16P:
859     case MVE_VRINTf16X:
860     case MVE_VRINTf16Z:
861     case MVE_VRINTf32A:
862     case MVE_VRINTf32M:
863     case MVE_VRINTf32N:
864     case MVE_VRINTf32P:
865     case MVE_VRINTf32X:
866     case MVE_VRINTf32Z:
867     case MVE_VRSHL_by_vecs16:
868     case MVE_VRSHL_by_vecs32:
869     case MVE_VRSHL_by_vecs8:
870     case MVE_VRSHL_by_vecu16:
871     case MVE_VRSHL_by_vecu32:
872     case MVE_VRSHL_by_vecu8:
873     case MVE_VRSHL_qrs16:
874     case MVE_VRSHL_qrs32:
875     case MVE_VRSHL_qrs8:
876     case MVE_VRSHL_qru16:
877     case MVE_VRSHL_qru32:
878     case MVE_VRSHL_qru8:
879     case MVE_VRSHR_imms16:
880     case MVE_VRSHR_imms32:
881     case MVE_VRSHR_imms8:
882     case MVE_VRSHR_immu16:
883     case MVE_VRSHR_immu32:
884     case MVE_VRSHR_immu8:
885     case MVE_VRSHRNi16bh:
886     case MVE_VRSHRNi16th:
887     case MVE_VRSHRNi32bh:
888     case MVE_VRSHRNi32th:
889     case MVE_VSHL_by_vecs16:
890     case MVE_VSHL_by_vecs32:
891     case MVE_VSHL_by_vecs8:
892     case MVE_VSHL_by_vecu16:
893     case MVE_VSHL_by_vecu32:
894     case MVE_VSHL_by_vecu8:
895     case MVE_VSHL_immi16:
896     case MVE_VSHL_immi32:
897     case MVE_VSHL_immi8:
898     case MVE_VSHL_qrs16:
899     case MVE_VSHL_qrs32:
900     case MVE_VSHL_qrs8:
901     case MVE_VSHL_qru16:
902     case MVE_VSHL_qru32:
903     case MVE_VSHL_qru8:
904     case MVE_VSHR_imms16:
905     case MVE_VSHR_imms32:
906     case MVE_VSHR_imms8:
907     case MVE_VSHR_immu16:
908     case MVE_VSHR_immu32:
909     case MVE_VSHR_immu8:
910     case MVE_VSHRNi16bh:
911     case MVE_VSHRNi16th:
912     case MVE_VSHRNi32bh:
913     case MVE_VSHRNi32th:
914     case MVE_VSLIimm16:
915     case MVE_VSLIimm32:
916     case MVE_VSLIimm8:
917     case MVE_VSRIimm16:
918     case MVE_VSRIimm32:
919     case MVE_VSRIimm8:
920     case MVE_VSTRB16:
921     case MVE_VSTRB16_post:
922     case MVE_VSTRB16_pre:
923     case MVE_VSTRB16_rq:
924     case MVE_VSTRB32:
925     case MVE_VSTRB32_post:
926     case MVE_VSTRB32_pre:
927     case MVE_VSTRB32_rq:
928     case MVE_VSTRB8_rq:
929     case MVE_VSTRBU8:
930     case MVE_VSTRBU8_post:
931     case MVE_VSTRBU8_pre:
932     case MVE_VSTRD64_qi:
933     case MVE_VSTRD64_qi_pre:
934     case MVE_VSTRD64_rq:
935     case MVE_VSTRD64_rq_u:
936     case MVE_VSTRH16_rq:
937     case MVE_VSTRH16_rq_u:
938     case MVE_VSTRH32:
939     case MVE_VSTRH32_post:
940     case MVE_VSTRH32_pre:
941     case MVE_VSTRH32_rq:
942     case MVE_VSTRH32_rq_u:
943     case MVE_VSTRHU16:
944     case MVE_VSTRHU16_post:
945     case MVE_VSTRHU16_pre:
946     case MVE_VSTRW32_qi:
947     case MVE_VSTRW32_qi_pre:
948     case MVE_VSTRW32_rq:
949     case MVE_VSTRW32_rq_u:
950     case MVE_VSTRWU32:
951     case MVE_VSTRWU32_post:
952     case MVE_VSTRWU32_pre:
953     case MVE_VSUB_qr_f16:
954     case MVE_VSUB_qr_f32:
955     case MVE_VSUB_qr_i16:
956     case MVE_VSUB_qr_i32:
957     case MVE_VSUB_qr_i8:
958     case MVE_VSUBf16:
959     case MVE_VSUBf32:
960     case MVE_VSUBi16:
961     case MVE_VSUBi32:
962     case MVE_VSUBi8:
963       return true;
964     }
965   };
966 
967   LLVMInitializeARMTargetInfo();
968   LLVMInitializeARMTarget();
969   LLVMInitializeARMTargetMC();
970 
971   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
972   std::string Error;
973   const Target *T = TargetRegistry::lookupTarget(TT, Error);
974   if (!T) {
975     dbgs() << Error;
976     return;
977   }
978 
979   TargetOptions Options;
980   auto TM = std::unique_ptr<LLVMTargetMachine>(
981     static_cast<LLVMTargetMachine*>(
982       T->createTargetMachine(TT, "generic", "", Options, None, None,
983                              CodeGenOpt::Default)));
984   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
985                   std::string(TM->getTargetFeatureString()),
986                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
987   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
988   auto MII = TM->getMCInstrInfo();
989 
990   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
991     const MCInstrDesc &Desc = TII->get(i);
992 
993     for (auto &Op : Desc.operands()) {
994       // Only check instructions that access the MQPR regs.
995       if ((Op.OperandType & MCOI::OPERAND_REGISTER) == 0 ||
996           (Op.RegClass != ARM::MQPRRegClassID &&
997            Op.RegClass != ARM::QQPRRegClassID &&
998            Op.RegClass != ARM::QQQQPRRegClassID))
999         continue;
1000 
1001       uint64_t Flags = MII->get(i).TSFlags;
1002       bool Valid = (Flags & ARMII::ValidForTailPredication) != 0;
1003       ASSERT_EQ(IsValidTPOpcode(i), Valid)
1004                 << MII->getName(i)
1005                 << ": mismatched expectation for tail-predicated safety\n";
1006       break;
1007     }
1008   }
1009 }
1010 
1011 TEST(MachineInstr, HasSideEffects) {
1012   using namespace ARM;
1013   std::set<unsigned> UnpredictableOpcodes = {
1014       // MVE Instructions
1015       MVE_VCTP8,
1016       MVE_VCTP16,
1017       MVE_VCTP32,
1018       MVE_VCTP64,
1019       MVE_VPST,
1020       MVE_VPTv16i8,
1021       MVE_VPTv8i16,
1022       MVE_VPTv4i32,
1023       MVE_VPTv16i8r,
1024       MVE_VPTv8i16r,
1025       MVE_VPTv4i32r,
1026       MVE_VPTv16s8,
1027       MVE_VPTv8s16,
1028       MVE_VPTv4s32,
1029       MVE_VPTv16s8r,
1030       MVE_VPTv8s16r,
1031       MVE_VPTv4s32r,
1032       MVE_VPTv16u8,
1033       MVE_VPTv8u16,
1034       MVE_VPTv4u32,
1035       MVE_VPTv16u8r,
1036       MVE_VPTv8u16r,
1037       MVE_VPTv4u32r,
1038       MVE_VPTv8f16,
1039       MVE_VPTv4f32,
1040       MVE_VPTv8f16r,
1041       MVE_VPTv4f32r,
1042       MVE_VADC,
1043       MVE_VADCI,
1044       MVE_VSBC,
1045       MVE_VSBCI,
1046       MVE_VSHLC,
1047       // FP Instructions
1048       FLDMXIA,
1049       FLDMXDB_UPD,
1050       FLDMXIA_UPD,
1051       FSTMXDB_UPD,
1052       FSTMXIA,
1053       FSTMXIA_UPD,
1054       VLDR_FPCXTNS_off,
1055       VLDR_FPCXTNS_off,
1056       VLDR_FPCXTNS_post,
1057       VLDR_FPCXTNS_pre,
1058       VLDR_FPCXTS_off,
1059       VLDR_FPCXTS_post,
1060       VLDR_FPCXTS_pre,
1061       VLDR_FPSCR_NZCVQC_off,
1062       VLDR_FPSCR_NZCVQC_post,
1063       VLDR_FPSCR_NZCVQC_pre,
1064       VLDR_FPSCR_off,
1065       VLDR_FPSCR_post,
1066       VLDR_FPSCR_pre,
1067       VLDR_P0_off,
1068       VLDR_P0_post,
1069       VLDR_P0_pre,
1070       VLDR_VPR_off,
1071       VLDR_VPR_post,
1072       VLDR_VPR_pre,
1073       VLLDM,
1074       VLSTM,
1075       VMRS,
1076       VMRS_FPCXTNS,
1077       VMRS_FPCXTS,
1078       VMRS_FPEXC,
1079       VMRS_FPINST,
1080       VMRS_FPINST2,
1081       VMRS_FPSCR_NZCVQC,
1082       VMRS_FPSID,
1083       VMRS_MVFR0,
1084       VMRS_MVFR1,
1085       VMRS_MVFR2,
1086       VMRS_P0,
1087       VMRS_VPR,
1088       VMSR,
1089       VMSR_FPCXTNS,
1090       VMSR_FPCXTS,
1091       VMSR_FPEXC,
1092       VMSR_FPINST,
1093       VMSR_FPINST2,
1094       VMSR_FPSCR_NZCVQC,
1095       VMSR_FPSID,
1096       VMSR_P0,
1097       VMSR_VPR,
1098       VSCCLRMD,
1099       VSCCLRMS,
1100       VSTR_FPCXTNS_off,
1101       VSTR_FPCXTNS_post,
1102       VSTR_FPCXTNS_pre,
1103       VSTR_FPCXTS_off,
1104       VSTR_FPCXTS_post,
1105       VSTR_FPCXTS_pre,
1106       VSTR_FPSCR_NZCVQC_off,
1107       VSTR_FPSCR_NZCVQC_post,
1108       VSTR_FPSCR_NZCVQC_pre,
1109       VSTR_FPSCR_off,
1110       VSTR_FPSCR_post,
1111       VSTR_FPSCR_pre,
1112       VSTR_P0_off,
1113       VSTR_P0_post,
1114       VSTR_P0_pre,
1115       VSTR_VPR_off,
1116       VSTR_VPR_post,
1117       VSTR_VPR_pre,
1118   };
1119 
1120   LLVMInitializeARMTargetInfo();
1121   LLVMInitializeARMTarget();
1122   LLVMInitializeARMTargetMC();
1123 
1124   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
1125   std::string Error;
1126   const Target *T = TargetRegistry::lookupTarget(TT, Error);
1127   if (!T) {
1128     dbgs() << Error;
1129     return;
1130   }
1131 
1132   TargetOptions Options;
1133   auto TM = std::unique_ptr<LLVMTargetMachine>(
1134       static_cast<LLVMTargetMachine *>(T->createTargetMachine(
1135           TT, "generic", "", Options, None, None, CodeGenOpt::Default)));
1136   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
1137                   std::string(TM->getTargetFeatureString()),
1138                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
1139   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
1140   auto MII = TM->getMCInstrInfo();
1141 
1142   for (unsigned Op = 0; Op < ARM::INSTRUCTION_LIST_END; ++Op) {
1143     const MCInstrDesc &Desc = TII->get(Op);
1144     if ((Desc.TSFlags &
1145          (ARMII::DomainMVE | ARMII::DomainVFP | ARMII::DomainNEONA8)) == 0)
1146       continue;
1147     if (UnpredictableOpcodes.count(Op))
1148       continue;
1149 
1150     ASSERT_FALSE(Desc.hasUnmodeledSideEffects())
1151         << MII->getName(Op) << " has unexpected side effects";
1152   }
1153 }
1154