xref: /llvm-project/llvm/test/CodeGen/Thumb2/LowOverheadLoops/wls-search-pred.mir (revision 60442f0d442723a487528bdd8b48b24657a025e8)
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=thumbv8.1m.main -mattr=+mve %s -run-pass=arm-mve-vpt-opts --verify-machineinstrs -o - | FileCheck %s
3
4--- |
5  target datalayout = "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64"
6  target triple = "thumbv8.1m.main-none-unknown-eabihf"
7
8  define void @test_memset_preheader(ptr %x, ptr %y, i32 %n) {
9  entry:
10    %cmp6 = icmp ne i32 %n, 0
11    %0 = call { i32, i1 } @llvm.test.start.loop.iterations.i32(i32 %n)
12    %1 = extractvalue { i32, i1 } %0, 1
13    %2 = extractvalue { i32, i1 } %0, 0
14    br i1 %1, label %prehead, label %for.cond.cleanup
15
16  prehead:                                          ; preds = %entry
17    call void @llvm.memset.p0.i32(ptr align 1 %x, i8 0, i32 %n, i1 false)
18    br label %for.body
19
20  for.body:                                         ; preds = %for.body, %prehead
21    %x.addr.08 = phi ptr [ %add.ptr, %for.body ], [ %x, %prehead ]
22    %y.addr.07 = phi ptr [ %add.ptr1, %for.body ], [ %y, %prehead ]
23    %3 = phi i32 [ %2, %prehead ], [ %4, %for.body ]
24    %add.ptr = getelementptr inbounds i8, ptr %x.addr.08, i32 1
25    %add.ptr1 = getelementptr inbounds i8, ptr %y.addr.07, i32 1
26    %l = load i8, ptr %x.addr.08, align 1
27    store i8 %l, ptr %y.addr.07, align 1
28    %4 = call i32 @llvm.loop.decrement.reg.i32(i32 %3, i32 1)
29    %5 = icmp ne i32 %4, 0
30    br i1 %5, label %for.body, label %for.cond.cleanup
31
32  for.cond.cleanup:                                 ; preds = %for.body, %entry
33    ret void
34  }
35
36  declare void @llvm.memset.p0.i32(ptr nocapture writeonly, i8, i32, i1 immarg)
37  declare { i32, i1 } @llvm.test.start.loop.iterations.i32(i32)
38  declare i32 @llvm.loop.decrement.reg.i32(i32, i32)
39
40...
41---
42name:            test_memset_preheader
43tracksRegLiveness: true
44liveins:
45  - { reg: '$r0', virtual-reg: '%7' }
46  - { reg: '$r1', virtual-reg: '%8' }
47  - { reg: '$r2', virtual-reg: '%9' }
48body:             |
49  ; CHECK-LABEL: name: test_memset_preheader
50  ; CHECK: bb.0.entry:
51  ; CHECK:   successors: %bb.1(0x40000000), %bb.5(0x40000000)
52  ; CHECK:   liveins: $r0, $r1, $r2
53  ; CHECK:   [[COPY:%[0-9]+]]:rgpr = COPY $r2
54  ; CHECK:   [[COPY1:%[0-9]+]]:gpr = COPY $r1
55  ; CHECK:   [[COPY2:%[0-9]+]]:rgpr = COPY $r0
56  ; CHECK:   t2CMPri [[COPY]], 0, 14 /* CC::al */, $noreg, implicit-def $cpsr
57  ; CHECK:   t2Bcc %bb.5, 0 /* CC::eq */, $cpsr
58  ; CHECK:   t2B %bb.1, 14 /* CC::al */, $noreg
59  ; CHECK: bb.1.prehead:
60  ; CHECK:   successors: %bb.3(0x40000000), %bb.2(0x40000000)
61  ; CHECK:   [[DEF:%[0-9]+]]:mqpr = IMPLICIT_DEF
62  ; CHECK:   [[MVE_VMOVimmi32_:%[0-9]+]]:mqpr = MVE_VMOVimmi32 0, 0, $noreg, $noreg, [[DEF]]
63  ; CHECK:   [[t2ADDri:%[0-9]+]]:rgpr = t2ADDri [[COPY]], 15, 14 /* CC::al */, $noreg, $noreg
64  ; CHECK:   [[t2BICri:%[0-9]+]]:rgpr = t2BICri killed [[t2ADDri]], 16, 14 /* CC::al */, $noreg, $noreg
65  ; CHECK:   [[t2LSRri:%[0-9]+]]:gprlr = t2LSRri killed [[t2BICri]], 4, 14 /* CC::al */, $noreg, $noreg
66  ; CHECK:   [[t2WhileLoopStartTP:%[0-9]+]]:gprlr = t2WhileLoopStartTP killed [[t2LSRri]], [[COPY]], %bb.3, implicit-def $cpsr
67  ; CHECK: bb.2:
68  ; CHECK:   successors: %bb.2(0x40000000), %bb.3(0x40000000)
69  ; CHECK:   [[PHI:%[0-9]+]]:rgpr = PHI [[COPY2]], %bb.1, %11, %bb.2
70  ; CHECK:   [[PHI1:%[0-9]+]]:gprlr = PHI [[t2WhileLoopStartTP]], %bb.1, %13, %bb.2
71  ; CHECK:   [[PHI2:%[0-9]+]]:rgpr = PHI [[COPY]], %bb.1, %15, %bb.2
72  ; CHECK:   [[MVE_VCTP8_:%[0-9]+]]:vccr = MVE_VCTP8 [[PHI2]], 0, $noreg, $noreg
73  ; CHECK:   [[t2SUBri:%[0-9]+]]:rgpr = t2SUBri [[PHI2]], 16, 14 /* CC::al */, $noreg, $noreg
74  ; CHECK:   [[MVE_VSTRBU8_post:%[0-9]+]]:rgpr = MVE_VSTRBU8_post [[MVE_VMOVimmi32_]], [[PHI]], 16, 1, [[MVE_VCTP8_]], [[PHI1]]
75  ; CHECK:   [[t2LoopEndDec:%[0-9]+]]:gprlr = t2LoopEndDec [[PHI1]], %bb.2, implicit-def $cpsr
76  ; CHECK:   t2B %bb.3, 14 /* CC::al */, $noreg
77  ; CHECK: bb.3.prehead:
78  ; CHECK:   successors: %bb.4(0x80000000)
79  ; CHECK:   [[t2DoLoopStart:%[0-9]+]]:gprlr = t2DoLoopStart [[COPY]]
80  ; CHECK:   t2B %bb.4, 14 /* CC::al */, $noreg
81  ; CHECK: bb.4.for.body:
82  ; CHECK:   successors: %bb.4(0x7c000000), %bb.5(0x04000000)
83  ; CHECK:   [[PHI3:%[0-9]+]]:gpr = PHI [[COPY2]], %bb.3, %19, %bb.4
84  ; CHECK:   [[PHI4:%[0-9]+]]:gpr = PHI [[COPY1]], %bb.3, %21, %bb.4
85  ; CHECK:   [[PHI5:%[0-9]+]]:gprlr = PHI [[t2DoLoopStart]], %bb.3, %26, %bb.4
86  ; CHECK:   [[t2LDRB_POST:%[0-9]+]]:rgpr, [[t2LDRB_POST1:%[0-9]+]]:gpr = t2LDRB_POST [[PHI3]], 1, 14 /* CC::al */, $noreg :: (load (s8) from %ir.x.addr.08)
87  ; CHECK:   early-clobber %25:gprnopc = t2STRB_POST killed [[t2LDRB_POST]], [[PHI4]], 1, 14 /* CC::al */, $noreg :: (store (s8) into %ir.y.addr.07)
88  ; CHECK:   [[COPY3:%[0-9]+]]:gpr = COPY %25
89  ; CHECK:   [[t2LoopEndDec1:%[0-9]+]]:gprlr = t2LoopEndDec [[PHI5]], %bb.4, implicit-def $cpsr
90  ; CHECK:   t2B %bb.5, 14 /* CC::al */, $noreg
91  ; CHECK: bb.5.for.cond.cleanup:
92  ; CHECK:   tBX_RET 14 /* CC::al */, $noreg
93  bb.0.entry:
94    successors: %bb.1(0x40000000), %bb.3(0x40000000)
95    liveins: $r0, $r1, $r2
96
97    %9:rgpr = COPY $r2
98    %8:gpr = COPY $r1
99    %7:rgpr = COPY $r0
100    %10:gprlr = t2WhileLoopSetup %9
101    t2WhileLoopStart %10, %bb.3, implicit-def dead $cpsr
102    t2B %bb.1, 14 /* CC::al */, $noreg
103
104  bb.1.prehead:
105    successors: %bb.5(0x40000000), %bb.4(0x40000000)
106
107    %12:mqpr = IMPLICIT_DEF
108    %11:mqpr = MVE_VMOVimmi32 0, 0, $noreg, $noreg, %12
109    %17:rgpr = t2ADDri %9, 15, 14 /* CC::al */, $noreg, $noreg
110    %18:rgpr = t2BICri killed %17, 16, 14 /* CC::al */, $noreg, $noreg
111    %19:gprlr = t2LSRri killed %18, 4, 14 /* CC::al */, $noreg, $noreg
112    %20:gprlr = t2WhileLoopSetup killed %19
113    t2WhileLoopStart %20, %bb.5, implicit-def $cpsr
114
115  bb.4:
116    successors: %bb.4(0x40000000), %bb.5(0x40000000)
117
118    %21:rgpr = PHI %7, %bb.1, %22, %bb.4
119    %23:gprlr = PHI %20, %bb.1, %24, %bb.4
120    %25:rgpr = PHI %9, %bb.1, %26, %bb.4
121    %27:vccr = MVE_VCTP8 %25, 0, $noreg, $noreg
122    %26:rgpr = t2SUBri %25, 16, 14 /* CC::al */, $noreg, $noreg
123    %22:rgpr = MVE_VSTRBU8_post %11, %21, 16, 1, %27, $noreg
124    %24:gprlr = t2LoopDec %23, 1
125    t2LoopEnd %24, %bb.4, implicit-def $cpsr
126    t2B %bb.5, 14 /* CC::al */, $noreg
127
128  bb.5.prehead:
129    successors: %bb.2(0x80000000)
130
131    %0:gpr = COPY %10
132    t2B %bb.2, 14 /* CC::al */, $noreg
133
134  bb.2.for.body:
135    successors: %bb.2(0x7c000000), %bb.3(0x04000000)
136
137    %1:gpr = PHI %7, %bb.5, %4, %bb.2
138    %2:gpr = PHI %8, %bb.5, %5, %bb.2
139    %3:gprlr = PHI %0, %bb.5, %6, %bb.2
140    %13:rgpr, %4:gpr = t2LDRB_POST %1, 1, 14 /* CC::al */, $noreg :: (load (s8) from %ir.x.addr.08)
141    early-clobber %14:gprnopc = t2STRB_POST killed %13, %2, 1, 14 /* CC::al */, $noreg :: (store (s8) into %ir.y.addr.07)
142    %15:gprlr = t2LoopDec %3, 1
143    %5:gpr = COPY %14
144    %6:gpr = COPY %15
145    t2LoopEnd %15, %bb.2, implicit-def dead $cpsr
146    t2B %bb.3, 14 /* CC::al */, $noreg
147
148  bb.3.for.cond.cleanup:
149    tBX_RET 14 /* CC::al */, $noreg
150
151...
152