xref: /llvm-project/llvm/test/CodeGen/AMDGPU/block-should-not-be-in-alive-blocks.mir (revision e7900e695e7dfb36be8651d914a31f42a5d6c634)
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1031 -verify-machineinstrs -start-after=unreachable-mbb-elimination -stop-after=phi-node-elimination -o - %s | FileCheck %s
3
4# FIXME: Should be able to just use run-pass, but need to keep
5# LiveVariables live after for the verifier. Also -start-before
6# doesn't work here for some reason.
7
8# LiveVariables needs to remove %bb.3 from the live blocks for %1
9# after the phi is introduced, but was previously missed due to
10# encountering the flow block first in the depth first search.
11
12---
13name:            live_variable_update
14tracksRegLiveness: true
15body:             |
16  ; CHECK-LABEL: name: live_variable_update
17  ; CHECK: bb.0:
18  ; CHECK-NEXT:   successors: %bb.2(0x40000000), %bb.5(0x40000000)
19  ; CHECK-NEXT:   liveins: $vgpr0, $sgpr4_sgpr5
20  ; CHECK-NEXT: {{  $}}
21  ; CHECK-NEXT:   [[COPY:%[0-9]+]]:sgpr_64 = COPY killed $sgpr4_sgpr5
22  ; CHECK-NEXT:   [[COPY1:%[0-9]+]]:vgpr_32 = COPY killed $vgpr0
23  ; CHECK-NEXT:   [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_NE_U32_e64 0, [[COPY1]], implicit $exec
24  ; CHECK-NEXT:   [[COPY2:%[0-9]+]]:vgpr_32 = COPY killed [[COPY1]]
25  ; CHECK-NEXT:   [[COPY3:%[0-9]+]]:sreg_32 = COPY $exec_lo, implicit-def $exec_lo
26  ; CHECK-NEXT:   [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 [[COPY3]], killed [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
27  ; CHECK-NEXT:   [[S_XOR_B32_:%[0-9]+]]:sreg_32 = S_XOR_B32 [[S_AND_B32_]], [[COPY3]], implicit-def dead $scc
28  ; CHECK-NEXT:   $exec_lo = S_MOV_B32_term killed [[S_AND_B32_]]
29  ; CHECK-NEXT:   S_CBRANCH_EXECZ %bb.5, implicit $exec
30  ; CHECK-NEXT:   S_BRANCH %bb.2
31  ; CHECK-NEXT: {{  $}}
32  ; CHECK-NEXT: bb.1:
33  ; CHECK-NEXT:   successors: %bb.7(0x80000000)
34  ; CHECK-NEXT: {{  $}}
35  ; CHECK-NEXT:   [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM killed [[COPY]], 0, 0 :: (dereferenceable invariant load (s64), align 16, addrspace 4)
36  ; CHECK-NEXT:   [[V_ADD_CO_U32_e64_:%[0-9]+]]:vgpr_32, [[V_ADD_CO_U32_e64_1:%[0-9]+]]:sreg_32_xm0_xexec = V_ADD_CO_U32_e64 [[S_LOAD_DWORDX2_IMM]].sub0, killed %15, 0, implicit $exec
37  ; CHECK-NEXT:   [[V_ADDC_U32_e64_:%[0-9]+]]:vgpr_32, dead [[V_ADDC_U32_e64_1:%[0-9]+]]:sreg_32_xm0_xexec = V_ADDC_U32_e64 0, killed [[S_LOAD_DWORDX2_IMM]].sub1, killed [[V_ADD_CO_U32_e64_1]], 0, implicit $exec
38  ; CHECK-NEXT:   [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE killed [[V_ADD_CO_U32_e64_]], %subreg.sub0, killed [[V_ADDC_U32_e64_]], %subreg.sub1
39  ; CHECK-NEXT:   [[GLOBAL_LOAD_UBYTE:%[0-9]+]]:vgpr_32 = GLOBAL_LOAD_UBYTE killed [[REG_SEQUENCE]], 0, 0, implicit $exec :: (load (s8), addrspace 1)
40  ; CHECK-NEXT:   [[V_MOV_B:%[0-9]+]]:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
41  ; CHECK-NEXT:   GLOBAL_STORE_BYTE killed [[V_MOV_B]], killed [[GLOBAL_LOAD_UBYTE]], 0, 0, implicit $exec :: (store (s8), addrspace 1)
42  ; CHECK-NEXT:   S_BRANCH %bb.7
43  ; CHECK-NEXT: {{  $}}
44  ; CHECK-NEXT: bb.2:
45  ; CHECK-NEXT:   successors: %bb.4(0x40000000), %bb.3(0x40000000)
46  ; CHECK-NEXT: {{  $}}
47  ; CHECK-NEXT:   S_CBRANCH_SCC0 %bb.4, implicit undef $scc
48  ; CHECK-NEXT: {{  $}}
49  ; CHECK-NEXT: bb.3:
50  ; CHECK-NEXT:   successors: %bb.6(0x80000000)
51  ; CHECK-NEXT: {{  $}}
52  ; CHECK-NEXT:   S_BRANCH %bb.6
53  ; CHECK-NEXT: {{  $}}
54  ; CHECK-NEXT: bb.4:
55  ; CHECK-NEXT:   successors: %bb.6(0x80000000)
56  ; CHECK-NEXT: {{  $}}
57  ; CHECK-NEXT:   [[V_MOV_B1:%[0-9]+]]:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
58  ; CHECK-NEXT:   dead [[GLOBAL_LOAD_UBYTE1:%[0-9]+]]:vgpr_32 = GLOBAL_LOAD_UBYTE killed [[V_MOV_B1]], 0, 0, implicit $exec :: (load (s8), addrspace 1)
59  ; CHECK-NEXT:   S_BRANCH %bb.6
60  ; CHECK-NEXT: {{  $}}
61  ; CHECK-NEXT: bb.5:
62  ; CHECK-NEXT:   successors: %bb.1(0x40000000), %bb.7(0x40000000)
63  ; CHECK-NEXT: {{  $}}
64  ; CHECK-NEXT:   [[S_OR_SAVEEXEC_B32_:%[0-9]+]]:sreg_32 = S_OR_SAVEEXEC_B32 killed [[S_XOR_B32_]], implicit-def $exec, implicit-def $scc, implicit $exec
65  ; CHECK-NEXT:   [[COPY4:%[0-9]+]]:vgpr_32 = COPY killed [[COPY2]]
66  ; CHECK-NEXT:   [[S_AND_B32_1:%[0-9]+]]:sreg_32 = S_AND_B32 $exec_lo, [[S_OR_SAVEEXEC_B32_]], implicit-def $scc
67  ; CHECK-NEXT:   $exec_lo = S_XOR_B32_term $exec_lo, [[S_AND_B32_1]], implicit-def $scc
68  ; CHECK-NEXT:   S_CBRANCH_EXECZ %bb.7, implicit $exec
69  ; CHECK-NEXT:   S_BRANCH %bb.1
70  ; CHECK-NEXT: {{  $}}
71  ; CHECK-NEXT: bb.6:
72  ; CHECK-NEXT:   successors: %bb.5(0x80000000)
73  ; CHECK-NEXT: {{  $}}
74  ; CHECK-NEXT:   [[COPY2:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
75  ; CHECK-NEXT:   S_BRANCH %bb.5
76  ; CHECK-NEXT: {{  $}}
77  ; CHECK-NEXT: bb.7:
78  ; CHECK-NEXT:   $exec_lo = S_OR_B32 $exec_lo, killed [[S_AND_B32_1]], implicit-def $scc
79  ; CHECK-NEXT:   S_ENDPGM 0
80  bb.0:
81    successors: %bb.2(0x40000000), %bb.5(0x40000000)
82    liveins: $vgpr0, $sgpr4_sgpr5
83
84    %0:sgpr_64 = COPY $sgpr4_sgpr5
85    %1:vgpr_32 = COPY $vgpr0
86    %2:sreg_32 = V_CMP_NE_U32_e64 0, %1, implicit $exec
87    %3:sreg_32 = SI_IF killed %2, %bb.5, implicit-def dead $exec, implicit-def dead $scc, implicit $exec
88    S_BRANCH %bb.2
89
90  bb.1:
91    successors: %bb.7(0x80000000)
92
93    %4:sreg_64_xexec = S_LOAD_DWORDX2_IMM %0, 0, 0 :: (dereferenceable invariant load (s64), align 16, addrspace 4)
94    %5:vgpr_32, %6:sreg_32_xm0_xexec = V_ADD_CO_U32_e64 %4.sub0, %1, 0, implicit $exec
95    %7:vgpr_32, dead %8:sreg_32_xm0_xexec = V_ADDC_U32_e64 0, %4.sub1, killed %6, 0, implicit $exec
96    %9:vreg_64 = REG_SEQUENCE %5, %subreg.sub0, %7, %subreg.sub1
97    %10:vgpr_32 = GLOBAL_LOAD_UBYTE killed %9, 0, 0, implicit $exec :: (load (s8), addrspace 1)
98    %11:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
99    GLOBAL_STORE_BYTE killed %11, killed %10, 0, 0, implicit $exec :: (store (s8), addrspace 1)
100    S_BRANCH %bb.7
101
102  bb.2:
103    successors: %bb.4(0x40000000), %bb.3(0x40000000)
104
105    S_CBRANCH_SCC0 %bb.4, implicit undef $scc
106
107  bb.3:
108    successors: %bb.6(0x80000000)
109
110    S_BRANCH %bb.6
111
112  bb.4:
113    successors: %bb.6(0x80000000)
114
115    %12:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
116    %13:vgpr_32 = GLOBAL_LOAD_UBYTE killed %12, 0, 0, implicit $exec :: (load (s8), addrspace 1)
117    S_BRANCH %bb.6
118
119  bb.5:
120    successors: %bb.1(0x40000000), %bb.7(0x40000000)
121
122    %14:sreg_32 = SI_ELSE %3, %bb.7, implicit-def dead $exec, implicit-def dead $scc, implicit $exec
123    S_BRANCH %bb.1
124
125  bb.6:
126    successors: %bb.5(0x80000000)
127
128    S_BRANCH %bb.5
129
130  bb.7:
131    SI_END_CF %14, implicit-def dead $exec, implicit-def dead $scc, implicit $exec
132    S_ENDPGM 0
133
134...
135