xref: /llvm-project/llvm/test/CodeGen/AArch64/GlobalISel/artifact-find-value.mir (revision e4c46ddd91eba5ec162225abc1e47aa3c6c13516)
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=aarch64 -O0 -run-pass=legalizer -global-isel-abort=1 %s -o - | FileCheck %s
3---
4name:            combine_unmerge_from_unmerge_of_concat_tree
5alignment:       4
6tracksRegLiveness: true
7body:             |
8  bb.1:
9  liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
10    ; CHECK-LABEL: name: combine_unmerge_from_unmerge_of_concat_tree
11    ; CHECK: liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
12    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
13    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
14    ; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
15    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $d0
16    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $d1
17    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $d2
18    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $d3
19    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $d4
20    ; CHECK: [[COPY8:%[0-9]+]]:_(s64) = COPY $d5
21    ; CHECK: %v2s64_val:_(<2 x s64>) = G_BUILD_VECTOR [[COPY5]](s64), [[COPY6]](s64)
22    ; CHECK: %v2s64_val2:_(<2 x s64>) = G_BUILD_VECTOR [[COPY6]](s64), [[COPY8]](s64)
23    ; CHECK: G_STORE %v2s64_val(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
24    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
25    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
26    ; CHECK: RET_ReallyLR
27  %0:_(s64)  = COPY $x0
28  %1:_(s64)  = COPY $x1
29  %2:_(p0)  = COPY $x2
30  %3:_(s64) = COPY $d0
31  %4:_(s64) = COPY $d1
32  %5:_(s64) = COPY $d2
33  %6:_(s64) = COPY $d3
34  %7:_(s64) = COPY $d4
35  %8:_(s64) = COPY $d5
36  %v2s64_val = G_BUILD_VECTOR %5:_(s64), %6:_(s64)
37  %v2s64_val2 = G_BUILD_VECTOR %6:_(s64), %8:_(s64)
38  %v4s64_val1:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val:_(<2 x s64>), %v2s64_val2:_(<2 x s64>)
39  %v4s64_val2:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val2:_(<2 x s64>), %v2s64_val:_(<2 x s64>)
40  %v8s64_undef:_(<8 x s64>) = G_IMPLICIT_DEF
41  %concat1:_(<8 x s64>) = G_CONCAT_VECTORS %v4s64_val1:_(<4 x s64>), %v4s64_val2:_(<4 x s64>)
42  %bigconcat:_(<24 x s64>) = G_CONCAT_VECTORS %concat1:_(<8 x s64>), %v8s64_undef:_(<8 x s64>), %v8s64_undef:_(<8 x s64>)
43
44  %unmerge1:_(<6 x s64>), %deaddef1:_(<6 x s64>), %deaddef2:_(<6 x s64>), %deaddef3:_(<6 x s64>) = G_UNMERGE_VALUES %bigconcat:_(<24 x s64>)
45  %val1:_(<2 x s64>), %val2:_(<2 x s64>), %val3:_(<2 x s64>) = G_UNMERGE_VALUES %unmerge1:_(<6 x s64>)
46
47  G_STORE %val1:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
48  G_STORE %val2:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
49  G_STORE %val3:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
50  RET_ReallyLR
51
52...
53
54---
55name:            combine_unmerge_from_unmerge_of_concat_tree_high_bits
56alignment:       4
57tracksRegLiveness: true
58body:             |
59  bb.1:
60  liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
61    ; CHECK-LABEL: name: combine_unmerge_from_unmerge_of_concat_tree_high_bits
62    ; CHECK: liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
63    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
64    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
65    ; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
66    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $d0
67    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $d1
68    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $d2
69    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $d3
70    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $d4
71    ; CHECK: [[COPY8:%[0-9]+]]:_(s64) = COPY $d5
72    ; CHECK: %v2s64_val:_(<2 x s64>) = G_BUILD_VECTOR [[COPY5]](s64), [[COPY6]](s64)
73    ; CHECK: %v2s64_val2:_(<2 x s64>) = G_BUILD_VECTOR [[COPY6]](s64), [[COPY8]](s64)
74    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
75    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
76    ; CHECK: G_STORE %v2s64_val(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
77    ; CHECK: RET_ReallyLR
78  %0:_(s64)  = COPY $x0
79  %1:_(s64)  = COPY $x1
80  %2:_(p0)  = COPY $x2
81  %3:_(s64) = COPY $d0
82  %4:_(s64) = COPY $d1
83  %5:_(s64) = COPY $d2
84  %6:_(s64) = COPY $d3
85  %7:_(s64) = COPY $d4
86  %8:_(s64) = COPY $d5
87  %v2s64_val = G_BUILD_VECTOR %5:_(s64), %6:_(s64)
88  %v2s64_val2 = G_BUILD_VECTOR %6:_(s64), %8:_(s64)
89  %v4s64_val1:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val:_(<2 x s64>), %v2s64_val2:_(<2 x s64>)
90  %v4s64_val2:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val2:_(<2 x s64>), %v2s64_val:_(<2 x s64>)
91  %v8s64_undef:_(<8 x s64>) = G_IMPLICIT_DEF
92  %concat1:_(<8 x s64>) = G_CONCAT_VECTORS %v4s64_val1:_(<4 x s64>), %v4s64_val2:_(<4 x s64>)
93  %bigconcat:_(<24 x s64>) = G_CONCAT_VECTORS %v8s64_undef:_(<8 x s64>), %v8s64_undef:_(<8 x s64>), %concat1:_(<8 x s64>)
94
95  %deaddef1:_(<6 x s64>), %deaddef2:_(<6 x s64>), %deaddef3:_(<6 x s64>), %unmerge1:_(<6 x s64>) = G_UNMERGE_VALUES %bigconcat:_(<24 x s64>)
96  %val1:_(<2 x s64>), %val2:_(<2 x s64>), %val3:_(<2 x s64>) = G_UNMERGE_VALUES %unmerge1:_(<6 x s64>)
97
98  G_STORE %val1:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
99  G_STORE %val2:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
100  G_STORE %val3:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
101  RET_ReallyLR
102
103...
104---
105name:            combine_unmerge_from_insert_into_low
106alignment:       4
107tracksRegLiveness: true
108body:             |
109  bb.1:
110  liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
111    ; CHECK-LABEL: name: combine_unmerge_from_insert_into_low
112    ; CHECK: liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
113    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
114    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
115    ; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
116    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $d0
117    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $d1
118    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $d2
119    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $d3
120    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $d4
121    ; CHECK: [[COPY8:%[0-9]+]]:_(s64) = COPY $d5
122    ; CHECK: %v2s64_val:_(<2 x s64>) = G_BUILD_VECTOR [[COPY5]](s64), [[COPY6]](s64)
123    ; CHECK: %v2s64_val2:_(<2 x s64>) = G_BUILD_VECTOR [[COPY6]](s64), [[COPY8]](s64)
124    ; CHECK: G_STORE %v2s64_val(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
125    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
126    ; CHECK: RET_ReallyLR
127  %0:_(s64)  = COPY $x0
128  %1:_(s64)  = COPY $x1
129  %2:_(p0)  = COPY $x2
130  %3:_(s64) = COPY $d0
131  %4:_(s64) = COPY $d1
132  %5:_(s64) = COPY $d2
133  %6:_(s64) = COPY $d3
134  %7:_(s64) = COPY $d4
135  %8:_(s64) = COPY $d5
136  %v2s64_val = G_BUILD_VECTOR %5:_(s64), %6:_(s64)
137  %v2s64_val2 = G_BUILD_VECTOR %6:_(s64), %8:_(s64)
138  %v4s64_val1:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val:_(<2 x s64>), %v2s64_val2:_(<2 x s64>)
139  %v8s64_undef:_(<8 x s64>) = G_IMPLICIT_DEF
140  %insert:_(<8 x s64>) = G_INSERT %v8s64_undef:_(<8 x s64>), %v4s64_val1:_(<4 x s64>), 0
141  %val1:_(<2 x s64>), %val2:_(<2 x s64>), %val3:_(<2 x s64>), %val4:_(<2 x s64>) = G_UNMERGE_VALUES %insert:_(<8 x s64>)
142
143  ; val1 should be <%5, %6>
144  G_STORE %val1:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
145  ; val2 should be <%6, %8>
146  G_STORE %val2:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
147  RET_ReallyLR
148
149...
150---
151name:            combine_unmerge_from_insert_into_high
152alignment:       4
153tracksRegLiveness: true
154body:             |
155  bb.1:
156  liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
157    ; CHECK-LABEL: name: combine_unmerge_from_insert_into_high
158    ; CHECK: liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
159    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
160    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
161    ; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
162    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $d0
163    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $d1
164    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $d2
165    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $d3
166    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $d4
167    ; CHECK: [[COPY8:%[0-9]+]]:_(s64) = COPY $d5
168    ; CHECK: %v2s64_val:_(<2 x s64>) = G_BUILD_VECTOR [[COPY5]](s64), [[COPY6]](s64)
169    ; CHECK: %v2s64_val2:_(<2 x s64>) = G_BUILD_VECTOR [[COPY6]](s64), [[COPY8]](s64)
170    ; CHECK: G_STORE %v2s64_val(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
171    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
172    ; CHECK: RET_ReallyLR
173  %0:_(s64)  = COPY $x0
174  %1:_(s64)  = COPY $x1
175  %2:_(p0)  = COPY $x2
176  %3:_(s64) = COPY $d0
177  %4:_(s64) = COPY $d1
178  %5:_(s64) = COPY $d2
179  %6:_(s64) = COPY $d3
180  %7:_(s64) = COPY $d4
181  %8:_(s64) = COPY $d5
182  %v2s64_val = G_BUILD_VECTOR %5:_(s64), %6:_(s64)
183  %v2s64_val2 = G_BUILD_VECTOR %6:_(s64), %8:_(s64)
184  %v4s64_val1:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val:_(<2 x s64>), %v2s64_val2:_(<2 x s64>)
185  %v8s64_undef:_(<8 x s64>) = G_IMPLICIT_DEF
186  %insert:_(<8 x s64>) = G_INSERT %v8s64_undef:_(<8 x s64>), %v4s64_val1:_(<4 x s64>), 256
187  %val1:_(<2 x s64>), %val2:_(<2 x s64>), %val3:_(<2 x s64>), %val4:_(<2 x s64>) = G_UNMERGE_VALUES %insert:_(<8 x s64>)
188
189  ; val3 should be <%5, %6>
190  G_STORE %val3:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
191  ; val4 should be <%6, %8>
192  G_STORE %val4:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
193  RET_ReallyLR
194
195...
196---
197name:            combine_unmerge_from_insert_look_into_container
198alignment:       4
199tracksRegLiveness: true
200body:             |
201  bb.1:
202  liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
203    ; CHECK-LABEL: name: combine_unmerge_from_insert_look_into_container
204    ; CHECK: liveins: $x0, $x1, $x2, $d0, $d1, $d2, $d3, $d4, $d5, $d6, $d7, $x0
205    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
206    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
207    ; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
208    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $d0
209    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $d1
210    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $d2
211    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $d3
212    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $d4
213    ; CHECK: [[COPY8:%[0-9]+]]:_(s64) = COPY $d5
214    ; CHECK: %v2s64_val:_(<2 x s64>) = G_BUILD_VECTOR [[COPY5]](s64), [[COPY6]](s64)
215    ; CHECK: %v2s64_val2:_(<2 x s64>) = G_BUILD_VECTOR [[COPY6]](s64), [[COPY8]](s64)
216    ; CHECK: G_STORE %v2s64_val(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
217    ; CHECK: G_STORE %v2s64_val2(<2 x s64>), [[COPY2]](p0) :: (store (<2 x s64>))
218    ; CHECK: RET_ReallyLR
219  %0:_(s64)  = COPY $x0
220  %1:_(s64)  = COPY $x1
221  %2:_(p0)  = COPY $x2
222  %3:_(s64) = COPY $d0
223  %4:_(s64) = COPY $d1
224  %5:_(s64) = COPY $d2
225  %6:_(s64) = COPY $d3
226  %7:_(s64) = COPY $d4
227  %8:_(s64) = COPY $d5
228  %v2s64_val = G_BUILD_VECTOR %5:_(s64), %6:_(s64)
229  %v2s64_val2 = G_BUILD_VECTOR %6:_(s64), %8:_(s64)
230  %v4s64_undef:_(<4 x s64>) = G_IMPLICIT_DEF
231  %v4s64_val1:_(<4 x s64>) = G_CONCAT_VECTORS %v2s64_val:_(<2 x s64>), %v2s64_val2:_(<2 x s64>)
232  %v8s64_val1:_(<8 x s64>) = G_CONCAT_VECTORS %v4s64_undef:_(<4 x s64>), %v4s64_val1:_(<4 x s64>)
233  %insert:_(<8 x s64>) = G_INSERT %v8s64_val1:_(<8 x s64>), %v4s64_undef:_(<4 x s64>), 0
234  ; The values we're interested in are in bits 256-512 of the insert container.
235  %val1:_(<2 x s64>), %val2:_(<2 x s64>), %val3:_(<2 x s64>), %val4:_(<2 x s64>) = G_UNMERGE_VALUES %insert:_(<8 x s64>)
236
237  ; val3 should be <%5, %6>
238  G_STORE %val3:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
239  ; val4 should be <%6, %8>
240  G_STORE %val4:_(<2 x s64>), %2:_(p0) :: (store (<2 x s64>))
241  RET_ReallyLR
242
243...
244