xref: /llvm-project/llvm/test/CodeGen/AArch64/GlobalISel/select-stlxr-intrin.mir (revision 1ee315ae7964c8433b772e0b5d667834994ba753)
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
3
4--- |
5  define i32 @test_store_release_i64(i32 %a, ptr %addr) {
6    ret i32 %a
7  }
8
9  define i32 @test_store_release_i32(i32 %a, ptr %addr) {
10    ret i32 %a
11  }
12
13  define void @test_store_release_i8(i32, i8 %val, ptr %addr) { ret void }
14  define void @test_store_release_i16(i32, i16 %val, ptr %addr) { ret void }
15...
16---
17name:            test_store_release_i64
18alignment:       4
19legalized:       true
20regBankSelected: true
21tracksRegLiveness: true
22body:             |
23  bb.0:
24    liveins: $w0, $x1, $x2
25
26    ; CHECK-LABEL: name: test_store_release_i64
27    ; CHECK: liveins: $w0, $x1, $x2
28    ; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x1
29    ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
30    ; CHECK: early-clobber %2:gpr32 = STLXRX [[COPY]], [[COPY1]] :: (volatile store (s64) into %ir.addr)
31    ; CHECK: $w0 = COPY %2
32    ; CHECK: RET_ReallyLR implicit $w0
33    %1:gpr(s64) = COPY $x1
34    %2:gpr(p0) = COPY $x2
35    %3:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %1(s64), %2(p0) :: (volatile store (s64) into %ir.addr)
36    $w0 = COPY %3(s32)
37    RET_ReallyLR implicit $w0
38
39...
40---
41name:            test_store_release_i32
42alignment:       4
43legalized:       true
44regBankSelected: true
45tracksRegLiveness: true
46body:             |
47  bb.0:
48    liveins: $w0, $w1, $x2
49    ; CHECK-LABEL: name: test_store_release_i32
50    ; CHECK: liveins: $w0, $w1, $x2
51    ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
52    ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
53    ; CHECK: early-clobber %3:gpr32 = STLXRW [[COPY]], [[COPY1]] :: (volatile store (s32) into %ir.addr)
54    ; CHECK: $w0 = COPY %3
55    ; CHECK: RET_ReallyLR implicit $w0
56    %1:gpr(s32) = COPY $w1
57    %2:gpr(p0) = COPY $x2
58    %3:gpr(s64) = G_ZEXT %1(s32)
59    %4:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %3(s64), %2(p0) :: (volatile store (s32) into %ir.addr)
60    $w0 = COPY %4(s32)
61    RET_ReallyLR implicit $w0
62
63...
64---
65name:            test_store_release_i8
66alignment:       4
67legalized:       true
68regBankSelected: true
69tracksRegLiveness: true
70body:             |
71  bb.0:
72    liveins: $w0, $w1, $x2
73
74    ; CHECK-LABEL: name: test_store_release_i8
75    ; CHECK: liveins: $w0, $w1, $x2
76    ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
77    ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
78    ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
79    ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
80    ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
81    ; CHECK: early-clobber %5:gpr32 = STLXRB [[COPY2]], [[COPY1]] :: (volatile store (s8) into %ir.addr)
82    ; CHECK: $w0 = COPY %5
83    ; CHECK: RET_ReallyLR implicit $w0
84    %3:gpr(s32) = COPY $w1
85    %2:gpr(p0) = COPY $x2
86    %6:gpr(s64) = G_CONSTANT i64 255
87    %7:gpr(s64) = G_ANYEXT %3(s32)
88    %4:gpr(s64) = G_AND %7, %6
89    %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store (s8) into %ir.addr)
90    $w0 = COPY %5(s32)
91    RET_ReallyLR implicit $w0
92
93...
94---
95name:            test_store_release_i16
96alignment:       4
97legalized:       true
98regBankSelected: true
99tracksRegLiveness: true
100body:             |
101  bb.0:
102    liveins: $w0, $w1, $x2
103
104    ; CHECK-LABEL: name: test_store_release_i16
105    ; CHECK: liveins: $w0, $w1, $x2
106    ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
107    ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
108    ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
109    ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
110    ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
111    ; CHECK: early-clobber %5:gpr32 = STLXRH [[COPY2]], [[COPY1]] :: (volatile store (s16) into %ir.addr)
112    ; CHECK: $w0 = COPY %5
113    ; CHECK: RET_ReallyLR implicit $w0
114    %3:gpr(s32) = COPY $w1
115    %2:gpr(p0) = COPY $x2
116    %6:gpr(s64) = G_CONSTANT i64 65535
117    %7:gpr(s64) = G_ANYEXT %3(s32)
118    %4:gpr(s64) = G_AND %7, %6
119    %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store (s16) into %ir.addr)
120    $w0 = COPY %5(s32)
121    RET_ReallyLR implicit $w0
122