xref: /llvm-project/llvm/test/CodeGen/AMDGPU/icmp64.ll (revision 9e9907f1cfa424366fba58d9520f9305b537cec9)
1; RUN: llc -mtriple=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SI %s
2; RUN: llc -mtriple=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,VI %s
3
4; GCN-LABEL: {{^}}test_i64_eq:
5; VI: s_cmp_eq_u64
6; SI: v_cmp_eq_u64
7define amdgpu_kernel void @test_i64_eq(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
8  %cmp = icmp eq i64 %a, %b
9  %result = sext i1 %cmp to i32
10  store i32 %result, ptr addrspace(1) %out, align 4
11  ret void
12}
13
14; GCN-LABEL: {{^}}test_i64_ne:
15; VI: s_cmp_lg_u64
16; SI: v_cmp_ne_u64
17define amdgpu_kernel void @test_i64_ne(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
18  %cmp = icmp ne i64 %a, %b
19  %result = sext i1 %cmp to i32
20  store i32 %result, ptr addrspace(1) %out, align 4
21  ret void
22}
23
24; GCN-LABEL: {{^}}test_i64_slt:
25; GCN: v_cmp_lt_i64
26define amdgpu_kernel void @test_i64_slt(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
27  %cmp = icmp slt i64 %a, %b
28  %result = sext i1 %cmp to i32
29  store i32 %result, ptr addrspace(1) %out, align 4
30  ret void
31}
32
33; GCN-LABEL: {{^}}test_i64_ult:
34; GCN: v_cmp_lt_u64
35define amdgpu_kernel void @test_i64_ult(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
36  %cmp = icmp ult i64 %a, %b
37  %result = sext i1 %cmp to i32
38  store i32 %result, ptr addrspace(1) %out, align 4
39  ret void
40}
41
42; GCN-LABEL: {{^}}test_i64_sle:
43; GCN: v_cmp_le_i64
44define amdgpu_kernel void @test_i64_sle(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
45  %cmp = icmp sle i64 %a, %b
46  %result = sext i1 %cmp to i32
47  store i32 %result, ptr addrspace(1) %out, align 4
48  ret void
49}
50
51; GCN-LABEL: {{^}}test_i64_ule:
52; GCN: v_cmp_le_u64
53define amdgpu_kernel void @test_i64_ule(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
54  %cmp = icmp ule i64 %a, %b
55  %result = sext i1 %cmp to i32
56  store i32 %result, ptr addrspace(1) %out, align 4
57  ret void
58}
59
60; GCN-LABEL: {{^}}test_i64_sgt:
61; GCN: v_cmp_gt_i64
62define amdgpu_kernel void @test_i64_sgt(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
63  %cmp = icmp sgt i64 %a, %b
64  %result = sext i1 %cmp to i32
65  store i32 %result, ptr addrspace(1) %out, align 4
66  ret void
67}
68
69; GCN-LABEL: {{^}}test_i64_ugt:
70; GCN: v_cmp_gt_u64
71define amdgpu_kernel void @test_i64_ugt(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
72  %cmp = icmp ugt i64 %a, %b
73  %result = sext i1 %cmp to i32
74  store i32 %result, ptr addrspace(1) %out, align 4
75  ret void
76}
77
78; GCN-LABEL: {{^}}test_i64_sge:
79; GCN: v_cmp_ge_i64
80define amdgpu_kernel void @test_i64_sge(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
81  %cmp = icmp sge i64 %a, %b
82  %result = sext i1 %cmp to i32
83  store i32 %result, ptr addrspace(1) %out, align 4
84  ret void
85}
86
87; GCN-LABEL: {{^}}test_i64_uge:
88; GCN: v_cmp_ge_u64
89define amdgpu_kernel void @test_i64_uge(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind {
90  %cmp = icmp uge i64 %a, %b
91  %result = sext i1 %cmp to i32
92  store i32 %result, ptr addrspace(1) %out, align 4
93  ret void
94}
95
96