xref: /llvm-project/llvm/test/Transforms/CodeGenPrepare/X86/cttz-ctlz.ll (revision f67388232384682fb442d6e5501d9259c41fd714)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -S -passes="require<profile-summary>,function(codegenprepare)" < %s | FileCheck %s --check-prefix=SLOW
3; RUN: opt -S -passes="require<profile-summary>,function(codegenprepare)" -mattr=+bmi < %s | FileCheck %s --check-prefix=FAST_TZ
4; RUN: opt -S -passes="require<profile-summary>,function(codegenprepare)" -mattr=+lzcnt < %s | FileCheck %s --check-prefix=FAST_LZ
5
6; RUN: opt -S -enable-debugify -passes="require<profile-summary>,function(codegenprepare)" < %s | FileCheck %s --check-prefix=DEBUGINFO
7; RUN: opt -S -enable-debugify -passes="require<profile-summary>,function(codegenprepare)" --try-experimental-debuginfo-iterators < %s | FileCheck %s --check-prefix=DEBUGINFO
8
9target triple = "x86_64-unknown-unknown"
10target datalayout = "e-n32:64"
11
12; If the intrinsic is cheap, nothing should change.
13; If the intrinsic is expensive, check if the input is zero to avoid the call.
14; This is undoing speculation that may have been created by SimplifyCFG + InstCombine.
15
16define i64 @cttz(i64 %A) {
17; SLOW-LABEL: @cttz(
18; SLOW-NEXT:  entry:
19; SLOW-NEXT:    [[Z:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
20; SLOW-NEXT:    ret i64 [[Z]]
21;
22; FAST_TZ-LABEL: @cttz(
23; FAST_TZ-NEXT:  entry:
24; FAST_TZ-NEXT:    [[Z:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
25; FAST_TZ-NEXT:    ret i64 [[Z]]
26;
27; FAST_LZ-LABEL: @cttz(
28; FAST_LZ-NEXT:  entry:
29; FAST_LZ-NEXT:    [[Z:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
30; FAST_LZ-NEXT:    ret i64 [[Z]]
31;
32; DEBUGINFO-LABEL: @cttz(
33; DEBUGINFO-NEXT:  entry:
34; DEBUGINFO-NEXT:    [[Z:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false), !dbg [[DBG11:![0-9]+]]
35; DEBUGINFO-NEXT:      #dbg_value(i64 [[Z]], [[META9:![0-9]+]], !DIExpression(), [[DBG11]])
36; DEBUGINFO-NEXT:    ret i64 [[Z]], !dbg [[DBG12:![0-9]+]]
37;
38entry:
39  %z = call i64 @llvm.cttz.i64(i64 %A, i1 false)
40  ret i64 %z
41}
42
43define i64 @ctlz(i64 %A) {
44; SLOW-LABEL: @ctlz(
45; SLOW-NEXT:  entry:
46; SLOW-NEXT:    [[Z:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
47; SLOW-NEXT:    ret i64 [[Z]]
48;
49; FAST_TZ-LABEL: @ctlz(
50; FAST_TZ-NEXT:  entry:
51; FAST_TZ-NEXT:    [[Z:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
52; FAST_TZ-NEXT:    ret i64 [[Z]]
53;
54; FAST_LZ-LABEL: @ctlz(
55; FAST_LZ-NEXT:  entry:
56; FAST_LZ-NEXT:    [[Z:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
57; FAST_LZ-NEXT:    ret i64 [[Z]]
58;
59; DEBUGINFO-LABEL: @ctlz(
60; DEBUGINFO-NEXT:  entry:
61; DEBUGINFO-NEXT:    [[Z:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false), !dbg [[DBG16:![0-9]+]]
62; DEBUGINFO-NEXT:      #dbg_value(i64 [[Z]], [[META15:![0-9]+]], !DIExpression(), [[DBG16]])
63; DEBUGINFO-NEXT:    ret i64 [[Z]], !dbg [[DBG17:![0-9]+]]
64;
65entry:
66  %z = call i64 @llvm.ctlz.i64(i64 %A, i1 false)
67  ret i64 %z
68}
69
70declare i64 @llvm.cttz.i64(i64, i1)
71declare i64 @llvm.ctlz.i64(i64, i1)
72
73