From 6e2055b7da0c1f04858d27227630c944ee6fca26 Mon Sep 17 00:00:00 2001 From: Craig Topper Date: Mon, 1 May 2017 06:33:17 +0000 Subject: [PATCH] [X86] Add tests for opportunities to improve known bits for CTTZ and CTLZ. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@301791 91177308-0d34-0410-b5e6-96231b3b80d8 --- test/CodeGen/X86/clz.ll | 93 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/test/CodeGen/X86/clz.ll b/test/CodeGen/X86/clz.ll index cffc6732728..e9f59944adc 100644 --- a/test/CodeGen/X86/clz.ll +++ b/test/CodeGen/X86/clz.ll @@ -778,3 +778,96 @@ define i32 @ctlz_bsr_zero_test(i32 %n) { %bsr = xor i32 %ctlz, 31 ret i32 %bsr } + +define i8 @cttz_i8_knownbits(i8 %x) { +; X32-LABEL: cttz_i8_knownbits: +; X32: # BB#0: +; X32-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-NEXT: orb $2, %al +; X32-NEXT: movzbl %al, %eax +; X32-NEXT: bsfl %eax, %eax +; X32-NEXT: andb $1, %al +; X32-NEXT: # kill: %AL %AL %EAX +; X32-NEXT: retl +; +; X64-LABEL: cttz_i8_knownbits: +; X64: # BB#0: +; X64-NEXT: orb $2, %dil +; X64-NEXT: movzbl %dil, %eax +; X64-NEXT: bsfl %eax, %eax +; X64-NEXT: andb $1, %al +; X64-NEXT: # kill: %AL %AL %EAX +; X64-NEXT: retq +; +; X32-CLZ-LABEL: cttz_i8_knownbits: +; X32-CLZ: # BB#0: +; X32-CLZ-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-CLZ-NEXT: orb $2, %al +; X32-CLZ-NEXT: movzbl %al, %eax +; X32-CLZ-NEXT: tzcntl %eax, %eax +; X32-CLZ-NEXT: andb $1, %al +; X32-CLZ-NEXT: # kill: %AL %AL %EAX +; X32-CLZ-NEXT: retl +; +; X64-CLZ-LABEL: cttz_i8_knownbits: +; X64-CLZ: # BB#0: +; X64-CLZ-NEXT: orb $2, %dil +; X64-CLZ-NEXT: movzbl %dil, %eax +; X64-CLZ-NEXT: tzcntl %eax, %eax +; X64-CLZ-NEXT: andb $1, %al +; X64-CLZ-NEXT: # kill: %AL %AL %EAX +; X64-CLZ-NEXT: retq + %x2 = or i8 %x, 2 + %tmp = call i8 @llvm.cttz.i8(i8 %x2, i1 true ) + %tmp2 = and i8 %tmp, 1 + ret i8 %tmp2 +} + +define i8 @ctlz_i8_knownbits(i8 %x) { +; X32-LABEL: ctlz_i8_knownbits: +; X32: # BB#0: +; X32-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-NEXT: orb $64, %al +; X32-NEXT: movzbl %al, %eax +; X32-NEXT: bsrl %eax, %eax +; X32-NEXT: notl %eax +; X32-NEXT: andb $1, %al +; X32-NEXT: # kill: %AL %AL %EAX +; X32-NEXT: retl +; +; X64-LABEL: ctlz_i8_knownbits: +; X64: # BB#0: +; X64-NEXT: orb $64, %dil +; X64-NEXT: movzbl %dil, %eax +; X64-NEXT: bsrl %eax, %eax +; X64-NEXT: notl %eax +; X64-NEXT: andb $1, %al +; X64-NEXT: # kill: %AL %AL %EAX +; X64-NEXT: retq +; +; X32-CLZ-LABEL: ctlz_i8_knownbits: +; X32-CLZ: # BB#0: +; X32-CLZ-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-CLZ-NEXT: orb $64, %al +; X32-CLZ-NEXT: movzbl %al, %eax +; X32-CLZ-NEXT: lzcntl %eax, %eax +; X32-CLZ-NEXT: addl $-24, %eax +; X32-CLZ-NEXT: andb $1, %al +; X32-CLZ-NEXT: # kill: %AL %AL %EAX +; X32-CLZ-NEXT: retl +; +; X64-CLZ-LABEL: ctlz_i8_knownbits: +; X64-CLZ: # BB#0: +; X64-CLZ-NEXT: orb $64, %dil +; X64-CLZ-NEXT: movzbl %dil, %eax +; X64-CLZ-NEXT: lzcntl %eax, %eax +; X64-CLZ-NEXT: addl $-24, %eax +; X64-CLZ-NEXT: andb $1, %al +; X64-CLZ-NEXT: # kill: %AL %AL %EAX +; X64-CLZ-NEXT: retq + + %x2 = or i8 %x, 64 + %tmp = call i8 @llvm.ctlz.i8(i8 %x2, i1 true ) + %tmp2 = and i8 %tmp, 1 + ret i8 %tmp2 +} -- 2.50.1