return true;
break;
default: {
- if (Op.getOpcode() >= ISD::BUILTIN_OP_END)
+ if (Op.getOpcode() >= ISD::BUILTIN_OP_END) {
if (SimplifyDemandedVectorEltsForTargetNode(Op, DemandedElts, KnownUndef,
KnownZero, TLO, Depth))
return true;
+ } else {
+ KnownBits Known;
+ APInt DemandedBits = APInt::getAllOnesValue(EltSizeInBits);
+ if (SimplifyDemandedBits(Op, DemandedBits, DemandedEltMask, Known, TLO,
+ Depth, AssumeSingleUse))
+ return true;
+ }
break;
}
}
define <4 x i32> @knownbits_umax_shuffle_ashr(<4 x i32> %a0) {
; X32-LABEL: knownbits_umax_shuffle_ashr:
; X32: # %bb.0:
-; X32-NEXT: vpmaxud {{\.LCPI.*}}, %xmm0, %xmm0
-; X32-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
+; X32-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
; X32-NEXT: retl
;
; X64-LABEL: knownbits_umax_shuffle_ashr:
; X64: # %bb.0:
-; X64-NEXT: vpmaxud {{.*}}(%rip), %xmm0, %xmm0
-; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
+; X64-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
; X64-NEXT: retq
%1 = call <4 x i32> @llvm.x86.sse41.pmaxud(<4 x i32> %a0, <4 x i32> <i32 65535, i32 -1, i32 -1, i32 262143>)
%2 = shufflevector <4 x i32> %1, <4 x i32> undef, <4 x i32> <i32 1, i32 1, i32 2, i32 2>
; X86-AVX1-NEXT: vpinsrd $3, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 4-byte Folded Reload
; X86-AVX1-NEXT: vmovd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 4-byte Folded Reload
; X86-AVX1-NEXT: # xmm2 = mem[0],zero,zero,zero
-; X86-AVX1-NEXT: movl $8199, %eax # imm = 0x2007
-; X86-AVX1-NEXT: vmovd %eax, %xmm3
-; X86-AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [8199,8199,8199,8199]
-; X86-AVX1-NEXT: vpmulld %xmm4, %xmm0, %xmm0
-; X86-AVX1-NEXT: vpmulld %xmm4, %xmm1, %xmm1
+; X86-AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [8199,8199,8199,8199]
+; X86-AVX1-NEXT: vpmulld %xmm3, %xmm0, %xmm0
+; X86-AVX1-NEXT: vpmulld %xmm3, %xmm1, %xmm1
; X86-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
; X86-AVX1-NEXT: vpmulld %xmm3, %xmm2, %xmm1
; X86-AVX1-NEXT: vmovd %xmm1, (%eax)
; X86-AVX2-NEXT: vmovd %edx, %xmm0
; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} ymm2 = [8199,8199,8199,8199,8199,8199,8199,8199]
; X86-AVX2-NEXT: vpmulld %ymm2, %ymm1, %ymm1
-; X86-AVX2-NEXT: movl $8199, %eax # imm = 0x2007
-; X86-AVX2-NEXT: vmovd %eax, %xmm2
; X86-AVX2-NEXT: vpmulld %xmm2, %xmm0, %xmm0
; X86-AVX2-NEXT: vmovd %xmm0, (%eax)
; X86-AVX2-NEXT: vmovdqa %ymm1, (%eax)
; X64-AVX1-NEXT: vpinsrd $1, %r11d, %xmm2, %xmm2
; X64-AVX1-NEXT: vpinsrd $2, %r10d, %xmm2, %xmm2
; X64-AVX1-NEXT: vpinsrd $3, %r9d, %xmm2, %xmm2
+; X64-AVX1-NEXT: vpmulld %xmm1, %xmm2, %xmm2
+; X64-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
+; X64-AVX1-NEXT: vmovd %r8d, %xmm2
; X64-AVX1-NEXT: vpmulld %xmm1, %xmm2, %xmm1
-; X64-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
-; X64-AVX1-NEXT: vmovd %r8d, %xmm1
-; X64-AVX1-NEXT: movl $8199, %eax # imm = 0x2007
-; X64-AVX1-NEXT: vmovd %eax, %xmm2
-; X64-AVX1-NEXT: vpmulld %xmm2, %xmm1, %xmm1
; X64-AVX1-NEXT: vmovd %xmm1, (%rax)
; X64-AVX1-NEXT: vmovaps %ymm0, (%rax)
; X64-AVX1-NEXT: popq %rbx
; X64-AVX2-NEXT: vmovd %edx, %xmm0
; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} ymm2 = [8199,8199,8199,8199,8199,8199,8199,8199]
; X64-AVX2-NEXT: vpmulld %ymm2, %ymm1, %ymm1
-; X64-AVX2-NEXT: movl $8199, %eax # imm = 0x2007
-; X64-AVX2-NEXT: vmovd %eax, %xmm2
; X64-AVX2-NEXT: vpmulld %xmm2, %xmm0, %xmm0
; X64-AVX2-NEXT: vmovd %xmm0, (%rax)
; X64-AVX2-NEXT: vmovdqa %ymm1, (%rax)
; X86-AVX1-NEXT: vpinsrd $3, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 4-byte Folded Reload
; X86-AVX1-NEXT: vmovd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 4-byte Folded Reload
; X86-AVX1-NEXT: # xmm2 = mem[0],zero,zero,zero
-; X86-AVX1-NEXT: movl $8199, %eax # imm = 0x2007
-; X86-AVX1-NEXT: vmovd %eax, %xmm3
-; X86-AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [8199,8199,8199,8199]
-; X86-AVX1-NEXT: vpmulld %xmm4, %xmm0, %xmm0
-; X86-AVX1-NEXT: vpmulld %xmm4, %xmm1, %xmm1
+; X86-AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [8199,8199,8199,8199]
+; X86-AVX1-NEXT: vpmulld %xmm3, %xmm0, %xmm0
+; X86-AVX1-NEXT: vpmulld %xmm3, %xmm1, %xmm1
; X86-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
; X86-AVX1-NEXT: vpmulld %xmm3, %xmm2, %xmm1
; X86-AVX1-NEXT: vmovd %xmm1, (%eax)
; X86-AVX2-NEXT: vmovd %edx, %xmm0
; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} ymm2 = [8199,8199,8199,8199,8199,8199,8199,8199]
; X86-AVX2-NEXT: vpmulld %ymm2, %ymm1, %ymm1
-; X86-AVX2-NEXT: movl $8199, %eax # imm = 0x2007
-; X86-AVX2-NEXT: vmovd %eax, %xmm2
; X86-AVX2-NEXT: vpmulld %xmm2, %xmm0, %xmm0
; X86-AVX2-NEXT: vmovd %xmm0, (%eax)
; X86-AVX2-NEXT: vmovdqa %ymm1, (%eax)
; X64-AVX1-NEXT: vpinsrd $1, %r11d, %xmm2, %xmm2
; X64-AVX1-NEXT: vpinsrd $2, %r10d, %xmm2, %xmm2
; X64-AVX1-NEXT: vpinsrd $3, %r9d, %xmm2, %xmm2
+; X64-AVX1-NEXT: vpmulld %xmm1, %xmm2, %xmm2
+; X64-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
+; X64-AVX1-NEXT: vmovd %r8d, %xmm2
; X64-AVX1-NEXT: vpmulld %xmm1, %xmm2, %xmm1
-; X64-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
-; X64-AVX1-NEXT: vmovd %r8d, %xmm1
-; X64-AVX1-NEXT: movl $8199, %eax # imm = 0x2007
-; X64-AVX1-NEXT: vmovd %eax, %xmm2
-; X64-AVX1-NEXT: vpmulld %xmm2, %xmm1, %xmm1
; X64-AVX1-NEXT: vmovd %xmm1, (%rax)
; X64-AVX1-NEXT: vmovaps %ymm0, (%rax)
; X64-AVX1-NEXT: popq %rbx
; X64-AVX2-NEXT: vmovd %edx, %xmm0
; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} ymm2 = [8199,8199,8199,8199,8199,8199,8199,8199]
; X64-AVX2-NEXT: vpmulld %ymm2, %ymm1, %ymm1
-; X64-AVX2-NEXT: movl $8199, %eax # imm = 0x2007
-; X64-AVX2-NEXT: vmovd %eax, %xmm2
; X64-AVX2-NEXT: vpmulld %xmm2, %xmm0, %xmm0
; X64-AVX2-NEXT: vmovd %xmm0, (%rax)
; X64-AVX2-NEXT: vmovdqa %ymm1, (%rax)