if (SDValue NewVSel = matchVSelectOpSizesWithSetCC(N))
return NewVSel;
+ // Eliminate this sign extend by doing a negation in the destination type:
+ // sext i32 (0 - (zext i8 X to i32)) to i64 --> 0 - (zext i8 X to i64)
+ if (N0.getOpcode() == ISD::SUB && N0.hasOneUse() &&
+ isNullOrNullSplat(N0.getOperand(0)) &&
+ N0.getOperand(1).getOpcode() == ISD::ZERO_EXTEND &&
+ TLI.isOperationLegalOrCustom(ISD::SUB, VT)) {
+ SDValue Zext = DAG.getZExtOrTrunc(N0.getOperand(1).getOperand(0), DL, VT);
+ return DAG.getNode(ISD::SUB, DL, VT, DAG.getConstant(0, DL, VT), Zext);
+ }
+
return SDValue();
}
; SSE-NEXT: cmpltpd %xmm1, %xmm3
; SSE-NEXT: cmpltpd %xmm0, %xmm2
; SSE-NEXT: packssdw %xmm3, %xmm2
-; SSE-NEXT: movmskps %xmm2, %eax
-; SSE-NEXT: xorl %ecx, %ecx
-; SSE-NEXT: cmpl $15, %eax
-; SSE-NEXT: sete %cl
-; SSE-NEXT: negl %ecx
-; SSE-NEXT: movslq %ecx, %rax
+; SSE-NEXT: movmskps %xmm2, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl $15, %ecx
+; SSE-NEXT: sete %al
+; SSE-NEXT: negq %rax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v4f64_legal_sext:
; AVX-NEXT: vcmpltpd %ymm0, %ymm1, %ymm0
; AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
; AVX-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
-; AVX-NEXT: vmovmskps %xmm0, %eax
-; AVX-NEXT: xorl %ecx, %ecx
-; AVX-NEXT: cmpl $15, %eax
-; AVX-NEXT: sete %cl
-; AVX-NEXT: negl %ecx
-; AVX-NEXT: movslq %ecx, %rax
+; AVX-NEXT: vmovmskps %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl $15, %ecx
+; AVX-NEXT: sete %al
+; AVX-NEXT: negq %rax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
; SSE-NEXT: pcmpgtq %xmm3, %xmm1
; SSE-NEXT: pcmpgtq %xmm2, %xmm0
; SSE-NEXT: packssdw %xmm1, %xmm0
-; SSE-NEXT: movmskps %xmm0, %eax
-; SSE-NEXT: xorl %ecx, %ecx
-; SSE-NEXT: cmpl $15, %eax
-; SSE-NEXT: sete %cl
-; SSE-NEXT: negl %ecx
-; SSE-NEXT: movslq %ecx, %rax
+; SSE-NEXT: movmskps %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl $15, %ecx
+; SSE-NEXT: sete %al
+; SSE-NEXT: negq %rax
; SSE-NEXT: retq
;
; AVX1-LABEL: test_v4i64_legal_sext:
; AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vpackssdw %xmm2, %xmm0, %xmm0
-; AVX1-NEXT: vmovmskps %xmm0, %eax
-; AVX1-NEXT: xorl %ecx, %ecx
-; AVX1-NEXT: cmpl $15, %eax
-; AVX1-NEXT: sete %cl
-; AVX1-NEXT: negl %ecx
-; AVX1-NEXT: movslq %ecx, %rax
+; AVX1-NEXT: vmovmskps %xmm0, %ecx
+; AVX1-NEXT: xorl %eax, %eax
+; AVX1-NEXT: cmpl $15, %ecx
+; AVX1-NEXT: sete %al
+; AVX1-NEXT: negq %rax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
;
; AVX2-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0
; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
; AVX2-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
-; AVX2-NEXT: vmovmskps %xmm0, %eax
-; AVX2-NEXT: xorl %ecx, %ecx
-; AVX2-NEXT: cmpl $15, %eax
-; AVX2-NEXT: sete %cl
-; AVX2-NEXT: negl %ecx
-; AVX2-NEXT: movslq %ecx, %rax
+; AVX2-NEXT: vmovmskps %xmm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl $15, %ecx
+; AVX2-NEXT: sete %al
+; AVX2-NEXT: negq %rax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
; SSE-NEXT: packssdw %xmm3, %xmm2
; SSE-NEXT: movmskps %xmm2, %eax
; SSE-NEXT: negl %eax
-; SSE-NEXT: sbbl %eax, %eax
-; SSE-NEXT: cltq
+; SSE-NEXT: sbbq %rax, %rax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v4f64_legal_sext:
; AVX-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
; AVX-NEXT: vmovmskps %xmm0, %eax
; AVX-NEXT: negl %eax
-; AVX-NEXT: sbbl %eax, %eax
-; AVX-NEXT: cltq
+; AVX-NEXT: sbbq %rax, %rax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
; SSE-NEXT: packssdw %xmm1, %xmm0
; SSE-NEXT: movmskps %xmm0, %eax
; SSE-NEXT: negl %eax
-; SSE-NEXT: sbbl %eax, %eax
-; SSE-NEXT: cltq
+; SSE-NEXT: sbbq %rax, %rax
; SSE-NEXT: retq
;
; AVX1-LABEL: test_v4i64_legal_sext:
; AVX1-NEXT: vpackssdw %xmm2, %xmm0, %xmm0
; AVX1-NEXT: vmovmskps %xmm0, %eax
; AVX1-NEXT: negl %eax
-; AVX1-NEXT: sbbl %eax, %eax
-; AVX1-NEXT: cltq
+; AVX1-NEXT: sbbq %rax, %rax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
;
; AVX2-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
; AVX2-NEXT: vmovmskps %xmm0, %eax
; AVX2-NEXT: negl %eax
-; AVX2-NEXT: sbbl %eax, %eax
-; AVX2-NEXT: cltq
+; AVX2-NEXT: sbbq %rax, %rax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
; AVX1: # %bb.0:
; AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
-; AVX1-NEXT: vpsubw %xmm0, %xmm1, %xmm0
-; AVX1-NEXT: vpmovsxwd %xmm0, %xmm1
-; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
-; AVX1-NEXT: vpmovsxwd %xmm0, %xmm0
-; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
+; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
+; AVX1-NEXT: vpsubd %xmm2, %xmm1, %xmm2
+; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
+; AVX1-NEXT: vpsubd %xmm0, %xmm1, %xmm0
+; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
; AVX1-NEXT: retq
;
; AVX2-LABEL: zext_negate_sext:
; AVX2: # %bb.0:
; AVX2-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
+; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
-; AVX2-NEXT: vpsubw %xmm0, %xmm1, %xmm0
-; AVX2-NEXT: vpmovsxwd %xmm0, %ymm0
+; AVX2-NEXT: vpsubd %ymm0, %ymm1, %ymm0
; AVX2-NEXT: retq
;
; AVX512-LABEL: zext_negate_sext:
; AVX512: # %bb.0:
; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
+; AVX512-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; AVX512-NEXT: vpxor %xmm1, %xmm1, %xmm1
-; AVX512-NEXT: vpsubw %xmm0, %xmm1, %xmm0
-; AVX512-NEXT: vpmovsxwd %xmm0, %ymm0
+; AVX512-NEXT: vpsubd %ymm0, %ymm1, %ymm0
; AVX512-NEXT: retq
;
; X32-SSE2-LABEL: zext_negate_sext: