; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: test_cmp_v16f64:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vextractf32x4 $3, %zmm2, %xmm4
-; AVX512F-NEXT: vextractf32x4 $3, %zmm0, %xmm5
-; AVX512F-NEXT: xorl %eax, %eax
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movq $-1, %rcx
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512F-NEXT: vextractf32x4 $2, %zmm2, %xmm5
-; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm6
-; AVX512F-NEXT: vucomisd %xmm5, %xmm6
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512F-NEXT: vucomisd %xmm5, %xmm6
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm5
-; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm6
-; AVX512F-NEXT: vucomisd %xmm5, %xmm6
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512F-NEXT: vucomisd %xmm5, %xmm6
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512F-NEXT: vucomisd %xmm2, %xmm0
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
-; AVX512F-NEXT: vucomisd %xmm2, %xmm0
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm0
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vextractf32x4 $3, %zmm3, %xmm2
-; AVX512F-NEXT: vextractf32x4 $3, %zmm1, %xmm4
-; AVX512F-NEXT: vucomisd %xmm2, %xmm4
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512F-NEXT: vucomisd %xmm2, %xmm4
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm2
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
-; AVX512F-NEXT: vextractf32x4 $2, %zmm3, %xmm4
-; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm5
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm4
-; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm5
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512F-NEXT: vucomisd %xmm4, %xmm5
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512F-NEXT: vucomisd %xmm3, %xmm1
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovaq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0]
-; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0]
-; AVX512F-NEXT: vucomisd %xmm3, %xmm1
-; AVX512F-NEXT: cmovaq %rcx, %rax
-; AVX512F-NEXT: vmovq %rax, %xmm1
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512DQ-LABEL: test_cmp_v16f64:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm2, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm0, %xmm5
-; AVX512DQ-NEXT: xorl %eax, %eax
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movq $-1, %rcx
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm2, %xmm5
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm0, %xmm6
-; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm7
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm2, %xmm5
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm0, %xmm6
-; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm7
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512DQ-NEXT: vucomisd %xmm2, %xmm0
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm2, %xmm0
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm0
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm3, %xmm2
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm1, %xmm4
-; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm2
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm3, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm1, %xmm5
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm3, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm1, %xmm5
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512DQ-NEXT: vucomisd %xmm3, %xmm1
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovaq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0]
-; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0]
-; AVX512DQ-NEXT: vucomisd %xmm3, %xmm1
-; AVX512DQ-NEXT: cmovaq %rcx, %rax
-; AVX512DQ-NEXT: vmovq %rax, %xmm1
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
-;
-; AVX512BW-LABEL: test_cmp_v16f64:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vextractf32x4 $3, %zmm2, %xmm4
-; AVX512BW-NEXT: vextractf32x4 $3, %zmm0, %xmm5
-; AVX512BW-NEXT: xorl %eax, %eax
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movq $-1, %rcx
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512BW-NEXT: vextractf32x4 $2, %zmm2, %xmm5
-; AVX512BW-NEXT: vextractf32x4 $2, %zmm0, %xmm6
-; AVX512BW-NEXT: vucomisd %xmm5, %xmm6
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm7
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512BW-NEXT: vucomisd %xmm5, %xmm6
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm5
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm6
-; AVX512BW-NEXT: vucomisd %xmm5, %xmm6
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm7
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
-; AVX512BW-NEXT: vucomisd %xmm5, %xmm6
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
-; AVX512BW-NEXT: vucomisd %xmm2, %xmm0
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
-; AVX512BW-NEXT: vucomisd %xmm2, %xmm0
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm0
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vextractf32x4 $3, %zmm3, %xmm2
-; AVX512BW-NEXT: vextractf32x4 $3, %zmm1, %xmm4
-; AVX512BW-NEXT: vucomisd %xmm2, %xmm4
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512BW-NEXT: vucomisd %xmm2, %xmm4
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm2
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
-; AVX512BW-NEXT: vextractf32x4 $2, %zmm3, %xmm4
-; AVX512BW-NEXT: vextractf32x4 $2, %zmm1, %xmm5
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm4
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm5
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
-; AVX512BW-NEXT: vucomisd %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
-; AVX512BW-NEXT: vucomisd %xmm3, %xmm1
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovaq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0]
-; AVX512BW-NEXT: vucomisd %xmm3, %xmm1
-; AVX512BW-NEXT: cmovaq %rcx, %rax
-; AVX512BW-NEXT: vmovq %rax, %xmm1
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
+; AVX512-LABEL: test_cmp_v16f64:
+; AVX512: # BB#0:
+; AVX512-NEXT: vextractf32x4 $3, %zmm2, %xmm4
+; AVX512-NEXT: vextractf32x4 $3, %zmm0, %xmm5
+; AVX512-NEXT: xorl %eax, %eax
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movq $-1, %rcx
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
+; AVX512-NEXT: vextractf32x4 $2, %zmm2, %xmm5
+; AVX512-NEXT: vextractf32x4 $2, %zmm0, %xmm6
+; AVX512-NEXT: vucomisd %xmm5, %xmm6
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm7
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
+; AVX512-NEXT: vucomisd %xmm5, %xmm6
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
+; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
+; AVX512-NEXT: vextractf32x4 $1, %zmm2, %xmm5
+; AVX512-NEXT: vextractf32x4 $1, %zmm0, %xmm6
+; AVX512-NEXT: vucomisd %xmm5, %xmm6
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm7
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0]
+; AVX512-NEXT: vucomisd %xmm5, %xmm6
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
+; AVX512-NEXT: vucomisd %xmm2, %xmm0
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; AVX512-NEXT: vucomisd %xmm2, %xmm0
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm0
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
+; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
+; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vextractf32x4 $3, %zmm3, %xmm2
+; AVX512-NEXT: vextractf32x4 $3, %zmm1, %xmm4
+; AVX512-NEXT: vucomisd %xmm2, %xmm4
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
+; AVX512-NEXT: vucomisd %xmm2, %xmm4
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm2
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
+; AVX512-NEXT: vextractf32x4 $2, %zmm3, %xmm4
+; AVX512-NEXT: vextractf32x4 $2, %zmm1, %xmm5
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
+; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
+; AVX512-NEXT: vextractf32x4 $1, %zmm3, %xmm4
+; AVX512-NEXT: vextractf32x4 $1, %zmm1, %xmm5
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0]
+; AVX512-NEXT: vucomisd %xmm4, %xmm5
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
+; AVX512-NEXT: vucomisd %xmm3, %xmm1
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovaq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0]
+; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0]
+; AVX512-NEXT: vucomisd %xmm3, %xmm1
+; AVX512-NEXT: cmovaq %rcx, %rax
+; AVX512-NEXT: vmovq %rax, %xmm1
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0]
+; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = fcmp ogt <16 x double> %a0, %a1
ret <16 x i1> %1
}
; AVX512DQ-NEXT: cmoval %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm5, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm8, %zmm0, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8
; AVX512DQ-NEXT: vextractf32x4 $3, %zmm3, %xmm2
; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3]
; AVX512DQ-NEXT: cmoval %ecx, %eax
; AVX512DQ-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm2, %zmm0, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0
; AVX512DQ-NEXT: retq
; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
; AVX512BW-NEXT: vucomiss %xmm4, %xmm6
; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmoval %ecx, %edx
-; AVX512BW-NEXT: vucomiss %xmm0, %xmm5
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmoval %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm4
-; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0]
-; AVX512BW-NEXT: vucomiss %xmm6, %xmm7
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmoval %ecx, %edx
-; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4
-; AVX512BW-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3]
-; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3]
-; AVX512BW-NEXT: vucomiss %xmm0, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmoval %ecx, %edx
-; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0
-; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3]
-; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3]
-; AVX512BW-NEXT: vucomiss %xmm4, %xmm5
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmoval %ecx, %edx
-; AVX512BW-NEXT: vucomiss %xmm3, %xmm1
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmoval %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm4
-; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0]
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0]
-; AVX512BW-NEXT: vucomiss %xmm5, %xmm6
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmoval %ecx, %edx
-; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4
-; AVX512BW-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3]
-; AVX512BW-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3]
-; AVX512BW-NEXT: vucomiss %xmm3, %xmm1
-; AVX512BW-NEXT: cmoval %ecx, %eax
-; AVX512BW-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1
-; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
-; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
-; AVX512BW-NEXT: retq
- %1 = fcmp ogt <32 x float> %a0, %a1
- ret <32 x i1> %1
-}
-
-define <16 x i1> @test_cmp_v16i64(<16 x i64> %a0, <16 x i64> %a1) nounwind {
-; SSE2-LABEL: test_cmp_v16i64:
-; SSE2: # BB#0:
-; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
-; SSE2-NEXT: pxor %xmm8, %xmm7
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm9
-; SSE2-NEXT: pxor %xmm8, %xmm9
-; SSE2-NEXT: movdqa %xmm7, %xmm10
-; SSE2-NEXT: pcmpgtd %xmm9, %xmm10
-; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm10[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm7, %xmm9
-; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm9[1,1,3,3]
-; SSE2-NEXT: pand %xmm11, %xmm7
-; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm10[1,1,3,3]
-; SSE2-NEXT: por %xmm7, %xmm9
-; SSE2-NEXT: pxor %xmm8, %xmm6
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm7
-; SSE2-NEXT: pxor %xmm8, %xmm7
-; SSE2-NEXT: movdqa %xmm6, %xmm10
-; SSE2-NEXT: pcmpgtd %xmm7, %xmm10
-; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm10[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm6, %xmm7
-; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
-; SSE2-NEXT: pand %xmm11, %xmm7
-; SSE2-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,1,3,3]
-; SSE2-NEXT: por %xmm7, %xmm10
-; SSE2-NEXT: packsswb %xmm9, %xmm10
-; SSE2-NEXT: pxor %xmm8, %xmm5
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm7
-; SSE2-NEXT: pxor %xmm8, %xmm7
-; SSE2-NEXT: movdqa %xmm5, %xmm6
-; SSE2-NEXT: pcmpgtd %xmm7, %xmm6
-; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm5, %xmm7
-; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
-; SSE2-NEXT: pand %xmm9, %xmm5
-; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
-; SSE2-NEXT: por %xmm5, %xmm6
-; SSE2-NEXT: pxor %xmm8, %xmm4
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm5
-; SSE2-NEXT: pxor %xmm8, %xmm5
-; SSE2-NEXT: movdqa %xmm4, %xmm7
-; SSE2-NEXT: pcmpgtd %xmm5, %xmm7
-; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm7[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm4, %xmm5
-; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
-; SSE2-NEXT: pand %xmm9, %xmm5
-; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm7[1,1,3,3]
-; SSE2-NEXT: por %xmm5, %xmm4
-; SSE2-NEXT: packsswb %xmm6, %xmm4
-; SSE2-NEXT: packsswb %xmm10, %xmm4
-; SSE2-NEXT: pxor %xmm8, %xmm3
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm5
-; SSE2-NEXT: pxor %xmm8, %xmm5
-; SSE2-NEXT: movdqa %xmm3, %xmm6
-; SSE2-NEXT: pcmpgtd %xmm5, %xmm6
-; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm3, %xmm5
-; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm5[1,1,3,3]
-; SSE2-NEXT: pand %xmm7, %xmm3
-; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,3,3]
-; SSE2-NEXT: por %xmm3, %xmm5
-; SSE2-NEXT: pxor %xmm8, %xmm2
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm3
-; SSE2-NEXT: pxor %xmm8, %xmm3
-; SSE2-NEXT: movdqa %xmm2, %xmm6
-; SSE2-NEXT: pcmpgtd %xmm3, %xmm6
-; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm2, %xmm3
-; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
-; SSE2-NEXT: pand %xmm7, %xmm3
-; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm6[1,1,3,3]
-; SSE2-NEXT: por %xmm3, %xmm2
-; SSE2-NEXT: packsswb %xmm5, %xmm2
-; SSE2-NEXT: pxor %xmm8, %xmm1
-; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm3
-; SSE2-NEXT: pxor %xmm8, %xmm3
-; SSE2-NEXT: movdqa %xmm1, %xmm5
-; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
-; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm1, %xmm3
-; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
-; SSE2-NEXT: pand %xmm6, %xmm1
-; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm5[1,1,3,3]
-; SSE2-NEXT: por %xmm1, %xmm3
-; SSE2-NEXT: pxor %xmm8, %xmm0
-; SSE2-NEXT: pxor {{[0-9]+}}(%rsp), %xmm8
-; SSE2-NEXT: movdqa %xmm0, %xmm1
-; SSE2-NEXT: pcmpgtd %xmm8, %xmm1
-; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,2,2]
-; SSE2-NEXT: pcmpeqd %xmm0, %xmm8
-; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm8[1,1,3,3]
-; SSE2-NEXT: pand %xmm5, %xmm6
-; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
-; SSE2-NEXT: por %xmm6, %xmm0
-; SSE2-NEXT: packsswb %xmm3, %xmm0
-; SSE2-NEXT: packsswb %xmm2, %xmm0
-; SSE2-NEXT: packsswb %xmm4, %xmm0
-; SSE2-NEXT: retq
-;
-; SSE42-LABEL: test_cmp_v16i64:
-; SSE42: # BB#0:
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm7
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm6
-; SSE42-NEXT: packsswb %xmm7, %xmm6
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm5
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm4
-; SSE42-NEXT: packsswb %xmm5, %xmm4
-; SSE42-NEXT: packsswb %xmm6, %xmm4
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm3
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm2
-; SSE42-NEXT: packsswb %xmm3, %xmm2
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm1
-; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm0
-; SSE42-NEXT: packsswb %xmm1, %xmm0
-; SSE42-NEXT: packsswb %xmm2, %xmm0
-; SSE42-NEXT: packsswb %xmm4, %xmm0
-; SSE42-NEXT: retq
-;
-; AVX1-LABEL: test_cmp_v16i64:
-; AVX1: # BB#0:
-; AVX1-NEXT: vextractf128 $1, %ymm7, %xmm8
-; AVX1-NEXT: vextractf128 $1, %ymm3, %xmm9
-; AVX1-NEXT: vpcmpgtq %xmm8, %xmm9, %xmm8
-; AVX1-NEXT: vpcmpgtq %xmm7, %xmm3, %xmm3
-; AVX1-NEXT: vpacksswb %xmm8, %xmm3, %xmm8
-; AVX1-NEXT: vextractf128 $1, %ymm6, %xmm7
-; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm3
-; AVX1-NEXT: vpcmpgtq %xmm7, %xmm3, %xmm3
-; AVX1-NEXT: vpcmpgtq %xmm6, %xmm2, %xmm2
-; AVX1-NEXT: vpacksswb %xmm3, %xmm2, %xmm2
-; AVX1-NEXT: vpacksswb %xmm8, %xmm2, %xmm2
-; AVX1-NEXT: vextractf128 $1, %ymm5, %xmm3
-; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm6
-; AVX1-NEXT: vpcmpgtq %xmm3, %xmm6, %xmm3
-; AVX1-NEXT: vpcmpgtq %xmm5, %xmm1, %xmm1
-; AVX1-NEXT: vpacksswb %xmm3, %xmm1, %xmm1
-; AVX1-NEXT: vextractf128 $1, %ymm4, %xmm3
-; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
-; AVX1-NEXT: vpcmpgtq %xmm3, %xmm5, %xmm3
-; AVX1-NEXT: vpcmpgtq %xmm4, %xmm0, %xmm0
-; AVX1-NEXT: vpacksswb %xmm3, %xmm0, %xmm0
-; AVX1-NEXT: vpacksswb %xmm1, %xmm0, %xmm0
-; AVX1-NEXT: vpacksswb %xmm2, %xmm0, %xmm0
-; AVX1-NEXT: vzeroupper
-; AVX1-NEXT: retq
-;
-; AVX2-LABEL: test_cmp_v16i64:
-; AVX2: # BB#0:
-; AVX2-NEXT: vpcmpgtq %ymm7, %ymm3, %ymm3
-; AVX2-NEXT: vpcmpgtq %ymm6, %ymm2, %ymm2
-; AVX2-NEXT: vpacksswb %ymm3, %ymm2, %ymm2
-; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
-; AVX2-NEXT: vpcmpgtq %ymm5, %ymm1, %ymm1
-; AVX2-NEXT: vpcmpgtq %ymm4, %ymm0, %ymm0
-; AVX2-NEXT: vpacksswb %ymm1, %ymm0, %ymm0
-; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
-; AVX2-NEXT: vpacksswb %ymm2, %ymm0, %ymm0
-; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
-; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
-; AVX2-NEXT: vpacksswb %xmm1, %xmm0, %xmm0
-; AVX2-NEXT: vzeroupper
-; AVX2-NEXT: retq
-;
-; AVX512F-LABEL: test_cmp_v16i64:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vextracti32x4 $3, %zmm2, %xmm4
-; AVX512F-NEXT: vpextrq $1, %xmm4, %rcx
-; AVX512F-NEXT: vextracti32x4 $3, %zmm0, %xmm5
-; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512F-NEXT: xorl %eax, %eax
-; AVX512F-NEXT: cmpq %rcx, %rdx
-; AVX512F-NEXT: movq $-1, %rcx
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vmovq %xmm4, %rdx
-; AVX512F-NEXT: vmovq %xmm5, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512F-NEXT: vextracti32x4 $2, %zmm2, %xmm5
-; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512F-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vmovq %xmm5, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm5
-; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vmovq %xmm5, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vmovq %xmm2, %rdx
-; AVX512F-NEXT: vmovq %xmm0, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm0
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512F-NEXT: vextracti32x4 $3, %zmm1, %xmm4
-; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vmovq %xmm2, %rdx
-; AVX512F-NEXT: vmovq %xmm4, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm2
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
-; AVX512F-NEXT: vextracti32x4 $2, %zmm3, %xmm4
-; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512F-NEXT: vextracti32x4 $2, %zmm1, %xmm5
-; AVX512F-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vmovq %xmm4, %rdx
-; AVX512F-NEXT: vmovq %xmm5, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4
-; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm5
-; AVX512F-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vmovq %xmm4, %rdx
-; AVX512F-NEXT: vmovq %xmm5, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm4
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512F-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512F-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmovgq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm5
-; AVX512F-NEXT: vmovq %xmm3, %rdx
-; AVX512F-NEXT: vmovq %xmm1, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: cmovgq %rcx, %rax
-; AVX512F-NEXT: vmovq %rax, %xmm1
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512DQ-LABEL: test_cmp_v16i64:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm2, %xmm4
-; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rcx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm0, %xmm5
-; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512DQ-NEXT: xorl %eax, %eax
-; AVX512DQ-NEXT: cmpq %rcx, %rdx
-; AVX512DQ-NEXT: movq $-1, %rcx
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vmovq %xmm4, %rdx
-; AVX512DQ-NEXT: vmovq %xmm5, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm2, %xmm5
-; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm0, %xmm6
-; AVX512DQ-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm7
-; AVX512DQ-NEXT: vmovq %xmm5, %rdx
-; AVX512DQ-NEXT: vmovq %xmm6, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm2, %xmm5
-; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm0, %xmm6
-; AVX512DQ-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm7
-; AVX512DQ-NEXT: vmovq %xmm5, %rdx
-; AVX512DQ-NEXT: vmovq %xmm6, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vmovq %xmm2, %rdx
-; AVX512DQ-NEXT: vmovq %xmm0, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm0
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm3, %xmm2
-; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm1, %xmm4
-; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vmovq %xmm2, %rdx
-; AVX512DQ-NEXT: vmovq %xmm4, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm2
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm3, %xmm4
-; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm1, %xmm5
-; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vmovq %xmm4, %rdx
-; AVX512DQ-NEXT: vmovq %xmm5, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm3, %xmm4
-; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm1, %xmm5
-; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm6
-; AVX512DQ-NEXT: vmovq %xmm4, %rdx
-; AVX512DQ-NEXT: vmovq %xmm5, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm4
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512DQ-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512DQ-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: movl $0, %edx
-; AVX512DQ-NEXT: cmovgq %rcx, %rdx
-; AVX512DQ-NEXT: vmovq %rdx, %xmm5
-; AVX512DQ-NEXT: vmovq %xmm3, %rdx
-; AVX512DQ-NEXT: vmovq %xmm1, %rsi
-; AVX512DQ-NEXT: cmpq %rdx, %rsi
-; AVX512DQ-NEXT: cmovgq %rcx, %rax
-; AVX512DQ-NEXT: vmovq %rax, %xmm1
-; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
-; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
-;
-; AVX512BW-LABEL: test_cmp_v16i64:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4
-; AVX512BW-NEXT: vpextrq $1, %xmm4, %rcx
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm5
-; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512BW-NEXT: xorl %eax, %eax
-; AVX512BW-NEXT: cmpq %rcx, %rdx
-; AVX512BW-NEXT: movq $-1, %rcx
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vmovq %xmm4, %rdx
-; AVX512BW-NEXT: vmovq %xmm5, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5
-; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512BW-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm7
-; AVX512BW-NEXT: vmovq %xmm5, %rdx
-; AVX512BW-NEXT: vmovq %xmm6, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5
-; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6
-; AVX512BW-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm7
-; AVX512BW-NEXT: vmovq %xmm5, %rdx
-; AVX512BW-NEXT: vmovq %xmm6, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512BW-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512BW-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vmovq %xmm2, %rdx
-; AVX512BW-NEXT: vmovq %xmm0, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm0
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512BW-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm4
-; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vmovq %xmm2, %rdx
-; AVX512BW-NEXT: vmovq %xmm4, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm2
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4
-; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm5
-; AVX512BW-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vmovq %xmm4, %rdx
-; AVX512BW-NEXT: vmovq %xmm5, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
+; AVX512BW-NEXT: cmoval %ecx, %edx
+; AVX512BW-NEXT: vucomiss %xmm0, %xmm5
+; AVX512BW-NEXT: movl $0, %esi
+; AVX512BW-NEXT: cmoval %ecx, %esi
+; AVX512BW-NEXT: vmovd %esi, %xmm4
+; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0]
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0]
+; AVX512BW-NEXT: vucomiss %xmm6, %xmm7
; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4
-; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5
-; AVX512BW-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
+; AVX512BW-NEXT: cmoval %ecx, %edx
+; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4
+; AVX512BW-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3]
+; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3]
+; AVX512BW-NEXT: vucomiss %xmm0, %xmm5
; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm6
-; AVX512BW-NEXT: vmovq %xmm4, %rdx
-; AVX512BW-NEXT: vmovq %xmm5, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
+; AVX512BW-NEXT: cmoval %ecx, %edx
+; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0
+; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3]
+; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3]
+; AVX512BW-NEXT: vucomiss %xmm4, %xmm5
; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm4
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512BW-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512BW-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
+; AVX512BW-NEXT: cmoval %ecx, %edx
+; AVX512BW-NEXT: vucomiss %xmm3, %xmm1
+; AVX512BW-NEXT: movl $0, %esi
+; AVX512BW-NEXT: cmoval %ecx, %esi
+; AVX512BW-NEXT: vmovd %esi, %xmm4
+; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0]
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0]
+; AVX512BW-NEXT: vucomiss %xmm5, %xmm6
; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgq %rcx, %rdx
-; AVX512BW-NEXT: vmovq %rdx, %xmm5
-; AVX512BW-NEXT: vmovq %xmm3, %rdx
-; AVX512BW-NEXT: vmovq %xmm1, %rsi
-; AVX512BW-NEXT: cmpq %rdx, %rsi
-; AVX512BW-NEXT: cmovgq %rcx, %rax
-; AVX512BW-NEXT: vmovq %rax, %xmm1
-; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
+; AVX512BW-NEXT: cmoval %ecx, %edx
+; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4
+; AVX512BW-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3]
+; AVX512BW-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3]
+; AVX512BW-NEXT: vucomiss %xmm3, %xmm1
+; AVX512BW-NEXT: cmoval %ecx, %eax
+; AVX512BW-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1
+; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
+; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0
+; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
+; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
; AVX512BW-NEXT: retq
+ %1 = fcmp ogt <32 x float> %a0, %a1
+ ret <32 x i1> %1
+}
+
+define <16 x i1> @test_cmp_v16i64(<16 x i64> %a0, <16 x i64> %a1) nounwind {
+; SSE2-LABEL: test_cmp_v16i64:
+; SSE2: # BB#0:
+; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
+; SSE2-NEXT: pxor %xmm8, %xmm7
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm9
+; SSE2-NEXT: pxor %xmm8, %xmm9
+; SSE2-NEXT: movdqa %xmm7, %xmm10
+; SSE2-NEXT: pcmpgtd %xmm9, %xmm10
+; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm10[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm7, %xmm9
+; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm9[1,1,3,3]
+; SSE2-NEXT: pand %xmm11, %xmm7
+; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm10[1,1,3,3]
+; SSE2-NEXT: por %xmm7, %xmm9
+; SSE2-NEXT: pxor %xmm8, %xmm6
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm7
+; SSE2-NEXT: pxor %xmm8, %xmm7
+; SSE2-NEXT: movdqa %xmm6, %xmm10
+; SSE2-NEXT: pcmpgtd %xmm7, %xmm10
+; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm10[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm6, %xmm7
+; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
+; SSE2-NEXT: pand %xmm11, %xmm7
+; SSE2-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,1,3,3]
+; SSE2-NEXT: por %xmm7, %xmm10
+; SSE2-NEXT: packsswb %xmm9, %xmm10
+; SSE2-NEXT: pxor %xmm8, %xmm5
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm7
+; SSE2-NEXT: pxor %xmm8, %xmm7
+; SSE2-NEXT: movdqa %xmm5, %xmm6
+; SSE2-NEXT: pcmpgtd %xmm7, %xmm6
+; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm5, %xmm7
+; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
+; SSE2-NEXT: pand %xmm9, %xmm5
+; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
+; SSE2-NEXT: por %xmm5, %xmm6
+; SSE2-NEXT: pxor %xmm8, %xmm4
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm5
+; SSE2-NEXT: pxor %xmm8, %xmm5
+; SSE2-NEXT: movdqa %xmm4, %xmm7
+; SSE2-NEXT: pcmpgtd %xmm5, %xmm7
+; SSE2-NEXT: pshufd {{.*#+}} xmm9 = xmm7[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm4, %xmm5
+; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
+; SSE2-NEXT: pand %xmm9, %xmm5
+; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm7[1,1,3,3]
+; SSE2-NEXT: por %xmm5, %xmm4
+; SSE2-NEXT: packsswb %xmm6, %xmm4
+; SSE2-NEXT: packsswb %xmm10, %xmm4
+; SSE2-NEXT: pxor %xmm8, %xmm3
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm5
+; SSE2-NEXT: pxor %xmm8, %xmm5
+; SSE2-NEXT: movdqa %xmm3, %xmm6
+; SSE2-NEXT: pcmpgtd %xmm5, %xmm6
+; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm3, %xmm5
+; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm5[1,1,3,3]
+; SSE2-NEXT: pand %xmm7, %xmm3
+; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,3,3]
+; SSE2-NEXT: por %xmm3, %xmm5
+; SSE2-NEXT: pxor %xmm8, %xmm2
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm3
+; SSE2-NEXT: pxor %xmm8, %xmm3
+; SSE2-NEXT: movdqa %xmm2, %xmm6
+; SSE2-NEXT: pcmpgtd %xmm3, %xmm6
+; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm2, %xmm3
+; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
+; SSE2-NEXT: pand %xmm7, %xmm3
+; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm6[1,1,3,3]
+; SSE2-NEXT: por %xmm3, %xmm2
+; SSE2-NEXT: packsswb %xmm5, %xmm2
+; SSE2-NEXT: pxor %xmm8, %xmm1
+; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm3
+; SSE2-NEXT: pxor %xmm8, %xmm3
+; SSE2-NEXT: movdqa %xmm1, %xmm5
+; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
+; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm1, %xmm3
+; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
+; SSE2-NEXT: pand %xmm6, %xmm1
+; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm5[1,1,3,3]
+; SSE2-NEXT: por %xmm1, %xmm3
+; SSE2-NEXT: pxor %xmm8, %xmm0
+; SSE2-NEXT: pxor {{[0-9]+}}(%rsp), %xmm8
+; SSE2-NEXT: movdqa %xmm0, %xmm1
+; SSE2-NEXT: pcmpgtd %xmm8, %xmm1
+; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,2,2]
+; SSE2-NEXT: pcmpeqd %xmm0, %xmm8
+; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm8[1,1,3,3]
+; SSE2-NEXT: pand %xmm5, %xmm6
+; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
+; SSE2-NEXT: por %xmm6, %xmm0
+; SSE2-NEXT: packsswb %xmm3, %xmm0
+; SSE2-NEXT: packsswb %xmm2, %xmm0
+; SSE2-NEXT: packsswb %xmm4, %xmm0
+; SSE2-NEXT: retq
+;
+; SSE42-LABEL: test_cmp_v16i64:
+; SSE42: # BB#0:
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm7
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm6
+; SSE42-NEXT: packsswb %xmm7, %xmm6
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm5
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm4
+; SSE42-NEXT: packsswb %xmm5, %xmm4
+; SSE42-NEXT: packsswb %xmm6, %xmm4
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm3
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm2
+; SSE42-NEXT: packsswb %xmm3, %xmm2
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm1
+; SSE42-NEXT: pcmpgtq {{[0-9]+}}(%rsp), %xmm0
+; SSE42-NEXT: packsswb %xmm1, %xmm0
+; SSE42-NEXT: packsswb %xmm2, %xmm0
+; SSE42-NEXT: packsswb %xmm4, %xmm0
+; SSE42-NEXT: retq
+;
+; AVX1-LABEL: test_cmp_v16i64:
+; AVX1: # BB#0:
+; AVX1-NEXT: vextractf128 $1, %ymm7, %xmm8
+; AVX1-NEXT: vextractf128 $1, %ymm3, %xmm9
+; AVX1-NEXT: vpcmpgtq %xmm8, %xmm9, %xmm8
+; AVX1-NEXT: vpcmpgtq %xmm7, %xmm3, %xmm3
+; AVX1-NEXT: vpacksswb %xmm8, %xmm3, %xmm8
+; AVX1-NEXT: vextractf128 $1, %ymm6, %xmm7
+; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm3
+; AVX1-NEXT: vpcmpgtq %xmm7, %xmm3, %xmm3
+; AVX1-NEXT: vpcmpgtq %xmm6, %xmm2, %xmm2
+; AVX1-NEXT: vpacksswb %xmm3, %xmm2, %xmm2
+; AVX1-NEXT: vpacksswb %xmm8, %xmm2, %xmm2
+; AVX1-NEXT: vextractf128 $1, %ymm5, %xmm3
+; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm6
+; AVX1-NEXT: vpcmpgtq %xmm3, %xmm6, %xmm3
+; AVX1-NEXT: vpcmpgtq %xmm5, %xmm1, %xmm1
+; AVX1-NEXT: vpacksswb %xmm3, %xmm1, %xmm1
+; AVX1-NEXT: vextractf128 $1, %ymm4, %xmm3
+; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
+; AVX1-NEXT: vpcmpgtq %xmm3, %xmm5, %xmm3
+; AVX1-NEXT: vpcmpgtq %xmm4, %xmm0, %xmm0
+; AVX1-NEXT: vpacksswb %xmm3, %xmm0, %xmm0
+; AVX1-NEXT: vpacksswb %xmm1, %xmm0, %xmm0
+; AVX1-NEXT: vpacksswb %xmm2, %xmm0, %xmm0
+; AVX1-NEXT: vzeroupper
+; AVX1-NEXT: retq
+;
+; AVX2-LABEL: test_cmp_v16i64:
+; AVX2: # BB#0:
+; AVX2-NEXT: vpcmpgtq %ymm7, %ymm3, %ymm3
+; AVX2-NEXT: vpcmpgtq %ymm6, %ymm2, %ymm2
+; AVX2-NEXT: vpacksswb %ymm3, %ymm2, %ymm2
+; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
+; AVX2-NEXT: vpcmpgtq %ymm5, %ymm1, %ymm1
+; AVX2-NEXT: vpcmpgtq %ymm4, %ymm0, %ymm0
+; AVX2-NEXT: vpacksswb %ymm1, %ymm0, %ymm0
+; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
+; AVX2-NEXT: vpacksswb %ymm2, %ymm0, %ymm0
+; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
+; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
+; AVX2-NEXT: vpacksswb %xmm1, %xmm0, %xmm0
+; AVX2-NEXT: vzeroupper
+; AVX2-NEXT: retq
+;
+; AVX512-LABEL: test_cmp_v16i64:
+; AVX512: # BB#0:
+; AVX512-NEXT: vextracti32x4 $3, %zmm2, %xmm4
+; AVX512-NEXT: vpextrq $1, %xmm4, %rcx
+; AVX512-NEXT: vextracti32x4 $3, %zmm0, %xmm5
+; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
+; AVX512-NEXT: xorl %eax, %eax
+; AVX512-NEXT: cmpq %rcx, %rdx
+; AVX512-NEXT: movq $-1, %rcx
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vmovq %xmm4, %rdx
+; AVX512-NEXT: vmovq %xmm5, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
+; AVX512-NEXT: vextracti32x4 $2, %zmm2, %xmm5
+; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
+; AVX512-NEXT: vextracti32x4 $2, %zmm0, %xmm6
+; AVX512-NEXT: vpextrq $1, %xmm6, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm7
+; AVX512-NEXT: vmovq %xmm5, %rdx
+; AVX512-NEXT: vmovq %xmm6, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
+; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
+; AVX512-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
+; AVX512-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512-NEXT: vpextrq $1, %xmm6, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm7
+; AVX512-NEXT: vmovq %xmm5, %rdx
+; AVX512-NEXT: vmovq %xmm6, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
+; AVX512-NEXT: vpextrq $1, %xmm2, %rdx
+; AVX512-NEXT: vpextrq $1, %xmm0, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vmovq %xmm2, %rdx
+; AVX512-NEXT: vmovq %xmm0, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm0
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0]
+; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
+; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vextracti32x4 $3, %zmm3, %xmm2
+; AVX512-NEXT: vpextrq $1, %xmm2, %rdx
+; AVX512-NEXT: vextracti32x4 $3, %zmm1, %xmm4
+; AVX512-NEXT: vpextrq $1, %xmm4, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vmovq %xmm2, %rdx
+; AVX512-NEXT: vmovq %xmm4, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm2
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
+; AVX512-NEXT: vextracti32x4 $2, %zmm3, %xmm4
+; AVX512-NEXT: vpextrq $1, %xmm4, %rdx
+; AVX512-NEXT: vextracti32x4 $2, %zmm1, %xmm5
+; AVX512-NEXT: vpextrq $1, %xmm5, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vmovq %xmm4, %rdx
+; AVX512-NEXT: vmovq %xmm5, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
+; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
+; AVX512-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512-NEXT: vpextrq $1, %xmm4, %rdx
+; AVX512-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512-NEXT: vpextrq $1, %xmm5, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm6
+; AVX512-NEXT: vmovq %xmm4, %rdx
+; AVX512-NEXT: vmovq %xmm5, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm4
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
+; AVX512-NEXT: vpextrq $1, %xmm3, %rdx
+; AVX512-NEXT: vpextrq $1, %xmm1, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: movl $0, %edx
+; AVX512-NEXT: cmovgq %rcx, %rdx
+; AVX512-NEXT: vmovq %rdx, %xmm5
+; AVX512-NEXT: vmovq %xmm3, %rdx
+; AVX512-NEXT: vmovq %xmm1, %rsi
+; AVX512-NEXT: cmpq %rdx, %rsi
+; AVX512-NEXT: cmovgq %rcx, %rax
+; AVX512-NEXT: vmovq %rax, %xmm1
+; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
+; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = icmp sgt <16 x i64> %a0, %a1
ret <16 x i1> %1
}
; AVX512DQ-NEXT: cmovgl %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm4, %zmm0, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vextracti32x4 $3, %zmm3, %xmm2
; AVX512DQ-NEXT: vpextrd $1, %xmm2, %edx
; AVX512DQ-NEXT: cmovgl %ecx, %eax
; AVX512DQ-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1
; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm2, %zmm1, %zmm1
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
; AVX512DQ-NEXT: vpmovdb %zmm1, %xmm1
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
; AVX512DQ-NEXT: retq
;
; AVX512DQ-LABEL: test_cmp_v32f64:
; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm4, %xmm8
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm0, %xmm9
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm4, %xmm8
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm0, %xmm9
; AVX512DQ-NEXT: xorl %eax, %eax
; AVX512DQ-NEXT: vucomisd %xmm8, %xmm9
; AVX512DQ-NEXT: movq $-1, %rcx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm8
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm10[0],xmm8[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm4, %xmm9
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm4, %xmm9
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm0, %xmm10
; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm9
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm4, %xmm9
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm4, %xmm9
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm0, %xmm10
; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm8
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm5, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm5, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm1, %xmm0
; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm0[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm5, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm5, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm1, %xmm0
; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm5, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm5, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm1, %xmm0
; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm8, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm6, %xmm1
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm6, %xmm1
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm2, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm1
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm6, %xmm4
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm2, %xmm5
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm6, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm2, %xmm5
; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm4
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm6, %xmm1
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm6, %xmm1
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm2, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm7, %xmm1
-; AVX512DQ-NEXT: vextractf64x2 $3, %zmm3, %xmm2
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm7, %xmm1
+; AVX512DQ-NEXT: vextractf32x4 $3, %zmm3, %xmm2
; AVX512DQ-NEXT: vucomisd %xmm1, %xmm2
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm1
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0]
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm7, %xmm2
-; AVX512DQ-NEXT: vextractf64x2 $2, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm7, %xmm2
+; AVX512DQ-NEXT: vextractf32x4 $2, %zmm3, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm2
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm7, %xmm2
-; AVX512DQ-NEXT: vextractf64x2 $1, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm7, %xmm2
+; AVX512DQ-NEXT: vextractf32x4 $1, %zmm3, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0
; AVX512DQ-NEXT: retq
;
; AVX512DQ-LABEL: test_cmp_v32i64:
; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm4, %xmm8
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm4, %xmm8
; AVX512DQ-NEXT: vpextrq $1, %xmm8, %rcx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm0, %xmm9
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm0, %xmm9
; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx
; AVX512DQ-NEXT: xorl %eax, %eax
; AVX512DQ-NEXT: cmpq %rcx, %rdx
; AVX512DQ-NEXT: cmovgq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm8
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm10[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm4, %xmm9
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm4, %xmm9
; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, %xmm10
; AVX512DQ-NEXT: vpextrq $1, %xmm10, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm9
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm4, %xmm9
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm4, %xmm9
; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm0, %xmm10
; AVX512DQ-NEXT: vpextrq $1, %xmm10, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm8
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm5, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm5, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm1, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovgq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm0[0],xmm9[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm5, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm5, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm1, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm5, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm5, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm1, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm1, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm8, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm6, %xmm1
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm6, %xmm1
; AVX512DQ-NEXT: vpextrq $1, %xmm1, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm2, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovgq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm1
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm6, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm6, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm2, %xmm5
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm2, %xmm5
; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm4
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm6, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm6, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm2, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm1
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm7, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm7, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $3, %zmm3, %xmm2
+; AVX512DQ-NEXT: vextracti32x4 $3, %zmm3, %xmm2
; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovgq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0]
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm7, %xmm2
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm7, %xmm2
; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $2, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $2, %zmm3, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm2
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm7, %xmm0
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm7, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti64x2 $1, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextracti32x4 $1, %zmm3, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm1, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0
; AVX512DQ-NEXT: retq
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_add_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpaddq %zmm3, %zmm1, %zmm1
-; AVX512F-NEXT: vpaddq %zmm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_add_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpaddq %zmm3, %zmm1, %zmm1
-; AVX512BW-NEXT: vpaddq %zmm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_add_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpaddq %zmm3, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpaddq %zmm2, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_add_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpaddq %zmm3, %zmm1, %zmm1
+; AVX512-NEXT: vpaddq %zmm2, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = add <16 x i64> %a0, %a1
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_add_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpaddb {{.*}}(%rip), %xmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_add_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vpaddb {{.*}}(%rip), %xmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_add_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vpaddb {{.*}}(%rip), %xmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_add_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vpaddb {{.*}}(%rip), %xmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = add <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_sub_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpsubq %zmm3, %zmm1, %zmm1
-; AVX512F-NEXT: vpsubq %zmm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_sub_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpsubq %zmm3, %zmm1, %zmm1
-; AVX512BW-NEXT: vpsubq %zmm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_sub_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpsubq %zmm3, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpsubq %zmm2, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_sub_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpsubq %zmm3, %zmm1, %zmm1
+; AVX512-NEXT: vpsubq %zmm2, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = sub <16 x i64> %a0, %a1
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_sub_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpsubq {{.*}}(%rip), %zmm1, %zmm1
-; AVX512F-NEXT: vpsubq {{.*}}(%rip), %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_sub_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpsubq {{.*}}(%rip), %zmm1, %zmm1
-; AVX512BW-NEXT: vpsubq {{.*}}(%rip), %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_sub_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpsubq {{.*}}(%rip), %zmm1, %zmm1
-; AVX512DQ-NEXT: vpsubq {{.*}}(%rip), %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_sub_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpsubq {{.*}}(%rip), %zmm1, %zmm1
+; AVX512-NEXT: vpsubq {{.*}}(%rip), %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = sub <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX512DQ-NEXT: vpmullq %zmm2, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
+; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vzeroupper
; AVX512DQ-NEXT: retq
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_mul_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmulld {{.*}}(%rip), %ymm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vpmulld {{.*}}(%rip), %ymm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_mul_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmulld {{.*}}(%rip), %ymm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vpmulld {{.*}}(%rip), %ymm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_mul_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmulld {{.*}}(%rip), %ymm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vpmulld {{.*}}(%rip), %ymm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_mul_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmulld {{.*}}(%rip), %ymm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vpmulld {{.*}}(%rip), %ymm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = mul <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_and_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpandq %zmm3, %zmm1, %zmm1
-; AVX512F-NEXT: vpandq %zmm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_and_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpandq %zmm3, %zmm1, %zmm1
-; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_and_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpandq %zmm3, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpandq %zmm2, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_and_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpandq %zmm3, %zmm1, %zmm1
+; AVX512-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = and <16 x i64> %a0, %a1
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_and_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_and_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_and_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_and_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = and <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_xor_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpxorq %zmm3, %zmm1, %zmm1
-; AVX512F-NEXT: vpxorq %zmm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_xor_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpxorq %zmm3, %zmm1, %zmm1
-; AVX512BW-NEXT: vpxorq %zmm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_xor_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpxorq %zmm3, %zmm1, %zmm1
-; AVX512DQ-NEXT: vpxorq %zmm2, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_xor_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpxorq %zmm3, %zmm1, %zmm1
+; AVX512-NEXT: vpxorq %zmm2, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = xor <16 x i64> %a0, %a1
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_xor_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_xor_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_xor_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_xor_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = xor <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_or_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vporq %zmm3, %zmm1, %zmm1
-; AVX512F-NEXT: vporq %zmm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_or_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vporq %zmm3, %zmm1, %zmm1
-; AVX512BW-NEXT: vporq %zmm2, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_or_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vporq %zmm3, %zmm1, %zmm1
-; AVX512DQ-NEXT: vporq %zmm2, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_or_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vporq %zmm3, %zmm1, %zmm1
+; AVX512-NEXT: vporq %zmm2, %zmm0, %zmm0
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = or <16 x i64> %a0, %a1
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: trunc_or_const_v16i64_v16i8:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512BW-LABEL: trunc_or_const_v16i64_v16i8:
-; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512BW-NEXT: vpor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512BW-NEXT: vzeroupper
-; AVX512BW-NEXT: retq
-;
-; AVX512DQ-LABEL: trunc_or_const_v16i64_v16i8:
-; AVX512DQ: # BB#0:
-; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512DQ-NEXT: vpor {{.*}}(%rip), %xmm0, %xmm0
-; AVX512DQ-NEXT: vzeroupper
-; AVX512DQ-NEXT: retq
+; AVX512-LABEL: trunc_or_const_v16i64_v16i8:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpmovqd %zmm0, %ymm0
+; AVX512-NEXT: vpmovqd %zmm1, %ymm1
+; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vpor {{.*}}(%rip), %xmm0, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = or <16 x i64> %a0, <i64 0, i64 1, i64 2, i64 3, i64 4, i64 5, i64 6, i64 7, i64 8, i64 9, i64 10, i64 11, i64 12, i64 13, i64 14, i64 15>
%2 = trunc <16 x i64> %1 to <16 x i8>
ret <16 x i8> %2