From: Craig Topper Date: Sun, 8 Jan 2017 21:32:26 +0000 (+0000) Subject: [X86] Add avx512bw and avx512dq command lines to the vector compare results test. X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=313712541e02894006d9c8dc4ecd3766a5c2cdbc;p=llvm [X86] Add avx512bw and avx512dq command lines to the vector compare results test. This is preparation for improving a case with avx512dq. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@291401 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/test/CodeGen/X86/vector-compare-results.ll b/test/CodeGen/X86/vector-compare-results.ll index abe3da75287..17269e15dc5 100644 --- a/test/CodeGen/X86/vector-compare-results.ll +++ b/test/CodeGen/X86/vector-compare-results.ll @@ -4,6 +4,8 @@ ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=AVX --check-prefix=AVX1 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=AVX --check-prefix=AVX2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=AVX --check-prefix=AVX512 --check-prefix=AVX512F +; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512dq | FileCheck %s --check-prefix=AVX --check-prefix=AVX512 --check-prefix=AVX512DQ +; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefix=AVX --check-prefix=AVX512 --check-prefix=AVX512BW ; ; 128-bit vector comparisons @@ -308,12 +310,26 @@ define <16 x i1> @test_cmp_v16i16(<16 x i16> %a0, <16 x i16> %a1) nounwind { ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v16i16: -; AVX512: # BB#0: -; AVX512-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0 -; AVX512-NEXT: vpmovsxwd %ymm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v16i16: +; AVX512F: # BB#0: +; AVX512F-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v16i16: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v16i16: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: # kill: %XMM0 %XMM0 %YMM0 +; AVX512BW-NEXT: retq %1 = icmp sgt <16 x i16> %a0, %a1 ret <16 x i1> %1 } @@ -1045,16 +1061,35 @@ define <32 x i1> @test_cmp_v32i16(<32 x i16> %a0, <32 x i16> %a1) nounwind { ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3] ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v32i16: -; AVX512: # BB#0: -; AVX512-NEXT: vpcmpgtw %ymm2, %ymm0, %ymm0 -; AVX512-NEXT: vpmovsxwd %ymm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: vpcmpgtw %ymm3, %ymm1, %ymm1 -; AVX512-NEXT: vpmovsxwd %ymm1, %zmm1 -; AVX512-NEXT: vpmovdb %zmm1, %xmm1 -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v32i16: +; AVX512F: # BB#0: +; AVX512F-NEXT: vpcmpgtw %ymm2, %ymm0, %ymm0 +; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: vpcmpgtw %ymm3, %ymm1, %ymm1 +; AVX512F-NEXT: vpmovsxwd %ymm1, %zmm1 +; AVX512F-NEXT: vpmovdb %zmm1, %xmm1 +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v32i16: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vpcmpgtw %ymm2, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: vpcmpgtw %ymm3, %ymm1, %ymm1 +; AVX512DQ-NEXT: vpmovsxwd %ymm1, %zmm1 +; AVX512DQ-NEXT: vpmovdb %zmm1, %xmm1 +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v32i16: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vpcmpgtw %zmm1, %zmm0, %k1 +; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 +; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm0 {%k1} {z} +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <32 x i16> %a0, %a1 ret <32 x i1> %1 } @@ -1874,15 +1909,31 @@ define <64 x i1> @test_cmp_v64i8(<64 x i8> %a0, <64 x i8> %a1) nounwind { ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v64i8: -; AVX512: # BB#0: -; AVX512-NEXT: vpcmpgtb %ymm3, %ymm1, %ymm4 -; AVX512-NEXT: vpcmpgtb %ymm2, %ymm0, %ymm0 -; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1 -; AVX512-NEXT: vextracti128 $1, %ymm4, %xmm3 -; AVX512-NEXT: # kill: %XMM0 %XMM0 %YMM0 -; AVX512-NEXT: vmovdqa %xmm4, %xmm2 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v64i8: +; AVX512F: # BB#0: +; AVX512F-NEXT: vpcmpgtb %ymm3, %ymm1, %ymm4 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm0, %ymm0 +; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512F-NEXT: vextracti128 $1, %ymm4, %xmm3 +; AVX512F-NEXT: # kill: %XMM0 %XMM0 %YMM0 +; AVX512F-NEXT: vmovdqa %xmm4, %xmm2 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v64i8: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vpcmpgtb %ymm3, %ymm1, %ymm4 +; AVX512DQ-NEXT: vpcmpgtb %ymm2, %ymm0, %ymm0 +; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm3 +; AVX512DQ-NEXT: # kill: %XMM0 %XMM0 %YMM0 +; AVX512DQ-NEXT: vmovdqa %xmm4, %xmm2 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v64i8: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vpcmpgtb %zmm1, %zmm0, %k0 +; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <64 x i8> %a0, %a1 ret <64 x i1> %1 } @@ -1957,120 +2008,350 @@ define <16 x i1> @test_cmp_v16f64(<16 x double> %a0, <16 x double> %a1) nounwind ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v16f64: -; AVX512: # BB#0: -; AVX512-NEXT: vextractf32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vextractf32x4 $3, %zmm0, %xmm5 -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movq $-1, %rcx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vextractf32x4 $2, %zmm0, %xmm6 -; AVX512-NEXT: vucomisd %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm7 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] -; AVX512-NEXT: vucomisd %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 -; AVX512-NEXT: vextractf32x4 $1, %zmm2, %xmm5 -; AVX512-NEXT: vextractf32x4 $1, %zmm0, %xmm6 -; AVX512-NEXT: vucomisd %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm7 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] -; AVX512-NEXT: vucomisd %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] -; AVX512-NEXT: vucomisd %xmm2, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] -; AVX512-NEXT: vucomisd %xmm2, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0] -; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vextractf32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vextractf32x4 $3, %zmm1, %xmm4 -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vextractf32x4 $2, %zmm1, %xmm5 -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] -; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 -; AVX512-NEXT: vextractf32x4 $1, %zmm3, %xmm4 -; AVX512-NEXT: vextractf32x4 $1, %zmm1, %xmm5 -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] -; AVX512-NEXT: vucomisd %xmm3, %xmm1 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] -; AVX512-NEXT: vucomisd %xmm3, %xmm1 -; AVX512-NEXT: cmovaq %rcx, %rax -; AVX512-NEXT: vmovq %rax, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 -; AVX512-NEXT: vpmovqd %zmm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v16f64: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vextractf32x4 $3, %zmm0, %xmm5 +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movq $-1, %rcx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm6 +; AVX512F-NEXT: vucomisd %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm7 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm5 +; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm6 +; AVX512F-NEXT: vucomisd %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm7 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512F-NEXT: vucomisd %xmm2, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512F-NEXT: vucomisd %xmm2, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0] +; AVX512F-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vextractf32x4 $3, %zmm1, %xmm4 +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm5 +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm4 +; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm5 +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512F-NEXT: vucomisd %xmm3, %xmm1 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512F-NEXT: vucomisd %xmm3, %xmm1 +; AVX512F-NEXT: cmovaq %rcx, %rax +; AVX512F-NEXT: vmovq %rax, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512F-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v16f64: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm0, %xmm5 +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movq $-1, %rcx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm0, %xmm6 +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm7 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm2, %xmm5 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm0, %xmm6 +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm7 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm1, %xmm4 +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm1, %xmm5 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm3, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm1, %xmm5 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512DQ-NEXT: vucomisd %xmm3, %xmm1 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512DQ-NEXT: vucomisd %xmm3, %xmm1 +; AVX512DQ-NEXT: cmovaq %rcx, %rax +; AVX512DQ-NEXT: vmovq %rax, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v16f64: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm0, %xmm5 +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movq $-1, %rcx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm0, %xmm6 +; AVX512BW-NEXT: vucomisd %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm7 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm5 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm6 +; AVX512BW-NEXT: vucomisd %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm7 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm6[1,0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0] +; AVX512BW-NEXT: vucomisd %xmm2, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512BW-NEXT: vucomisd %xmm2, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm1, %xmm4 +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm1, %xmm5 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm5 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0] +; AVX512BW-NEXT: vucomisd %xmm3, %xmm1 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512BW-NEXT: vucomisd %xmm3, %xmm1 +; AVX512BW-NEXT: cmovaq %rcx, %rax +; AVX512BW-NEXT: vmovq %rax, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512BW-NEXT: retq %1 = fcmp ogt <16 x double> %a0, %a1 ret <16 x i1> %1 } @@ -2416,207 +2697,612 @@ define <32 x i1> @test_cmp_v32f32(<32 x float> %a0, <32 x float> %a1) nounwind { ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3] ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v32f32: -; AVX512: # BB#0: -; AVX512-NEXT: vextractf32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $3, %zmm0, %xmm6 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: vucomiss %xmm5, %xmm7 -; AVX512-NEXT: movl $-1, %ecx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm4, %xmm6 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm5 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm5, %xmm8 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] -; AVX512-NEXT: vucomiss %xmm7, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm8, %xmm5 -; AVX512-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm6 = xmm6[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm4, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm5, %xmm8 -; AVX512-NEXT: vextractf32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $2, %zmm0, %xmm7 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm6, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm5, %xmm7 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm4 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] -; AVX512-NEXT: vucomiss %xmm6, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 -; AVX512-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8 -; AVX512-NEXT: vextractf32x4 $1, %zmm2, %xmm5 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $1, %zmm0, %xmm7 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm6, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm5, %xmm7 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm4 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] -; AVX512-NEXT: vucomiss %xmm6, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 -; AVX512-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] -; AVX512-NEXT: vmovshdup {{.*#+}} xmm6 = xmm0[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm2, %xmm0 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm5 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm7 = xmm0[1,0] -; AVX512-NEXT: vucomiss %xmm6, %xmm7 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm2, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm5, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm8 -; AVX512-NEXT: vextractf32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $3, %zmm1, %xmm5 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm4, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm2, %xmm5 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm4 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] -; AVX512-NEXT: vucomiss %xmm6, %xmm7 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm2, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm4, %xmm2 -; AVX512-NEXT: vextractf32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $2, %zmm1, %xmm6 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm5, %xmm7 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm4, %xmm6 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm5 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm6[1,0] -; AVX512-NEXT: vucomiss %xmm7, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm5, %xmm0 -; AVX512-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm5 = xmm6[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2 -; AVX512-NEXT: vextractf32x4 $1, %zmm3, %xmm0 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] -; AVX512-NEXT: vextractf32x4 $1, %zmm1, %xmm5 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm4, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm0, %xmm5 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm4 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] -; AVX512-NEXT: vucomiss %xmm6, %xmm7 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm0, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0 -; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3] -; AVX512-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] -; AVX512-NEXT: vucomiss %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vucomiss %xmm3, %xmm1 -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmoval %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm4 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0] -; AVX512-NEXT: vucomiss %xmm5, %xmm6 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmoval %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 -; AVX512-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3] -; AVX512-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3] -; AVX512-NEXT: vucomiss %xmm3, %xmm1 -; AVX512-NEXT: cmoval %ecx, %eax -; AVX512-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1 -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v32f32: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $3, %zmm0, %xmm6 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: vucomiss %xmm5, %xmm7 +; AVX512F-NEXT: movl $-1, %ecx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm4, %xmm6 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm5 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm5, %xmm8 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512F-NEXT: vucomiss %xmm7, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm8, %xmm5 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm6 = xmm6[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm4, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm5, %xmm8 +; AVX512F-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm7 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm6, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm5, %xmm7 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm4 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512F-NEXT: vucomiss %xmm6, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8 +; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm5 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm7 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm6, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm5, %xmm7 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm4 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512F-NEXT: vucomiss %xmm6, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm0[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm2, %xmm0 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm5 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm7 = xmm0[1,0] +; AVX512F-NEXT: vucomiss %xmm6, %xmm7 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm2, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm5, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512F-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $3, %zmm1, %xmm5 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm4, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm2, %xmm5 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm4 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512F-NEXT: vucomiss %xmm6, %xmm7 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm2, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm4, %xmm2 +; AVX512F-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm6 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm5, %xmm7 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm4, %xmm6 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm5 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm6[1,0] +; AVX512F-NEXT: vucomiss %xmm7, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm5, %xmm0 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm5 = xmm6[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2 +; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm0 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] +; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm5 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm4, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm0, %xmm5 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm4 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512F-NEXT: vucomiss %xmm6, %xmm7 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm0, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0 +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3] +; AVX512F-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] +; AVX512F-NEXT: vucomiss %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vucomiss %xmm3, %xmm1 +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmoval %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm4 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0] +; AVX512F-NEXT: vucomiss %xmm5, %xmm6 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmoval %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512F-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3] +; AVX512F-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3] +; AVX512F-NEXT: vucomiss %xmm3, %xmm1 +; AVX512F-NEXT: cmoval %ecx, %eax +; AVX512F-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1 +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v32f32: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $3, %zmm0, %xmm6 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm7 +; AVX512DQ-NEXT: movl $-1, %ecx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm5 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm5, %xmm8 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512DQ-NEXT: vucomiss %xmm7, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm8, %xmm5 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm6 = xmm6[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm5, %xmm8 +; AVX512DQ-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $2, %zmm0, %xmm7 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm7 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm4 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8 +; AVX512DQ-NEXT: vextractf32x4 $1, %zmm2, %xmm5 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $1, %zmm0, %xmm7 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm7 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm4 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm0[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm2, %xmm0 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm5 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm7 = xmm0[1,0] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm7 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm2, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm5, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm8, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512DQ-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $3, %zmm1, %xmm5 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm2, %xmm5 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm4 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm7 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm2, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm4, %xmm2 +; AVX512DQ-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $2, %zmm1, %xmm6 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm7 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm5 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm6[1,0] +; AVX512DQ-NEXT: vucomiss %xmm7, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm5, %xmm0 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm5 = xmm6[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2 +; AVX512DQ-NEXT: vextractf32x4 $1, %zmm3, %xmm0 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] +; AVX512DQ-NEXT: vextractf32x4 $1, %zmm1, %xmm5 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm0, %xmm5 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm4 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512DQ-NEXT: vucomiss %xmm6, %xmm7 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm0, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0 +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3] +; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] +; AVX512DQ-NEXT: vucomiss %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vucomiss %xmm3, %xmm1 +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmoval %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm4 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0] +; AVX512DQ-NEXT: vucomiss %xmm5, %xmm6 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmoval %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3] +; AVX512DQ-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3] +; AVX512DQ-NEXT: vucomiss %xmm3, %xmm1 +; AVX512DQ-NEXT: cmoval %ecx, %eax +; AVX512DQ-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1 +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm2, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v32f32: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $3, %zmm0, %xmm6 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: vucomiss %xmm5, %xmm7 +; AVX512BW-NEXT: movl $-1, %ecx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm4, %xmm6 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm5, %xmm8 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512BW-NEXT: vucomiss %xmm7, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm8, %xmm5 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm6 = xmm6[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm4, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm5, %xmm8 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm0, %xmm7 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm5, %xmm7 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm4 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm5 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm7 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm5, %xmm7 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm4 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm9 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm5[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm7[1,0] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm9, %xmm4 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm6 = xmm7[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm0[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm2, %xmm0 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm0[1,0] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm7 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm2, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm5, %xmm0 +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm8 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $3, %zmm1, %xmm5 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm4, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm2, %xmm5 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm4 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm7 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm2 = xmm2[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm2, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm2 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm4[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm1, %xmm6 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm7 = xmm6[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm5, %xmm7 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm4, %xmm6 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm6[1,0] +; AVX512BW-NEXT: vucomiss %xmm7, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm5, %xmm0 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm4 = xmm4[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm6[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0 +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm0 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] +; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm5 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm4, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm0, %xmm5 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm4 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm0[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm7 = xmm5[1,0] +; AVX512BW-NEXT: vucomiss %xmm6, %xmm7 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm5 = xmm5[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm0, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm0 +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm3[1,1,3,3] +; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] +; AVX512BW-NEXT: vucomiss %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vucomiss %xmm3, %xmm1 +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmoval %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm4 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm3[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm6 = xmm1[1,0] +; AVX512BW-NEXT: vucomiss %xmm5, %xmm6 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmoval %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm4, %xmm4 +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm3 = xmm3[3,1,2,3] +; AVX512BW-NEXT: vpermilps {{.*#+}} xmm1 = xmm1[3,1,2,3] +; AVX512BW-NEXT: vucomiss %xmm3, %xmm1 +; AVX512BW-NEXT: cmoval %ecx, %eax +; AVX512BW-NEXT: vpinsrd $3, %eax, %xmm4, %xmm1 +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: retq %1 = fcmp ogt <32 x float> %a0, %a1 ret <32 x i1> %1 } @@ -2785,136 +3471,398 @@ define <16 x i1> @test_cmp_v16i64(<16 x i64> %a0, <16 x i64> %a1) nounwind { ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v16i64: -; AVX512: # BB#0: -; AVX512-NEXT: vextracti32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rcx -; AVX512-NEXT: vextracti32x4 $3, %zmm0, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rdx -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: cmpq %rcx, %rdx -; AVX512-NEXT: movq $-1, %rcx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm0, %xmm6 -; AVX512-NEXT: vpextrq $1, %xmm6, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm7 -; AVX512-NEXT: vmovq %xmm5, %rdx -; AVX512-NEXT: vmovq %xmm6, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 -; AVX512-NEXT: vextracti32x4 $1, %zmm2, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm0, %xmm6 -; AVX512-NEXT: vpextrq $1, %xmm6, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm7 -; AVX512-NEXT: vmovq %xmm5, %rdx -; AVX512-NEXT: vmovq %xmm6, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] -; AVX512-NEXT: vpextrq $1, %xmm2, %rdx -; AVX512-NEXT: vpextrq $1, %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vmovq %xmm2, %rdx -; AVX512-NEXT: vmovq %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0] -; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vextracti32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vpextrq $1, %xmm2, %rdx -; AVX512-NEXT: vextracti32x4 $3, %zmm1, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm2, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm1, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] -; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 -; AVX512-NEXT: vextracti32x4 $1, %zmm3, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm1, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm6 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] -; AVX512-NEXT: vpextrq $1, %xmm3, %rdx -; AVX512-NEXT: vpextrq $1, %xmm1, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm3, %rdx -; AVX512-NEXT: vmovq %xmm1, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: cmovgq %rcx, %rax -; AVX512-NEXT: vmovq %rax, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 -; AVX512-NEXT: vpmovqd %zmm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v16i64: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rcx +; AVX512F-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: cmpq %rcx, %rdx +; AVX512F-NEXT: movq $-1, %rcx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm7 +; AVX512F-NEXT: vmovq %xmm5, %rdx +; AVX512F-NEXT: vmovq %xmm6, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm7 +; AVX512F-NEXT: vmovq %xmm5, %rdx +; AVX512F-NEXT: vmovq %xmm6, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vmovq %xmm2, %rdx +; AVX512F-NEXT: vmovq %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0] +; AVX512F-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512F-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm2, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm6 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512F-NEXT: vpextrq $1, %xmm3, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm3, %rdx +; AVX512F-NEXT: vmovq %xmm1, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: cmovgq %rcx, %rax +; AVX512F-NEXT: vmovq %rax, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512F-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v16i64: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rcx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm0, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: cmpq %rcx, %rdx +; AVX512DQ-NEXT: movq $-1, %rcx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm0, %xmm6 +; AVX512DQ-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm7 +; AVX512DQ-NEXT: vmovq %xmm5, %rdx +; AVX512DQ-NEXT: vmovq %xmm6, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm2, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm0, %xmm6 +; AVX512DQ-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm7 +; AVX512DQ-NEXT: vmovq %xmm5, %rdx +; AVX512DQ-NEXT: vmovq %xmm6, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vmovq %xmm2, %rdx +; AVX512DQ-NEXT: vmovq %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm1, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm2, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm1, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm1, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm6 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm3, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm3, %rdx +; AVX512DQ-NEXT: vmovq %xmm1, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: cmovgq %rcx, %rax +; AVX512DQ-NEXT: vmovq %rax, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v16i64: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rcx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: cmpq %rcx, %rdx +; AVX512BW-NEXT: movq $-1, %rcx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm7 +; AVX512BW-NEXT: vmovq %xmm5, %rdx +; AVX512BW-NEXT: vmovq %xmm6, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrq $1, %xmm6, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm7 +; AVX512BW-NEXT: vmovq %xmm5, %rdx +; AVX512BW-NEXT: vmovq %xmm6, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0] +; AVX512BW-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vmovq %xmm2, %rdx +; AVX512BW-NEXT: vmovq %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm2, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm6 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0] +; AVX512BW-NEXT: vpextrq $1, %xmm3, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm3, %rdx +; AVX512BW-NEXT: vmovq %xmm1, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: cmovgq %rcx, %rax +; AVX512BW-NEXT: vmovq %rax, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <16 x i64> %a0, %a1 ret <16 x i1> %1 } @@ -3252,223 +4200,660 @@ define <32 x i1> @test_cmp_v32i32(<32 x i32> %a0, <32 x i32> %a1) nounwind { ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3] ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v32i32: -; AVX512: # BB#0: -; AVX512-NEXT: vextracti32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vpextrd $1, %xmm4, %ecx -; AVX512-NEXT: vextracti32x4 $3, %zmm0, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm5, %edx -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: cmpl %ecx, %edx -; AVX512-NEXT: movl $-1, %ecx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm4, %esi -; AVX512-NEXT: vmovd %xmm5, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm6 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $2, %xmm4, %edx -; AVX512-NEXT: vpextrd $2, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $3, %xmm4, %edx -; AVX512-NEXT: vpextrd $3, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 -; AVX512-NEXT: vextracti32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm5, %edx -; AVX512-NEXT: vextracti32x4 $2, %zmm0, %xmm6 -; AVX512-NEXT: vpextrd $1, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm5, %esi -; AVX512-NEXT: vmovd %xmm6, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm7 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 -; AVX512-NEXT: vpextrd $2, %xmm5, %edx -; AVX512-NEXT: vpextrd $2, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 -; AVX512-NEXT: vpextrd $3, %xmm5, %edx -; AVX512-NEXT: vpextrd $3, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 -; AVX512-NEXT: vextracti32x4 $1, %zmm2, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm5, %edx -; AVX512-NEXT: vextracti32x4 $1, %zmm0, %xmm6 -; AVX512-NEXT: vpextrd $1, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm5, %esi -; AVX512-NEXT: vmovd %xmm6, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm7 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 -; AVX512-NEXT: vpextrd $2, %xmm5, %edx -; AVX512-NEXT: vpextrd $2, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 -; AVX512-NEXT: vpextrd $3, %xmm5, %edx -; AVX512-NEXT: vpextrd $3, %xmm6, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm2, %edx -; AVX512-NEXT: vpextrd $1, %xmm0, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm2, %esi -; AVX512-NEXT: vmovd %xmm0, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm6 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $2, %xmm2, %edx -; AVX512-NEXT: vpextrd $2, %xmm0, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $3, %xmm2, %edx -; AVX512-NEXT: vpextrd $3, %xmm0, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm6, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: vextracti32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vpextrd $1, %xmm2, %edx -; AVX512-NEXT: vextracti32x4 $3, %zmm1, %xmm4 -; AVX512-NEXT: vpextrd $1, %xmm4, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm2, %esi -; AVX512-NEXT: vmovd %xmm4, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm5 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpextrd $2, %xmm2, %edx -; AVX512-NEXT: vpextrd $2, %xmm4, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpextrd $3, %xmm2, %edx -; AVX512-NEXT: vpextrd $3, %xmm4, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm5, %xmm2 -; AVX512-NEXT: vextracti32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vpextrd $1, %xmm4, %edx -; AVX512-NEXT: vextracti32x4 $2, %zmm1, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm4, %esi -; AVX512-NEXT: vmovd %xmm5, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm6 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $2, %xmm4, %edx -; AVX512-NEXT: vpextrd $2, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $3, %xmm4, %edx -; AVX512-NEXT: vpextrd $3, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 -; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 -; AVX512-NEXT: vextracti32x4 $1, %zmm3, %xmm4 -; AVX512-NEXT: vpextrd $1, %xmm4, %edx -; AVX512-NEXT: vextracti32x4 $1, %zmm1, %xmm5 -; AVX512-NEXT: vpextrd $1, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm4, %esi -; AVX512-NEXT: vmovd %xmm5, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm6 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $2, %xmm4, %edx -; AVX512-NEXT: vpextrd $2, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 -; AVX512-NEXT: vpextrd $3, %xmm4, %edx -; AVX512-NEXT: vpextrd $3, %xmm5, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 -; AVX512-NEXT: vpextrd $1, %xmm3, %edx -; AVX512-NEXT: vpextrd $1, %xmm1, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vmovd %xmm3, %esi -; AVX512-NEXT: vmovd %xmm1, %edi -; AVX512-NEXT: cmpl %esi, %edi -; AVX512-NEXT: movl $0, %esi -; AVX512-NEXT: cmovgl %ecx, %esi -; AVX512-NEXT: vmovd %esi, %xmm5 -; AVX512-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpextrd $2, %xmm3, %edx -; AVX512-NEXT: vpextrd $2, %xmm1, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgl %ecx, %edx -; AVX512-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 -; AVX512-NEXT: vpextrd $3, %xmm3, %edx -; AVX512-NEXT: vpextrd $3, %xmm1, %esi -; AVX512-NEXT: cmpl %edx, %esi -; AVX512-NEXT: cmovgl %ecx, %eax -; AVX512-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 -; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 -; AVX512-NEXT: vpmovdb %zmm1, %xmm1 -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v32i32: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vpextrd $1, %xmm4, %ecx +; AVX512F-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm5, %edx +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: cmpl %ecx, %edx +; AVX512F-NEXT: movl $-1, %ecx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm4, %esi +; AVX512F-NEXT: vmovd %xmm5, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm6 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $2, %xmm4, %edx +; AVX512F-NEXT: vpextrd $2, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $3, %xmm4, %edx +; AVX512F-NEXT: vpextrd $3, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512F-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm5, %edx +; AVX512F-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512F-NEXT: vpextrd $1, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm5, %esi +; AVX512F-NEXT: vmovd %xmm6, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm7 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512F-NEXT: vpextrd $2, %xmm5, %edx +; AVX512F-NEXT: vpextrd $2, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512F-NEXT: vpextrd $3, %xmm5, %edx +; AVX512F-NEXT: vpextrd $3, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm5, %edx +; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512F-NEXT: vpextrd $1, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm5, %esi +; AVX512F-NEXT: vmovd %xmm6, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm7 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512F-NEXT: vpextrd $2, %xmm5, %edx +; AVX512F-NEXT: vpextrd $2, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512F-NEXT: vpextrd $3, %xmm5, %edx +; AVX512F-NEXT: vpextrd $3, %xmm6, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm2, %edx +; AVX512F-NEXT: vpextrd $1, %xmm0, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm2, %esi +; AVX512F-NEXT: vmovd %xmm0, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm6 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $2, %xmm2, %edx +; AVX512F-NEXT: vpextrd $2, %xmm0, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $3, %xmm2, %edx +; AVX512F-NEXT: vpextrd $3, %xmm0, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm6, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vpextrd $1, %xmm2, %edx +; AVX512F-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512F-NEXT: vpextrd $1, %xmm4, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm2, %esi +; AVX512F-NEXT: vmovd %xmm4, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm5 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpextrd $2, %xmm2, %edx +; AVX512F-NEXT: vpextrd $2, %xmm4, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpextrd $3, %xmm2, %edx +; AVX512F-NEXT: vpextrd $3, %xmm4, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm5, %xmm2 +; AVX512F-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrd $1, %xmm4, %edx +; AVX512F-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm4, %esi +; AVX512F-NEXT: vmovd %xmm5, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm6 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $2, %xmm4, %edx +; AVX512F-NEXT: vpextrd $2, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $3, %xmm4, %edx +; AVX512F-NEXT: vpextrd $3, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrd $1, %xmm4, %edx +; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512F-NEXT: vpextrd $1, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm4, %esi +; AVX512F-NEXT: vmovd %xmm5, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm6 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $2, %xmm4, %edx +; AVX512F-NEXT: vpextrd $2, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512F-NEXT: vpextrd $3, %xmm4, %edx +; AVX512F-NEXT: vpextrd $3, %xmm5, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512F-NEXT: vpextrd $1, %xmm3, %edx +; AVX512F-NEXT: vpextrd $1, %xmm1, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vmovd %xmm3, %esi +; AVX512F-NEXT: vmovd %xmm1, %edi +; AVX512F-NEXT: cmpl %esi, %edi +; AVX512F-NEXT: movl $0, %esi +; AVX512F-NEXT: cmovgl %ecx, %esi +; AVX512F-NEXT: vmovd %esi, %xmm5 +; AVX512F-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpextrd $2, %xmm3, %edx +; AVX512F-NEXT: vpextrd $2, %xmm1, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgl %ecx, %edx +; AVX512F-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512F-NEXT: vpextrd $3, %xmm3, %edx +; AVX512F-NEXT: vpextrd $3, %xmm1, %esi +; AVX512F-NEXT: cmpl %edx, %esi +; AVX512F-NEXT: cmovgl %ecx, %eax +; AVX512F-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 +; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512F-NEXT: vpmovdb %zmm1, %xmm1 +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v32i32: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vpextrd $1, %xmm4, %ecx +; AVX512DQ-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm5, %edx +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: cmpl %ecx, %edx +; AVX512DQ-NEXT: movl $-1, %ecx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm4, %esi +; AVX512DQ-NEXT: vmovd %xmm5, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm6 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $2, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $3, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512DQ-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm5, %edx +; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512DQ-NEXT: vpextrd $1, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm5, %esi +; AVX512DQ-NEXT: vmovd %xmm6, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm7 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512DQ-NEXT: vpextrd $2, %xmm5, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512DQ-NEXT: vpextrd $3, %xmm5, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512DQ-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm5, %edx +; AVX512DQ-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512DQ-NEXT: vpextrd $1, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm5, %esi +; AVX512DQ-NEXT: vmovd %xmm6, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm7 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512DQ-NEXT: vpextrd $2, %xmm5, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512DQ-NEXT: vpextrd $3, %xmm5, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm6, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm2, %edx +; AVX512DQ-NEXT: vpextrd $1, %xmm0, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm2, %esi +; AVX512DQ-NEXT: vmovd %xmm0, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm6 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $2, %xmm2, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm0, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $3, %xmm2, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm0, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm4, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vpextrd $1, %xmm2, %edx +; AVX512DQ-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512DQ-NEXT: vpextrd $1, %xmm4, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm2, %esi +; AVX512DQ-NEXT: vmovd %xmm4, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm5 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpextrd $2, %xmm2, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm4, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpextrd $3, %xmm2, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm4, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm5, %xmm2 +; AVX512DQ-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrd $1, %xmm4, %edx +; AVX512DQ-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm4, %esi +; AVX512DQ-NEXT: vmovd %xmm5, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm6 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $2, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $3, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512DQ-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrd $1, %xmm4, %edx +; AVX512DQ-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512DQ-NEXT: vpextrd $1, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm4, %esi +; AVX512DQ-NEXT: vmovd %xmm5, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm6 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $2, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512DQ-NEXT: vpextrd $3, %xmm4, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm5, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512DQ-NEXT: vpextrd $1, %xmm3, %edx +; AVX512DQ-NEXT: vpextrd $1, %xmm1, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vmovd %xmm3, %esi +; AVX512DQ-NEXT: vmovd %xmm1, %edi +; AVX512DQ-NEXT: cmpl %esi, %edi +; AVX512DQ-NEXT: movl $0, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %esi +; AVX512DQ-NEXT: vmovd %esi, %xmm5 +; AVX512DQ-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpextrd $2, %xmm3, %edx +; AVX512DQ-NEXT: vpextrd $2, %xmm1, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgl %ecx, %edx +; AVX512DQ-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512DQ-NEXT: vpextrd $3, %xmm3, %edx +; AVX512DQ-NEXT: vpextrd $3, %xmm1, %esi +; AVX512DQ-NEXT: cmpl %edx, %esi +; AVX512DQ-NEXT: cmovgl %ecx, %eax +; AVX512DQ-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 +; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm2, %zmm1, %zmm1 +; AVX512DQ-NEXT: vpmovdb %zmm1, %xmm1 +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v32i32: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vpextrd $1, %xmm4, %ecx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm5, %edx +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: cmpl %ecx, %edx +; AVX512BW-NEXT: movl $-1, %ecx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm5, %edx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrd $1, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm5, %esi +; AVX512BW-NEXT: vmovd %xmm6, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm7 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrd $2, %xmm5, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrd $3, %xmm5, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm5, %edx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrd $1, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm5, %esi +; AVX512BW-NEXT: vmovd %xmm6, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm7 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrd $2, %xmm5, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrd $3, %xmm5, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm6, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm2, %edx +; AVX512BW-NEXT: vpextrd $1, %xmm0, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm2, %esi +; AVX512BW-NEXT: vmovd %xmm0, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $2, %xmm2, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm0, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $3, %xmm2, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm0, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm6, %xmm0 +; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vpextrd $1, %xmm2, %edx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512BW-NEXT: vpextrd $1, %xmm4, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm2, %esi +; AVX512BW-NEXT: vmovd %xmm4, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrd $2, %xmm2, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm4, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrd $3, %xmm2, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm4, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm5, %xmm2 +; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrd $1, %xmm4, %edx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrd $1, %xmm4, %edx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrd $1, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrd $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm5, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vpextrd $1, %xmm3, %edx +; AVX512BW-NEXT: vpextrd $1, %xmm1, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vmovd %xmm3, %esi +; AVX512BW-NEXT: vmovd %xmm1, %edi +; AVX512BW-NEXT: cmpl %esi, %edi +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgl %ecx, %esi +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrd $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrd $2, %xmm3, %edx +; AVX512BW-NEXT: vpextrd $2, %xmm1, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgl %ecx, %edx +; AVX512BW-NEXT: vpinsrd $2, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrd $3, %xmm3, %edx +; AVX512BW-NEXT: vpextrd $3, %xmm1, %esi +; AVX512BW-NEXT: cmpl %edx, %esi +; AVX512BW-NEXT: cmovgl %ecx, %eax +; AVX512BW-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512BW-NEXT: vpmovdw %zmm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <32 x i32> %a0, %a1 ret <32 x i1> %1 } @@ -4342,291 +5727,987 @@ define <64 x i1> @test_cmp_v64i16(<64 x i16> %a0, <64 x i16> %a1) nounwind { ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v64i16: -; AVX512: # BB#0: -; AVX512-NEXT: vpcmpgtw %ymm7, %ymm3, %ymm3 -; AVX512-NEXT: vpmovsxwd %ymm3, %zmm3 -; AVX512-NEXT: vpslld $31, %zmm3, %zmm3 -; AVX512-NEXT: vptestmd %zmm3, %zmm3, %k0 -; AVX512-NEXT: kshiftlw $14, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: kshiftlw $15, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %ecx -; AVX512-NEXT: vmovd %ecx, %xmm3 -; AVX512-NEXT: vpinsrb $1, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $13, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $2, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $12, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $3, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $11, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $4, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $10, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $5, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $9, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $6, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $8, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $7, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $7, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $8, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $6, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $9, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $5, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $10, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $4, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $11, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $3, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $12, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $2, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $13, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftlw $1, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $14, %eax, %xmm3, %xmm3 -; AVX512-NEXT: kshiftrw $15, %k0, %k0 -; AVX512-NEXT: kmovw %k0, %eax -; AVX512-NEXT: vpinsrb $15, %eax, %xmm3, %xmm3 -; AVX512-NEXT: vpcmpgtw %ymm6, %ymm2, %ymm2 -; AVX512-NEXT: vpmovsxwd %ymm2, %zmm2 -; AVX512-NEXT: vpslld $31, %zmm2, %zmm2 -; AVX512-NEXT: vptestmd %zmm2, %zmm2, %k0 -; AVX512-NEXT: kshiftlw $14, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: kshiftlw $15, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %ecx -; AVX512-NEXT: vmovd %ecx, %xmm2 -; AVX512-NEXT: vpinsrb $1, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $13, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $2, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $12, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $3, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $11, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $4, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $10, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $5, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $9, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $6, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $8, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $7, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $7, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $8, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $6, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $9, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $5, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $10, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $4, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $11, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $3, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $12, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $2, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $13, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftlw $1, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $14, %eax, %xmm2, %xmm2 -; AVX512-NEXT: kshiftrw $15, %k0, %k0 -; AVX512-NEXT: kmovw %k0, %eax -; AVX512-NEXT: vpinsrb $15, %eax, %xmm2, %xmm2 -; AVX512-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2 -; AVX512-NEXT: vpsllw $7, %ymm2, %ymm2 -; AVX512-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] -; AVX512-NEXT: vpand %ymm3, %ymm2, %ymm2 -; AVX512-NEXT: vpxor %ymm6, %ymm6, %ymm6 -; AVX512-NEXT: vpcmpgtb %ymm2, %ymm6, %ymm2 -; AVX512-NEXT: vpcmpgtw %ymm5, %ymm1, %ymm1 -; AVX512-NEXT: vpmovsxwd %ymm1, %zmm1 -; AVX512-NEXT: vpslld $31, %zmm1, %zmm1 -; AVX512-NEXT: vptestmd %zmm1, %zmm1, %k0 -; AVX512-NEXT: kshiftlw $14, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: kshiftlw $15, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %ecx -; AVX512-NEXT: vmovd %ecx, %xmm1 -; AVX512-NEXT: vpinsrb $1, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $13, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $2, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $12, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $3, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $11, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $4, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $10, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $5, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $9, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $6, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $8, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $7, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $7, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $8, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $6, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $9, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $5, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $10, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $4, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $11, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $3, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $12, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $2, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $13, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftlw $1, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $14, %eax, %xmm1, %xmm1 -; AVX512-NEXT: kshiftrw $15, %k0, %k0 -; AVX512-NEXT: kmovw %k0, %eax -; AVX512-NEXT: vpinsrb $15, %eax, %xmm1, %xmm1 -; AVX512-NEXT: vpcmpgtw %ymm4, %ymm0, %ymm0 -; AVX512-NEXT: vpmovsxwd %ymm0, %zmm0 -; AVX512-NEXT: vpslld $31, %zmm0, %zmm0 -; AVX512-NEXT: vptestmd %zmm0, %zmm0, %k0 -; AVX512-NEXT: kshiftlw $14, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: kshiftlw $15, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %ecx -; AVX512-NEXT: vmovd %ecx, %xmm0 -; AVX512-NEXT: vpinsrb $1, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $13, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $2, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $12, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $3, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $11, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $4, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $10, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $5, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $9, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $6, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $8, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $7, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $7, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $8, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $6, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $9, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $5, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $10, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $4, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $11, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $3, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $12, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $2, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $13, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftlw $1, %k0, %k1 -; AVX512-NEXT: kshiftrw $15, %k1, %k1 -; AVX512-NEXT: kmovw %k1, %eax -; AVX512-NEXT: vpinsrb $14, %eax, %xmm0, %xmm0 -; AVX512-NEXT: kshiftrw $15, %k0, %k0 -; AVX512-NEXT: kmovw %k0, %eax -; AVX512-NEXT: vpinsrb $15, %eax, %xmm0, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 -; AVX512-NEXT: vpsllw $7, %ymm0, %ymm0 -; AVX512-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512-NEXT: vpcmpgtb %ymm0, %ymm6, %ymm0 -; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1 -; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3 -; AVX512-NEXT: # kill: %XMM0 %XMM0 %YMM0 -; AVX512-NEXT: # kill: %XMM2 %XMM2 %YMM2 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v64i16: +; AVX512F: # BB#0: +; AVX512F-NEXT: vpcmpgtw %ymm7, %ymm3, %ymm3 +; AVX512F-NEXT: vpmovsxwd %ymm3, %zmm3 +; AVX512F-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512F-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512F-NEXT: kshiftlw $14, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: kshiftlw $15, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %ecx +; AVX512F-NEXT: vmovd %ecx, %xmm3 +; AVX512F-NEXT: vpinsrb $1, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $13, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $2, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $12, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $3, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $11, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $4, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $10, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $5, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $9, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $6, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $8, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $7, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $7, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $8, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $6, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $9, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $5, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $10, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $4, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $11, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $3, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $12, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $2, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $13, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftlw $1, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $14, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: kshiftrw $15, %k0, %k0 +; AVX512F-NEXT: kmovw %k0, %eax +; AVX512F-NEXT: vpinsrb $15, %eax, %xmm3, %xmm3 +; AVX512F-NEXT: vpcmpgtw %ymm6, %ymm2, %ymm2 +; AVX512F-NEXT: vpmovsxwd %ymm2, %zmm2 +; AVX512F-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512F-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512F-NEXT: kshiftlw $14, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: kshiftlw $15, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %ecx +; AVX512F-NEXT: vmovd %ecx, %xmm2 +; AVX512F-NEXT: vpinsrb $1, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $13, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $2, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $12, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $3, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $11, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $4, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $10, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $5, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $9, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $6, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $8, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $7, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $7, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $8, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $6, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $9, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $5, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $10, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $4, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $11, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $3, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $12, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $2, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $13, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftlw $1, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $14, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: kshiftrw $15, %k0, %k0 +; AVX512F-NEXT: kmovw %k0, %eax +; AVX512F-NEXT: vpinsrb $15, %eax, %xmm2, %xmm2 +; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2 +; AVX512F-NEXT: vpsllw $7, %ymm2, %ymm2 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] +; AVX512F-NEXT: vpand %ymm3, %ymm2, %ymm2 +; AVX512F-NEXT: vpxor %ymm6, %ymm6, %ymm6 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm6, %ymm2 +; AVX512F-NEXT: vpcmpgtw %ymm5, %ymm1, %ymm1 +; AVX512F-NEXT: vpmovsxwd %ymm1, %zmm1 +; AVX512F-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512F-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512F-NEXT: kshiftlw $14, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: kshiftlw $15, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %ecx +; AVX512F-NEXT: vmovd %ecx, %xmm1 +; AVX512F-NEXT: vpinsrb $1, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $13, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $2, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $12, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $3, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $11, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $4, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $10, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $5, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $9, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $6, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $8, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $7, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $7, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $8, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $6, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $9, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $5, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $10, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $4, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $11, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $3, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $12, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $2, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $13, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftlw $1, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $14, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: kshiftrw $15, %k0, %k0 +; AVX512F-NEXT: kmovw %k0, %eax +; AVX512F-NEXT: vpinsrb $15, %eax, %xmm1, %xmm1 +; AVX512F-NEXT: vpcmpgtw %ymm4, %ymm0, %ymm0 +; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512F-NEXT: vpslld $31, %zmm0, %zmm0 +; AVX512F-NEXT: vptestmd %zmm0, %zmm0, %k0 +; AVX512F-NEXT: kshiftlw $14, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: kshiftlw $15, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %ecx +; AVX512F-NEXT: vmovd %ecx, %xmm0 +; AVX512F-NEXT: vpinsrb $1, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $13, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $2, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $12, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $3, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $11, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $4, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $10, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $5, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $9, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $6, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $8, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $7, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $7, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $8, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $6, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $9, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $5, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $10, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $4, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $11, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $3, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $12, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $2, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $13, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftlw $1, %k0, %k1 +; AVX512F-NEXT: kshiftrw $15, %k1, %k1 +; AVX512F-NEXT: kmovw %k1, %eax +; AVX512F-NEXT: vpinsrb $14, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: kshiftrw $15, %k0, %k0 +; AVX512F-NEXT: kmovw %k0, %eax +; AVX512F-NEXT: vpinsrb $15, %eax, %xmm0, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpsllw $7, %ymm0, %ymm0 +; AVX512F-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm6, %ymm0 +; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512F-NEXT: vextracti128 $1, %ymm2, %xmm3 +; AVX512F-NEXT: # kill: %XMM0 %XMM0 %YMM0 +; AVX512F-NEXT: # kill: %XMM2 %XMM2 %YMM2 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v64i16: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vpcmpgtw %ymm7, %ymm3, %ymm3 +; AVX512DQ-NEXT: vpmovsxwd %ymm3, %zmm3 +; AVX512DQ-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512DQ-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512DQ-NEXT: kshiftlw $14, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: kshiftlw $15, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %ecx +; AVX512DQ-NEXT: vmovd %ecx, %xmm3 +; AVX512DQ-NEXT: vpinsrb $1, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $13, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $2, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $12, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $3, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $11, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $4, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $10, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $5, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $9, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $6, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $8, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $7, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $7, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $8, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $6, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $9, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $5, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $10, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $4, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $11, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $3, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $12, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $2, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $13, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftlw $1, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $14, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: kshiftrw $15, %k0, %k0 +; AVX512DQ-NEXT: kmovw %k0, %eax +; AVX512DQ-NEXT: vpinsrb $15, %eax, %xmm3, %xmm3 +; AVX512DQ-NEXT: vpcmpgtw %ymm6, %ymm2, %ymm2 +; AVX512DQ-NEXT: vpmovsxwd %ymm2, %zmm2 +; AVX512DQ-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512DQ-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512DQ-NEXT: kshiftlw $14, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: kshiftlw $15, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %ecx +; AVX512DQ-NEXT: vmovd %ecx, %xmm2 +; AVX512DQ-NEXT: vpinsrb $1, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $13, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $2, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $12, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $3, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $11, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $4, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $10, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $5, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $9, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $6, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $8, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $7, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $7, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $8, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $6, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $9, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $5, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $10, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $4, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $11, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $3, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $12, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $2, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $13, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftlw $1, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $14, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: kshiftrw $15, %k0, %k0 +; AVX512DQ-NEXT: kmovw %k0, %eax +; AVX512DQ-NEXT: vpinsrb $15, %eax, %xmm2, %xmm2 +; AVX512DQ-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2 +; AVX512DQ-NEXT: vpsllw $7, %ymm2, %ymm2 +; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] +; AVX512DQ-NEXT: vpand %ymm3, %ymm2, %ymm2 +; AVX512DQ-NEXT: vpxor %ymm6, %ymm6, %ymm6 +; AVX512DQ-NEXT: vpcmpgtb %ymm2, %ymm6, %ymm2 +; AVX512DQ-NEXT: vpcmpgtw %ymm5, %ymm1, %ymm1 +; AVX512DQ-NEXT: vpmovsxwd %ymm1, %zmm1 +; AVX512DQ-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512DQ-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512DQ-NEXT: kshiftlw $14, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: kshiftlw $15, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %ecx +; AVX512DQ-NEXT: vmovd %ecx, %xmm1 +; AVX512DQ-NEXT: vpinsrb $1, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $13, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $2, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $12, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $3, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $11, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $4, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $10, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $5, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $9, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $6, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $8, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $7, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $7, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $8, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $6, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $9, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $5, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $10, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $4, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $11, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $3, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $12, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $2, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $13, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftlw $1, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $14, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: kshiftrw $15, %k0, %k0 +; AVX512DQ-NEXT: kmovw %k0, %eax +; AVX512DQ-NEXT: vpinsrb $15, %eax, %xmm1, %xmm1 +; AVX512DQ-NEXT: vpcmpgtw %ymm4, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpmovsxwd %ymm0, %zmm0 +; AVX512DQ-NEXT: vpslld $31, %zmm0, %zmm0 +; AVX512DQ-NEXT: vptestmd %zmm0, %zmm0, %k0 +; AVX512DQ-NEXT: kshiftlw $14, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: kshiftlw $15, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %ecx +; AVX512DQ-NEXT: vmovd %ecx, %xmm0 +; AVX512DQ-NEXT: vpinsrb $1, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $13, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $2, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $12, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $3, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $11, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $4, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $10, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $5, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $9, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $6, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $8, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $7, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $7, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $8, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $6, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $9, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $5, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $10, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $4, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $11, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $3, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $12, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $2, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $13, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftlw $1, %k0, %k1 +; AVX512DQ-NEXT: kshiftrw $15, %k1, %k1 +; AVX512DQ-NEXT: kmovw %k1, %eax +; AVX512DQ-NEXT: vpinsrb $14, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: kshiftrw $15, %k0, %k0 +; AVX512DQ-NEXT: kmovw %k0, %eax +; AVX512DQ-NEXT: vpinsrb $15, %eax, %xmm0, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpsllw $7, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpcmpgtb %ymm0, %ymm6, %ymm0 +; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3 +; AVX512DQ-NEXT: # kill: %XMM0 %XMM0 %YMM0 +; AVX512DQ-NEXT: # kill: %XMM2 %XMM2 %YMM2 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v64i16: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vpextrw $1, %xmm4, %ecx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: cmpw %cx, %dx +; AVX512BW-NEXT: movw $-1, %cx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrw $1, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm5, %esi +; AVX512BW-NEXT: vmovd %xmm6, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm7 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $2, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $3, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $4, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $5, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $6, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $7, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm7, %xmm5 +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6 +; AVX512BW-NEXT: vpextrw $1, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm5, %esi +; AVX512BW-NEXT: vmovd %xmm6, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm7 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $2, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $3, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $4, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $5, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $6, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm7, %xmm7 +; AVX512BW-NEXT: vpextrw $7, %xmm5, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm6, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm7, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $1, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm2, %esi +; AVX512BW-NEXT: vmovd %xmm0, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $2, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $3, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $4, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $5, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $6, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $7, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm0, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm0 +; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vpextrw $1, %xmm2, %edx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm4 +; AVX512BW-NEXT: vpextrw $1, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm2, %esi +; AVX512BW-NEXT: vmovd %xmm4, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $2, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $3, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $4, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $5, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $6, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $7, %xmm2, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm4, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm5, %xmm2 +; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrw $1, %xmm4, %edx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrw $1, %xmm4, %edx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5 +; AVX512BW-NEXT: vpextrw $1, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm4, %esi +; AVX512BW-NEXT: vmovd %xmm5, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm6 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6 +; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4 +; AVX512BW-NEXT: vpextrw $1, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $1, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vmovd %xmm3, %esi +; AVX512BW-NEXT: vmovd %xmm1, %edi +; AVX512BW-NEXT: cmpw %si, %di +; AVX512BW-NEXT: movl $0, %esi +; AVX512BW-NEXT: cmovgw %cx, %si +; AVX512BW-NEXT: vmovd %esi, %xmm5 +; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $2, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $2, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $3, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $3, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $4, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $4, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $5, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $5, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $6, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $6, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgw %cx, %dx +; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm5, %xmm5 +; AVX512BW-NEXT: vpextrw $7, %xmm3, %edx +; AVX512BW-NEXT: vpextrw $7, %xmm1, %esi +; AVX512BW-NEXT: cmpw %dx, %si +; AVX512BW-NEXT: cmovgw %cx, %ax +; AVX512BW-NEXT: vpinsrw $7, %eax, %xmm5, %xmm1 +; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1 +; AVX512BW-NEXT: vpmovwb %zmm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <64 x i16> %a0, %a1 ret <64 x i1> %1 } @@ -6240,50 +8321,103 @@ define <128 x i1> @test_cmp_v128i8(<128 x i8> %a0, <128 x i8> %a1) nounwind { ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v128i8: -; AVX512: # BB#0: -; AVX512-NEXT: vpcmpgtb %ymm4, %ymm0, %ymm0 -; AVX512-NEXT: vpcmpgtb %ymm5, %ymm1, %ymm1 -; AVX512-NEXT: vpcmpgtb %ymm6, %ymm2, %ymm2 -; AVX512-NEXT: vpcmpgtb %ymm7, %ymm3, %ymm3 -; AVX512-NEXT: vextracti128 $1, %ymm3, %xmm4 -; AVX512-NEXT: vpmovsxbd %xmm4, %zmm4 -; AVX512-NEXT: vpslld $31, %zmm4, %zmm4 -; AVX512-NEXT: vptestmd %zmm4, %zmm4, %k0 -; AVX512-NEXT: kmovw %k0, 14(%rdi) -; AVX512-NEXT: vpmovsxbd %xmm3, %zmm3 -; AVX512-NEXT: vpslld $31, %zmm3, %zmm3 -; AVX512-NEXT: vptestmd %zmm3, %zmm3, %k0 -; AVX512-NEXT: kmovw %k0, 12(%rdi) -; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3 -; AVX512-NEXT: vpmovsxbd %xmm3, %zmm3 -; AVX512-NEXT: vpslld $31, %zmm3, %zmm3 -; AVX512-NEXT: vptestmd %zmm3, %zmm3, %k0 -; AVX512-NEXT: kmovw %k0, 10(%rdi) -; AVX512-NEXT: vpmovsxbd %xmm2, %zmm2 -; AVX512-NEXT: vpslld $31, %zmm2, %zmm2 -; AVX512-NEXT: vptestmd %zmm2, %zmm2, %k0 -; AVX512-NEXT: kmovw %k0, 8(%rdi) -; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm2 -; AVX512-NEXT: vpmovsxbd %xmm2, %zmm2 -; AVX512-NEXT: vpslld $31, %zmm2, %zmm2 -; AVX512-NEXT: vptestmd %zmm2, %zmm2, %k0 -; AVX512-NEXT: kmovw %k0, 6(%rdi) -; AVX512-NEXT: vpmovsxbd %xmm1, %zmm1 -; AVX512-NEXT: vpslld $31, %zmm1, %zmm1 -; AVX512-NEXT: vptestmd %zmm1, %zmm1, %k0 -; AVX512-NEXT: kmovw %k0, 4(%rdi) -; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1 -; AVX512-NEXT: vpmovsxbd %xmm1, %zmm1 -; AVX512-NEXT: vpslld $31, %zmm1, %zmm1 -; AVX512-NEXT: vptestmd %zmm1, %zmm1, %k0 -; AVX512-NEXT: kmovw %k0, 2(%rdi) -; AVX512-NEXT: vpmovsxbd %xmm0, %zmm0 -; AVX512-NEXT: vpslld $31, %zmm0, %zmm0 -; AVX512-NEXT: vptestmd %zmm0, %zmm0, %k0 -; AVX512-NEXT: kmovw %k0, (%rdi) -; AVX512-NEXT: movq %rdi, %rax -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v128i8: +; AVX512F: # BB#0: +; AVX512F-NEXT: vpcmpgtb %ymm4, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm5, %ymm1, %ymm1 +; AVX512F-NEXT: vpcmpgtb %ymm6, %ymm2, %ymm2 +; AVX512F-NEXT: vpcmpgtb %ymm7, %ymm3, %ymm3 +; AVX512F-NEXT: vextracti128 $1, %ymm3, %xmm4 +; AVX512F-NEXT: vpmovsxbd %xmm4, %zmm4 +; AVX512F-NEXT: vpslld $31, %zmm4, %zmm4 +; AVX512F-NEXT: vptestmd %zmm4, %zmm4, %k0 +; AVX512F-NEXT: kmovw %k0, 14(%rdi) +; AVX512F-NEXT: vpmovsxbd %xmm3, %zmm3 +; AVX512F-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512F-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512F-NEXT: kmovw %k0, 12(%rdi) +; AVX512F-NEXT: vextracti128 $1, %ymm2, %xmm3 +; AVX512F-NEXT: vpmovsxbd %xmm3, %zmm3 +; AVX512F-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512F-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512F-NEXT: kmovw %k0, 10(%rdi) +; AVX512F-NEXT: vpmovsxbd %xmm2, %zmm2 +; AVX512F-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512F-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512F-NEXT: kmovw %k0, 8(%rdi) +; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm2 +; AVX512F-NEXT: vpmovsxbd %xmm2, %zmm2 +; AVX512F-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512F-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512F-NEXT: kmovw %k0, 6(%rdi) +; AVX512F-NEXT: vpmovsxbd %xmm1, %zmm1 +; AVX512F-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512F-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512F-NEXT: kmovw %k0, 4(%rdi) +; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512F-NEXT: vpmovsxbd %xmm1, %zmm1 +; AVX512F-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512F-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512F-NEXT: kmovw %k0, 2(%rdi) +; AVX512F-NEXT: vpmovsxbd %xmm0, %zmm0 +; AVX512F-NEXT: vpslld $31, %zmm0, %zmm0 +; AVX512F-NEXT: vptestmd %zmm0, %zmm0, %k0 +; AVX512F-NEXT: kmovw %k0, (%rdi) +; AVX512F-NEXT: movq %rdi, %rax +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v128i8: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vpcmpgtb %ymm4, %ymm0, %ymm0 +; AVX512DQ-NEXT: vpcmpgtb %ymm5, %ymm1, %ymm1 +; AVX512DQ-NEXT: vpcmpgtb %ymm6, %ymm2, %ymm2 +; AVX512DQ-NEXT: vpcmpgtb %ymm7, %ymm3, %ymm3 +; AVX512DQ-NEXT: vextracti128 $1, %ymm3, %xmm4 +; AVX512DQ-NEXT: vpmovsxbd %xmm4, %zmm4 +; AVX512DQ-NEXT: vpslld $31, %zmm4, %zmm4 +; AVX512DQ-NEXT: vptestmd %zmm4, %zmm4, %k0 +; AVX512DQ-NEXT: kmovw %k0, 14(%rdi) +; AVX512DQ-NEXT: vpmovsxbd %xmm3, %zmm3 +; AVX512DQ-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512DQ-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512DQ-NEXT: kmovw %k0, 12(%rdi) +; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3 +; AVX512DQ-NEXT: vpmovsxbd %xmm3, %zmm3 +; AVX512DQ-NEXT: vpslld $31, %zmm3, %zmm3 +; AVX512DQ-NEXT: vptestmd %zmm3, %zmm3, %k0 +; AVX512DQ-NEXT: kmovw %k0, 10(%rdi) +; AVX512DQ-NEXT: vpmovsxbd %xmm2, %zmm2 +; AVX512DQ-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512DQ-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512DQ-NEXT: kmovw %k0, 8(%rdi) +; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm2 +; AVX512DQ-NEXT: vpmovsxbd %xmm2, %zmm2 +; AVX512DQ-NEXT: vpslld $31, %zmm2, %zmm2 +; AVX512DQ-NEXT: vptestmd %zmm2, %zmm2, %k0 +; AVX512DQ-NEXT: kmovw %k0, 6(%rdi) +; AVX512DQ-NEXT: vpmovsxbd %xmm1, %zmm1 +; AVX512DQ-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512DQ-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512DQ-NEXT: kmovw %k0, 4(%rdi) +; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1 +; AVX512DQ-NEXT: vpmovsxbd %xmm1, %zmm1 +; AVX512DQ-NEXT: vpslld $31, %zmm1, %zmm1 +; AVX512DQ-NEXT: vptestmd %zmm1, %zmm1, %k0 +; AVX512DQ-NEXT: kmovw %k0, 2(%rdi) +; AVX512DQ-NEXT: vpmovsxbd %xmm0, %zmm0 +; AVX512DQ-NEXT: vpslld $31, %zmm0, %zmm0 +; AVX512DQ-NEXT: vptestmd %zmm0, %zmm0, %k0 +; AVX512DQ-NEXT: kmovw %k0, (%rdi) +; AVX512DQ-NEXT: movq %rdi, %rax +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v128i8: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vpcmpgtb %zmm3, %zmm1, %k0 +; AVX512BW-NEXT: vpcmpgtb %zmm2, %zmm0, %k1 +; AVX512BW-NEXT: vpmovm2b %k1, %zmm0 +; AVX512BW-NEXT: vpmovm2b %k0, %zmm1 +; AVX512BW-NEXT: retq %1 = icmp sgt <128 x i8> %a0, %a1 ret <128 x i1> %1 } @@ -6781,231 +8915,684 @@ define <32 x i1> @test_cmp_v32f64(<32 x double> %a0, <32 x double> %a1) nounwind ; AVX2-NEXT: popq %rbp ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v32f64: -; AVX512: # BB#0: -; AVX512-NEXT: vextractf32x4 $3, %zmm4, %xmm8 -; AVX512-NEXT: vextractf32x4 $3, %zmm0, %xmm9 -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: vucomisd %xmm8, %xmm9 -; AVX512-NEXT: movq $-1, %rcx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm8 = xmm8[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] -; AVX512-NEXT: vucomisd %xmm8, %xmm9 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm8 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm10[0],xmm8[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm4, %xmm9 -; AVX512-NEXT: vextractf32x4 $2, %zmm0, %xmm10 -; AVX512-NEXT: vucomisd %xmm9, %xmm10 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm11 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] -; AVX512-NEXT: vucomisd %xmm9, %xmm10 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] -; AVX512-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 -; AVX512-NEXT: vextractf32x4 $1, %zmm4, %xmm9 -; AVX512-NEXT: vextractf32x4 $1, %zmm0, %xmm10 -; AVX512-NEXT: vucomisd %xmm9, %xmm10 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm11 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] -; AVX512-NEXT: vucomisd %xmm9, %xmm10 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] -; AVX512-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm8 -; AVX512-NEXT: vextractf32x4 $3, %zmm5, %xmm4 -; AVX512-NEXT: vextractf32x4 $3, %zmm1, %xmm0 -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm0[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm5, %xmm4 -; AVX512-NEXT: vextractf32x4 $2, %zmm1, %xmm0 -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] -; AVX512-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 -; AVX512-NEXT: vextractf32x4 $1, %zmm5, %xmm4 -; AVX512-NEXT: vextractf32x4 $1, %zmm1, %xmm0 -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm0 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] -; AVX512-NEXT: vucomisd %xmm5, %xmm1 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] -; AVX512-NEXT: vucomisd %xmm5, %xmm1 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm8 -; AVX512-NEXT: vextractf32x4 $3, %zmm6, %xmm1 -; AVX512-NEXT: vextractf32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vucomisd %xmm1, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vucomisd %xmm1, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm6, %xmm4 -; AVX512-NEXT: vextractf32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] -; AVX512-NEXT: vucomisd %xmm4, %xmm5 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 -; AVX512-NEXT: vextractf32x4 $1, %zmm6, %xmm1 -; AVX512-NEXT: vextractf32x4 $1, %zmm2, %xmm4 -; AVX512-NEXT: vucomisd %xmm1, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vucomisd %xmm1, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] -; AVX512-NEXT: vucomisd %xmm6, %xmm2 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vucomisd %xmm5, %xmm2 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm4[0],xmm2[0] -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vextractf32x4 $3, %zmm7, %xmm1 -; AVX512-NEXT: vextractf32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vucomisd %xmm1, %xmm2 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vucomisd %xmm1, %xmm2 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] -; AVX512-NEXT: vextractf32x4 $2, %zmm7, %xmm2 -; AVX512-NEXT: vextractf32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 -; AVX512-NEXT: vextractf32x4 $1, %zmm7, %xmm2 -; AVX512-NEXT: vextractf32x4 $1, %zmm3, %xmm4 -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] -; AVX512-NEXT: vucomisd %xmm2, %xmm4 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] -; AVX512-NEXT: vucomisd %xmm7, %xmm3 -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovaq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpermilpd {{.*#+}} xmm5 = xmm7[1,0] -; AVX512-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] -; AVX512-NEXT: vucomisd %xmm5, %xmm3 -; AVX512-NEXT: cmovaq %rcx, %rax -; AVX512-NEXT: vmovq %rax, %xmm3 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0] -; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 -; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1 -; AVX512-NEXT: vpmovqd %zmm1, %ymm1 -; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v32f64: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextractf32x4 $3, %zmm4, %xmm8 +; AVX512F-NEXT: vextractf32x4 $3, %zmm0, %xmm9 +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: vucomisd %xmm8, %xmm9 +; AVX512F-NEXT: movq $-1, %rcx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm8 = xmm8[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512F-NEXT: vucomisd %xmm8, %xmm9 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm8 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm10[0],xmm8[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm4, %xmm9 +; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm10 +; AVX512F-NEXT: vucomisd %xmm9, %xmm10 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm11 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512F-NEXT: vucomisd %xmm9, %xmm10 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512F-NEXT: vextractf32x4 $1, %zmm4, %xmm9 +; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm10 +; AVX512F-NEXT: vucomisd %xmm9, %xmm10 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm11 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512F-NEXT: vucomisd %xmm9, %xmm10 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512F-NEXT: vextractf32x4 $3, %zmm5, %xmm4 +; AVX512F-NEXT: vextractf32x4 $3, %zmm1, %xmm0 +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm0[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm5, %xmm4 +; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm0 +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512F-NEXT: vextractf32x4 $1, %zmm5, %xmm4 +; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm0 +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm0 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm1 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm1 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512F-NEXT: vextractf32x4 $3, %zmm6, %xmm1 +; AVX512F-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vucomisd %xmm1, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vucomisd %xmm1, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm6, %xmm4 +; AVX512F-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512F-NEXT: vucomisd %xmm4, %xmm5 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512F-NEXT: vextractf32x4 $1, %zmm6, %xmm1 +; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm4 +; AVX512F-NEXT: vucomisd %xmm1, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vucomisd %xmm1, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512F-NEXT: vucomisd %xmm6, %xmm2 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm2 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm4[0],xmm2[0] +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vextractf32x4 $3, %zmm7, %xmm1 +; AVX512F-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vucomisd %xmm1, %xmm2 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vucomisd %xmm1, %xmm2 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512F-NEXT: vextractf32x4 $2, %zmm7, %xmm2 +; AVX512F-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512F-NEXT: vextractf32x4 $1, %zmm7, %xmm2 +; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm4 +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512F-NEXT: vucomisd %xmm2, %xmm4 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512F-NEXT: vucomisd %xmm7, %xmm3 +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovaq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm5 = xmm7[1,0] +; AVX512F-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512F-NEXT: vucomisd %xmm5, %xmm3 +; AVX512F-NEXT: cmovaq %rcx, %rax +; AVX512F-NEXT: vmovq %rax, %xmm3 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0] +; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 +; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1 +; AVX512F-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v32f64: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm4, %xmm8 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm0, %xmm9 +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: vucomisd %xmm8, %xmm9 +; AVX512DQ-NEXT: movq $-1, %rcx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm8 = xmm8[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512DQ-NEXT: vucomisd %xmm8, %xmm9 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm8 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm10[0],xmm8[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm4, %xmm9 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm0, %xmm10 +; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm11 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm4, %xmm9 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm0, %xmm10 +; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm11 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm5, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm1, %xmm0 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm0[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm5, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm1, %xmm0 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm5, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm1, %xmm0 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm1 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm1 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm8, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm6, %xmm1 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm6, %xmm4 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512DQ-NEXT: vucomisd %xmm4, %xmm5 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm6, %xmm1 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm2, %xmm4 +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512DQ-NEXT: vucomisd %xmm6, %xmm2 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm2 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm4[0],xmm2[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm7, %xmm1 +; AVX512DQ-NEXT: vextractf64x2 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm2 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vucomisd %xmm1, %xmm2 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm7, %xmm2 +; AVX512DQ-NEXT: vextractf64x2 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm7, %xmm2 +; AVX512DQ-NEXT: vextractf64x2 $1, %zmm3, %xmm4 +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512DQ-NEXT: vucomisd %xmm7, %xmm3 +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovaq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm5 = xmm7[1,0] +; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512DQ-NEXT: vucomisd %xmm5, %xmm3 +; AVX512DQ-NEXT: cmovaq %rcx, %rax +; AVX512DQ-NEXT: vmovq %rax, %xmm3 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1 +; AVX512DQ-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v32f64: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextractf32x4 $3, %zmm4, %xmm8 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm0, %xmm9 +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: vucomisd %xmm8, %xmm9 +; AVX512BW-NEXT: movq $-1, %rcx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm8 = xmm8[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512BW-NEXT: vucomisd %xmm8, %xmm9 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm8 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm10[0],xmm8[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm4, %xmm9 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm0, %xmm10 +; AVX512BW-NEXT: vucomisd %xmm9, %xmm10 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm11 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512BW-NEXT: vucomisd %xmm9, %xmm10 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm4, %xmm9 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm10 +; AVX512BW-NEXT: vucomisd %xmm9, %xmm10 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm11 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm9 = xmm9[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm10 = xmm10[1,0] +; AVX512BW-NEXT: vucomisd %xmm9, %xmm10 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm5, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm1, %xmm0 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm0[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm5, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm1, %xmm0 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm5, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm0 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm0 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm1 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm1 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm8 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm6, %xmm1 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vucomisd %xmm1, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vucomisd %xmm1, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm6, %xmm4 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm5[1,0] +; AVX512BW-NEXT: vucomisd %xmm4, %xmm5 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm6, %xmm1 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm4 +; AVX512BW-NEXT: vucomisd %xmm1, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vucomisd %xmm1, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm5[0],xmm1[0] +; AVX512BW-NEXT: vucomisd %xmm6, %xmm2 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm6[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm2 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm4[0],xmm2[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm7, %xmm1 +; AVX512BW-NEXT: vextractf32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vucomisd %xmm1, %xmm2 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vucomisd %xmm1, %xmm2 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm4[0],xmm1[0] +; AVX512BW-NEXT: vextractf32x4 $2, %zmm7, %xmm2 +; AVX512BW-NEXT: vextractf32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm7, %xmm2 +; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm2[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm4 = xmm4[1,0] +; AVX512BW-NEXT: vucomisd %xmm2, %xmm4 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0] +; AVX512BW-NEXT: vucomisd %xmm7, %xmm3 +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovaq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm5 = xmm7[1,0] +; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm3 = xmm3[1,0] +; AVX512BW-NEXT: vucomisd %xmm5, %xmm3 +; AVX512BW-NEXT: cmovaq %rcx, %rax +; AVX512BW-NEXT: vmovq %rax, %xmm3 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1 +; AVX512BW-NEXT: vpmovqd %zmm1, %ymm1 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: retq %1 = fcmp ogt <32 x double> %a0, %a1 ret <32 x i1> %1 } @@ -7639,263 +10226,780 @@ define <32 x i1> @test_cmp_v32i64(<32 x i64> %a0, <32 x i64> %a1) nounwind { ; AVX2-NEXT: popq %rbp ; AVX2-NEXT: retq ; -; AVX512-LABEL: test_cmp_v32i64: -; AVX512: # BB#0: -; AVX512-NEXT: vextracti32x4 $3, %zmm4, %xmm8 -; AVX512-NEXT: vpextrq $1, %xmm8, %rcx -; AVX512-NEXT: vextracti32x4 $3, %zmm0, %xmm9 -; AVX512-NEXT: vpextrq $1, %xmm9, %rdx -; AVX512-NEXT: xorl %eax, %eax -; AVX512-NEXT: cmpq %rcx, %rdx -; AVX512-NEXT: movq $-1, %rcx -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vmovq %xmm8, %rdx -; AVX512-NEXT: vmovq %xmm9, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm8 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm10[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm4, %xmm9 -; AVX512-NEXT: vpextrq $1, %xmm9, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm0, %xmm10 -; AVX512-NEXT: vpextrq $1, %xmm10, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm11 -; AVX512-NEXT: vmovq %xmm9, %rdx -; AVX512-NEXT: vmovq %xmm10, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] -; AVX512-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 -; AVX512-NEXT: vextracti32x4 $1, %zmm4, %xmm9 -; AVX512-NEXT: vpextrq $1, %xmm9, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm0, %xmm10 -; AVX512-NEXT: vpextrq $1, %xmm10, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm11 -; AVX512-NEXT: vmovq %xmm9, %rdx -; AVX512-NEXT: vmovq %xmm10, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vpextrq $1, %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] -; AVX512-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm8 -; AVX512-NEXT: vextracti32x4 $3, %zmm5, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vextracti32x4 $3, %zmm1, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm9 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm0[0],xmm9[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm5, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm1, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm0, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] -; AVX512-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 -; AVX512-NEXT: vextracti32x4 $1, %zmm5, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm1, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm10 -; AVX512-NEXT: vmovq %xmm0, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] -; AVX512-NEXT: vpextrq $1, %xmm5, %rdx -; AVX512-NEXT: vpextrq $1, %xmm1, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vmovq %xmm5, %rdx -; AVX512-NEXT: vmovq %xmm1, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0] -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm8 -; AVX512-NEXT: vextracti32x4 $3, %zmm6, %xmm1 -; AVX512-NEXT: vpextrq $1, %xmm1, %rdx -; AVX512-NEXT: vextracti32x4 $3, %zmm2, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm1, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm1 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm6, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm2, %xmm5 -; AVX512-NEXT: vpextrq $1, %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vmovq %xmm4, %rdx -; AVX512-NEXT: vmovq %xmm5, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0] -; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 -; AVX512-NEXT: vextracti32x4 $1, %zmm6, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm2, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm0, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] -; AVX512-NEXT: vpextrq $1, %xmm6, %rdx -; AVX512-NEXT: vpextrq $1, %xmm2, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vmovq %xmm6, %rdx -; AVX512-NEXT: vmovq %xmm2, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0] -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm1 -; AVX512-NEXT: vextracti32x4 $3, %zmm7, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rdx -; AVX512-NEXT: vextracti32x4 $3, %zmm3, %xmm2 -; AVX512-NEXT: vpextrq $1, %xmm2, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vmovq %xmm0, %rdx -; AVX512-NEXT: vmovq %xmm2, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] -; AVX512-NEXT: vextracti32x4 $2, %zmm7, %xmm2 -; AVX512-NEXT: vpextrq $1, %xmm2, %rdx -; AVX512-NEXT: vextracti32x4 $2, %zmm3, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm2, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm2 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2 -; AVX512-NEXT: vextracti32x4 $1, %zmm7, %xmm0 -; AVX512-NEXT: vpextrq $1, %xmm0, %rdx -; AVX512-NEXT: vextracti32x4 $1, %zmm3, %xmm4 -; AVX512-NEXT: vpextrq $1, %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm5 -; AVX512-NEXT: vmovq %xmm0, %rdx -; AVX512-NEXT: vmovq %xmm4, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm0 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] -; AVX512-NEXT: vpextrq $1, %xmm7, %rdx -; AVX512-NEXT: vpextrq $1, %xmm3, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: movl $0, %edx -; AVX512-NEXT: cmovgq %rcx, %rdx -; AVX512-NEXT: vmovq %rdx, %xmm4 -; AVX512-NEXT: vmovq %xmm7, %rdx -; AVX512-NEXT: vmovq %xmm3, %rsi -; AVX512-NEXT: cmpq %rdx, %rsi -; AVX512-NEXT: cmovgq %rcx, %rax -; AVX512-NEXT: vmovq %rax, %xmm3 -; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm4[0] -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 -; AVX512-NEXT: vpmovqd %zmm0, %ymm0 -; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 -; AVX512-NEXT: vpmovdb %zmm0, %xmm0 -; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 -; AVX512-NEXT: retq +; AVX512F-LABEL: test_cmp_v32i64: +; AVX512F: # BB#0: +; AVX512F-NEXT: vextracti32x4 $3, %zmm4, %xmm8 +; AVX512F-NEXT: vpextrq $1, %xmm8, %rcx +; AVX512F-NEXT: vextracti32x4 $3, %zmm0, %xmm9 +; AVX512F-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512F-NEXT: xorl %eax, %eax +; AVX512F-NEXT: cmpq %rcx, %rdx +; AVX512F-NEXT: movq $-1, %rcx +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vmovq %xmm8, %rdx +; AVX512F-NEXT: vmovq %xmm9, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm8 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm10[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm4, %xmm9 +; AVX512F-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm0, %xmm10 +; AVX512F-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm11 +; AVX512F-NEXT: vmovq %xmm9, %rdx +; AVX512F-NEXT: vmovq %xmm10, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512F-NEXT: vextracti32x4 $1, %zmm4, %xmm9 +; AVX512F-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm10 +; AVX512F-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm11 +; AVX512F-NEXT: vmovq %xmm9, %rdx +; AVX512F-NEXT: vmovq %xmm10, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512F-NEXT: vextracti32x4 $3, %zmm5, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vextracti32x4 $3, %zmm1, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm9 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm0[0],xmm9[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm5, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm1, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm0, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512F-NEXT: vextracti32x4 $1, %zmm5, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm10 +; AVX512F-NEXT: vmovq %xmm0, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512F-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vmovq %xmm5, %rdx +; AVX512F-NEXT: vmovq %xmm1, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0] +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512F-NEXT: vextracti32x4 $3, %zmm6, %xmm1 +; AVX512F-NEXT: vpextrq $1, %xmm1, %rdx +; AVX512F-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm1, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm1 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm6, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512F-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vmovq %xmm4, %rdx +; AVX512F-NEXT: vmovq %xmm5, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0] +; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 +; AVX512F-NEXT: vextracti32x4 $1, %zmm6, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm0, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512F-NEXT: vpextrq $1, %xmm6, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vmovq %xmm6, %rdx +; AVX512F-NEXT: vmovq %xmm2, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0] +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm1 +; AVX512F-NEXT: vextracti32x4 $3, %zmm7, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512F-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512F-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vmovq %xmm0, %rdx +; AVX512F-NEXT: vmovq %xmm2, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512F-NEXT: vextracti32x4 $2, %zmm7, %xmm2 +; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512F-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm2, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm2 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2 +; AVX512F-NEXT: vextracti32x4 $1, %zmm7, %xmm0 +; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm5 +; AVX512F-NEXT: vmovq %xmm0, %rdx +; AVX512F-NEXT: vmovq %xmm4, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm0 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512F-NEXT: vpextrq $1, %xmm7, %rdx +; AVX512F-NEXT: vpextrq $1, %xmm3, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: movl $0, %edx +; AVX512F-NEXT: cmovgq %rcx, %rdx +; AVX512F-NEXT: vmovq %rdx, %xmm4 +; AVX512F-NEXT: vmovq %xmm7, %rdx +; AVX512F-NEXT: vmovq %xmm3, %rsi +; AVX512F-NEXT: cmpq %rdx, %rsi +; AVX512F-NEXT: cmovgq %rcx, %rax +; AVX512F-NEXT: vmovq %rax, %xmm3 +; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm4[0] +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; AVX512F-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; AVX512F-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512F-NEXT: retq +; +; AVX512DQ-LABEL: test_cmp_v32i64: +; AVX512DQ: # BB#0: +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm4, %xmm8 +; AVX512DQ-NEXT: vpextrq $1, %xmm8, %rcx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm0, %xmm9 +; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512DQ-NEXT: xorl %eax, %eax +; AVX512DQ-NEXT: cmpq %rcx, %rdx +; AVX512DQ-NEXT: movq $-1, %rcx +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vmovq %xmm8, %rdx +; AVX512DQ-NEXT: vmovq %xmm9, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm8 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm10[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm4, %xmm9 +; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm0, %xmm10 +; AVX512DQ-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm11 +; AVX512DQ-NEXT: vmovq %xmm9, %rdx +; AVX512DQ-NEXT: vmovq %xmm10, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm4, %xmm9 +; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm0, %xmm10 +; AVX512DQ-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm11 +; AVX512DQ-NEXT: vmovq %xmm9, %rdx +; AVX512DQ-NEXT: vmovq %xmm10, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm5, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm1, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm9 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm0[0],xmm9[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm5, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm1, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm0, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm5, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm1, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm10 +; AVX512DQ-NEXT: vmovq %xmm0, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vmovq %xmm5, %rdx +; AVX512DQ-NEXT: vmovq %xmm1, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm8, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm8 +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm6, %xmm1 +; AVX512DQ-NEXT: vpextrq $1, %xmm1, %rdx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm2, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm1, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm1 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm6, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm2, %xmm5 +; AVX512DQ-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vmovq %xmm4, %rdx +; AVX512DQ-NEXT: vmovq %xmm5, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm6, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm2, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm0, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm6, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vmovq %xmm6, %rdx +; AVX512DQ-NEXT: vmovq %xmm2, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm1 +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm7, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512DQ-NEXT: vextracti64x2 $3, %zmm3, %xmm2 +; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vmovq %xmm0, %rdx +; AVX512DQ-NEXT: vmovq %xmm2, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm7, %xmm2 +; AVX512DQ-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512DQ-NEXT: vextracti64x2 $2, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm2, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm2 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2 +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm7, %xmm0 +; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512DQ-NEXT: vextracti64x2 $1, %zmm3, %xmm4 +; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm5 +; AVX512DQ-NEXT: vmovq %xmm0, %rdx +; AVX512DQ-NEXT: vmovq %xmm4, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm0 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512DQ-NEXT: vpextrq $1, %xmm7, %rdx +; AVX512DQ-NEXT: vpextrq $1, %xmm3, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: movl $0, %edx +; AVX512DQ-NEXT: cmovgq %rcx, %rdx +; AVX512DQ-NEXT: vmovq %rdx, %xmm4 +; AVX512DQ-NEXT: vmovq %xmm7, %rdx +; AVX512DQ-NEXT: vmovq %xmm3, %rsi +; AVX512DQ-NEXT: cmpq %rdx, %rsi +; AVX512DQ-NEXT: cmovgq %rcx, %rax +; AVX512DQ-NEXT: vmovq %rax, %xmm3 +; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm4[0] +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 +; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; AVX512DQ-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512DQ-NEXT: vinserti32x8 $1, %ymm0, %zmm1, %zmm0 +; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0 +; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0 +; AVX512DQ-NEXT: retq +; +; AVX512BW-LABEL: test_cmp_v32i64: +; AVX512BW: # BB#0: +; AVX512BW-NEXT: vextracti32x4 $3, %zmm4, %xmm8 +; AVX512BW-NEXT: vpextrq $1, %xmm8, %rcx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm9 +; AVX512BW-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512BW-NEXT: xorl %eax, %eax +; AVX512BW-NEXT: cmpq %rcx, %rdx +; AVX512BW-NEXT: movq $-1, %rcx +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vmovq %xmm8, %rdx +; AVX512BW-NEXT: vmovq %xmm9, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm8 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm10[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm4, %xmm9 +; AVX512BW-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm10 +; AVX512BW-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm11 +; AVX512BW-NEXT: vmovq %xmm9, %rdx +; AVX512BW-NEXT: vmovq %xmm10, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm4, %xmm9 +; AVX512BW-NEXT: vpextrq $1, %xmm9, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm10 +; AVX512BW-NEXT: vpextrq $1, %xmm10, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm11 +; AVX512BW-NEXT: vmovq %xmm9, %rdx +; AVX512BW-NEXT: vmovq %xmm10, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0] +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm8 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm5, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm9 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm0[0],xmm9[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm5, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm0, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm5, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm10 +; AVX512BW-NEXT: vmovq %xmm0, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0] +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm1, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vmovq %xmm5, %rdx +; AVX512BW-NEXT: vmovq %xmm1, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm8 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm6, %xmm1 +; AVX512BW-NEXT: vpextrq $1, %xmm1, %rdx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm1, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm1 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm6, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5 +; AVX512BW-NEXT: vpextrq $1, %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vmovq %xmm4, %rdx +; AVX512BW-NEXT: vmovq %xmm5, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm6, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm0, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512BW-NEXT: vpextrq $1, %xmm6, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vmovq %xmm6, %rdx +; AVX512BW-NEXT: vmovq %xmm2, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm1 +; AVX512BW-NEXT: vextracti32x4 $3, %zmm7, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2 +; AVX512BW-NEXT: vpextrq $1, %xmm2, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vmovq %xmm0, %rdx +; AVX512BW-NEXT: vmovq %xmm2, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0] +; AVX512BW-NEXT: vextracti32x4 $2, %zmm7, %xmm2 +; AVX512BW-NEXT: vpextrq $1, %xmm2, %rdx +; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm2, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm2 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2 +; AVX512BW-NEXT: vextracti32x4 $1, %zmm7, %xmm0 +; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx +; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4 +; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm5 +; AVX512BW-NEXT: vmovq %xmm0, %rdx +; AVX512BW-NEXT: vmovq %xmm4, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm0 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0] +; AVX512BW-NEXT: vpextrq $1, %xmm7, %rdx +; AVX512BW-NEXT: vpextrq $1, %xmm3, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: movl $0, %edx +; AVX512BW-NEXT: cmovgq %rcx, %rdx +; AVX512BW-NEXT: vmovq %rdx, %xmm4 +; AVX512BW-NEXT: vmovq %xmm7, %rdx +; AVX512BW-NEXT: vmovq %xmm3, %rsi +; AVX512BW-NEXT: cmpq %rdx, %rsi +; AVX512BW-NEXT: cmovgq %rcx, %rax +; AVX512BW-NEXT: vmovq %rax, %xmm3 +; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm4[0] +; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpmovqd %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0 +; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0 +; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0 +; AVX512BW-NEXT: retq %1 = icmp sgt <32 x i64> %a0, %a1 ret <32 x i1> %1 }