def : Pat<(v16i8 (extract_subvector (v64i8 VR512:$src), (iPTR 0))),
(v16i8 (EXTRACT_SUBREG (v64i8 VR512:$src), sub_xmm))>;
+// A 128-bit extract from bits [255:128] of a 512-bit vector should use a
+// smaller extract to enable EVEX->VEX.
+let Predicates = [NoVLX] in {
+def : Pat<(v2i64 (extract_subvector (v8i64 VR512:$src), (iPTR 2))),
+ (v2i64 (VEXTRACTI128rr
+ (v4i64 (EXTRACT_SUBREG (v8i64 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v2f64 (extract_subvector (v8f64 VR512:$src), (iPTR 2))),
+ (v2f64 (VEXTRACTF128rr
+ (v4f64 (EXTRACT_SUBREG (v8f64 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v4i32 (extract_subvector (v16i32 VR512:$src), (iPTR 4))),
+ (v4i32 (VEXTRACTI128rr
+ (v8i32 (EXTRACT_SUBREG (v16i32 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v4f32 (extract_subvector (v16f32 VR512:$src), (iPTR 4))),
+ (v4f32 (VEXTRACTF128rr
+ (v8f32 (EXTRACT_SUBREG (v16f32 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v8i16 (extract_subvector (v32i16 VR512:$src), (iPTR 8))),
+ (v8i16 (VEXTRACTI128rr
+ (v16i16 (EXTRACT_SUBREG (v32i16 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v16i8 (extract_subvector (v64i8 VR512:$src), (iPTR 16))),
+ (v16i8 (VEXTRACTI128rr
+ (v32i8 (EXTRACT_SUBREG (v64i8 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+}
+
+// A 128-bit extract from bits [255:128] of a 512-bit vector should use a
+// smaller extract to enable EVEX->VEX.
+let Predicates = [HasVLX] in {
+def : Pat<(v2i64 (extract_subvector (v8i64 VR512:$src), (iPTR 2))),
+ (v2i64 (VEXTRACTI32x4Z256rr
+ (v4i64 (EXTRACT_SUBREG (v8i64 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v2f64 (extract_subvector (v8f64 VR512:$src), (iPTR 2))),
+ (v2f64 (VEXTRACTF32x4Z256rr
+ (v4f64 (EXTRACT_SUBREG (v8f64 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v4i32 (extract_subvector (v16i32 VR512:$src), (iPTR 4))),
+ (v4i32 (VEXTRACTI32x4Z256rr
+ (v8i32 (EXTRACT_SUBREG (v16i32 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v4f32 (extract_subvector (v16f32 VR512:$src), (iPTR 4))),
+ (v4f32 (VEXTRACTF32x4Z256rr
+ (v8f32 (EXTRACT_SUBREG (v16f32 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v8i16 (extract_subvector (v32i16 VR512:$src), (iPTR 8))),
+ (v8i16 (VEXTRACTI32x4Z256rr
+ (v16i16 (EXTRACT_SUBREG (v32i16 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+def : Pat<(v16i8 (extract_subvector (v64i8 VR512:$src), (iPTR 16))),
+ (v16i8 (VEXTRACTI32x4Z256rr
+ (v32i8 (EXTRACT_SUBREG (v64i8 VR512:$src), sub_ymm)),
+ (iPTR 1)))>;
+}
+
// A 256-bit subvector extract from the first 256-bit vector position
// is a subregister copy that needs no instruction.
def : Pat<(v4i64 (extract_subvector (v8i64 VR512:$src), (iPTR 0))),
; NODQ-NEXT: vcvtsi2sdq %rax, %xmm4, %xmm2
; NODQ-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; NODQ-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
-; NODQ-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NODQ-NEXT: vextracti128 $1, %ymm0, %xmm2
; NODQ-NEXT: vpextrq $1, %xmm2, %rax
; NODQ-NEXT: vcvtsi2sdq %rax, %xmm4, %xmm3
; NODQ-NEXT: vmovq %xmm2, %rax
; NODQ-NEXT: vcvtusi2sdq %rax, %xmm4, %xmm2
; NODQ-NEXT: vunpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; NODQ-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
-; NODQ-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NODQ-NEXT: vextracti128 $1, %ymm0, %xmm2
; NODQ-NEXT: vpextrq $1, %xmm2, %rax
; NODQ-NEXT: vcvtusi2sdq %rax, %xmm4, %xmm3
; NODQ-NEXT: vmovq %xmm2, %rax
define <16 x float> @test3(<16 x float> %x) nounwind {
; CHECK-LABEL: test3:
; CHECK: ## BB#0:
-; CHECK-NEXT: vextractf32x4 $1, %zmm0, %xmm1
+; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1
; CHECK-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[2,3]
; CHECK-NEXT: vinsertf32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT: retq
; KNL-LABEL: extract_v8i64:
; KNL: ## BB#0:
; KNL-NEXT: vpextrq $1, %xmm0, %rax
-; KNL-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
; KNL-NEXT: vpextrq $1, %xmm0, (%rdi)
; KNL-NEXT: retq
;
; SKX-LABEL: extract_v8i64:
; SKX: ## BB#0:
; SKX-NEXT: vpextrq $1, %xmm0, %rax
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpextrq $1, %xmm0, (%rdi)
; SKX-NEXT: vzeroupper
; SKX-NEXT: retq
; KNL-LABEL: extract_v16i32:
; KNL: ## BB#0:
; KNL-NEXT: vpextrd $1, %xmm0, %eax
-; KNL-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
; KNL-NEXT: vpextrd $1, %xmm0, (%rdi)
; KNL-NEXT: retq
;
; SKX-LABEL: extract_v16i32:
; SKX: ## BB#0:
; SKX-NEXT: vpextrd $1, %xmm0, %eax
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpextrd $1, %xmm0, (%rdi)
; SKX-NEXT: vzeroupper
; SKX-NEXT: retq
; SKX-LABEL: extract_v32i16:
; SKX: ## BB#0:
; SKX-NEXT: vpextrw $1, %xmm0, %eax
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpextrw $1, %xmm0, (%rdi)
; SKX-NEXT: ## kill: %AX<def> %AX<kill> %EAX<kill>
; SKX-NEXT: vzeroupper
; SKX-LABEL: extract_v64i8:
; SKX: ## BB#0:
; SKX-NEXT: vpextrb $1, %xmm0, %eax
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpextrb $1, %xmm0, (%rdi)
; SKX-NEXT: ## kill: %AL<def> %AL<kill> %EAX<kill>
; SKX-NEXT: vzeroupper
; CHECK: ## BB#0:
; CHECK-NEXT: vpinsrq $1, (%rsi), %xmm0, %xmm1
; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1
-; CHECK-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT: vpinsrq $1, %rdi, %xmm0, %xmm0
; CHECK-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0
; CHECK-NEXT: retq
; CHECK: ## BB#0:
; CHECK-NEXT: vpinsrd $1, (%rsi), %xmm0, %xmm1
; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1
-; CHECK-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT: vpinsrd $1, %edi, %xmm0, %xmm0
; CHECK-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0
; CHECK-NEXT: retq
; SKX: ## BB#0:
; SKX-NEXT: vpinsrw $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpinsrw $1, %edi, %xmm0, %xmm0
; SKX-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0
; SKX-NEXT: retq
; CHECK-NEXT: kmovw %k1, %ecx
; CHECK-NEXT: vmovd %ecx, %xmm2
; CHECK-NEXT: vpinsrb $8, %eax, %xmm2, %xmm2
-; CHECK-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm0
; CHECK-NEXT: vpsllq $63, %xmm2, %xmm2
; CHECK-NEXT: vpsraq $63, %zmm2, %zmm2
; CHECK-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm1
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm2
; NoVLX-NEXT: vmovq %xmm2, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm0, %xmm0
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm6
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm6
; NoVLX-NEXT: vmovq %xmm6, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm0, %xmm5
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm1
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm1
; NoVLX-NEXT: vmovq %xmm1, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm3, %xmm3
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm4
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm4
; NoVLX-NEXT: vmovq %xmm4, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm3, %xmm3
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm6, %xmm6
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm7
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm7
; NoVLX-NEXT: vmovq %xmm7, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm6, %xmm6
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm2, %xmm2
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm3
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm3
; NoVLX-NEXT: vmovq %xmm3, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm2, %xmm2
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm2
; NoVLX-NEXT: vmovq %xmm2, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm0, %xmm0
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm6
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm6
; NoVLX-NEXT: vmovq %xmm6, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm0, %xmm5
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm1
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm1
; NoVLX-NEXT: vmovq %xmm1, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm3, %xmm3
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm4
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm4
; NoVLX-NEXT: vmovq %xmm4, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm3, %xmm3
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm6, %xmm6
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm7
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm7
; NoVLX-NEXT: vmovq %xmm7, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm6, %xmm6
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm2, %xmm2
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm3
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm3
; NoVLX-NEXT: vmovq %xmm3, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm2, %xmm2
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm2
; NoVLX-NEXT: vmovq %xmm2, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm4, %xmm4
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm5
; NoVLX-NEXT: vmovq %xmm5, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm4, %xmm4
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm1
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm1
; NoVLX-NEXT: vmovq %xmm1, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm2, %xmm2
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm3
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm3
; NoVLX-NEXT: vmovq %xmm3, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm2, %xmm5
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm3, %xmm3
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm7
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm7
; NoVLX-NEXT: vmovq %xmm7, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm3, %xmm3
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm2, %xmm2
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm3
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm3
; NoVLX-NEXT: vmovq %xmm3, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm2, %xmm2
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm2
; NoVLX-NEXT: vmovq %xmm2, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm0, %xmm0
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm6
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm6
; NoVLX-NEXT: vmovq %xmm6, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm0, %xmm5
; NoVLX-NEXT: .cfi_def_cfa_register %rbp
; NoVLX-NEXT: andq $-32, %rsp
; NoVLX-NEXT: subq $64, %rsp
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm1
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm1
; NoVLX-NEXT: vmovq %xmm1, %rax
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: movq %rax, %rdx
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm3, %xmm3
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm4
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm4
; NoVLX-NEXT: vmovq %xmm4, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm3, %xmm9
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm6, %xmm6
-; NoVLX-NEXT: vextracti32x4 $1, %zmm1, %xmm7
+; NoVLX-NEXT: vextracti128 $1, %ymm1, %xmm7
; NoVLX-NEXT: vmovq %xmm7, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm6, %xmm6
; NoVLX-NEXT: movq %rax, %rcx
; NoVLX-NEXT: shrq $32, %rcx
; NoVLX-NEXT: vpinsrw $6, %ecx, %xmm2, %xmm2
-; NoVLX-NEXT: vextracti32x4 $1, %zmm0, %xmm3
+; NoVLX-NEXT: vextracti128 $1, %ymm0, %xmm3
; NoVLX-NEXT: vmovq %xmm3, %rcx
; NoVLX-NEXT: shrq $48, %rax
; NoVLX-NEXT: vpinsrw $7, %eax, %xmm2, %xmm2
; CHECK-NEXT: .Lcfi0:
; CHECK-NEXT: .cfi_def_cfa_offset 96
; CHECK-NEXT: vmovaps %xmm1, {{[0-9]+}}(%rsp) # 16-byte Spill
-; CHECK-NEXT: vextractf32x4 $1, %zmm3, %xmm1
-; CHECK-NEXT: vextractf32x4 $1, %zmm2, %xmm8
+; CHECK-NEXT: vextractf128 $1, %ymm3, %xmm1
+; CHECK-NEXT: vextractf128 $1, %ymm2, %xmm8
; CHECK-NEXT: vinsertps {{.*#+}} xmm9 = xmm8[0],xmm1[0],xmm8[2,3]
; CHECK-NEXT: vinsertps {{.*#+}} xmm0 = xmm9[0,1],xmm2[1],xmm9[3]
; CHECK-NEXT: vinsertps {{.*#+}} xmm15 = xmm0[0,1,2],xmm3[1]
; AVX512F-NEXT: vmovq %xmm0, %rax
; AVX512F-NEXT: vcvtsi2ssq %rax, %xmm4, %xmm3
; AVX512F-NEXT: vinsertps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[2,3]
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512F-NEXT: vmovq %xmm0, %rax
; AVX512F-NEXT: vcvtsi2ssq %rax, %xmm4, %xmm3
; AVX512F-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],xmm3[0],xmm2[3]
; AVX512VL-NEXT: vmovq %xmm0, %rax
; AVX512VL-NEXT: vcvtsi2ssq %rax, %xmm4, %xmm3
; AVX512VL-NEXT: vinsertps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[2,3]
-; AVX512VL-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; AVX512VL-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512VL-NEXT: vmovq %xmm0, %rax
; AVX512VL-NEXT: vcvtsi2ssq %rax, %xmm4, %xmm3
; AVX512VL-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],xmm3[0],xmm2[3]
; AVX512F-NEXT: vmovq %xmm0, %rax
; AVX512F-NEXT: vcvtusi2ssq %rax, %xmm4, %xmm3
; AVX512F-NEXT: vinsertps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[2,3]
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512F-NEXT: vmovq %xmm0, %rax
; AVX512F-NEXT: vcvtusi2ssq %rax, %xmm4, %xmm3
; AVX512F-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],xmm3[0],xmm2[3]
; AVX512VL-NEXT: vmovq %xmm0, %rax
; AVX512VL-NEXT: vcvtusi2ssq %rax, %xmm4, %xmm3
; AVX512VL-NEXT: vinsertps {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[2,3]
-; AVX512VL-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; AVX512VL-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512VL-NEXT: vmovq %xmm0, %rax
; AVX512VL-NEXT: vcvtusi2ssq %rax, %xmm4, %xmm3
; AVX512VL-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],xmm3[0],xmm2[3]
; AVX512-NEXT: vmovq %rdx, %xmm5
; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm7[0],xmm5[0]
; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512-NEXT: vextractf32x4 $1, %zmm2, %xmm5
-; AVX512-NEXT: vextractf32x4 $1, %zmm0, %xmm6
+; AVX512-NEXT: vextractf128 $1, %ymm2, %xmm5
+; AVX512-NEXT: vextractf128 $1, %ymm0, %xmm6
; AVX512-NEXT: vucomisd %xmm5, %xmm6
; AVX512-NEXT: movl $0, %edx
; AVX512-NEXT: cmovaq %rcx, %rdx
; AVX512-NEXT: vmovq %rdx, %xmm4
; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm6[0],xmm4[0]
; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512-NEXT: vextractf32x4 $1, %zmm3, %xmm4
-; AVX512-NEXT: vextractf32x4 $1, %zmm1, %xmm5
+; AVX512-NEXT: vextractf128 $1, %ymm3, %xmm4
+; AVX512-NEXT: vextractf128 $1, %ymm1, %xmm5
; AVX512-NEXT: vucomisd %xmm4, %xmm5
; AVX512-NEXT: movl $0, %edx
; AVX512-NEXT: cmovaq %rcx, %rdx
; AVX512F-NEXT: cmoval %ecx, %edx
; AVX512F-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4
; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8
-; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm5
+; AVX512F-NEXT: vextractf128 $1, %ymm2, %xmm5
; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
-; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm7
+; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm7
; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3]
; AVX512F-NEXT: vucomiss %xmm6, %xmm4
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmoval %ecx, %edx
; AVX512F-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
-; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm0
+; AVX512F-NEXT: vextractf128 $1, %ymm3, %xmm0
; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3]
-; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm5
+; AVX512F-NEXT: vextractf128 $1, %ymm1, %xmm5
; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
; AVX512F-NEXT: vucomiss %xmm4, %xmm6
; AVX512F-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmoval %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4
; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm2, %xmm5
+; AVX512DQ-NEXT: vextractf128 $1, %ymm2, %xmm5
; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm0, %xmm7
+; AVX512DQ-NEXT: vextractf128 $1, %ymm0, %xmm7
; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3]
; AVX512DQ-NEXT: vucomiss %xmm6, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmoval %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0
; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm3, %xmm0
+; AVX512DQ-NEXT: vextractf128 $1, %ymm3, %xmm0
; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3]
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm1, %xmm5
+; AVX512DQ-NEXT: vextractf128 $1, %ymm1, %xmm5
; AVX512DQ-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
; AVX512DQ-NEXT: vucomiss %xmm4, %xmm6
; AVX512DQ-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmoval %ecx, %edx
; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm4, %xmm4
; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm4, %ymm8
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm5
+; AVX512BW-NEXT: vextractf128 $1, %ymm2, %xmm5
; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm7
+; AVX512BW-NEXT: vextractf128 $1, %ymm0, %xmm7
; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm7[1,1,3,3]
; AVX512BW-NEXT: vucomiss %xmm6, %xmm4
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmoval %ecx, %edx
; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm0, %xmm0
; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm0
+; AVX512BW-NEXT: vextractf128 $1, %ymm3, %xmm0
; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3]
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm5
+; AVX512BW-NEXT: vextractf128 $1, %ymm1, %xmm5
; AVX512BW-NEXT: vmovshdup {{.*#+}} xmm6 = xmm5[1,1,3,3]
; AVX512BW-NEXT: vucomiss %xmm4, %xmm6
; AVX512BW-NEXT: movl $0, %edx
; AVX512-NEXT: vmovq %rdx, %xmm5
; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm5
; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm6
; AVX512-NEXT: vpextrq $1, %xmm6, %rsi
; AVX512-NEXT: cmpq %rdx, %rsi
; AVX512-NEXT: movl $0, %edx
; AVX512-NEXT: vmovq %rdx, %xmm4
; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm5
; AVX512-NEXT: vpextrq $1, %xmm5, %rsi
; AVX512-NEXT: cmpq %rdx, %rsi
; AVX512-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovgl %ecx, %edx
; AVX512F-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5
; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512F-NEXT: vextracti128 $1, %ymm2, %xmm5
; AVX512F-NEXT: vpextrd $1, %xmm5, %edx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm6
; AVX512F-NEXT: vpextrd $1, %xmm6, %esi
; AVX512F-NEXT: cmpl %edx, %esi
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovgl %ecx, %edx
; AVX512F-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512F-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512F-NEXT: vpextrd $1, %xmm4, %edx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm5
; AVX512F-NEXT: vpextrd $1, %xmm5, %esi
; AVX512F-NEXT: cmpl %edx, %esi
; AVX512F-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovgl %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5
; AVX512DQ-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm5
; AVX512DQ-NEXT: vpextrd $1, %xmm5, %edx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm6
; AVX512DQ-NEXT: vpextrd $1, %xmm6, %esi
; AVX512DQ-NEXT: cmpl %edx, %esi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovgl %ecx, %edx
; AVX512DQ-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4
; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512DQ-NEXT: vpextrd $1, %xmm4, %edx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm5
; AVX512DQ-NEXT: vpextrd $1, %xmm5, %esi
; AVX512DQ-NEXT: cmpl %edx, %esi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovgl %ecx, %edx
; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm7, %xmm5
; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512BW-NEXT: vextracti128 $1, %ymm2, %xmm5
; AVX512BW-NEXT: vpextrd $1, %xmm5, %edx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm6
; AVX512BW-NEXT: vpextrd $1, %xmm6, %esi
; AVX512BW-NEXT: cmpl %edx, %esi
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovgl %ecx, %edx
; AVX512BW-NEXT: vpinsrd $3, %edx, %xmm6, %xmm4
; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512BW-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512BW-NEXT: vpextrd $1, %xmm4, %edx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm5
; AVX512BW-NEXT: vpextrd $1, %xmm5, %esi
; AVX512BW-NEXT: cmpl %edx, %esi
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovgw %cx, %dx
; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm7, %xmm5
; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm5
+; AVX512BW-NEXT: vextracti128 $1, %ymm2, %xmm5
; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm6
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm6
; AVX512BW-NEXT: vpextrw $1, %xmm6, %esi
; AVX512BW-NEXT: cmpw %dx, %si
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovgw %cx, %dx
; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4
; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512BW-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512BW-NEXT: vpextrw $1, %xmm4, %edx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm5
+; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm5
; AVX512BW-NEXT: vpextrw $1, %xmm5, %esi
; AVX512BW-NEXT: cmpw %dx, %si
; AVX512BW-NEXT: movl $0, %edx
; AVX512F-NEXT: vmovq %rdx, %xmm9
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0]
; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512F-NEXT: vextractf32x4 $1, %zmm4, %xmm9
-; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm10
+; AVX512F-NEXT: vextractf128 $1, %ymm4, %xmm9
+; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm10
; AVX512F-NEXT: vucomisd %xmm9, %xmm10
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovaq %rcx, %rdx
; AVX512F-NEXT: vmovq %rdx, %xmm0
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0]
; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512F-NEXT: vextractf32x4 $1, %zmm5, %xmm4
-; AVX512F-NEXT: vextractf32x4 $1, %zmm1, %xmm0
+; AVX512F-NEXT: vextractf128 $1, %ymm5, %xmm4
+; AVX512F-NEXT: vextractf128 $1, %ymm1, %xmm0
; AVX512F-NEXT: vucomisd %xmm4, %xmm0
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovaq %rcx, %rdx
; AVX512F-NEXT: vmovq %rdx, %xmm4
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0]
; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512F-NEXT: vextractf32x4 $1, %zmm6, %xmm1
-; AVX512F-NEXT: vextractf32x4 $1, %zmm2, %xmm4
+; AVX512F-NEXT: vextractf128 $1, %ymm6, %xmm1
+; AVX512F-NEXT: vextractf128 $1, %ymm2, %xmm4
; AVX512F-NEXT: vucomisd %xmm1, %xmm4
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovaq %rcx, %rdx
; AVX512F-NEXT: vmovq %rdx, %xmm2
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512F-NEXT: vextractf32x4 $1, %zmm7, %xmm2
-; AVX512F-NEXT: vextractf32x4 $1, %zmm3, %xmm4
+; AVX512F-NEXT: vextractf128 $1, %ymm7, %xmm2
+; AVX512F-NEXT: vextractf128 $1, %ymm3, %xmm4
; AVX512F-NEXT: vucomisd %xmm2, %xmm4
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm9
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm4, %xmm9
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextractf128 $1, %ymm4, %xmm9
+; AVX512DQ-NEXT: vextractf128 $1, %ymm0, %xmm10
; AVX512DQ-NEXT: vucomisd %xmm9, %xmm10
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm5, %xmm4
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm1, %xmm0
+; AVX512DQ-NEXT: vextractf128 $1, %ymm5, %xmm4
+; AVX512DQ-NEXT: vextractf128 $1, %ymm1, %xmm0
; AVX512DQ-NEXT: vucomisd %xmm4, %xmm0
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm4
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm6, %xmm1
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextractf128 $1, %ymm6, %xmm1
+; AVX512DQ-NEXT: vextractf128 $1, %ymm2, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm1, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512DQ-NEXT: vmovq %rdx, %xmm2
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm7, %xmm2
-; AVX512DQ-NEXT: vextractf32x4 $1, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextractf128 $1, %ymm7, %xmm2
+; AVX512DQ-NEXT: vextractf128 $1, %ymm3, %xmm4
; AVX512DQ-NEXT: vucomisd %xmm2, %xmm4
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: cmovaq %rcx, %rdx
; AVX512BW-NEXT: vmovq %rdx, %xmm9
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm11[0],xmm9[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm4, %xmm9
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm0, %xmm10
+; AVX512BW-NEXT: vextractf128 $1, %ymm4, %xmm9
+; AVX512BW-NEXT: vextractf128 $1, %ymm0, %xmm10
; AVX512BW-NEXT: vucomisd %xmm9, %xmm10
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovaq %rcx, %rdx
; AVX512BW-NEXT: vmovq %rdx, %xmm0
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm10[0],xmm0[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm5, %xmm4
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm1, %xmm0
+; AVX512BW-NEXT: vextractf128 $1, %ymm5, %xmm4
+; AVX512BW-NEXT: vextractf128 $1, %ymm1, %xmm0
; AVX512BW-NEXT: vucomisd %xmm4, %xmm0
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovaq %rcx, %rdx
; AVX512BW-NEXT: vmovq %rdx, %xmm4
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm4[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm6, %xmm1
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm2, %xmm4
+; AVX512BW-NEXT: vextractf128 $1, %ymm6, %xmm1
+; AVX512BW-NEXT: vextractf128 $1, %ymm2, %xmm4
; AVX512BW-NEXT: vucomisd %xmm1, %xmm4
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovaq %rcx, %rdx
; AVX512BW-NEXT: vmovq %rdx, %xmm2
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm5[0],xmm2[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm7, %xmm2
-; AVX512BW-NEXT: vextractf32x4 $1, %zmm3, %xmm4
+; AVX512BW-NEXT: vextractf128 $1, %ymm7, %xmm2
+; AVX512BW-NEXT: vextractf128 $1, %ymm3, %xmm4
; AVX512BW-NEXT: vucomisd %xmm2, %xmm4
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: cmovaq %rcx, %rdx
; AVX512F-NEXT: vmovq %rdx, %xmm9
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0]
; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512F-NEXT: vextracti32x4 $1, %zmm4, %xmm9
+; AVX512F-NEXT: vextracti128 $1, %ymm4, %xmm9
; AVX512F-NEXT: vpextrq $1, %xmm9, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm10
+; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm10
; AVX512F-NEXT: vpextrq $1, %xmm10, %rsi
; AVX512F-NEXT: cmpq %rdx, %rsi
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: vmovq %rdx, %xmm0
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0]
; AVX512F-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512F-NEXT: vextracti32x4 $1, %zmm5, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm5, %xmm0
; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm1, %xmm4
+; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm4
; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512F-NEXT: cmpq %rdx, %rsi
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: vmovq %rdx, %xmm4
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0]
; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512F-NEXT: vextracti32x4 $1, %zmm6, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm6, %xmm0
; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm2, %xmm4
+; AVX512F-NEXT: vextracti128 $1, %ymm2, %xmm4
; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512F-NEXT: cmpq %rdx, %rsi
; AVX512F-NEXT: movl $0, %edx
; AVX512F-NEXT: vmovq %rdx, %xmm2
; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2
-; AVX512F-NEXT: vextracti32x4 $1, %zmm7, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm7, %xmm0
; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512F-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512F-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512F-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512F-NEXT: cmpq %rdx, %rsi
; AVX512F-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm9
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm4, %xmm9
+; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm9
; AVX512DQ-NEXT: vpextrq $1, %xmm9, %rdx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm0, %xmm10
+; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm10
; AVX512DQ-NEXT: vpextrq $1, %xmm10, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm0
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm5, %xmm0
+; AVX512DQ-NEXT: vextracti128 $1, %ymm5, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm1, %xmm4
+; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm4
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm6, %xmm0
+; AVX512DQ-NEXT: vextracti128 $1, %ymm6, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm2, %xmm4
+; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512DQ-NEXT: vmovq %rdx, %xmm2
; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm7, %xmm0
+; AVX512DQ-NEXT: vextracti128 $1, %ymm7, %xmm0
; AVX512DQ-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512DQ-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512DQ-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512DQ-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512DQ-NEXT: cmpq %rdx, %rsi
; AVX512DQ-NEXT: movl $0, %edx
; AVX512BW-NEXT: vmovq %rdx, %xmm9
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm11[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm4, %xmm9
+; AVX512BW-NEXT: vextracti128 $1, %ymm4, %xmm9
; AVX512BW-NEXT: vpextrq $1, %xmm9, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm10
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm10
; AVX512BW-NEXT: vpextrq $1, %xmm10, %rsi
; AVX512BW-NEXT: cmpq %rdx, %rsi
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: vmovq %rdx, %xmm0
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm10[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm5, %xmm0
+; AVX512BW-NEXT: vextracti128 $1, %ymm5, %xmm0
; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm1, %xmm4
+; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm4
; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512BW-NEXT: cmpq %rdx, %rsi
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: vmovq %rdx, %xmm4
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm4[0],xmm0[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm6, %xmm0
+; AVX512BW-NEXT: vextracti128 $1, %ymm6, %xmm0
; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm2, %xmm4
+; AVX512BW-NEXT: vextracti128 $1, %ymm2, %xmm4
; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512BW-NEXT: cmpq %rdx, %rsi
; AVX512BW-NEXT: movl $0, %edx
; AVX512BW-NEXT: vmovq %rdx, %xmm2
; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
; AVX512BW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm2
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm7, %xmm0
+; AVX512BW-NEXT: vextracti128 $1, %ymm7, %xmm0
; AVX512BW-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm3, %xmm4
+; AVX512BW-NEXT: vextracti128 $1, %ymm3, %xmm4
; AVX512BW-NEXT: vpextrq $1, %xmm4, %rsi
; AVX512BW-NEXT: cmpq %rdx, %rsi
; AVX512BW-NEXT: movl $0, %edx
; AVX-NEXT: vmovq %rdx, %xmm2
; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrq $1, %xmm2, %rax
; AVX-NEXT: imulq %rcx
; AVX-NEXT: movq %rdx, %rax
; AVX-NEXT: addl %ecx, %eax
; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrd $1, %xmm2, %eax
; AVX-NEXT: cltq
; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
; AVX512BW-NEXT: movzbl %al, %eax
; AVX512BW-NEXT: vpinsrb $15, %eax, %xmm3, %xmm2
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX512BW-NEXT: vpextrb $1, %xmm2, %eax
; AVX512BW-NEXT: movsbl %al, %eax
; AVX512BW-NEXT: imull $-109, %eax, %ecx
; AVX-NEXT: vmovq %rcx, %xmm2
; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrq $1, %xmm2, %rcx
; AVX-NEXT: movq %rcx, %rax
; AVX-NEXT: imulq %rsi
; AVX-NEXT: subl %edx, %eax
; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrd $1, %xmm2, %eax
; AVX-NEXT: cltq
; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
; AVX512BW-NEXT: movzbl %cl, %eax
; AVX512BW-NEXT: vpinsrb $15, %eax, %xmm3, %xmm2
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX512BW-NEXT: vpextrb $1, %xmm2, %eax
; AVX512BW-NEXT: movsbl %al, %ecx
; AVX512BW-NEXT: imull $-109, %ecx, %eax
; AVX-NEXT: vmovq %rcx, %xmm2
; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrq $1, %xmm2, %rcx
; AVX-NEXT: movq %rcx, %rax
; AVX-NEXT: mulq %rsi
; AVX-NEXT: shrl $2, %eax
; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrd $1, %xmm2, %eax
; AVX-NEXT: imulq $613566757, %rax, %rcx # imm = 0x24924925
; AVX-NEXT: shrq $32, %rcx
; AVX512BW-NEXT: movzbl %al, %eax
; AVX512BW-NEXT: vpinsrb $15, %eax, %xmm3, %xmm2
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX512BW-NEXT: vpextrb $1, %xmm2, %eax
; AVX512BW-NEXT: imull $37, %eax, %ecx
; AVX512BW-NEXT: shrl $8, %ecx
; AVX-NEXT: vmovq %rcx, %xmm2
; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrq $1, %xmm2, %rcx
; AVX-NEXT: movq %rcx, %rax
; AVX-NEXT: mulq %rsi
; AVX-NEXT: subl %ecx, %eax
; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX-NEXT: vpextrd $1, %xmm2, %eax
; AVX-NEXT: imulq $613566757, %rax, %rcx # imm = 0x24924925
; AVX-NEXT: shrq $32, %rcx
; AVX512BW-NEXT: movzbl %dl, %eax
; AVX512BW-NEXT: vpinsrb $15, %eax, %xmm3, %xmm2
; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512BW-NEXT: vextracti32x4 $1, %zmm0, %xmm2
+; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm2
; AVX512BW-NEXT: vpextrb $1, %xmm2, %edx
; AVX512BW-NEXT: imull $37, %edx, %esi
; AVX512BW-NEXT: shrl $8, %esi
define <16 x i32> @shuffle_v16i32_04_04_04_04_04_04_04_04_04_04_04_04_04_04_04_04(<16 x i32> %a, <16 x i32> %b) {
; ALL-LABEL: shuffle_v16i32_04_04_04_04_04_04_04_04_04_04_04_04_04_04_04_04:
; ALL: # BB#0:
-; ALL-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; ALL-NEXT: vextractf128 $1, %ymm0, %xmm0
; ALL-NEXT: vbroadcastss %xmm0, %zmm0
; ALL-NEXT: retq
%shuffle = shufflevector <16 x i32> %a, <16 x i32> %b, <16 x i32><i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4>
define <4 x float> @test_v16f32_0_1_3_6 (<16 x float> %v) {
; ALL-LABEL: test_v16f32_0_1_3_6:
; ALL: # BB#0:
-; ALL-NEXT: vextractf32x4 $1, %zmm0, %xmm1
-; ALL-NEXT: vpermilpd {{.*#+}} xmm1 = xmm1[1,0]
-; ALL-NEXT: vpermilps {{.*#+}} xmm0 = xmm0[0,1,3,3]
-; ALL-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[0]
+; ALL-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[0,1,3,3]
+; ALL-NEXT: vextractf128 $1, %ymm0, %xmm0
+; ALL-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; ALL-NEXT: vinsertps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[0]
; ALL-NEXT: vzeroupper
; ALL-NEXT: retq
%res = shufflevector <16 x float> %v, <16 x float> undef, <4 x i32> <i32 0, i32 1, i32 3, i32 6>
;
; SKX-LABEL: shuffle_v32i16_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08_08:
; SKX: ## BB#0:
-; SKX-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0
; SKX-NEXT: vpbroadcastw %xmm0, %zmm0
; SKX-NEXT: retq
%c = shufflevector <32 x i16> %a, <32 x i16> undef, <32 x i32> <i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8>
define <8 x double> @shuffle_v8f64_22222222(<8 x double> %a, <8 x double> %b) {
; AVX512F-LABEL: shuffle_v8f64_22222222:
; AVX512F: # BB#0:
-; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm0
; AVX512F-NEXT: vbroadcastsd %xmm0, %zmm0
; AVX512F-NEXT: retq
;
; AVX512F-32-LABEL: shuffle_v8f64_22222222:
; AVX512F-32: # BB#0:
-; AVX512F-32-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; AVX512F-32-NEXT: vextractf128 $1, %ymm0, %xmm0
; AVX512F-32-NEXT: vbroadcastsd %xmm0, %zmm0
; AVX512F-32-NEXT: retl
%shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> <i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2>
; AVX512F-LABEL: test_v8f64_34:
; AVX512F: # BB#0:
; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm1
-; AVX512F-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm0
; AVX512F-NEXT: vshufpd {{.*#+}} xmm0 = xmm0[1],xmm1[0]
; AVX512F-NEXT: vzeroupper
; AVX512F-NEXT: retq
; AVX512F-32-LABEL: test_v8f64_34:
; AVX512F-32: # BB#0:
; AVX512F-32-NEXT: vextractf32x4 $2, %zmm0, %xmm1
-; AVX512F-32-NEXT: vextractf32x4 $1, %zmm0, %xmm0
+; AVX512F-32-NEXT: vextractf128 $1, %ymm0, %xmm0
; AVX512F-32-NEXT: vshufpd {{.*#+}} xmm0 = xmm0[1],xmm1[0]
; AVX512F-32-NEXT: vzeroupper
; AVX512F-32-NEXT: retl
; AVX512F: # BB#0:
; AVX512F-NEXT: kmovw %edi, %k1
; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
-; AVX512F-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512F-NEXT: vpbroadcastq %xmm0, %zmm0
; AVX512F-NEXT: vpsllq $63, %zmm0, %zmm0
; AVX512F-NEXT: vptestmq %zmm0, %zmm0, %k1
; VL_BW_DQ: # BB#0:
; VL_BW_DQ-NEXT: kmovd %edi, %k0
; VL_BW_DQ-NEXT: vpmovm2q %k0, %zmm0
-; VL_BW_DQ-NEXT: vextracti32x4 $1, %zmm0, %xmm0
+; VL_BW_DQ-NEXT: vextracti128 $1, %ymm0, %xmm0
; VL_BW_DQ-NEXT: vpbroadcastq %xmm0, %zmm0
; VL_BW_DQ-NEXT: vpmovq2m %zmm0, %k0
; VL_BW_DQ-NEXT: vpmovm2w %k0, %xmm0