mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-23 11:13:28 +01:00
514380352b
More mechanical splitting of tests so we can add a one use check to the isel patterns for forming masked instructions. In a few cases I changed immediates of instructions in order to avoid needing to split.
1844 lines
94 KiB
LLVM
1844 lines
94 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc < %s -disable-peephole -mtriple=i686-unknown-unknown -mattr=+avx512bw --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X86
|
|
; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown -mattr=+avx512bw --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X64
|
|
|
|
define i32 @test_int_x86_avx512_kadd_d(<32 x i16> %A, <32 x i16> %B) nounwind {
|
|
; CHECK-LABEL: test_int_x86_avx512_kadd_d:
|
|
; CHECK: # %bb.0: # %entry
|
|
; CHECK-NEXT: vptestmw %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0xfd,0x48,0x26,0xc0]
|
|
; CHECK-NEXT: vptestmw %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0xf5,0x48,0x26,0xc9]
|
|
; CHECK-NEXT: kaddd %k1, %k0, %k0 # encoding: [0xc4,0xe1,0xfd,0x4a,0xc1]
|
|
; CHECK-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; CHECK-NEXT: kortestd %k0, %k0 # encoding: [0xc4,0xe1,0xf9,0x98,0xc0]
|
|
; CHECK-NEXT: sete %al # encoding: [0x0f,0x94,0xc0]
|
|
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
entry:
|
|
%0 = icmp ne <32 x i16> %A, zeroinitializer
|
|
%1 = icmp ne <32 x i16> %B, zeroinitializer
|
|
%2 = call <32 x i1> @llvm.x86.avx512.kadd.d(<32 x i1> %0, <32 x i1> %1)
|
|
%3 = bitcast <32 x i1> %2 to i32
|
|
%4 = icmp eq i32 %3, 0
|
|
%5 = zext i1 %4 to i32
|
|
ret i32 %5
|
|
}
|
|
declare <32 x i1> @llvm.x86.avx512.kadd.d(<32 x i1>, <32 x i1>)
|
|
|
|
define i32 @test_int_x86_avx512_kadd_q(<64 x i8> %A, <64 x i8> %B) nounwind {
|
|
; X86-LABEL: test_int_x86_avx512_kadd_q:
|
|
; X86: # %bb.0: # %entry
|
|
; X86-NEXT: vptestmb %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0x7d,0x48,0x26,0xc0]
|
|
; X86-NEXT: vptestmb %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0x75,0x48,0x26,0xc9]
|
|
; X86-NEXT: kaddq %k1, %k0, %k0 # encoding: [0xc4,0xe1,0xfc,0x4a,0xc1]
|
|
; X86-NEXT: kshiftrq $32, %k0, %k1 # encoding: [0xc4,0xe3,0xf9,0x31,0xc8,0x20]
|
|
; X86-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; X86-NEXT: kortestd %k1, %k0 # encoding: [0xc4,0xe1,0xf9,0x98,0xc1]
|
|
; X86-NEXT: sete %al # encoding: [0x0f,0x94,0xc0]
|
|
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_kadd_q:
|
|
; X64: # %bb.0: # %entry
|
|
; X64-NEXT: vptestmb %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0x7d,0x48,0x26,0xc0]
|
|
; X64-NEXT: vptestmb %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0x75,0x48,0x26,0xc9]
|
|
; X64-NEXT: kaddq %k1, %k0, %k0 # encoding: [0xc4,0xe1,0xfc,0x4a,0xc1]
|
|
; X64-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; X64-NEXT: kortestq %k0, %k0 # encoding: [0xc4,0xe1,0xf8,0x98,0xc0]
|
|
; X64-NEXT: sete %al # encoding: [0x0f,0x94,0xc0]
|
|
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
entry:
|
|
%0 = icmp ne <64 x i8> %A, zeroinitializer
|
|
%1 = icmp ne <64 x i8> %B, zeroinitializer
|
|
%2 = call <64 x i1> @llvm.x86.avx512.kadd.q(<64 x i1> %0, <64 x i1> %1)
|
|
%3 = bitcast <64 x i1> %2 to i64
|
|
%4 = icmp eq i64 %3, 0
|
|
%5 = zext i1 %4 to i32
|
|
ret i32 %5
|
|
}
|
|
declare <64 x i1> @llvm.x86.avx512.kadd.q(<64 x i1>, <64 x i1>)
|
|
|
|
define i32 @test_x86_avx512_ktestc_d(<32 x i16> %A, <32 x i16> %B) {
|
|
; CHECK-LABEL: test_x86_avx512_ktestc_d:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vptestmw %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0xfd,0x48,0x26,0xc0]
|
|
; CHECK-NEXT: vptestmw %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0xf5,0x48,0x26,0xc9]
|
|
; CHECK-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; CHECK-NEXT: ktestd %k1, %k0 # encoding: [0xc4,0xe1,0xf9,0x99,0xc1]
|
|
; CHECK-NEXT: setb %al # encoding: [0x0f,0x92,0xc0]
|
|
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = icmp ne <32 x i16> %A, zeroinitializer
|
|
%2 = icmp ne <32 x i16> %B, zeroinitializer
|
|
%res = call i32 @llvm.x86.avx512.ktestc.d(<32 x i1> %1, <32 x i1> %2) ; <i32> [#uses=1]
|
|
ret i32 %res
|
|
}
|
|
declare i32 @llvm.x86.avx512.ktestc.d(<32 x i1>, <32 x i1>) nounwind readnone
|
|
|
|
define i32 @test_x86_avx512_ktestz_d(<32 x i16> %A, <32 x i16> %B) {
|
|
; CHECK-LABEL: test_x86_avx512_ktestz_d:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vptestmw %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0xfd,0x48,0x26,0xc0]
|
|
; CHECK-NEXT: vptestmw %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0xf5,0x48,0x26,0xc9]
|
|
; CHECK-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; CHECK-NEXT: ktestd %k1, %k0 # encoding: [0xc4,0xe1,0xf9,0x99,0xc1]
|
|
; CHECK-NEXT: sete %al # encoding: [0x0f,0x94,0xc0]
|
|
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = icmp ne <32 x i16> %A, zeroinitializer
|
|
%2 = icmp ne <32 x i16> %B, zeroinitializer
|
|
%res = call i32 @llvm.x86.avx512.ktestz.d(<32 x i1> %1, <32 x i1> %2) ; <i32> [#uses=1]
|
|
ret i32 %res
|
|
}
|
|
declare i32 @llvm.x86.avx512.ktestz.d(<32 x i1>, <32 x i1>) nounwind readnone
|
|
|
|
define i32 @test_x86_avx512_ktestc_q(<64 x i8> %A, <64 x i8> %B) {
|
|
; CHECK-LABEL: test_x86_avx512_ktestc_q:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vptestmb %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0x7d,0x48,0x26,0xc0]
|
|
; CHECK-NEXT: vptestmb %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0x75,0x48,0x26,0xc9]
|
|
; CHECK-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; CHECK-NEXT: ktestq %k1, %k0 # encoding: [0xc4,0xe1,0xf8,0x99,0xc1]
|
|
; CHECK-NEXT: setb %al # encoding: [0x0f,0x92,0xc0]
|
|
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = icmp ne <64 x i8> %A, zeroinitializer
|
|
%2 = icmp ne <64 x i8> %B, zeroinitializer
|
|
%res = call i32 @llvm.x86.avx512.ktestc.q(<64 x i1> %1, <64 x i1> %2) ; <i32> [#uses=1]
|
|
ret i32 %res
|
|
}
|
|
declare i32 @llvm.x86.avx512.ktestc.q(<64 x i1>, <64 x i1>) nounwind readnone
|
|
|
|
define i32 @test_x86_avx512_ktestz_q(<64 x i8> %A, <64 x i8> %B) {
|
|
; CHECK-LABEL: test_x86_avx512_ktestz_q:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vptestmb %zmm0, %zmm0, %k0 # encoding: [0x62,0xf2,0x7d,0x48,0x26,0xc0]
|
|
; CHECK-NEXT: vptestmb %zmm1, %zmm1, %k1 # encoding: [0x62,0xf2,0x75,0x48,0x26,0xc9]
|
|
; CHECK-NEXT: xorl %eax, %eax # encoding: [0x31,0xc0]
|
|
; CHECK-NEXT: ktestq %k1, %k0 # encoding: [0xc4,0xe1,0xf8,0x99,0xc1]
|
|
; CHECK-NEXT: sete %al # encoding: [0x0f,0x94,0xc0]
|
|
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = icmp ne <64 x i8> %A, zeroinitializer
|
|
%2 = icmp ne <64 x i8> %B, zeroinitializer
|
|
%res = call i32 @llvm.x86.avx512.ktestz.q(<64 x i1> %1, <64 x i1> %2) ; <i32> [#uses=1]
|
|
ret i32 %res
|
|
}
|
|
declare i32 @llvm.x86.avx512.ktestz.q(<64 x i1>, <64 x i1>) nounwind readnone
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rr_512(<16 x i32> %a, <16 x i32> %b) {
|
|
; CHECK-LABEL: test_mask_packs_epi32_rr_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpackssdw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x6b,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rrk_512(<16 x i32> %a, <16 x i32> %b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rrk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackssdw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x6b,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rrk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackssdw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x6b,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rrkz_512(<16 x i32> %a, <16 x i32> %b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rrkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackssdw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x6b,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rrkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackssdw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x6b,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rm_512(<16 x i32> %a, <16 x i32>* %ptr_b) {
|
|
; X86-LABEL: test_mask_packs_epi32_rm_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpackssdw (%eax), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x6b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rm_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpackssdw (%rdi), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x6b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rmk_512(<16 x i32> %a, <16 x i32>* %ptr_b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rmk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackssdw (%eax), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x6b,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rmk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackssdw (%rdi), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x6b,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rmkz_512(<16 x i32> %a, <16 x i32>* %ptr_b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rmkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackssdw (%eax), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x6b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rmkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackssdw (%rdi), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x6b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rmb_512(<16 x i32> %a, i32* %ptr_b) {
|
|
; X86-LABEL: test_mask_packs_epi32_rmb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpackssdw (%eax){1to16}, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x58,0x6b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rmb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpackssdw (%rdi){1to16}, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x58,0x6b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rmbk_512(<16 x i32> %a, i32* %ptr_b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rmbk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackssdw (%eax){1to16}, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x59,0x6b,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rmbk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackssdw (%rdi){1to16}, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x59,0x6b,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packs_epi32_rmbkz_512(<16 x i32> %a, i32* %ptr_b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi32_rmbkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackssdw (%eax){1to16}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xd9,0x6b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi32_rmbkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackssdw (%rdi){1to16}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xd9,0x6b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.packssdw.512(<16 x i32>, <16 x i32>)
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rr_512(<32 x i16> %a, <32 x i16> %b) {
|
|
; CHECK-LABEL: test_mask_packs_epi16_rr_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpacksswb %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x63,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
ret <64 x i8> %1
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rrk_512(<32 x i16> %a, <32 x i16> %b, <64 x i8> %passThru, i64 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi16_rrk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpacksswb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x63,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi16_rrk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpacksswb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x63,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> %passThru
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rrkz_512(<32 x i16> %a, <32 x i16> %b, i64 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi16_rrkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpacksswb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x63,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi16_rrkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpacksswb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x63,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> zeroinitializer
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rm_512(<32 x i16> %a, <32 x i16>* %ptr_b) {
|
|
; X86-LABEL: test_mask_packs_epi16_rm_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpacksswb (%eax), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x63,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi16_rm_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpacksswb (%rdi), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x63,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
ret <64 x i8> %1
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rmk_512(<32 x i16> %a, <32 x i16>* %ptr_b, <64 x i8> %passThru, i64 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi16_rmk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpacksswb (%eax), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x63,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi16_rmk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rsi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpacksswb (%rdi), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x63,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> %passThru
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packs_epi16_rmkz_512(<32 x i16> %a, <32 x i16>* %ptr_b, i64 %mask) {
|
|
; X86-LABEL: test_mask_packs_epi16_rmkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpacksswb (%eax), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x63,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packs_epi16_rmkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rsi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpacksswb (%rdi), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x63,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> zeroinitializer
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
declare <64 x i8> @llvm.x86.avx512.packsswb.512(<32 x i16>, <32 x i16>)
|
|
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rr_512(<16 x i32> %a, <16 x i32> %b) {
|
|
; CHECK-LABEL: test_mask_packus_epi32_rr_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpackusdw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x2b,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rrk_512(<16 x i32> %a, <16 x i32> %b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rrk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackusdw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x2b,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rrk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackusdw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x2b,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rrkz_512(<16 x i32> %a, <16 x i32> %b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rrkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackusdw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x2b,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rrkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackusdw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x2b,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rm_512(<16 x i32> %a, <16 x i32>* %ptr_b) {
|
|
; X86-LABEL: test_mask_packus_epi32_rm_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpackusdw (%eax), %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x2b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rm_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpackusdw (%rdi), %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x2b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rmk_512(<16 x i32> %a, <16 x i32>* %ptr_b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rmk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackusdw (%eax), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x2b,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rmk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackusdw (%rdi), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x2b,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rmkz_512(<16 x i32> %a, <16 x i32>* %ptr_b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rmkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackusdw (%eax), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x2b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rmkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackusdw (%rdi), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x2b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <16 x i32>, <16 x i32>* %ptr_b
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rmb_512(<16 x i32> %a, i32* %ptr_b) {
|
|
; X86-LABEL: test_mask_packus_epi32_rmb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpackusdw (%eax){1to16}, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x58,0x2b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rmb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpackusdw (%rdi){1to16}, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x58,0x2b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rmbk_512(<16 x i32> %a, i32* %ptr_b, <32 x i16> %passThru, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rmbk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackusdw (%eax){1to16}, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0x7d,0x59,0x2b,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rmbk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackusdw (%rdi){1to16}, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0x7d,0x59,0x2b,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %passThru
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_mask_packus_epi32_rmbkz_512(<16 x i32> %a, i32* %ptr_b, i32 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi32_rmbkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackusdw (%eax){1to16}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xd9,0x2b,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi32_rmbkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackusdw (%rdi){1to16}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xd9,0x2b,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%q = load i32, i32* %ptr_b
|
|
%vecinit.i = insertelement <16 x i32> undef, i32 %q, i32 0
|
|
%b = shufflevector <16 x i32> %vecinit.i, <16 x i32> undef, <16 x i32> zeroinitializer
|
|
%1 = call <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32> %a, <16 x i32> %b)
|
|
%2 = bitcast i32 %mask to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.packusdw.512(<16 x i32>, <16 x i32>)
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rr_512(<32 x i16> %a, <32 x i16> %b) {
|
|
; CHECK-LABEL: test_mask_packus_epi16_rr_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpackuswb %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x67,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
ret <64 x i8> %1
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rrk_512(<32 x i16> %a, <32 x i16> %b, <64 x i8> %passThru, i64 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi16_rrk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackuswb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x67,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi16_rrk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackuswb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x67,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> %passThru
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rrkz_512(<32 x i16> %a, <32 x i16> %b, i64 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi16_rrkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpackuswb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x67,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi16_rrkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpackuswb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x67,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> zeroinitializer
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rm_512(<32 x i16> %a, <32 x i16>* %ptr_b) {
|
|
; X86-LABEL: test_mask_packus_epi16_rm_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpackuswb (%eax), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x67,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi16_rm_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpackuswb (%rdi), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x67,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
ret <64 x i8> %1
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rmk_512(<32 x i16> %a, <32 x i16>* %ptr_b, <64 x i8> %passThru, i64 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi16_rmk_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackuswb (%eax), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x67,0x08]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi16_rmk_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rsi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackuswb (%rdi), %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0x67,0x0f]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> %passThru
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
define <64 x i8> @test_mask_packus_epi16_rmkz_512(<32 x i16> %a, <32 x i16>* %ptr_b, i64 %mask) {
|
|
; X86-LABEL: test_mask_packus_epi16_rmkz_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: vpackuswb (%eax), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x67,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_mask_packus_epi16_rmkz_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rsi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpackuswb (%rdi), %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x67,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%b = load <32 x i16>, <32 x i16>* %ptr_b
|
|
%1 = call <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16> %a, <32 x i16> %b)
|
|
%2 = bitcast i64 %mask to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> zeroinitializer
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
declare <64 x i8> @llvm.x86.avx512.packuswb.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16>@test_int_x86_avx512_vpermt2var_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_vpermt2var_hi_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpermi2w %zmm2, %zmm1, %zmm0 # encoding: [0x62,0xf2,0xf5,0x48,0x75,0xc2]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16> %x1, <32 x i16> %x0, <32 x i16> %x2)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_vpermt2var_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_vpermt2var_hi_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpermt2w %zmm2, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x7d,0xca]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_vpermt2var_hi_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpermt2w %zmm2, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x7d,0xca]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16> %x1, <32 x i16> %x0, <32 x i16> %x2)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x1
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_maskz_vpermt2var_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_vpermt2var_hi_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpermi2w %zmm2, %zmm1, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xf5,0xc9,0x75,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_vpermt2var_hi_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpermi2w %zmm2, %zmm1, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xf5,0xc9,0x75,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16> %x1, <32 x i16> %x0, <32 x i16> %x2)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16>, <32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16>@test_int_x86_avx512_vpermi2var_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_vpermi2var_hi_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpermt2w %zmm2, %zmm1, %zmm0 # encoding: [0x62,0xf2,0xf5,0x48,0x7d,0xc2]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_vpermi2var_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_vpermi2var_hi_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpermi2w %zmm2, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x75,0xca]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_vpermi2var_hi_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpermi2w %zmm2, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x75,0xca]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.vpermi2var.hi.512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x1
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <64 x i8> @llvm.x86.avx512.pavg.b.512(<64 x i8>, <64 x i8>)
|
|
|
|
define <64 x i8> @test_int_x86_avx512_pavg_b_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pavg_b_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpavgb %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xe0,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.pavg.b.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
ret <64 x i8> %1
|
|
}
|
|
|
|
define <64 x i8> @test_int_x86_avx512_mask_pavg_b_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pavg_b_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpavgb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe0,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pavg_b_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpavgb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe0,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <64 x i8> @llvm.x86.avx512.pavg.b.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
%2 = bitcast i64 %x3 to <64 x i1>
|
|
%3 = select <64 x i1> %2, <64 x i8> %1, <64 x i8> %x2
|
|
ret <64 x i8> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.pavg.w.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16> @test_int_x86_avx512_pavg_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pavg_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpavgw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xe3,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pavg.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_int_x86_avx512_mask_pavg_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pavg_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpavgw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe3,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pavg_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpavgw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe3,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pavg.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <64 x i8> @llvm.x86.avx512.pshuf.b.512(<64 x i8>, <64 x i8>)
|
|
|
|
define <64 x i8>@test_int_x86_avx512_pshuf_b_512(<64 x i8> %x0, <64 x i8> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pshuf_b_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpshufb %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x00,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <64 x i8> @llvm.x86.avx512.pshuf.b.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
ret <64 x i8> %res
|
|
}
|
|
|
|
define <64 x i8>@test_int_x86_avx512_pshuf_b_512_mask(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i64 %mask) {
|
|
; X86-LABEL: test_int_x86_avx512_pshuf_b_512_mask:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpshufb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x00,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_pshuf_b_512_mask:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpshufb %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x00,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <64 x i8> @llvm.x86.avx512.pshuf.b.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
%mask.cast = bitcast i64 %mask to <64 x i1>
|
|
%res2 = select <64 x i1> %mask.cast, <64 x i8> %res, <64 x i8> %x2
|
|
ret <64 x i8> %res2
|
|
}
|
|
|
|
define <64 x i8>@test_int_x86_avx512_pshuf_b_512_maskz(<64 x i8> %x0, <64 x i8> %x1, i64 %mask) {
|
|
; X86-LABEL: test_int_x86_avx512_pshuf_b_512_maskz:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpshufb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x00,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_pshuf_b_512_maskz:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpshufb %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x00,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <64 x i8> @llvm.x86.avx512.pshuf.b.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
%mask.cast = bitcast i64 %mask to <64 x i1>
|
|
%res2 = select <64 x i1> %mask.cast, <64 x i8> %res, <64 x i8> zeroinitializer
|
|
ret <64 x i8> %res2
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.pmulhu.w.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16> @test_int_x86_avx512_pmulhu_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmulhu_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmulhuw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xe4,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmulhu.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_int_x86_avx512_mask_pmulhu_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmulhu_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmulhuw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe4,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmulhu_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmulhuw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe4,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmulhu.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.pmulh.w.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16> @test_int_x86_avx512_pmulh_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmulh_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmulhw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xe5,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmulh.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_int_x86_avx512_mask_pmulh_w_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmulh_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmulhw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe5,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmulh_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmulhw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe5,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmulh.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.pmul.hr.sw.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16> @test_int_x86_avx512_pmulhr_sw_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmulhr_sw_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmulhrsw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x0b,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmul.hr.sw.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_int_x86_avx512_mask_pmulhr_sw_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmulhr_sw_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmulhrsw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x0b,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmulhr_sw_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmulhrsw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x0b,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmul.hr.sw.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_pmov_wb_512(<32 x i16> %x0) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmov_wb_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmovwb %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x30,0xc0]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = trunc <32 x i16> %x0 to <32 x i8>
|
|
ret <32 x i8> %1
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_mask_pmov_wb_512(<32 x i16> %x0, <32 x i8> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmov_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovwb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x30,0xc1]
|
|
; X86-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmov_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovwb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x30,0xc1]
|
|
; X64-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = trunc <32 x i16> %x0 to <32 x i8>
|
|
%2 = bitcast i32 %x2 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %x1
|
|
ret <32 x i8> %3
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_maskz_pmov_wb_512(<32 x i16> %x0, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_pmov_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovwb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x30,0xc0]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_pmov_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovwb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x30,0xc0]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = trunc <32 x i16> %x0 to <32 x i8>
|
|
%2 = bitcast i32 %x2 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> zeroinitializer
|
|
ret <32 x i8> %3
|
|
}
|
|
|
|
declare void @llvm.x86.avx512.mask.pmov.wb.mem.512(i8* %ptr, <32 x i16>, i32)
|
|
|
|
define void @test_int_x86_avx512_mask_pmov_wb_mem_512(i8* %ptr, <32 x i16> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmov_wb_mem_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpmovwb %zmm0, (%eax) # encoding: [0x62,0xf2,0x7e,0x48,0x30,0x00]
|
|
; X86-NEXT: vpmovwb %zmm0, (%eax) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x30,0x00]
|
|
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmov_wb_mem_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpmovwb %zmm0, (%rdi) # encoding: [0x62,0xf2,0x7e,0x48,0x30,0x07]
|
|
; X64-NEXT: vpmovwb %zmm0, (%rdi) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x30,0x07]
|
|
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
call void @llvm.x86.avx512.mask.pmov.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 -1)
|
|
call void @llvm.x86.avx512.mask.pmov.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 %x2)
|
|
ret void
|
|
}
|
|
|
|
declare <32 x i8> @llvm.x86.avx512.mask.pmovs.wb.512(<32 x i16>, <32 x i8>, i32)
|
|
|
|
define <32 x i8>@test_int_x86_avx512_pmovs_wb_512(<32 x i16> %x0, <32 x i8> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmovs_wb_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmovswb %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x20,0xc0]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovs.wb.512(<32 x i16> %x0, <32 x i8> %x1, i32 -1)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_mask_pmovs_wb_512(<32 x i16> %x0, <32 x i8> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmovs_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovswb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x20,0xc1]
|
|
; X86-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmovs_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovswb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x20,0xc1]
|
|
; X64-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovs.wb.512(<32 x i16> %x0, <32 x i8> %x1, i32 %x2)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_maskz_pmovs_wb_512(<32 x i16> %x0, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_pmovs_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovswb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x20,0xc0]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_pmovs_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovswb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x20,0xc0]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovs.wb.512(<32 x i16> %x0, <32 x i8> zeroinitializer, i32 %x2)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
declare void @llvm.x86.avx512.mask.pmovs.wb.mem.512(i8* %ptr, <32 x i16>, i32)
|
|
|
|
define void @test_int_x86_avx512_mask_pmovs_wb_mem_512(i8* %ptr, <32 x i16> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmovs_wb_mem_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpmovswb %zmm0, (%eax) # encoding: [0x62,0xf2,0x7e,0x48,0x20,0x00]
|
|
; X86-NEXT: vpmovswb %zmm0, (%eax) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x20,0x00]
|
|
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmovs_wb_mem_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpmovswb %zmm0, (%rdi) # encoding: [0x62,0xf2,0x7e,0x48,0x20,0x07]
|
|
; X64-NEXT: vpmovswb %zmm0, (%rdi) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x20,0x07]
|
|
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
call void @llvm.x86.avx512.mask.pmovs.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 -1)
|
|
call void @llvm.x86.avx512.mask.pmovs.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 %x2)
|
|
ret void
|
|
}
|
|
|
|
declare <32 x i8> @llvm.x86.avx512.mask.pmovus.wb.512(<32 x i16>, <32 x i8>, i32)
|
|
|
|
define <32 x i8>@test_int_x86_avx512_pmovus_wb_512(<32 x i16> %x0, <32 x i8> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmovus_wb_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmovuswb %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x10,0xc0]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovus.wb.512(<32 x i16> %x0, <32 x i8> %x1, i32 -1)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_mask_pmovus_wb_512(<32 x i16> %x0, <32 x i8> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmovus_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovuswb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x10,0xc1]
|
|
; X86-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmovus_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovuswb %zmm0, %ymm1 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x10,0xc1]
|
|
; X64-NEXT: vmovdqa %ymm1, %ymm0 # encoding: [0xc5,0xfd,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovus.wb.512(<32 x i16> %x0, <32 x i8> %x1, i32 %x2)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
define <32 x i8>@test_int_x86_avx512_maskz_pmovus_wb_512(<32 x i16> %x0, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_pmovus_wb_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmovuswb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x10,0xc0]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_pmovus_wb_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmovuswb %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x10,0xc0]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i8> @llvm.x86.avx512.mask.pmovus.wb.512(<32 x i16> %x0, <32 x i8> zeroinitializer, i32 %x2)
|
|
ret <32 x i8> %res
|
|
}
|
|
|
|
declare void @llvm.x86.avx512.mask.pmovus.wb.mem.512(i8* %ptr, <32 x i16>, i32)
|
|
|
|
define void @test_int_x86_avx512_mask_pmovus_wb_mem_512(i8* %ptr, <32 x i16> %x1, i32 %x2) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmovus_wb_mem_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x08]
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpmovuswb %zmm0, (%eax) # encoding: [0x62,0xf2,0x7e,0x48,0x10,0x00]
|
|
; X86-NEXT: vpmovuswb %zmm0, (%eax) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x10,0x00]
|
|
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmovus_wb_mem_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %esi, %k1 # encoding: [0xc5,0xfb,0x92,0xce]
|
|
; X64-NEXT: vpmovuswb %zmm0, (%rdi) # encoding: [0x62,0xf2,0x7e,0x48,0x10,0x07]
|
|
; X64-NEXT: vpmovuswb %zmm0, (%rdi) {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x10,0x07]
|
|
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
call void @llvm.x86.avx512.mask.pmovus.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 -1)
|
|
call void @llvm.x86.avx512.mask.pmovus.wb.mem.512(i8* %ptr, <32 x i16> %x1, i32 %x2)
|
|
ret void
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.pmaddubs.w.512(<64 x i8>, <64 x i8>)
|
|
|
|
define <32 x i16> @test_int_x86_avx512_pmaddubs_w_512(<64 x i8> %x0, <64 x i8> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmaddubs_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmaddubsw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x04,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmaddubs.w.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16> @test_int_x86_avx512_mask_pmaddubs_w_512(<64 x i8> %x0, <64 x i8> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmaddubs_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmaddubsw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x04,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmaddubs_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmaddubsw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0x7d,0x49,0x04,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.pmaddubs.w.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <16 x i32> @llvm.x86.avx512.pmaddw.d.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <16 x i32> @test_int_x86_avx512_pmaddw_d_512(<32 x i16> %x0, <32 x i16> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_pmaddw_d_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpmaddwd %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xf5,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <16 x i32> @llvm.x86.avx512.pmaddw.d.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <16 x i32> %1
|
|
}
|
|
|
|
define <16 x i32> @test_int_x86_avx512_mask_pmaddw_d_512(<32 x i16> %x0, <32 x i16> %x1, <16 x i32> %x2, i16 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_pmaddw_d_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpmaddwd %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xf5,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_pmaddw_d_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpmaddwd %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xf5,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <16 x i32> @llvm.x86.avx512.pmaddw.d.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i16 %x3 to <16 x i1>
|
|
%3 = select <16 x i1> %2, <16 x i32> %1, <16 x i32> %x2
|
|
ret <16 x i32> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.dbpsadbw.512(<64 x i8>, <64 x i8>, i32)
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_dbpsadbw_512(<64 x i8> %x0, <64 x i8> %x1, <32 x i16> %x3, i32 %x4) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_dbpsadbw_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vdbpsadbw $2, %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf3,0x7d,0x49,0x42,0xd1,0x02]
|
|
; X86-NEXT: vdbpsadbw $3, %zmm1, %zmm0, %zmm3 {%k1} {z} # encoding: [0x62,0xf3,0x7d,0xc9,0x42,0xd9,0x03]
|
|
; X86-NEXT: vdbpsadbw $4, %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf3,0x7d,0x48,0x42,0xc1,0x04]
|
|
; X86-NEXT: vpaddw %zmm0, %zmm3, %zmm0 # encoding: [0x62,0xf1,0x65,0x48,0xfd,0xc0]
|
|
; X86-NEXT: vpaddw %zmm0, %zmm2, %zmm0 # encoding: [0x62,0xf1,0x6d,0x48,0xfd,0xc0]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_dbpsadbw_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vdbpsadbw $2, %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf3,0x7d,0x49,0x42,0xd1,0x02]
|
|
; X64-NEXT: vdbpsadbw $3, %zmm1, %zmm0, %zmm3 {%k1} {z} # encoding: [0x62,0xf3,0x7d,0xc9,0x42,0xd9,0x03]
|
|
; X64-NEXT: vdbpsadbw $4, %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf3,0x7d,0x48,0x42,0xc1,0x04]
|
|
; X64-NEXT: vpaddw %zmm0, %zmm3, %zmm0 # encoding: [0x62,0xf1,0x65,0x48,0xfd,0xc0]
|
|
; X64-NEXT: vpaddw %zmm0, %zmm2, %zmm0 # encoding: [0x62,0xf1,0x6d,0x48,0xfd,0xc0]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.dbpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i32 2)
|
|
%2 = bitcast i32 %x4 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x3
|
|
%4 = call <32 x i16> @llvm.x86.avx512.dbpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i32 3)
|
|
%5 = bitcast i32 %x4 to <32 x i1>
|
|
%6 = select <32 x i1> %5, <32 x i16> %4, <32 x i16> zeroinitializer
|
|
%7 = call <32 x i16> @llvm.x86.avx512.dbpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i32 4)
|
|
%res3 = add <32 x i16> %3, %6
|
|
%res4 = add <32 x i16> %res3, %7
|
|
ret <32 x i16> %res4
|
|
}
|
|
|
|
declare <8 x i64> @llvm.x86.avx512.psad.bw.512(<64 x i8>, <64 x i8>)
|
|
|
|
define <8 x i64>@test_int_x86_avx512_mask_psadb_w_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2){
|
|
; CHECK-LABEL: test_int_x86_avx512_mask_psadb_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsadbw %zmm1, %zmm0, %zmm1 # encoding: [0x62,0xf1,0x7d,0x48,0xf6,0xc9]
|
|
; CHECK-NEXT: vpsadbw %zmm2, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xf6,0xc2]
|
|
; CHECK-NEXT: vpaddq %zmm0, %zmm1, %zmm0 # encoding: [0x62,0xf1,0xf5,0x48,0xd4,0xc0]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <8 x i64> @llvm.x86.avx512.psad.bw.512(<64 x i8> %x0, <64 x i8> %x1)
|
|
%res1 = call <8 x i64> @llvm.x86.avx512.psad.bw.512(<64 x i8> %x0, <64 x i8> %x2)
|
|
%res2 = add <8 x i64> %res, %res1
|
|
ret <8 x i64> %res2
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.psrlv.w.512(<32 x i16>, <32 x i16>) nounwind readnone
|
|
|
|
define <32 x i16> @test_x86_avx512_psrlv_w_512_const() optsize {
|
|
; X86-LABEL: test_x86_avx512_psrlv_w_512_const:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,65535]
|
|
; X86-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: vpsrlvw {{\.LCPI.*}}, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x10,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_psrlv_w_512_const:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,65535]
|
|
; X64-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: vpsrlvw {{.*}}(%rip), %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x10,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res1 = call <32 x i16> @llvm.x86.avx512.psrlv.w.512(<32 x i16> <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 -1>, <32 x i16> <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 -1>)
|
|
ret <32 x i16> %res1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_psrlv32hi(<32 x i16> %x0, <32 x i16> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_psrlv32hi:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x10,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrlv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_psrlv32hi(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_psrlv32hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlvw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x10,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_psrlv32hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlvw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x10,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrlv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_maskz_psrlv32hi(<32 x i16> %x0, <32 x i16> %x1, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_psrlv32hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x10,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_psrlv32hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x10,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrlv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.psrav.w.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16>@test_int_x86_avx512_psrav32_hi(<32 x i16> %x0, <32 x i16> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_psrav32_hi:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsravw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x11,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrav.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_psrav32_hi(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_psrav32_hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsravw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x11,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_psrav32_hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsravw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x11,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrav.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_maskz_psrav32_hi(<32 x i16> %x0, <32 x i16> %x1, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_psrav32_hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsravw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x11,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_psrav32_hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsravw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x11,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrav.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_psrav32_hi_const(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_psrav32_hi_const:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: vmovdqa64 {{.*#+}} zmm0 = [2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51]
|
|
; X86-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: vpsravw {{\.LCPI.*}}, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x11,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_psrav32_hi_const:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vmovdqa64 {{.*#+}} zmm0 = [2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51,2,9,65524,23,65510,37,65496,51]
|
|
; X64-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: vpsravw {{.*}}(%rip), %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x11,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psrav.w.512(<32 x i16> <i16 2, i16 9, i16 -12, i16 23, i16 -26, i16 37, i16 -40, i16 51, i16 2, i16 9, i16 -12, i16 23, i16 -26, i16 37, i16 -40, i16 51, i16 2, i16 9, i16 -12, i16 23, i16 -26, i16 37, i16 -40, i16 51, i16 2, i16 9, i16 -12, i16 23, i16 -26, i16 37, i16 -40, i16 51>, <32 x i16> <i16 1, i16 10, i16 35, i16 52, i16 69, i16 9, i16 16, i16 49, i16 1, i16 10, i16 35, i16 52, i16 69, i16 9, i16 16, i16 49, i16 1, i16 10, i16 35, i16 52, i16 69, i16 9, i16 16, i16 49, i16 1, i16 10, i16 35, i16 52, i16 69, i16 9, i16 16, i16 49>)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_psllv32hi(<32 x i16> %x0, <32 x i16> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_psllv32hi:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsllvw %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x12,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psllv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_psllv32hi(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_psllv32hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllvw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x12,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_psllv32hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllvw %zmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf2,0xfd,0x49,0x12,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psllv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_maskz_psllv32hi(<32 x i16> %x0, <32 x i16> %x1, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_psllv32hi:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllvw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x12,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_psllv32hi:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllvw %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xfd,0xc9,0x12,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.psllv.w.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
declare <32 x i16> @llvm.x86.avx512.permvar.hi.512(<32 x i16>, <32 x i16>)
|
|
|
|
define <32 x i16>@test_int_x86_avx512_permvar_hi_512(<32 x i16> %x0, <32 x i16> %x1) {
|
|
; CHECK-LABEL: test_int_x86_avx512_permvar_hi_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpermw %zmm0, %zmm1, %zmm0 # encoding: [0x62,0xf2,0xf5,0x48,0x8d,0xc0]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.permvar.hi.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
ret <32 x i16> %1
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_mask_permvar_hi_512(<32 x i16> %x0, <32 x i16> %x1, <32 x i16> %x2, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_mask_permvar_hi_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpermw %zmm0, %zmm1, %zmm2 {%k1} # encoding: [0x62,0xf2,0xf5,0x49,0x8d,0xd0]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_mask_permvar_hi_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpermw %zmm0, %zmm1, %zmm2 {%k1} # encoding: [0x62,0xf2,0xf5,0x49,0x8d,0xd0]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.permvar.hi.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %x2
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16>@test_int_x86_avx512_maskz_permvar_hi_512(<32 x i16> %x0, <32 x i16> %x1, i32 %x3) {
|
|
; X86-LABEL: test_int_x86_avx512_maskz_permvar_hi_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpermw %zmm0, %zmm1, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xf5,0xc9,0x8d,0xc0]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_int_x86_avx512_maskz_permvar_hi_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpermw %zmm0, %zmm1, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0xf5,0xc9,0x8d,0xc0]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%1 = call <32 x i16> @llvm.x86.avx512.permvar.hi.512(<32 x i16> %x0, <32 x i16> %x1)
|
|
%2 = bitcast i32 %x3 to <32 x i1>
|
|
%3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %3
|
|
}
|
|
|
|
define <32 x i16> @test_x86_avx512_psll_w_512(<32 x i16> %a0, <8 x i16> %a1) {
|
|
; CHECK-LABEL: test_x86_avx512_psll_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsllw %xmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xf1,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psll.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_psll_w_512(<32 x i16> %a0, <8 x i16> %a1, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_psll_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xf1,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_psll_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xf1,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psll.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_psll_w_512(<32 x i16> %a0, <8 x i16> %a1, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_psll_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xf1,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_psll_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xf1,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psll.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psll.w.512(<32 x i16>, <8 x i16>) nounwind readnone
|
|
|
|
|
|
define <32 x i16> @test_x86_avx512_psllv_w_512_const() optsize {
|
|
; X86-LABEL: test_x86_avx512_psllv_w_512_const:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,65535]
|
|
; X86-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: vpsllvw {{\.LCPI.*}}, %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x12,0x05,A,A,A,A]
|
|
; X86-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}, kind: FK_Data_4
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_psllv_w_512_const:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vmovdqa64 {{.*#+}} zmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,65535]
|
|
; X64-NEXT: # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: vpsllvw {{.*}}(%rip), %zmm0, %zmm0 # encoding: [0x62,0xf2,0xfd,0x48,0x12,0x05,A,A,A,A]
|
|
; X64-NEXT: # fixup A - offset: 6, value: {{\.LCPI.*}}-4, kind: reloc_riprel_4byte
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res1 = call <32 x i16> @llvm.x86.avx512.psllv.w.512(<32 x i16> <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 -1>, <32 x i16> <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 -1>)
|
|
ret <32 x i16> %res1
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psllv.w.512(<32 x i16>, <32 x i16>) nounwind readnone
|
|
|
|
define <32 x i16> @test_x86_avx512_pslli_w_512(<32 x i16> %a0) {
|
|
; CHECK-LABEL: test_x86_avx512_pslli_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsllw $7, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x71,0xf0,0x07]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.pslli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_pslli_w_512(<32 x i16> %a0, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_pslli_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xf0,0x07]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_pslli_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xf0,0x07]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.pslli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_pslli_w_512(<32 x i16> %a0, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_pslli_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsllw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xf0,0x07]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_pslli_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsllw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xf0,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.pslli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.pslli.w.512(<32 x i16>, i32) nounwind readnone
|
|
|
|
|
|
define <32 x i16> @test_x86_avx512_psra_w_512(<32 x i16> %a0, <8 x i16> %a1) {
|
|
; CHECK-LABEL: test_x86_avx512_psra_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsraw %xmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xe1,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psra.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_psra_w_512(<32 x i16> %a0, <8 x i16> %a1, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_psra_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsraw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe1,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_psra_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsraw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xe1,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psra.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_psra_w_512(<32 x i16> %a0, <8 x i16> %a1, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_psra_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsraw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xe1,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_psra_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsraw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xe1,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psra.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psra.w.512(<32 x i16>, <8 x i16>) nounwind readnone
|
|
|
|
|
|
define <32 x i16> @test_x86_avx512_psrai_w_512(<32 x i16> %a0) {
|
|
; CHECK-LABEL: test_x86_avx512_psrai_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsraw $7, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x71,0xe0,0x07]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrai.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_psrai_w_512(<32 x i16> %a0, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_psrai_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsraw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xe0,0x07]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_psrai_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsraw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xe0,0x07]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrai.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_psrai_w_512(<32 x i16> %a0, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_psrai_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsraw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xe0,0x07]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_psrai_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsraw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xe0,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrai.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psrai.w.512(<32 x i16>, i32) nounwind readnone
|
|
|
|
|
|
define <32 x i16> @test_x86_avx512_psrl_w_512(<32 x i16> %a0, <8 x i16> %a1) {
|
|
; CHECK-LABEL: test_x86_avx512_psrl_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsrlw %xmm1, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xd1,0xc1]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrl.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_psrl_w_512(<32 x i16> %a0, <8 x i16> %a1, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_psrl_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xd1,0xd1]
|
|
; X86-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_psrl_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlw %xmm1, %zmm0, %zmm2 {%k1} # encoding: [0x62,0xf1,0x7d,0x49,0xd1,0xd1]
|
|
; X64-NEXT: vmovdqa64 %zmm2, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc2]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrl.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_psrl_w_512(<32 x i16> %a0, <8 x i16> %a1, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_psrl_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xd1,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_psrl_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlw %xmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0xd1,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrl.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psrl.w.512(<32 x i16>, <8 x i16>) nounwind readnone
|
|
|
|
define <32 x i16> @test_x86_avx512_psrl_w_512_load(<32 x i16> %a0, <8 x i16>* %p) {
|
|
; X86-LABEL: test_x86_avx512_psrl_w_512_load:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax # encoding: [0x8b,0x44,0x24,0x04]
|
|
; X86-NEXT: vpsrlw (%eax), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xd1,0x00]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_psrl_w_512_load:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: vpsrlw (%rdi), %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0xd1,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%a1 = load <8 x i16>, <8 x i16>* %p
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrl.w.512(<32 x i16> %a0, <8 x i16> %a1) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
|
|
define <32 x i16> @test_x86_avx512_psrli_w_512(<32 x i16> %a0) {
|
|
; CHECK-LABEL: test_x86_avx512_psrli_w_512:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: vpsrlw $7, %zmm0, %zmm0 # encoding: [0x62,0xf1,0x7d,0x48,0x71,0xd0,0x07]
|
|
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
ret <32 x i16> %res
|
|
}
|
|
define <32 x i16> @test_x86_avx512_mask_psrli_w_512(<32 x i16> %a0, <32 x i16> %passthru, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_mask_psrli_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xd0,0x07]
|
|
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_mask_psrli_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlw $7, %zmm0, %zmm1 {%k1} # encoding: [0x62,0xf1,0x75,0x49,0x71,0xd0,0x07]
|
|
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> %passthru
|
|
ret <32 x i16> %res2
|
|
}
|
|
define <32 x i16> @test_x86_avx512_maskz_psrli_w_512(<32 x i16> %a0, i32 %mask) {
|
|
; X86-LABEL: test_x86_avx512_maskz_psrli_w_512:
|
|
; X86: # %bb.0:
|
|
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
|
|
; X86-NEXT: vpsrlw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xd0,0x07]
|
|
; X86-NEXT: retl # encoding: [0xc3]
|
|
;
|
|
; X64-LABEL: test_x86_avx512_maskz_psrli_w_512:
|
|
; X64: # %bb.0:
|
|
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
|
|
; X64-NEXT: vpsrlw $7, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf1,0x7d,0xc9,0x71,0xd0,0x07]
|
|
; X64-NEXT: retq # encoding: [0xc3]
|
|
%res = call <32 x i16> @llvm.x86.avx512.psrli.w.512(<32 x i16> %a0, i32 7) ; <<32 x i16>> [#uses=1]
|
|
%mask.cast = bitcast i32 %mask to <32 x i1>
|
|
%res2 = select <32 x i1> %mask.cast, <32 x i16> %res, <32 x i16> zeroinitializer
|
|
ret <32 x i16> %res2
|
|
}
|
|
declare <32 x i16> @llvm.x86.avx512.psrli.w.512(<32 x i16>, i32) nounwind readnone
|