1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

[DAG] SimplifyMultipleUseDemandedBits - remove superfluous bitcasts

If the SimplifyMultipleUseDemandedBits calls BITCASTs that peek through back to the original type then we can remove the BITCASTs entirely.

Differential Revision: https://reviews.llvm.org/D79572
This commit is contained in:
Simon Pilgrim 2020-05-08 19:04:29 +01:00
parent dc5f5241a9
commit 9de716fac1
11 changed files with 486 additions and 523 deletions

View File

@ -617,9 +617,11 @@ SDValue TargetLowering::SimplifyMultipleUseDemandedBits(
SDValue Src = peekThroughBitcasts(Op.getOperand(0));
EVT SrcVT = Src.getValueType();
EVT DstVT = Op.getValueType();
if (SrcVT == DstVT)
return Src;
unsigned NumSrcEltBits = SrcVT.getScalarSizeInBits();
unsigned NumDstEltBits = DstVT.getScalarSizeInBits();
if (NumSrcEltBits == NumDstEltBits)
if (SDValue V = SimplifyMultipleUseDemandedBits(
Src, DemandedBits, DemandedElts, DAG, Depth + 1))

View File

@ -782,9 +782,8 @@ define <3 x i16> @v_fshr_v3i16(<3 x i16> %src0, <3 x i16> %src1, <3 x i16> %src2
; VI-NEXT: v_and_b32_e32 v7, 15, v5
; VI-NEXT: v_lshrrev_b16_e32 v8, v7, v3
; VI-NEXT: v_sub_u16_e32 v7, 16, v7
; VI-NEXT: s_mov_b32 s4, 0xf000f
; VI-NEXT: v_lshlrev_b16_e32 v1, v7, v1
; VI-NEXT: v_and_b32_e32 v5, s4, v5
; VI-NEXT: v_and_b32_e32 v5, 15, v5
; VI-NEXT: v_or_b32_e32 v1, v1, v8
; VI-NEXT: v_cmp_eq_u16_e32 vcc, 0, v5
; VI-NEXT: v_cndmask_b32_e32 v1, v1, v3, vcc
@ -792,7 +791,7 @@ define <3 x i16> @v_fshr_v3i16(<3 x i16> %src0, <3 x i16> %src1, <3 x i16> %src2
; VI-NEXT: v_lshrrev_b16_e32 v5, v3, v2
; VI-NEXT: v_sub_u16_e32 v3, 16, v3
; VI-NEXT: v_lshlrev_b16_e32 v0, v3, v0
; VI-NEXT: v_and_b32_e32 v3, s4, v4
; VI-NEXT: v_and_b32_e32 v3, 0xf000f, v4
; VI-NEXT: v_or_b32_e32 v0, v0, v5
; VI-NEXT: v_cmp_eq_u16_e32 vcc, 0, v3
; VI-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc

View File

@ -5,17 +5,16 @@ define void @_Z4loopPxS_iS_i(i64* %d) {
; CHECK-LABEL: _Z4loopPxS_iS_i:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrw.u32 q0, [r0]
; CHECK-NEXT: vmov r2, s2
; CHECK-NEXT: vmov r1, s0
; CHECK-NEXT: sxth r2, r2
; CHECK-NEXT: vmov r1, s2
; CHECK-NEXT: vmov r2, s0
; CHECK-NEXT: rsbs r1, r1, #0
; CHECK-NEXT: rsbs r2, r2, #0
; CHECK-NEXT: sxth r1, r1
; CHECK-NEXT: sxth r2, r2
; CHECK-NEXT: asrs r3, r1, #31
; CHECK-NEXT: asr.w r12, r2, #31
; CHECK-NEXT: strd r1, r3, [r0]
; CHECK-NEXT: strd r2, r12, [r0, #8]
; CHECK-NEXT: asr.w r12, r1, #31
; CHECK-NEXT: asrs r3, r2, #31
; CHECK-NEXT: strd r2, r3, [r0]
; CHECK-NEXT: strd r1, r12, [r0, #8]
; CHECK-NEXT: bx lr
entry:
%wide.load = load <2 x i64>, <2 x i64>* undef, align 8

View File

@ -6,26 +6,23 @@
define void @vld3_v2i32(<6 x i32> *%src, <2 x i32> *%dst) {
; CHECK-LABEL: vld3_v2i32:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: ldrd r2, r3, [r0, #16]
; CHECK-NEXT: vldrw.u32 q0, [r0]
; CHECK-NEXT: vmov.32 q2[0], r2
; CHECK-NEXT: vmov.f64 d2, d0
; CHECK-NEXT: vmov.32 q2[2], r3
; CHECK-NEXT: vmov.32 r0, q0[2]
; CHECK-NEXT: vmov.f32 s12, s1
; CHECK-NEXT: vmov.f32 s6, s3
; CHECK-NEXT: vmov.f32 s14, s8
; CHECK-NEXT: vmov r2, s12
; CHECK-NEXT: vmov r12, s6
; CHECK-NEXT: ldrd r12, r3, [r0, #16]
; CHECK-NEXT: vmov.32 r0, q0[1]
; CHECK-NEXT: vmov r2, s0
; CHECK-NEXT: vdup.32 q1, r0
; CHECK-NEXT: vmov r0, s14
; CHECK-NEXT: add r0, r12
; CHECK-NEXT: add r0, r3
; CHECK-NEXT: vmov r3, s0
; CHECK-NEXT: vmov r0, s4
; CHECK-NEXT: vmov.f64 d2, d0
; CHECK-NEXT: vmov.f32 s6, s3
; CHECK-NEXT: add r0, r2
; CHECK-NEXT: vmov.32 r2, q0[2]
; CHECK-NEXT: vdup.32 q0, r2
; CHECK-NEXT: vmov r2, s0
; CHECK-NEXT: add r0, r2
; CHECK-NEXT: vmov r2, s6
; CHECK-NEXT: add r2, r12
; CHECK-NEXT: add r2, r3
; CHECK-NEXT: vmov r3, s4
; CHECK-NEXT: add r2, r3
; CHECK-NEXT: strd r2, r0, [r1]
; CHECK-NEXT: strd r0, r2, [r1]
; CHECK-NEXT: bx lr
entry:
%l1 = load <6 x i32>, <6 x i32>* %src, align 4

View File

@ -50,14 +50,9 @@ define i2 @bitcast_v4i32_to_v2i2(<4 x i32> %a0) nounwind {
; SSE2-SSSE3-NEXT: movmskps %xmm0, %eax
; SSE2-SSSE3-NEXT: movl %eax, %ecx
; SSE2-SSSE3-NEXT: shrb $2, %cl
; SSE2-SSSE3-NEXT: movzbl %cl, %ecx
; SSE2-SSSE3-NEXT: andb $3, %al
; SSE2-SSSE3-NEXT: movzbl %al, %eax
; SSE2-SSSE3-NEXT: movd %eax, %xmm0
; SSE2-SSSE3-NEXT: pinsrw $4, %ecx, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, -{{[0-9]+}}(%rsp)
; SSE2-SSSE3-NEXT: movb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb %cl, %al
; SSE2-SSSE3-NEXT: # kill: def $al killed $al killed $eax
; SSE2-SSSE3-NEXT: retq
;
; AVX12-LABEL: bitcast_v4i32_to_v2i2:
@ -96,14 +91,9 @@ define i4 @bitcast_v8i16_to_v2i4(<8 x i16> %a0) nounwind {
; SSE2-SSSE3-NEXT: pmovmskb %xmm0, %eax
; SSE2-SSSE3-NEXT: movl %eax, %ecx
; SSE2-SSSE3-NEXT: shrb $4, %cl
; SSE2-SSSE3-NEXT: movzbl %cl, %ecx
; SSE2-SSSE3-NEXT: andb $15, %al
; SSE2-SSSE3-NEXT: movzbl %al, %eax
; SSE2-SSSE3-NEXT: movd %eax, %xmm0
; SSE2-SSSE3-NEXT: pinsrw $4, %ecx, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, -{{[0-9]+}}(%rsp)
; SSE2-SSSE3-NEXT: movb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb %cl, %al
; SSE2-SSSE3-NEXT: # kill: def $al killed $al killed $eax
; SSE2-SSSE3-NEXT: retq
;
; AVX12-LABEL: bitcast_v8i16_to_v2i4:
@ -183,14 +173,9 @@ define i2 @bitcast_v4i64_to_v2i2(<4 x i64> %a0) nounwind {
; SSE2-SSSE3-NEXT: movmskps %xmm0, %eax
; SSE2-SSSE3-NEXT: movl %eax, %ecx
; SSE2-SSSE3-NEXT: shrb $2, %cl
; SSE2-SSSE3-NEXT: movzbl %cl, %ecx
; SSE2-SSSE3-NEXT: andb $3, %al
; SSE2-SSSE3-NEXT: movzbl %al, %eax
; SSE2-SSSE3-NEXT: movd %eax, %xmm0
; SSE2-SSSE3-NEXT: pinsrw $4, %ecx, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, -{{[0-9]+}}(%rsp)
; SSE2-SSSE3-NEXT: movb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb %cl, %al
; SSE2-SSSE3-NEXT: # kill: def $al killed $al killed $eax
; SSE2-SSSE3-NEXT: retq
;
; AVX12-LABEL: bitcast_v4i64_to_v2i2:
@ -232,14 +217,9 @@ define i4 @bitcast_v8i32_to_v2i4(<8 x i32> %a0) nounwind {
; SSE2-SSSE3-NEXT: pmovmskb %xmm0, %eax
; SSE2-SSSE3-NEXT: movl %eax, %ecx
; SSE2-SSSE3-NEXT: shrb $4, %cl
; SSE2-SSSE3-NEXT: movzbl %cl, %ecx
; SSE2-SSSE3-NEXT: andb $15, %al
; SSE2-SSSE3-NEXT: movzbl %al, %eax
; SSE2-SSSE3-NEXT: movd %eax, %xmm0
; SSE2-SSSE3-NEXT: pinsrw $4, %ecx, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, -{{[0-9]+}}(%rsp)
; SSE2-SSSE3-NEXT: movb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb %cl, %al
; SSE2-SSSE3-NEXT: # kill: def $al killed $al killed $eax
; SSE2-SSSE3-NEXT: retq
;
; AVX12-LABEL: bitcast_v8i32_to_v2i4:
@ -395,14 +375,9 @@ define i4 @bitcast_v8i64_to_v2i4(<8 x i64> %a0) nounwind {
; SSE2-SSSE3-NEXT: pmovmskb %xmm0, %eax
; SSE2-SSSE3-NEXT: movl %eax, %ecx
; SSE2-SSSE3-NEXT: shrb $4, %cl
; SSE2-SSSE3-NEXT: movzbl %cl, %ecx
; SSE2-SSSE3-NEXT: andb $15, %al
; SSE2-SSSE3-NEXT: movzbl %al, %eax
; SSE2-SSSE3-NEXT: movd %eax, %xmm0
; SSE2-SSSE3-NEXT: pinsrw $4, %ecx, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, -{{[0-9]+}}(%rsp)
; SSE2-SSSE3-NEXT: movb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb -{{[0-9]+}}(%rsp), %al
; SSE2-SSSE3-NEXT: addb %cl, %al
; SSE2-SSSE3-NEXT: # kill: def $al killed $al killed $eax
; SSE2-SSSE3-NEXT: retq
;
; AVX1-LABEL: bitcast_v8i64_to_v2i4:

View File

@ -577,25 +577,25 @@ define <4 x i32> @vector_i128_i32(<4 x i32> %x, <4 x i32> %y, <4 x i32>* %divdst
; X86-NEXT: movd %xmm1, %esi
; X86-NEXT: cltd
; X86-NEXT: idivl %esi
; X86-NEXT: movd %eax, %xmm4
; X86-NEXT: pshufd {{.*#+}} xmm5 = xmm0[1,1,2,3]
; X86-NEXT: movd %xmm5, %eax
; X86-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X86-NEXT: movd %xmm5, %esi
; X86-NEXT: movd %eax, %xmm2
; X86-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,2,3]
; X86-NEXT: movd %xmm4, %eax
; X86-NEXT: pshufd {{.*#+}} xmm4 = xmm1[1,1,2,3]
; X86-NEXT: movd %xmm4, %esi
; X86-NEXT: cltd
; X86-NEXT: idivl %esi
; X86-NEXT: movd %eax, %xmm5
; X86-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; X86-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm3[0]
; X86-NEXT: movdqa %xmm4, (%ecx)
; X86-NEXT: pmuludq %xmm1, %xmm4
; X86-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,2,2,3]
; X86-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm2[0,0]
; X86-NEXT: movd %eax, %xmm4
; X86-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
; X86-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; X86-NEXT: movdqa %xmm2, (%ecx)
; X86-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
; X86-NEXT: pmuludq %xmm1, %xmm2
; X86-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
; X86-NEXT: pmuludq %xmm5, %xmm1
; X86-NEXT: pmuludq %xmm3, %xmm1
; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X86-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; X86-NEXT: psubd %xmm3, %xmm0
; X86-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X86-NEXT: psubd %xmm2, %xmm0
; X86-NEXT: popl %esi
; X86-NEXT: retl
;
@ -620,25 +620,25 @@ define <4 x i32> @vector_i128_i32(<4 x i32> %x, <4 x i32> %y, <4 x i32>* %divdst
; X64-NEXT: movd %xmm1, %ecx
; X64-NEXT: cltd
; X64-NEXT: idivl %ecx
; X64-NEXT: movd %eax, %xmm4
; X64-NEXT: pshufd {{.*#+}} xmm5 = xmm0[1,1,2,3]
; X64-NEXT: movd %xmm5, %eax
; X64-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X64-NEXT: movd %xmm5, %ecx
; X64-NEXT: movd %eax, %xmm2
; X64-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,2,3]
; X64-NEXT: movd %xmm4, %eax
; X64-NEXT: pshufd {{.*#+}} xmm4 = xmm1[1,1,2,3]
; X64-NEXT: movd %xmm4, %ecx
; X64-NEXT: cltd
; X64-NEXT: idivl %ecx
; X64-NEXT: movd %eax, %xmm5
; X64-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; X64-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm3[0]
; X64-NEXT: movdqa %xmm4, (%rdi)
; X64-NEXT: pmuludq %xmm1, %xmm4
; X64-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,2,2,3]
; X64-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm2[0,0]
; X64-NEXT: movd %eax, %xmm4
; X64-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
; X64-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; X64-NEXT: movdqa %xmm2, (%rdi)
; X64-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
; X64-NEXT: pmuludq %xmm1, %xmm2
; X64-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; X64-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
; X64-NEXT: pmuludq %xmm5, %xmm1
; X64-NEXT: pmuludq %xmm3, %xmm1
; X64-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X64-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; X64-NEXT: psubd %xmm3, %xmm0
; X64-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X64-NEXT: psubd %xmm2, %xmm0
; X64-NEXT: retq
%div = sdiv <4 x i32> %x, %y
store <4 x i32> %div, <4 x i32>* %divdst, align 16

View File

@ -577,25 +577,25 @@ define <4 x i32> @vector_i128_i32(<4 x i32> %x, <4 x i32> %y, <4 x i32>* %divdst
; X86-NEXT: movd %xmm1, %esi
; X86-NEXT: xorl %edx, %edx
; X86-NEXT: divl %esi
; X86-NEXT: movd %eax, %xmm4
; X86-NEXT: pshufd {{.*#+}} xmm5 = xmm0[1,1,2,3]
; X86-NEXT: movd %xmm5, %eax
; X86-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X86-NEXT: movd %xmm5, %esi
; X86-NEXT: movd %eax, %xmm2
; X86-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,2,3]
; X86-NEXT: movd %xmm4, %eax
; X86-NEXT: pshufd {{.*#+}} xmm4 = xmm1[1,1,2,3]
; X86-NEXT: movd %xmm4, %esi
; X86-NEXT: xorl %edx, %edx
; X86-NEXT: divl %esi
; X86-NEXT: movd %eax, %xmm5
; X86-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; X86-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm3[0]
; X86-NEXT: movdqa %xmm4, (%ecx)
; X86-NEXT: pmuludq %xmm1, %xmm4
; X86-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,2,2,3]
; X86-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm2[0,0]
; X86-NEXT: movd %eax, %xmm4
; X86-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
; X86-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; X86-NEXT: movdqa %xmm2, (%ecx)
; X86-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
; X86-NEXT: pmuludq %xmm1, %xmm2
; X86-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
; X86-NEXT: pmuludq %xmm5, %xmm1
; X86-NEXT: pmuludq %xmm3, %xmm1
; X86-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X86-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; X86-NEXT: psubd %xmm3, %xmm0
; X86-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X86-NEXT: psubd %xmm2, %xmm0
; X86-NEXT: popl %esi
; X86-NEXT: retl
;
@ -620,25 +620,25 @@ define <4 x i32> @vector_i128_i32(<4 x i32> %x, <4 x i32> %y, <4 x i32>* %divdst
; X64-NEXT: movd %xmm1, %ecx
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: divl %ecx
; X64-NEXT: movd %eax, %xmm4
; X64-NEXT: pshufd {{.*#+}} xmm5 = xmm0[1,1,2,3]
; X64-NEXT: movd %xmm5, %eax
; X64-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X64-NEXT: movd %xmm5, %ecx
; X64-NEXT: movd %eax, %xmm2
; X64-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,2,3]
; X64-NEXT: movd %xmm4, %eax
; X64-NEXT: pshufd {{.*#+}} xmm4 = xmm1[1,1,2,3]
; X64-NEXT: movd %xmm4, %ecx
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: divl %ecx
; X64-NEXT: movd %eax, %xmm5
; X64-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; X64-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm3[0]
; X64-NEXT: movdqa %xmm4, (%rdi)
; X64-NEXT: pmuludq %xmm1, %xmm4
; X64-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,2,2,3]
; X64-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm2[0,0]
; X64-NEXT: movd %eax, %xmm4
; X64-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
; X64-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
; X64-NEXT: movdqa %xmm2, (%rdi)
; X64-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
; X64-NEXT: pmuludq %xmm1, %xmm2
; X64-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; X64-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
; X64-NEXT: pmuludq %xmm5, %xmm1
; X64-NEXT: pmuludq %xmm3, %xmm1
; X64-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X64-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; X64-NEXT: psubd %xmm3, %xmm0
; X64-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X64-NEXT: psubd %xmm2, %xmm0
; X64-NEXT: retq
%div = udiv <4 x i32> %x, %y
store <4 x i32> %div, <4 x i32>* %divdst, align 16

View File

@ -2076,96 +2076,96 @@ define void @PR34947(<9 x i16>* %p0, <9 x i32>* %p1) nounwind {
; X86-SSE-NEXT: pushl %esi
; X86-SSE-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-SSE-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-SSE-NEXT: movdqa (%eax), %xmm5
; X86-SSE-NEXT: movdqa (%eax), %xmm4
; X86-SSE-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X86-SSE-NEXT: movdqa (%ecx), %xmm2
; X86-SSE-NEXT: movdqa 16(%ecx), %xmm6
; X86-SSE-NEXT: pxor %xmm1, %xmm1
; X86-SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
; X86-SSE-NEXT: movdqa %xmm5, %xmm4
; X86-SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
; X86-SSE-NEXT: movdqa %xmm5, %xmm3
; X86-SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
; X86-SSE-NEXT: movdqa %xmm5, %xmm1
; X86-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
; X86-SSE-NEXT: movd %xmm1, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[3,1,2,3]
; X86-SSE-NEXT: movdqa (%ecx), %xmm1
; X86-SSE-NEXT: movdqa 16(%ecx), %xmm5
; X86-SSE-NEXT: pxor %xmm3, %xmm3
; X86-SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
; X86-SSE-NEXT: movdqa %xmm4, %xmm2
; X86-SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
; X86-SSE-NEXT: movdqa %xmm4, %xmm6
; X86-SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
; X86-SSE-NEXT: movdqa %xmm4, %xmm3
; X86-SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
; X86-SSE-NEXT: movd %xmm3, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[3,1,2,3]
; X86-SSE-NEXT: movd %xmm3, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm3
; X86-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm6[2,3,0,1]
; X86-SSE-NEXT: movd %xmm7, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[2,3,0,1]
; X86-SSE-NEXT: movd %xmm7, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm7
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
; X86-SSE-NEXT: movd %xmm6, %eax
; X86-SSE-NEXT: movd %xmm5, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm3
; X86-SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,2,3]
; X86-SSE-NEXT: movd %xmm6, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
; X86-SSE-NEXT: movd %xmm5, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm5
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1]
; X86-SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm7[0]
; X86-SSE-NEXT: movdqa %xmm4, %xmm5
; X86-SSE-NEXT: psrld $16, %xmm5
; X86-SSE-NEXT: movd %xmm5, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X86-SSE-NEXT: movd %xmm5, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm6
; X86-SSE-NEXT: movd %xmm2, %eax
; X86-SSE-NEXT: movd %xmm1, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm5
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
; X86-SSE-NEXT: psrlq $48, %xmm4
; X86-SSE-NEXT: movd %xmm4, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm1[3,1,2,3]
; X86-SSE-NEXT: movd %xmm4, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm4
; X86-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,0,1]
; X86-SSE-NEXT: movd %xmm2, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X86-SSE-NEXT: movd %xmm1, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm1
; X86-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm3[2,3,0,1]
; X86-SSE-NEXT: movd %xmm7, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm6[2,3,0,1]
; X86-SSE-NEXT: movd %xmm7, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm7
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm1[0],xmm7[1],xmm1[1]
; X86-SSE-NEXT: movd %xmm3, %eax
; X86-SSE-NEXT: movd %xmm6, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
; X86-SSE-NEXT: movd %xmm3, %eax
; X86-SSE-NEXT: movd %edx, %xmm3
; X86-SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,2,3]
; X86-SSE-NEXT: movd %xmm6, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm6
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1]
; X86-SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm7[0]
; X86-SSE-NEXT: movdqa %xmm5, %xmm7
; X86-SSE-NEXT: psrld $16, %xmm7
; X86-SSE-NEXT: movd %xmm7, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[1,1,2,3]
; X86-SSE-NEXT: movd %xmm7, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm7
; X86-SSE-NEXT: movd %xmm4, %eax
; X86-SSE-NEXT: movd %xmm2, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: psrlq $48, %xmm5
; X86-SSE-NEXT: movd %xmm5, %eax
; X86-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[3,1,2,3]
; X86-SSE-NEXT: movd %xmm5, %esi
; X86-SSE-NEXT: movd %edx, %xmm5
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1]
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,3,0,1]
; X86-SSE-NEXT: movd %xmm4, %eax
; X86-SSE-NEXT: movd %edx, %xmm4
; X86-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,0,1]
; X86-SSE-NEXT: movd %xmm2, %esi
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl %esi
; X86-SSE-NEXT: movd %edx, %xmm2
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
; X86-SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm2[0]
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; X86-SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm1[0]
; X86-SSE-NEXT: movd %xmm0, %eax
; X86-SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,0],xmm4[0,0]
; X86-SSE-NEXT: movdqa {{.*#+}} xmm0 = [8199,8199,8199,8199]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm7
; X86-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm5
; X86-SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm5[0,2,2,3]
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
; X86-SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,0],xmm1[0,0]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm3
; X86-SSE-NEXT: pmuludq %xmm0, %xmm6
; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[0,2,2,3]
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[0,2,2,3]
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X86-SSE-NEXT: xorl %edx, %edx
; X86-SSE-NEXT: divl 32(%ecx)
; X86-SSE-NEXT: movdqa %xmm0, (%eax)
; X86-SSE-NEXT: movdqa %xmm4, (%eax)
; X86-SSE-NEXT: movdqa {{.*#+}} xmm0 = [8199,8199,8199,8199]
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,3,3]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm5
; X86-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,2,2,3]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm1
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm3
; X86-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
; X86-SSE-NEXT: pmuludq %xmm0, %xmm1
; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; X86-SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
; X86-SSE-NEXT: imull $8199, %edx, %eax # imm = 0x2007
; X86-SSE-NEXT: movl %eax, (%eax)
; X86-SSE-NEXT: movdqa %xmm3, (%eax)
; X86-SSE-NEXT: movdqa %xmm2, (%eax)
; X86-SSE-NEXT: popl %esi
; X86-SSE-NEXT: retl
;
@ -2318,96 +2318,96 @@ define void @PR34947(<9 x i16>* %p0, <9 x i32>* %p1) nounwind {
;
; X64-SSE-LABEL: PR34947:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: movdqa (%rdi), %xmm5
; X64-SSE-NEXT: movdqa (%rdi), %xmm4
; X64-SSE-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X64-SSE-NEXT: movdqa (%rsi), %xmm2
; X64-SSE-NEXT: movdqa 16(%rsi), %xmm6
; X64-SSE-NEXT: pxor %xmm1, %xmm1
; X64-SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
; X64-SSE-NEXT: movdqa %xmm5, %xmm3
; X64-SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
; X64-SSE-NEXT: movdqa %xmm5, %xmm7
; X64-SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
; X64-SSE-NEXT: movdqa %xmm5, %xmm1
; X64-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
; X64-SSE-NEXT: movd %xmm1, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[3,1,2,3]
; X64-SSE-NEXT: movd %xmm1, %ecx
; X64-SSE-NEXT: movdqa (%rsi), %xmm1
; X64-SSE-NEXT: movdqa 16(%rsi), %xmm5
; X64-SSE-NEXT: pxor %xmm3, %xmm3
; X64-SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
; X64-SSE-NEXT: movdqa %xmm4, %xmm2
; X64-SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
; X64-SSE-NEXT: movdqa %xmm4, %xmm6
; X64-SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
; X64-SSE-NEXT: movdqa %xmm4, %xmm3
; X64-SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
; X64-SSE-NEXT: movd %xmm3, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[3,1,2,3]
; X64-SSE-NEXT: movd %xmm3, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm8
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[2,3,0,1]
; X64-SSE-NEXT: movd %xmm1, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[2,3,0,1]
; X64-SSE-NEXT: movd %xmm1, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm1
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1]
; X64-SSE-NEXT: movd %edx, %xmm3
; X64-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm6[2,3,0,1]
; X64-SSE-NEXT: movd %xmm7, %eax
; X64-SSE-NEXT: movd %xmm6, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm4
; X64-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,2,3]
; X64-SSE-NEXT: movd %xmm7, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,2,3]
; X64-SSE-NEXT: movd %xmm6, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm6
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1]
; X64-SSE-NEXT: punpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm1[0]
; X64-SSE-NEXT: movdqa %xmm5, %xmm1
; X64-SSE-NEXT: psrld $16, %xmm1
; X64-SSE-NEXT: movd %xmm1, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,2,3]
; X64-SSE-NEXT: movd %xmm1, %ecx
; X64-SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[2,3,0,1]
; X64-SSE-NEXT: movd %xmm7, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm7
; X64-SSE-NEXT: movd %xmm3, %eax
; X64-SSE-NEXT: movd %xmm2, %ecx
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1]
; X64-SSE-NEXT: movd %xmm6, %eax
; X64-SSE-NEXT: movd %xmm5, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm1
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1]
; X64-SSE-NEXT: psrlq $48, %xmm5
; X64-SSE-NEXT: movd %xmm5, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[3,1,2,3]
; X64-SSE-NEXT: movd %edx, %xmm3
; X64-SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,2,3]
; X64-SSE-NEXT: movd %xmm6, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
; X64-SSE-NEXT: movd %xmm5, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm5
; X64-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,3,0,1]
; X64-SSE-NEXT: movd %xmm3, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,0,1]
; X64-SSE-NEXT: movd %xmm2, %ecx
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1]
; X64-SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm7[0]
; X64-SSE-NEXT: movdqa %xmm4, %xmm5
; X64-SSE-NEXT: psrld $16, %xmm5
; X64-SSE-NEXT: movd %xmm5, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,3]
; X64-SSE-NEXT: movd %xmm5, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm2
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
; X64-SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm2[0]
; X64-SSE-NEXT: movd %edx, %xmm6
; X64-SSE-NEXT: movd %xmm2, %eax
; X64-SSE-NEXT: movd %xmm1, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm5
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
; X64-SSE-NEXT: psrlq $48, %xmm4
; X64-SSE-NEXT: movd %xmm4, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm1[3,1,2,3]
; X64-SSE-NEXT: movd %xmm4, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm4
; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,0,1]
; X64-SSE-NEXT: movd %xmm2, %eax
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X64-SSE-NEXT: movd %xmm1, %ecx
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl %ecx
; X64-SSE-NEXT: movd %edx, %xmm1
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; X64-SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm1[0]
; X64-SSE-NEXT: movd %xmm0, %eax
; X64-SSE-NEXT: xorl %edx, %edx
; X64-SSE-NEXT: divl 32(%rsi)
; X64-SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,0],xmm5[0,0]
; X64-SSE-NEXT: movdqa {{.*#+}} xmm0 = [8199,8199,8199,8199]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm7
; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,3,3]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm5
; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,2,2,3]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm1
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm4
; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
; X64-SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,0],xmm8[0,0]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm6
; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,2,2,3]
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm3
; X64-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
; X64-SSE-NEXT: pmuludq %xmm0, %xmm1
; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; X64-SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
; X64-SSE-NEXT: imull $8199, %edx, %eax # imm = 0x2007
; X64-SSE-NEXT: movl %eax, (%rax)
; X64-SSE-NEXT: movdqa %xmm3, (%rax)
; X64-SSE-NEXT: movdqa %xmm2, (%rax)
; X64-SSE-NEXT: movdqa %xmm1, (%rax)
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: PR34947:

View File

@ -449,145 +449,145 @@ define <6 x i32> @smulo_v6i32(<6 x i32> %a0, <6 x i32> %a1, <6 x i32>* %p2) noun
; SSE2-LABEL: smulo_v6i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movq %rdi, %rax
; SSE2-NEXT: movd %r8d, %xmm8
; SSE2-NEXT: movd %ecx, %xmm0
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1]
; SSE2-NEXT: movd %edx, %xmm3
; SSE2-NEXT: movd %esi, %xmm6
; SSE2-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm3[0],xmm6[1],xmm3[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm0[0]
; SSE2-NEXT: movd {{.*#+}} xmm10 = mem[0],zero,zero,zero
; SSE2-NEXT: movd %r8d, %xmm0
; SSE2-NEXT: movd %ecx, %xmm1
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSE2-NEXT: movd %edx, %xmm0
; SSE2-NEXT: movd %esi, %xmm2
; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSE2-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1]
; SSE2-NEXT: movd {{.*#+}} xmm12 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
; SSE2-NEXT: movd %r9d, %xmm13
; SSE2-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE2-NEXT: movdqa %xmm13, %xmm11
; SSE2-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm2[0],xmm11[1],xmm2[1]
; SSE2-NEXT: movd {{.*#+}} xmm9 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm7 = mem[0],zero,zero,zero
; SSE2-NEXT: pmuludq %xmm7, %xmm13
; SSE2-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSE2-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm5 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm1[0]
; SSE2-NEXT: movd %r9d, %xmm10
; SSE2-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSE2-NEXT: movdqa %xmm10, %xmm9
; SSE2-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1]
; SSE2-NEXT: movd {{.*#+}} xmm8 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm6 = mem[0],zero,zero,zero
; SSE2-NEXT: pmuludq %xmm6, %xmm10
; SSE2-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm8[0],xmm6[1],xmm8[1]
; SSE2-NEXT: movq {{[0-9]+}}(%rsp), %rcx
; SSE2-NEXT: pxor %xmm7, %xmm7
; SSE2-NEXT: pxor %xmm4, %xmm4
; SSE2-NEXT: pxor %xmm5, %xmm5
; SSE2-NEXT: pcmpgtd %xmm1, %xmm5
; SSE2-NEXT: pand %xmm6, %xmm5
; SSE2-NEXT: pxor %xmm0, %xmm0
; SSE2-NEXT: pcmpgtd %xmm6, %xmm0
; SSE2-NEXT: pand %xmm1, %xmm0
; SSE2-NEXT: paddd %xmm5, %xmm0
; SSE2-NEXT: pmuludq %xmm6, %xmm1
; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,3,2,3]
; SSE2-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,0],xmm10[0,0]
; SSE2-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,0],xmm8[0,0]
; SSE2-NEXT: pmuludq %xmm12, %xmm3
; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm3[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
; SSE2-NEXT: psubd %xmm0, %xmm5
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; SSE2-NEXT: movdqa %xmm0, (%rcx)
; SSE2-NEXT: psrad $31, %xmm0
; SSE2-NEXT: pcmpeqd %xmm5, %xmm0
; SSE2-NEXT: pcmpeqd %xmm1, %xmm1
; SSE2-NEXT: pxor %xmm1, %xmm0
; SSE2-NEXT: pcmpgtd %xmm5, %xmm4
; SSE2-NEXT: pand %xmm2, %xmm4
; SSE2-NEXT: pxor %xmm3, %xmm3
; SSE2-NEXT: pcmpgtd %xmm7, %xmm3
; SSE2-NEXT: pand %xmm11, %xmm3
; SSE2-NEXT: pcmpgtd %xmm11, %xmm4
; SSE2-NEXT: pand %xmm7, %xmm4
; SSE2-NEXT: paddd %xmm3, %xmm4
; SSE2-NEXT: pmuludq %xmm9, %xmm2
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm13[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1]
; SSE2-NEXT: psubd %xmm4, %xmm5
; SSE2-NEXT: pcmpgtd %xmm2, %xmm3
; SSE2-NEXT: pand %xmm5, %xmm3
; SSE2-NEXT: paddd %xmm4, %xmm3
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,3,3]
; SSE2-NEXT: pmuludq %xmm5, %xmm2
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
; SSE2-NEXT: pmuludq %xmm4, %xmm5
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
; SSE2-NEXT: psubd %xmm3, %xmm0
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm13[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm5[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
; SSE2-NEXT: movdqa %xmm2, (%rcx)
; SSE2-NEXT: psrad $31, %xmm2
; SSE2-NEXT: pcmpeqd %xmm0, %xmm2
; SSE2-NEXT: pcmpeqd %xmm0, %xmm0
; SSE2-NEXT: pxor %xmm0, %xmm2
; SSE2-NEXT: pxor %xmm3, %xmm3
; SSE2-NEXT: pcmpgtd %xmm6, %xmm3
; SSE2-NEXT: pand %xmm9, %xmm3
; SSE2-NEXT: pcmpgtd %xmm9, %xmm7
; SSE2-NEXT: pand %xmm6, %xmm7
; SSE2-NEXT: paddd %xmm3, %xmm7
; SSE2-NEXT: pmuludq %xmm8, %xmm1
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm10[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
; SSE2-NEXT: psubd %xmm7, %xmm4
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm10[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; SSE2-NEXT: movq %xmm3, 16(%rcx)
; SSE2-NEXT: psrad $31, %xmm3
; SSE2-NEXT: pcmpeqd %xmm5, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm3
; SSE2-NEXT: pcmpeqd %xmm4, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm3
; SSE2-NEXT: movq %xmm3, 16(%rdi)
; SSE2-NEXT: movdqa %xmm0, (%rdi)
; SSE2-NEXT: movdqa %xmm2, (%rdi)
; SSE2-NEXT: retq
;
; SSSE3-LABEL: smulo_v6i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movq %rdi, %rax
; SSSE3-NEXT: movd %r8d, %xmm8
; SSSE3-NEXT: movd %ecx, %xmm0
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1]
; SSSE3-NEXT: movd %edx, %xmm3
; SSSE3-NEXT: movd %esi, %xmm6
; SSSE3-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm3[0],xmm6[1],xmm3[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm0[0]
; SSSE3-NEXT: movd {{.*#+}} xmm10 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd %r8d, %xmm0
; SSSE3-NEXT: movd %ecx, %xmm1
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSSE3-NEXT: movd %edx, %xmm0
; SSSE3-NEXT: movd %esi, %xmm2
; SSSE3-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSSE3-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1]
; SSSE3-NEXT: movd {{.*#+}} xmm12 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
; SSSE3-NEXT: movd %r9d, %xmm13
; SSSE3-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSSE3-NEXT: movdqa %xmm13, %xmm11
; SSSE3-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm2[0],xmm11[1],xmm2[1]
; SSSE3-NEXT: movd {{.*#+}} xmm9 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm7 = mem[0],zero,zero,zero
; SSSE3-NEXT: pmuludq %xmm7, %xmm13
; SSSE3-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSSE3-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm5 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm1[0]
; SSSE3-NEXT: movd %r9d, %xmm10
; SSSE3-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSSE3-NEXT: movdqa %xmm10, %xmm9
; SSSE3-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1]
; SSSE3-NEXT: movd {{.*#+}} xmm8 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm6 = mem[0],zero,zero,zero
; SSSE3-NEXT: pmuludq %xmm6, %xmm10
; SSSE3-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm8[0],xmm6[1],xmm8[1]
; SSSE3-NEXT: movq {{[0-9]+}}(%rsp), %rcx
; SSSE3-NEXT: pxor %xmm7, %xmm7
; SSSE3-NEXT: pxor %xmm4, %xmm4
; SSSE3-NEXT: pxor %xmm5, %xmm5
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm5
; SSSE3-NEXT: pand %xmm6, %xmm5
; SSSE3-NEXT: pxor %xmm0, %xmm0
; SSSE3-NEXT: pcmpgtd %xmm6, %xmm0
; SSSE3-NEXT: pand %xmm1, %xmm0
; SSSE3-NEXT: paddd %xmm5, %xmm0
; SSSE3-NEXT: pmuludq %xmm6, %xmm1
; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,3,2,3]
; SSSE3-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,0],xmm10[0,0]
; SSSE3-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,0],xmm8[0,0]
; SSSE3-NEXT: pmuludq %xmm12, %xmm3
; SSSE3-NEXT: pshufd {{.*#+}} xmm6 = xmm3[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
; SSSE3-NEXT: psubd %xmm0, %xmm5
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; SSSE3-NEXT: movdqa %xmm0, (%rcx)
; SSSE3-NEXT: psrad $31, %xmm0
; SSSE3-NEXT: pcmpeqd %xmm5, %xmm0
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1
; SSSE3-NEXT: pxor %xmm1, %xmm0
; SSSE3-NEXT: pcmpgtd %xmm5, %xmm4
; SSSE3-NEXT: pand %xmm2, %xmm4
; SSSE3-NEXT: pxor %xmm3, %xmm3
; SSSE3-NEXT: pcmpgtd %xmm7, %xmm3
; SSSE3-NEXT: pand %xmm11, %xmm3
; SSSE3-NEXT: pcmpgtd %xmm11, %xmm4
; SSSE3-NEXT: pand %xmm7, %xmm4
; SSSE3-NEXT: paddd %xmm3, %xmm4
; SSSE3-NEXT: pmuludq %xmm9, %xmm2
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm13[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1]
; SSSE3-NEXT: psubd %xmm4, %xmm5
; SSSE3-NEXT: pcmpgtd %xmm2, %xmm3
; SSSE3-NEXT: pand %xmm5, %xmm3
; SSSE3-NEXT: paddd %xmm4, %xmm3
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,1,3,3]
; SSSE3-NEXT: pmuludq %xmm5, %xmm2
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
; SSSE3-NEXT: pmuludq %xmm4, %xmm5
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
; SSSE3-NEXT: psubd %xmm3, %xmm0
; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm13[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm5[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
; SSSE3-NEXT: movdqa %xmm2, (%rcx)
; SSSE3-NEXT: psrad $31, %xmm2
; SSSE3-NEXT: pcmpeqd %xmm0, %xmm2
; SSSE3-NEXT: pcmpeqd %xmm0, %xmm0
; SSSE3-NEXT: pxor %xmm0, %xmm2
; SSSE3-NEXT: pxor %xmm3, %xmm3
; SSSE3-NEXT: pcmpgtd %xmm6, %xmm3
; SSSE3-NEXT: pand %xmm9, %xmm3
; SSSE3-NEXT: pcmpgtd %xmm9, %xmm7
; SSSE3-NEXT: pand %xmm6, %xmm7
; SSSE3-NEXT: paddd %xmm3, %xmm7
; SSSE3-NEXT: pmuludq %xmm8, %xmm1
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm10[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
; SSSE3-NEXT: psubd %xmm7, %xmm4
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm10[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
; SSSE3-NEXT: movq %xmm3, 16(%rcx)
; SSSE3-NEXT: psrad $31, %xmm3
; SSSE3-NEXT: pcmpeqd %xmm5, %xmm3
; SSSE3-NEXT: pxor %xmm1, %xmm3
; SSSE3-NEXT: pcmpeqd %xmm4, %xmm3
; SSSE3-NEXT: pxor %xmm0, %xmm3
; SSSE3-NEXT: movq %xmm3, 16(%rdi)
; SSSE3-NEXT: movdqa %xmm0, (%rdi)
; SSSE3-NEXT: movdqa %xmm2, (%rdi)
; SSSE3-NEXT: retq
;
; SSE41-LABEL: smulo_v6i32:

View File

@ -405,101 +405,101 @@ define <6 x i32> @umulo_v6i32(<6 x i32> %a0, <6 x i32> %a1, <6 x i32>* %p2) noun
; SSE2-LABEL: umulo_v6i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movq %rdi, %rax
; SSE2-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; SSE2-NEXT: movd {{.*#+}} xmm3 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
; SSE2-NEXT: movd %r8d, %xmm4
; SSE2-NEXT: movd %ecx, %xmm0
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
; SSE2-NEXT: movd %edx, %xmm5
; SSE2-NEXT: movd %esi, %xmm6
; SSE2-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm0[0]
; SSE2-NEXT: pmuludq %xmm1, %xmm6
; SSE2-NEXT: movd %r9d, %xmm0
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSE2-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSE2-NEXT: movd %r8d, %xmm0
; SSE2-NEXT: movd %ecx, %xmm1
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSE2-NEXT: movd %edx, %xmm0
; SSE2-NEXT: movd %esi, %xmm3
; SSE2-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
; SSE2-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm1[0]
; SSE2-NEXT: movd %r9d, %xmm1
; SSE2-NEXT: movq {{[0-9]+}}(%rsp), %rcx
; SSE2-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSE2-NEXT: pmuludq %xmm0, %xmm1
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,3,2,3]
; SSE2-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,0],xmm2[0,0]
; SSE2-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm4[0,0]
; SSE2-NEXT: pmuludq %xmm3, %xmm5
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm5[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; SSE2-NEXT: pxor %xmm2, %xmm2
; SSE2-NEXT: pcmpeqd %xmm2, %xmm0
; SSE2-NEXT: pcmpeqd %xmm3, %xmm3
; SSE2-NEXT: pxor %xmm3, %xmm0
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm6[0,2,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; SSE2-NEXT: movd {{.*#+}} xmm5 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE2-NEXT: pmuludq %xmm1, %xmm0
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[1,1,3,3]
; SSE2-NEXT: pmuludq %xmm2, %xmm3
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
; SSE2-NEXT: pmuludq %xmm4, %xmm2
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; SSE2-NEXT: pxor %xmm4, %xmm4
; SSE2-NEXT: pcmpeqd %xmm4, %xmm1
; SSE2-NEXT: pcmpeqd %xmm5, %xmm5
; SSE2-NEXT: pxor %xmm5, %xmm1
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
; SSE2-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE2-NEXT: movd {{.*#+}} xmm6 = mem[0],zero,zero,zero
; SSE2-NEXT: pmuludq %xmm5, %xmm6
; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm1[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
; SSE2-NEXT: pcmpeqd %xmm2, %xmm7
; SSE2-NEXT: pxor %xmm3, %xmm7
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1]
; SSE2-NEXT: movq %xmm1, 16(%rcx)
; SSE2-NEXT: movdqa %xmm4, (%rcx)
; SSE2-NEXT: pmuludq %xmm2, %xmm6
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm6[1,3,2,3]
; SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm0[1,3,2,3]
; SSE2-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
; SSE2-NEXT: pcmpeqd %xmm4, %xmm7
; SSE2-NEXT: pxor %xmm5, %xmm7
; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
; SSE2-NEXT: movq %xmm0, 16(%rcx)
; SSE2-NEXT: movdqa %xmm3, (%rcx)
; SSE2-NEXT: movq %xmm7, 16(%rdi)
; SSE2-NEXT: movdqa %xmm0, (%rdi)
; SSE2-NEXT: movdqa %xmm1, (%rdi)
; SSE2-NEXT: retq
;
; SSSE3-LABEL: umulo_v6i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movq %rdi, %rax
; SSSE3-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; SSSE3-NEXT: movd {{.*#+}} xmm3 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
; SSSE3-NEXT: movd %r8d, %xmm4
; SSSE3-NEXT: movd %ecx, %xmm0
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
; SSSE3-NEXT: movd %edx, %xmm5
; SSSE3-NEXT: movd %esi, %xmm6
; SSSE3-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm0[0]
; SSSE3-NEXT: pmuludq %xmm1, %xmm6
; SSSE3-NEXT: movd %r9d, %xmm0
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSSE3-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSSE3-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSSE3-NEXT: movd %r8d, %xmm0
; SSSE3-NEXT: movd %ecx, %xmm1
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; SSSE3-NEXT: movd %edx, %xmm0
; SSSE3-NEXT: movd %esi, %xmm3
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
; SSSE3-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm1[0]
; SSSE3-NEXT: movd %r9d, %xmm1
; SSSE3-NEXT: movq {{[0-9]+}}(%rsp), %rcx
; SSSE3-NEXT: movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SSSE3-NEXT: pmuludq %xmm0, %xmm1
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,3,2,3]
; SSSE3-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,0],xmm2[0,0]
; SSSE3-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,0],xmm4[0,0]
; SSSE3-NEXT: pmuludq %xmm3, %xmm5
; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm5[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; SSSE3-NEXT: pxor %xmm2, %xmm2
; SSSE3-NEXT: pcmpeqd %xmm2, %xmm0
; SSSE3-NEXT: pcmpeqd %xmm3, %xmm3
; SSSE3-NEXT: pxor %xmm3, %xmm0
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm6[0,2,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
; SSSE3-NEXT: movd {{.*#+}} xmm5 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSSE3-NEXT: pmuludq %xmm1, %xmm0
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm3[1,1,3,3]
; SSSE3-NEXT: pmuludq %xmm2, %xmm3
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
; SSSE3-NEXT: pmuludq %xmm4, %xmm2
; SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; SSSE3-NEXT: pxor %xmm4, %xmm4
; SSSE3-NEXT: pcmpeqd %xmm4, %xmm1
; SSSE3-NEXT: pcmpeqd %xmm5, %xmm5
; SSSE3-NEXT: pxor %xmm5, %xmm1
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
; SSSE3-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSSE3-NEXT: movd {{.*#+}} xmm6 = mem[0],zero,zero,zero
; SSSE3-NEXT: pmuludq %xmm5, %xmm6
; SSSE3-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm7 = xmm1[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1]
; SSSE3-NEXT: pcmpeqd %xmm2, %xmm7
; SSSE3-NEXT: pxor %xmm3, %xmm7
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1]
; SSSE3-NEXT: movq %xmm1, 16(%rcx)
; SSSE3-NEXT: movdqa %xmm4, (%rcx)
; SSSE3-NEXT: pmuludq %xmm2, %xmm6
; SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm6[1,3,2,3]
; SSSE3-NEXT: pshufd {{.*#+}} xmm7 = xmm0[1,3,2,3]
; SSSE3-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
; SSSE3-NEXT: pcmpeqd %xmm4, %xmm7
; SSSE3-NEXT: pxor %xmm5, %xmm7
; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
; SSSE3-NEXT: movq %xmm0, 16(%rcx)
; SSSE3-NEXT: movdqa %xmm3, (%rcx)
; SSSE3-NEXT: movq %xmm7, 16(%rdi)
; SSSE3-NEXT: movdqa %xmm0, (%rdi)
; SSSE3-NEXT: movdqa %xmm1, (%rdi)
; SSSE3-NEXT: retq
;
; SSE41-LABEL: umulo_v6i32:

View File

@ -2492,77 +2492,68 @@ define i8 @test_v128i8(<128 x i8> %a0) {
;
; SSE41-LABEL: test_v128i8:
; SSE41: # %bb.0:
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm10 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm8 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm5, %xmm1
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [255,255,255,255,255,255,255,255]
; SSE41-NEXT: pmullw %xmm10, %xmm8
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm10 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero,xmm7[4],zero,xmm7[5],zero,xmm7[6],zero,xmm7[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8],xmm0[8],xmm7[9],xmm0[9],xmm7[10],xmm0[10],xmm7[11],xmm0[11],xmm7[12],xmm0[12],xmm7[13],xmm0[13],xmm7[14],xmm0[14],xmm7[15],xmm0[15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm5 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm0[8],xmm3[9],xmm0[9],xmm3[10],xmm0[10],xmm3[11],xmm0[11],xmm3[12],xmm0[12],xmm3[13],xmm0[13],xmm3[14],xmm0[14],xmm3[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm7, %xmm3
; SSE41-NEXT: movdqa %xmm3, %xmm7
; SSE41-NEXT: pmullw %xmm1, %xmm3
; SSE41-NEXT: pand %xmm9, %xmm1
; SSE41-NEXT: pand %xmm9, %xmm8
; SSE41-NEXT: packuswb %xmm1, %xmm8
; SSE41-NEXT: pand %xmm9, %xmm7
; SSE41-NEXT: pmullw %xmm10, %xmm5
; SSE41-NEXT: pand %xmm9, %xmm5
; SSE41-NEXT: packuswb %xmm7, %xmm5
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm10 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero,xmm4[4],zero,xmm4[5],zero,xmm4[6],zero,xmm4[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm0[8],xmm4[9],xmm0[9],xmm4[10],xmm0[10],xmm4[11],xmm0[11],xmm4[12],xmm0[12],xmm4[13],xmm0[13],xmm4[14],xmm0[14],xmm4[15],xmm0[15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm7 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
; SSE41-NEXT: pmullw %xmm4, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm4 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero,xmm6[4],zero,xmm6[5],zero,xmm6[6],zero,xmm6[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm8 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero,xmm6[4],zero,xmm6[5],zero,xmm6[6],zero,xmm6[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8],xmm0[8],xmm6[9],xmm0[9],xmm6[10],xmm0[10],xmm6[11],xmm0[11],xmm6[12],xmm0[12],xmm6[13],xmm0[13],xmm6[14],xmm0[14],xmm6[15],xmm0[15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm9 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm6, %xmm2
; SSE41-NEXT: pmullw %xmm2, %xmm3
; SSE41-NEXT: pmullw %xmm0, %xmm3
; SSE41-NEXT: pand %xmm9, %xmm0
; SSE41-NEXT: pmullw %xmm10, %xmm7
; SSE41-NEXT: pand %xmm9, %xmm7
; SSE41-NEXT: packuswb %xmm0, %xmm7
; SSE41-NEXT: pand %xmm9, %xmm2
; SSE41-NEXT: pmullw %xmm4, %xmm1
; SSE41-NEXT: pand %xmm9, %xmm1
; SSE41-NEXT: packuswb %xmm2, %xmm1
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero,xmm7[4],zero,xmm7[5],zero,xmm7[6],zero,xmm7[7],zero
; SSE41-NEXT: pmullw %xmm0, %xmm1
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
; SSE41-NEXT: pshufb %xmm0, %xmm1
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm4 = xmm8[0],zero,xmm8[1],zero,xmm8[2],zero,xmm8[3],zero,xmm8[4],zero,xmm8[5],zero,xmm8[6],zero,xmm8[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm10 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero,xmm4[4],zero,xmm4[5],zero,xmm4[6],zero,xmm4[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm0[8],xmm4[9],xmm0[9],xmm4[10],xmm0[10],xmm4[11],xmm0[11],xmm4[12],xmm0[12],xmm4[13],xmm0[13],xmm4[14],xmm0[14],xmm4[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm2, %xmm4
; SSE41-NEXT: pshufb %xmm0, %xmm4
; SSE41-NEXT: movdqa %xmm3, %xmm0
; SSE41-NEXT: pand %xmm9, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero,xmm4[4],zero,xmm4[5],zero,xmm4[6],zero,xmm4[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
; SSE41-NEXT: pmullw %xmm2, %xmm1
; SSE41-NEXT: pand %xmm9, %xmm1
; SSE41-NEXT: packuswb %xmm0, %xmm1
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
; SSE41-NEXT: pmullw %xmm3, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm11 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero,xmm7[4],zero,xmm7[5],zero,xmm7[6],zero,xmm7[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8],xmm0[8],xmm7[9],xmm0[9],xmm7[10],xmm0[10],xmm7[11],xmm0[11],xmm7[12],xmm0[12],xmm7[13],xmm0[13],xmm7[14],xmm0[14],xmm7[15],xmm0[15]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm6 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm0[8],xmm3[9],xmm0[9],xmm3[10],xmm0[10],xmm3[11],xmm0[11],xmm3[12],xmm0[12],xmm3[13],xmm0[13],xmm3[14],xmm0[14],xmm3[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm7, %xmm3
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm7 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm3, %xmm5
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
; SSE41-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
; SSE41-NEXT: pmullw %xmm5, %xmm1
; SSE41-NEXT: pmullw %xmm4, %xmm1
; SSE41-NEXT: pmullw %xmm0, %xmm1
; SSE41-NEXT: pmullw %xmm7, %xmm3
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
; SSE41-NEXT: pshufb %xmm0, %xmm3
; SSE41-NEXT: pmullw %xmm11, %xmm6
; SSE41-NEXT: pshufb %xmm0, %xmm6
; SSE41-NEXT: pmullw %xmm10, %xmm2
; SSE41-NEXT: pshufb %xmm0, %xmm2
; SSE41-NEXT: pmullw %xmm8, %xmm9
; SSE41-NEXT: pshufb %xmm0, %xmm9
; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,255,255,255,255]
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm5 = xmm9[0],zero,xmm9[1],zero,xmm9[2],zero,xmm9[3],zero,xmm9[4],zero,xmm9[5],zero,xmm9[6],zero,xmm9[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
; SSE41-NEXT: pmullw %xmm5, %xmm2
; SSE41-NEXT: pshufb %xmm0, %xmm2
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm5 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero,xmm6[4],zero,xmm6[5],zero,xmm6[6],zero,xmm6[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
; SSE41-NEXT: pmullw %xmm5, %xmm3
; SSE41-NEXT: pshufb %xmm0, %xmm3
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pand %xmm4, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
; SSE41-NEXT: pmullw %xmm3, %xmm2
; SSE41-NEXT: pand %xmm4, %xmm2
; SSE41-NEXT: packuswb %xmm0, %xmm2
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
; SSE41-NEXT: pmullw %xmm1, %xmm0
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; SSE41-NEXT: pand %xmm9, %xmm0
; SSE41-NEXT: pand %xmm4, %xmm0
; SSE41-NEXT: pxor %xmm2, %xmm2
; SSE41-NEXT: packuswb %xmm2, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; SSE41-NEXT: pmullw %xmm1, %xmm0
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
; SSE41-NEXT: pand %xmm9, %xmm0
; SSE41-NEXT: pand %xmm4, %xmm0
; SSE41-NEXT: packuswb %xmm2, %xmm0
; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; SSE41-NEXT: pmullw %xmm1, %xmm0
; SSE41-NEXT: pand %xmm9, %xmm0
; SSE41-NEXT: pand %xmm4, %xmm0
; SSE41-NEXT: packuswb %xmm2, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: psrlw $8, %xmm1