1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-19 11:02:59 +02:00

[X86][SSE] Regenerated sext/zext constant folding tests and added i686 tests

llvm-svn: 284837
This commit is contained in:
Simon Pilgrim 2016-10-21 16:22:16 +00:00
parent f5c4b15e16
commit 66a5e3ccb8

View File

@ -1,5 +1,6 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-unknown-linux-gnu -mcpu=corei7-avx -mattr=+avx | FileCheck %s
; RUN: llc < %s -mtriple=i686-unknown-linux-gnu -mattr=+avx | FileCheck %s --check-prefix=X32
; RUN: llc < %s -mtriple=x86_64-unknown-linux-gnu -mattr=+avx | FileCheck %s --check-prefix=X64
; Verify that the backend correctly folds a sign/zero extend of a vector where
; elements are all constant values or UNDEFs.
@ -7,11 +8,16 @@
; simple loads from constant pool of the result. That is because the resulting
; vector should be known at static time.
define <4 x i16> @test1() {
; CHECK-LABEL: test1:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; CHECK-NEXT: retq
define <4 x i16> @test_sext_4i8_4i16() {
; X32-LABEL: test_sext_4i8_4i16:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i16:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -20,11 +26,16 @@ define <4 x i16> @test1() {
ret <4 x i16> %5
}
define <4 x i16> @test2() {
; CHECK-LABEL: test2:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; CHECK-NEXT: retq
define <4 x i16> @test_sext_4i8_4i16_undef() {
; X32-LABEL: test_sext_4i8_4i16_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i16_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 undef, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 undef, i32 2
@ -33,11 +44,16 @@ define <4 x i16> @test2() {
ret <4 x i16> %5
}
define <4 x i32> @test3() {
; CHECK-LABEL: test3:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; CHECK-NEXT: retq
define <4 x i32> @test_sext_4i8_4i32() {
; X32-LABEL: test_sext_4i8_4i32:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i32:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = [0,4294967295,2,4294967293]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -46,11 +62,16 @@ define <4 x i32> @test3() {
ret <4 x i32> %5
}
define <4 x i32> @test4() {
; CHECK-LABEL: test4:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; CHECK-NEXT: retq
define <4 x i32> @test_sext_4i8_4i32_undef() {
; X32-LABEL: test_sext_4i8_4i32_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i32_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <u,4294967295,u,4294967293>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 undef, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 undef, i32 2
@ -59,11 +80,18 @@ define <4 x i32> @test4() {
ret <4 x i32> %5
}
define <4 x i64> @test5() {
; CHECK-LABEL: test5:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = [0,18446744073709551615,2,18446744073709551613]
; CHECK-NEXT: retq
define <4 x i64> @test_sext_4i8_4i64() {
; X32-LABEL: test_sext_4i8_4i64:
; X32: # BB#0:
; X32-NEXT: vpmovsxbq {{\.LCPI.*}}, %xmm0
; X32-NEXT: vpmovsxbq {{\.LCPI.*}}, %xmm1
; X32-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i64:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = [0,18446744073709551615,2,18446744073709551613]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -72,11 +100,19 @@ define <4 x i64> @test5() {
ret <4 x i64> %5
}
define <4 x i64> @test6() {
; CHECK-LABEL: test6:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = <u,18446744073709551615,u,18446744073709551613>
; CHECK-NEXT: retq
define <4 x i64> @test_sext_4i8_4i64_undef() {
; X32-LABEL: test_sext_4i8_4i64_undef:
; X32: # BB#0:
; X32-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
; X32-NEXT: vpmovsxbq %xmm0, %xmm0
; X32-NEXT: vpmovsxbq {{\.LCPI.*}}, %xmm1
; X32-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
; X32-NEXT: retl
;
; X64-LABEL: test_sext_4i8_4i64_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = <u,18446744073709551615,u,18446744073709551613>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 undef, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 undef, i32 2
@ -85,11 +121,16 @@ define <4 x i64> @test6() {
ret <4 x i64> %5
}
define <8 x i16> @test7() {
; CHECK-LABEL: test7:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <0,65535,2,65533,u,u,u,u>
; CHECK-NEXT: retq
define <8 x i16> @test_sext_8i8_8i16() {
; X32-LABEL: test_sext_8i8_8i16:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <0,65535,2,65533,u,u,u,u>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_8i8_8i16:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <0,65535,2,65533,u,u,u,u>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2
@ -102,11 +143,16 @@ define <8 x i16> @test7() {
ret <8 x i16> %9
}
define <8 x i32> @test8() {
; CHECK-LABEL: test8:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = <0,4294967295,2,4294967293,u,u,u,u>
; CHECK-NEXT: retq
define <8 x i32> @test_sext_8i8_8i32() {
; X32-LABEL: test_sext_8i8_8i32:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} ymm0 = <0,4294967295,2,4294967293,u,u,u,u>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_8i8_8i32:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = <0,4294967295,2,4294967293,u,u,u,u>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2
@ -119,11 +165,16 @@ define <8 x i32> @test8() {
ret <8 x i32> %9
}
define <8 x i16> @test9() {
; CHECK-LABEL: test9:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <u,65535,u,65533,u,u,u,u>
; CHECK-NEXT: retq
define <8 x i16> @test_sext_8i8_8i16_undef() {
; X32-LABEL: test_sext_8i8_8i16_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <u,65535,u,65533,u,u,u,u>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_8i8_8i16_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <u,65535,u,65533,u,u,u,u>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 undef, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 undef, i32 2
@ -136,11 +187,16 @@ define <8 x i16> @test9() {
ret <8 x i16> %9
}
define <8 x i32> @test10() {
; CHECK-LABEL: test10:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,u,u,u,u,u>
; CHECK-NEXT: retq
define <8 x i32> @test_sext_8i8_8i32_undef() {
; X32-LABEL: test_sext_8i8_8i32_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,u,u,u,u,u>
; X32-NEXT: retl
;
; X64-LABEL: test_sext_8i8_8i32_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,u,u,u,u,u>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 undef, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2
@ -153,11 +209,16 @@ define <8 x i32> @test10() {
ret <8 x i32> %9
}
define <4 x i16> @test11() {
; CHECK-LABEL: test11:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; CHECK-NEXT: retq
define <4 x i16> @test_zext_4i8_4i16() {
; X32-LABEL: test_zext_4i8_4i16:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i16:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -166,11 +227,16 @@ define <4 x i16> @test11() {
ret <4 x i16> %5
}
define <4 x i32> @test12() {
; CHECK-LABEL: test12:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; CHECK-NEXT: retq
define <4 x i32> @test_zext_4i8_4i32() {
; X32-LABEL: test_zext_4i8_4i32:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i32:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -179,11 +245,18 @@ define <4 x i32> @test12() {
ret <4 x i32> %5
}
define <4 x i64> @test13() {
; CHECK-LABEL: test13:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = [0,255,2,253]
; CHECK-NEXT: retq
define <4 x i64> @test_zext_4i8_4i64() {
; X32-LABEL: test_zext_4i8_4i64:
; X32: # BB#0:
; X32-NEXT: vpmovzxbq {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero,mem[1],zero,zero,zero,zero,zero,zero,zero
; X32-NEXT: vpmovzxbq {{.*#+}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero,mem[1],zero,zero,zero,zero,zero,zero,zero
; X32-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i64:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = [0,255,2,253]
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -192,11 +265,16 @@ define <4 x i64> @test13() {
ret <4 x i64> %5
}
define <4 x i16> @test14() {
; CHECK-LABEL: test14:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253>
; CHECK-NEXT: retq
define <4 x i16> @test_zext_4i8_4i16_undef() {
; X32-LABEL: test_zext_4i8_4i16_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253>
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i16_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 undef, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 undef, i32 2
@ -205,11 +283,16 @@ define <4 x i16> @test14() {
ret <4 x i16> %5
}
define <4 x i32> @test15() {
; CHECK-LABEL: test15:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <0,u,2,u>
; CHECK-NEXT: retq
define <4 x i32> @test_zext_4i8_4i32_undef() {
; X32-LABEL: test_zext_4i8_4i32_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <0,u,2,u>
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i32_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <0,u,2,u>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 0, i32 0
%2 = insertelement <4 x i8> %1, i8 undef, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -218,11 +301,21 @@ define <4 x i32> @test15() {
ret <4 x i32> %5
}
define <4 x i64> @test16() {
; CHECK-LABEL: test16:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = <u,255,2,u>
; CHECK-NEXT: retq
define <4 x i64> @test_zext_4i8_4i64_undef() {
; X32-LABEL: test_zext_4i8_4i64_undef:
; X32: # BB#0:
; X32-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
; X32-NEXT: vpmovzxbq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,zero,zero,zero,zero,xmm0[1],zero,zero,zero,zero,zero,zero,zero
; X32-NEXT: movl $2, %eax
; X32-NEXT: vmovd %eax, %xmm1
; X32-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
; X32-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
; X32-NEXT: retl
;
; X64-LABEL: test_zext_4i8_4i64_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = <u,255,2,u>
; X64-NEXT: retq
%1 = insertelement <4 x i8> undef, i8 undef, i32 0
%2 = insertelement <4 x i8> %1, i8 -1, i32 1
%3 = insertelement <4 x i8> %2, i8 2, i32 2
@ -231,11 +324,16 @@ define <4 x i64> @test16() {
ret <4 x i64> %5
}
define <8 x i16> @test17() {
; CHECK-LABEL: test17:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253,4,251,6,249]
; CHECK-NEXT: retq
define <8 x i16> @test_zext_8i8_8i16() {
; X32-LABEL: test_zext_8i8_8i16:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253,4,251,6,249]
; X32-NEXT: retl
;
; X64-LABEL: test_zext_8i8_8i16:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = [0,255,2,253,4,251,6,249]
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2
@ -248,11 +346,16 @@ define <8 x i16> @test17() {
ret <8 x i16> %9
}
define <8 x i32> @test18() {
; CHECK-LABEL: test18:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = [0,255,2,253,4,251,6,249]
; CHECK-NEXT: retq
define <8 x i32> @test_zext_8i8_8i32() {
; X32-LABEL: test_zext_8i8_8i32:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} ymm0 = [0,255,2,253,4,251,6,249]
; X32-NEXT: retl
;
; X64-LABEL: test_zext_8i8_8i32:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = [0,255,2,253,4,251,6,249]
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2
@ -265,11 +368,16 @@ define <8 x i32> @test18() {
ret <8 x i32> %9
}
define <8 x i16> @test19() {
; CHECK-LABEL: test19:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253,u,251,u,249>
; CHECK-NEXT: retq
define <8 x i16> @test_zext_8i8_8i16_undef() {
; X32-LABEL: test_zext_8i8_8i16_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253,u,251,u,249>
; X32-NEXT: retl
;
; X64-LABEL: test_zext_8i8_8i16_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} xmm0 = <u,255,u,253,u,251,u,249>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 undef, i32 0
%2 = insertelement <8 x i8> %1, i8 -1, i32 1
%3 = insertelement <8 x i8> %2, i8 undef, i32 2
@ -282,11 +390,16 @@ define <8 x i16> @test19() {
ret <8 x i16> %9
}
define <8 x i32> @test20() {
; CHECK-LABEL: test20:
; CHECK: # BB#0:
; CHECK-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,253,4,u,6,u>
; CHECK-NEXT: retq
define <8 x i32> @test_zext_8i8_8i32_undef() {
; X32-LABEL: test_zext_8i8_8i32_undef:
; X32: # BB#0:
; X32-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,253,4,u,6,u>
; X32-NEXT: retl
;
; X64-LABEL: test_zext_8i8_8i32_undef:
; X64: # BB#0:
; X64-NEXT: vmovaps {{.*#+}} ymm0 = <0,u,2,253,4,u,6,u>
; X64-NEXT: retq
%1 = insertelement <8 x i8> undef, i8 0, i32 0
%2 = insertelement <8 x i8> %1, i8 undef, i32 1
%3 = insertelement <8 x i8> %2, i8 2, i32 2