mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-24 19:52:54 +01:00
3d80f1333d
Same as what was done for gather/scatter/load/store in r367489. Expandload/compressstore were delayed due to lack of constant masking handling that has since been fixed. llvm-svn: 367738
67 lines
3.5 KiB
LLVM
67 lines
3.5 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
|
|
; RUN: opt -S %s -scalarize-masked-mem-intrin -mtriple=x86_64-linux-gnu | FileCheck %s
|
|
|
|
define <2 x i64> @scalarize_v2i64(i64* %p, <2 x i1> %mask, <2 x i64> %passthru) {
|
|
; CHECK-LABEL: @scalarize_v2i64(
|
|
; CHECK-NEXT: [[SCALAR_MASK:%.*]] = bitcast <2 x i1> [[MASK:%.*]] to i2
|
|
; CHECK-NEXT: [[TMP1:%.*]] = and i2 [[SCALAR_MASK]], 1
|
|
; CHECK-NEXT: [[TMP2:%.*]] = icmp ne i2 [[TMP1]], 0
|
|
; CHECK-NEXT: br i1 [[TMP2]], label [[COND_LOAD:%.*]], label [[ELSE:%.*]]
|
|
; CHECK: cond.load:
|
|
; CHECK-NEXT: [[TMP3:%.*]] = load i64, i64* [[P:%.*]], align 1
|
|
; CHECK-NEXT: [[TMP4:%.*]] = insertelement <2 x i64> [[PASSTHRU:%.*]], i64 [[TMP3]], i64 0
|
|
; CHECK-NEXT: [[TMP5:%.*]] = getelementptr inbounds i64, i64* [[P]], i32 1
|
|
; CHECK-NEXT: br label [[ELSE]]
|
|
; CHECK: else:
|
|
; CHECK-NEXT: [[RES_PHI_ELSE:%.*]] = phi <2 x i64> [ [[TMP4]], [[COND_LOAD]] ], [ [[PASSTHRU]], [[TMP0:%.*]] ]
|
|
; CHECK-NEXT: [[PTR_PHI_ELSE:%.*]] = phi i64* [ [[TMP5]], [[COND_LOAD]] ], [ [[P]], [[TMP0]] ]
|
|
; CHECK-NEXT: [[TMP6:%.*]] = and i2 [[SCALAR_MASK]], -2
|
|
; CHECK-NEXT: [[TMP7:%.*]] = icmp ne i2 [[TMP6]], 0
|
|
; CHECK-NEXT: br i1 [[TMP7]], label [[COND_LOAD1:%.*]], label [[ELSE2:%.*]]
|
|
; CHECK: cond.load1:
|
|
; CHECK-NEXT: [[TMP8:%.*]] = load i64, i64* [[PTR_PHI_ELSE]], align 1
|
|
; CHECK-NEXT: [[TMP9:%.*]] = insertelement <2 x i64> [[RES_PHI_ELSE]], i64 [[TMP8]], i64 1
|
|
; CHECK-NEXT: br label [[ELSE2]]
|
|
; CHECK: else2:
|
|
; CHECK-NEXT: [[RES_PHI_ELSE3:%.*]] = phi <2 x i64> [ [[TMP9]], [[COND_LOAD1]] ], [ [[RES_PHI_ELSE]], [[ELSE]] ]
|
|
; CHECK-NEXT: ret <2 x i64> [[RES_PHI_ELSE3]]
|
|
;
|
|
%ret = call <2 x i64> @llvm.masked.expandload.v2i64.p0v2i64(i64* %p, <2 x i1> %mask, <2 x i64> %passthru)
|
|
ret <2 x i64> %ret
|
|
}
|
|
|
|
define <2 x i64> @scalarize_v2i64_ones_mask(i64* %p, <2 x i64> %passthru) {
|
|
; CHECK-LABEL: @scalarize_v2i64_ones_mask(
|
|
; CHECK-NEXT: [[TMP1:%.*]] = getelementptr inbounds i64, i64* [[P:%.*]], i32 0
|
|
; CHECK-NEXT: [[LOAD0:%.*]] = load i64, i64* [[TMP1]], align 1
|
|
; CHECK-NEXT: [[RES0:%.*]] = insertelement <2 x i64> [[PASSTHRU:%.*]], i64 [[LOAD0]], i64 0
|
|
; CHECK-NEXT: [[TMP2:%.*]] = getelementptr inbounds i64, i64* [[P]], i32 1
|
|
; CHECK-NEXT: [[LOAD1:%.*]] = load i64, i64* [[TMP2]], align 1
|
|
; CHECK-NEXT: [[RES1:%.*]] = insertelement <2 x i64> [[RES0]], i64 [[LOAD1]], i64 1
|
|
; CHECK-NEXT: ret <2 x i64> [[RES1]]
|
|
;
|
|
%ret = call <2 x i64> @llvm.masked.expandload.v2i64.p0v2i64(i64* %p, <2 x i1> <i1 true, i1 true>, <2 x i64> %passthru)
|
|
ret <2 x i64> %ret
|
|
}
|
|
|
|
define <2 x i64> @scalarize_v2i64_zero_mask(i64* %p, <2 x i64> %passthru) {
|
|
; CHECK-LABEL: @scalarize_v2i64_zero_mask(
|
|
; CHECK-NEXT: ret <2 x i64> [[PASSTHRU:%.*]]
|
|
;
|
|
%ret = call <2 x i64> @llvm.masked.expandload.v2i64.p0v2i64(i64* %p, <2 x i1> <i1 false, i1 false>, <2 x i64> %passthru)
|
|
ret <2 x i64> %ret
|
|
}
|
|
|
|
define <2 x i64> @scalarize_v2i64_const_mask(i64* %p, <2 x i64> %passthru) {
|
|
; CHECK-LABEL: @scalarize_v2i64_const_mask(
|
|
; CHECK-NEXT: [[TMP1:%.*]] = getelementptr inbounds i64, i64* [[P:%.*]], i32 0
|
|
; CHECK-NEXT: [[LOAD1:%.*]] = load i64, i64* [[TMP1]], align 1
|
|
; CHECK-NEXT: [[RES1:%.*]] = insertelement <2 x i64> [[PASSTHRU:%.*]], i64 [[LOAD1]], i64 1
|
|
; CHECK-NEXT: ret <2 x i64> [[RES1]]
|
|
;
|
|
%ret = call <2 x i64> @llvm.masked.expandload.v2i64.p0v2i64(i64* %p, <2 x i1> <i1 false, i1 true>, <2 x i64> %passthru)
|
|
ret <2 x i64> %ret
|
|
}
|
|
|
|
declare <2 x i64> @llvm.masked.expandload.v2i64.p0v2i64(i64*, <2 x i1>, <2 x i64>)
|