1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-19 11:02:59 +02:00

[LV] Common duplicate vector load/store address calculation (NFC)

Summary:
Commoning some obviously copy/paste code in
InnerLoopVectorizer::vectorizeMemoryInstruction

llvm-svn: 331076
This commit is contained in:
Daniel Neilson 2018-04-27 20:29:18 +00:00
parent 8ff594b7b3
commit b7b2ff1d78

View File

@ -3121,6 +3121,22 @@ void InnerLoopVectorizer::vectorizeMemoryInstruction(Instruction *Instr,
if (isMaskRequired)
Mask = *BlockInMask;
const auto CreateVecPtr = [&](unsigned Part, Value *Ptr) -> Value * {
// Calculate the pointer for the specific unroll-part.
Value *PartPtr = Builder.CreateGEP(Ptr, Builder.getInt32(Part * VF));
if (Reverse) {
// If the address is consecutive but reversed, then the
// wide store needs to start at the last vector element.
PartPtr = Builder.CreateGEP(Ptr, Builder.getInt32(-Part * VF));
PartPtr = Builder.CreateGEP(PartPtr, Builder.getInt32(1 - VF));
if (isMaskRequired) // Reverse of a null all-one mask is a null mask.
Mask[Part] = reverseVector(Mask[Part]);
}
return Builder.CreateBitCast(PartPtr, DataTy->getPointerTo(AddressSpace));
};
// Handle Stores:
if (SI) {
setDebugLocFromInst(Builder, SI);
@ -3134,30 +3150,14 @@ void InnerLoopVectorizer::vectorizeMemoryInstruction(Instruction *Instr,
NewSI = Builder.CreateMaskedScatter(StoredVal, VectorGep, Alignment,
MaskPart);
} else {
// Calculate the pointer for the specific unroll-part.
Value *PartPtr =
Builder.CreateGEP(nullptr, Ptr, Builder.getInt32(Part * VF));
if (Reverse) {
// If we store to reverse consecutive memory locations, then we need
// to reverse the order of elements in the stored value.
StoredVal = reverseVector(StoredVal);
// We don't want to update the value in the map as it might be used in
// another expression. So don't call resetVectorValue(StoredVal).
// If the address is consecutive but reversed, then the
// wide store needs to start at the last vector element.
PartPtr =
Builder.CreateGEP(nullptr, Ptr, Builder.getInt32(-Part * VF));
PartPtr =
Builder.CreateGEP(nullptr, PartPtr, Builder.getInt32(1 - VF));
if (isMaskRequired) // Reverse of a null all-one mask is a null mask.
Mask[Part] = reverseVector(Mask[Part]);
}
Value *VecPtr =
Builder.CreateBitCast(PartPtr, DataTy->getPointerTo(AddressSpace));
auto *VecPtr = CreateVecPtr(Part, Ptr);
if (isMaskRequired)
NewSI = Builder.CreateMaskedStore(StoredVal, VecPtr, Alignment,
Mask[Part]);
@ -3181,21 +3181,7 @@ void InnerLoopVectorizer::vectorizeMemoryInstruction(Instruction *Instr,
nullptr, "wide.masked.gather");
addMetadata(NewLI, LI);
} else {
// Calculate the pointer for the specific unroll-part.
Value *PartPtr =
Builder.CreateGEP(nullptr, Ptr, Builder.getInt32(Part * VF));
if (Reverse) {
// If the address is consecutive but reversed, then the
// wide load needs to start at the last vector element.
PartPtr = Builder.CreateGEP(nullptr, Ptr, Builder.getInt32(-Part * VF));
PartPtr = Builder.CreateGEP(nullptr, PartPtr, Builder.getInt32(1 - VF));
if (isMaskRequired) // Reverse of a null all-one mask is a null mask.
Mask[Part] = reverseVector(Mask[Part]);
}
Value *VecPtr =
Builder.CreateBitCast(PartPtr, DataTy->getPointerTo(AddressSpace));
auto *VecPtr = CreateVecPtr(Part, Ptr);
if (isMaskRequired)
NewLI = Builder.CreateMaskedLoad(VecPtr, Alignment, Mask[Part],
UndefValue::get(DataTy),