1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

WholeProgramDevirt: Move logic for finding devirtualizable call sites to Analysis.

The plan is to eventually make this logic simpler, however I expect it to
be a little tricky for the foreseeable future (at least until we're rid of
pointee types), so move it here so that it can be reused to build a summary
index for devirtualization.

Differential Revision: http://reviews.llvm.org/D20005

llvm-svn: 269081
This commit is contained in:
Peter Collingbourne 2016-05-10 17:34:21 +00:00
parent 95dd50f19c
commit 9c45cac6e6
4 changed files with 133 additions and 51 deletions

View File

@ -0,0 +1,38 @@
//===- BitSetUtils.h - Utilities related to pointer bitsets ------*- C++ -*-==//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file contains functions that make it easier to manipulate bitsets for
// devirtualization.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_ANALYSIS_BITSETUTILS_H
#define LLVM_ANALYSIS_BITSETUTILS_H
#include "llvm/ADT/SmallVector.h"
#include "llvm/IR/CallSite.h"
namespace llvm {
// A call site that could be devirtualized.
struct DevirtCallSite {
// The offset from the address point to the virtual function.
uint64_t Offset;
// The call site itself.
CallSite CS;
};
// Given a call to the intrinsic @llvm.bitset.test, find all devirtualizable
// call sites based on the call and return them in DevirtCalls.
void findDevirtualizableCalls(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
SmallVectorImpl<CallInst *> &Assumes,
CallInst *CI);
}
#endif

View File

@ -0,0 +1,82 @@
//===- BitSetUtils.cpp - Utilities related to pointer bitsets -------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file contains functions that make it easier to manipulate bitsets for
// devirtualization.
//
//===----------------------------------------------------------------------===//
#include "llvm/Analysis/BitSetUtils.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/Module.h"
using namespace llvm;
// Search for virtual calls that call FPtr and add them to DevirtCalls.
static void
findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
Value *FPtr, uint64_t Offset) {
for (const Use &U : FPtr->uses()) {
Value *User = U.getUser();
if (isa<BitCastInst>(User)) {
findCallsAtConstantOffset(DevirtCalls, User, Offset);
} else if (auto CI = dyn_cast<CallInst>(User)) {
DevirtCalls.push_back({Offset, CI});
} else if (auto II = dyn_cast<InvokeInst>(User)) {
DevirtCalls.push_back({Offset, II});
}
}
}
// Search for virtual calls that load from VPtr and add them to DevirtCalls.
static void
findLoadCallsAtConstantOffset(Module *M,
SmallVectorImpl<DevirtCallSite> &DevirtCalls,
Value *VPtr, uint64_t Offset) {
for (const Use &U : VPtr->uses()) {
Value *User = U.getUser();
if (isa<BitCastInst>(User)) {
findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset);
} else if (isa<LoadInst>(User)) {
findCallsAtConstantOffset(DevirtCalls, User, Offset);
} else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
// Take into account the GEP offset.
if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
uint64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
GEP->getSourceElementType(), Indices);
findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset);
}
}
}
}
void llvm::findDevirtualizableCalls(
SmallVectorImpl<DevirtCallSite> &DevirtCalls,
SmallVectorImpl<CallInst *> &Assumes, CallInst *CI) {
assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::bitset_test);
Module *M = CI->getParent()->getParent()->getParent();
// Find llvm.assume intrinsics for this llvm.bitset.test call.
for (const Use &CIU : CI->uses()) {
auto AssumeCI = dyn_cast<CallInst>(CIU.getUser());
if (AssumeCI) {
Function *F = AssumeCI->getCalledFunction();
if (F && F->getIntrinsicID() == Intrinsic::assume)
Assumes.push_back(AssumeCI);
}
}
// If we found any, search for virtual calls based on %p and add them to
// DevirtCalls.
if (!Assumes.empty())
findLoadCallsAtConstantOffset(M, DevirtCalls,
CI->getArgOperand(0)->stripPointerCasts(), 0);
}

View File

@ -5,6 +5,7 @@ add_llvm_library(LLVMAnalysis
Analysis.cpp
AssumptionCache.cpp
BasicAliasAnalysis.cpp
BitSetUtils.cpp
BlockFrequencyInfo.cpp
BlockFrequencyInfoImpl.cpp
BranchProbabilityInfo.cpp

View File

@ -31,6 +31,7 @@
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseSet.h"
#include "llvm/ADT/MapVector.h"
#include "llvm/Analysis/BitSetUtils.h"
#include "llvm/IR/CallSite.h"
#include "llvm/IR/Constants.h"
#include "llvm/IR/DataLayout.h"
@ -231,10 +232,6 @@ struct DevirtModule {
: M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
Int32Ty(Type::getInt32Ty(M.getContext())) {}
void findLoadCallsAtConstantOffset(Metadata *BitSet, Value *Ptr,
uint64_t Offset, Value *VTable);
void findCallsAtConstantOffset(Metadata *BitSet, Value *Ptr, uint64_t Offset,
Value *VTable);
void buildBitSets(std::vector<VTableBits> &Bits,
DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets);
@ -283,43 +280,6 @@ ModulePass *llvm::createWholeProgramDevirtPass() {
return new WholeProgramDevirt;
}
// Search for virtual calls that call FPtr and add them to CallSlots.
void DevirtModule::findCallsAtConstantOffset(Metadata *BitSet, Value *FPtr,
uint64_t Offset, Value *VTable) {
for (const Use &U : FPtr->uses()) {
Value *User = U.getUser();
if (isa<BitCastInst>(User)) {
findCallsAtConstantOffset(BitSet, User, Offset, VTable);
} else if (auto CI = dyn_cast<CallInst>(User)) {
CallSlots[{BitSet, Offset}].push_back({VTable, CI});
} else if (auto II = dyn_cast<InvokeInst>(User)) {
CallSlots[{BitSet, Offset}].push_back({VTable, II});
}
}
}
// Search for virtual calls that load from VPtr and add them to CallSlots.
void DevirtModule::findLoadCallsAtConstantOffset(Metadata *BitSet, Value *VPtr,
uint64_t Offset,
Value *VTable) {
for (const Use &U : VPtr->uses()) {
Value *User = U.getUser();
if (isa<BitCastInst>(User)) {
findLoadCallsAtConstantOffset(BitSet, User, Offset, VTable);
} else if (isa<LoadInst>(User)) {
findCallsAtConstantOffset(BitSet, User, Offset, VTable);
} else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
// Take into account the GEP offset.
if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
uint64_t GEPOffset = M.getDataLayout().getIndexedOffsetInType(
GEP->getSourceElementType(), Indices);
findLoadCallsAtConstantOffset(BitSet, User, Offset + GEPOffset, VTable);
}
}
}
}
void DevirtModule::buildBitSets(
std::vector<VTableBits> &Bits,
DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets) {
@ -674,22 +634,23 @@ bool DevirtModule::run() {
if (!CI)
continue;
// Find llvm.assume intrinsics for this llvm.bitset.test call.
// Search for virtual calls based on %p and add them to DevirtCalls.
SmallVector<DevirtCallSite, 1> DevirtCalls;
SmallVector<CallInst *, 1> Assumes;
for (const Use &CIU : CI->uses()) {
auto AssumeCI = dyn_cast<CallInst>(CIU.getUser());
if (AssumeCI && AssumeCI->getCalledValue() == AssumeFunc)
Assumes.push_back(AssumeCI);
}
findDevirtualizableCalls(DevirtCalls, Assumes, CI);
// If we found any, search for virtual calls based on %p and add them to
// CallSlots.
// If we found any, add them to CallSlots. Only do this if we haven't seen
// the vtable pointer before, as it may have been CSE'd with pointers from
// other call sites, and we don't want to process call sites multiple times.
if (!Assumes.empty()) {
Metadata *BitSet =
cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
if (SeenPtrs.insert(Ptr).second)
findLoadCallsAtConstantOffset(BitSet, Ptr, 0, CI->getArgOperand(0));
if (SeenPtrs.insert(Ptr).second) {
for (DevirtCallSite Call : DevirtCalls)
CallSlots[{BitSet, Call.Offset}].push_back(
{CI->getArgOperand(0), Call.CS});
}
}
// We no longer need the assumes or the bitset test.