2009-01-18 13:19:30 +01:00
|
|
|
//===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file contains routines that help determine which pointers are captured.
|
|
|
|
// A pointer value is captured if the function makes a copy of any part of the
|
|
|
|
// pointer that outlives the call. Not being captured means, more or less, that
|
|
|
|
// the pointer is only dereferenced and not stored in a global. Returning part
|
|
|
|
// of the pointer as the function return value may or may not count as capturing
|
|
|
|
// the pointer, depending on the context.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2017-06-06 13:49:48 +02:00
|
|
|
#include "llvm/Analysis/CaptureTracking.h"
|
2012-01-17 23:16:31 +01:00
|
|
|
#include "llvm/ADT/SmallSet.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
2013-03-10 01:34:01 +01:00
|
|
|
#include "llvm/Analysis/AliasAnalysis.h"
|
2014-07-21 15:15:48 +02:00
|
|
|
#include "llvm/Analysis/CFG.h"
|
2015-07-31 16:31:35 +02:00
|
|
|
#include "llvm/Analysis/OrderedBasicBlock.h"
|
2014-03-04 12:01:28 +01:00
|
|
|
#include "llvm/IR/CallSite.h"
|
2013-03-10 01:34:01 +01:00
|
|
|
#include "llvm/IR/Constants.h"
|
2014-07-21 15:15:48 +02:00
|
|
|
#include "llvm/IR/Dominators.h"
|
2013-03-10 01:34:01 +01:00
|
|
|
#include "llvm/IR/Instructions.h"
|
2016-05-26 19:36:22 +02:00
|
|
|
#include "llvm/IR/IntrinsicInst.h"
|
2013-03-10 01:34:01 +01:00
|
|
|
|
2009-01-18 13:19:30 +01:00
|
|
|
using namespace llvm;
|
|
|
|
|
2011-11-21 19:32:21 +01:00
|
|
|
CaptureTracker::~CaptureTracker() {}
|
|
|
|
|
2014-03-05 11:21:48 +01:00
|
|
|
bool CaptureTracker::shouldExplore(const Use *U) { return true; }
|
2012-10-09 00:12:48 +02:00
|
|
|
|
2011-11-14 23:49:42 +01:00
|
|
|
namespace {
|
2011-11-20 20:37:06 +01:00
|
|
|
struct SimpleCaptureTracker : public CaptureTracker {
|
2011-11-14 23:49:42 +01:00
|
|
|
explicit SimpleCaptureTracker(bool ReturnCaptures)
|
|
|
|
: ReturnCaptures(ReturnCaptures), Captured(false) {}
|
|
|
|
|
2014-03-05 08:30:04 +01:00
|
|
|
void tooManyUses() override { Captured = true; }
|
2011-11-14 23:49:42 +01:00
|
|
|
|
2014-03-05 11:21:48 +01:00
|
|
|
bool captured(const Use *U) override {
|
2011-12-29 00:24:21 +01:00
|
|
|
if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
|
2012-05-11 01:38:07 +02:00
|
|
|
return false;
|
2011-11-14 23:49:42 +01:00
|
|
|
|
|
|
|
Captured = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ReturnCaptures;
|
|
|
|
|
|
|
|
bool Captured;
|
|
|
|
};
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
/// Only find pointer captures which happen before the given instruction. Uses
|
|
|
|
/// the dominator tree to determine whether one instruction is before another.
|
|
|
|
/// Only support the case where the Value is defined in the same basic block
|
|
|
|
/// as the given instruction and the use.
|
|
|
|
struct CapturesBefore : public CaptureTracker {
|
2015-06-24 19:53:17 +02:00
|
|
|
|
2018-04-24 23:12:45 +02:00
|
|
|
CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT,
|
2015-07-31 16:31:35 +02:00
|
|
|
bool IncludeI, OrderedBasicBlock *IC)
|
|
|
|
: OrderedBB(IC), BeforeHere(I), DT(DT),
|
2015-06-24 19:53:17 +02:00
|
|
|
ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
void tooManyUses() override { Captured = true; }
|
|
|
|
|
2015-06-24 19:53:17 +02:00
|
|
|
bool isSafeToPrune(Instruction *I) {
|
2014-07-21 15:15:48 +02:00
|
|
|
BasicBlock *BB = I->getParent();
|
|
|
|
// We explore this usage only if the usage can reach "BeforeHere".
|
|
|
|
// If use is not reachable from entry, there is no need to explore.
|
|
|
|
if (BeforeHere != I && !DT->isReachableFromEntry(BB))
|
2015-06-24 19:53:17 +02:00
|
|
|
return true;
|
|
|
|
|
|
|
|
// Compute the case where both instructions are inside the same basic
|
|
|
|
// block. Since instructions in the same BB as BeforeHere are numbered in
|
2015-07-31 16:31:35 +02:00
|
|
|
// 'OrderedBB', avoid using 'dominates' and 'isPotentiallyReachable'
|
2015-06-24 19:53:17 +02:00
|
|
|
// which are very expensive for large basic blocks.
|
|
|
|
if (BB == BeforeHere->getParent()) {
|
|
|
|
// 'I' dominates 'BeforeHere' => not safe to prune.
|
|
|
|
//
|
[IR] Reformulate LLVM's EH funclet IR
While we have successfully implemented a funclet-oriented EH scheme on
top of LLVM IR, our scheme has some notable deficiencies:
- catchendpad and cleanupendpad are necessary in the current design
but they are difficult to explain to others, even to seasoned LLVM
experts.
- catchendpad and cleanupendpad are optimization barriers. They cannot
be split and force all potentially throwing call-sites to be invokes.
This has a noticable effect on the quality of our code generation.
- catchpad, while similar in some aspects to invoke, is fairly awkward.
It is unsplittable, starts a funclet, and has control flow to other
funclets.
- The nesting relationship between funclets is currently a property of
control flow edges. Because of this, we are forced to carefully
analyze the flow graph to see if there might potentially exist illegal
nesting among funclets. While we have logic to clone funclets when
they are illegally nested, it would be nicer if we had a
representation which forbade them upfront.
Let's clean this up a bit by doing the following:
- Instead, make catchpad more like cleanuppad and landingpad: no control
flow, just a bunch of simple operands; catchpad would be splittable.
- Introduce catchswitch, a control flow instruction designed to model
the constraints of funclet oriented EH.
- Make funclet scoping explicit by having funclet instructions consume
the token produced by the funclet which contains them.
- Remove catchendpad and cleanupendpad. Their presence can be inferred
implicitly using coloring information.
N.B. The state numbering code for the CLR has been updated but the
veracity of it's output cannot be spoken for. An expert should take a
look to make sure the results are reasonable.
Reviewers: rnk, JosephTremoulet, andrew.w.kaylor
Differential Revision: http://reviews.llvm.org/D15139
llvm-svn: 255422
2015-12-12 06:38:55 +01:00
|
|
|
// The value defined by an invoke dominates an instruction only
|
2015-08-15 04:46:08 +02:00
|
|
|
// if it dominates every instruction in UseBB. A PHI is dominated only
|
|
|
|
// if the instruction dominates every possible use in the UseBB. Since
|
2015-06-24 19:53:17 +02:00
|
|
|
// UseBB == BB, avoid pruning.
|
[IR] Reformulate LLVM's EH funclet IR
While we have successfully implemented a funclet-oriented EH scheme on
top of LLVM IR, our scheme has some notable deficiencies:
- catchendpad and cleanupendpad are necessary in the current design
but they are difficult to explain to others, even to seasoned LLVM
experts.
- catchendpad and cleanupendpad are optimization barriers. They cannot
be split and force all potentially throwing call-sites to be invokes.
This has a noticable effect on the quality of our code generation.
- catchpad, while similar in some aspects to invoke, is fairly awkward.
It is unsplittable, starts a funclet, and has control flow to other
funclets.
- The nesting relationship between funclets is currently a property of
control flow edges. Because of this, we are forced to carefully
analyze the flow graph to see if there might potentially exist illegal
nesting among funclets. While we have logic to clone funclets when
they are illegally nested, it would be nicer if we had a
representation which forbade them upfront.
Let's clean this up a bit by doing the following:
- Instead, make catchpad more like cleanuppad and landingpad: no control
flow, just a bunch of simple operands; catchpad would be splittable.
- Introduce catchswitch, a control flow instruction designed to model
the constraints of funclet oriented EH.
- Make funclet scoping explicit by having funclet instructions consume
the token produced by the funclet which contains them.
- Remove catchendpad and cleanupendpad. Their presence can be inferred
implicitly using coloring information.
N.B. The state numbering code for the CLR has been updated but the
veracity of it's output cannot be spoken for. An expert should take a
look to make sure the results are reasonable.
Reviewers: rnk, JosephTremoulet, andrew.w.kaylor
Differential Revision: http://reviews.llvm.org/D15139
llvm-svn: 255422
2015-12-12 06:38:55 +01:00
|
|
|
if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
|
2015-06-24 19:53:17 +02:00
|
|
|
return false;
|
2015-07-31 16:31:35 +02:00
|
|
|
if (!OrderedBB->dominates(BeforeHere, I))
|
2015-06-24 19:53:17 +02:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// 'BeforeHere' comes before 'I', it's safe to prune if we also
|
|
|
|
// guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
|
|
|
|
// by its successors, i.e, prune if:
|
|
|
|
//
|
2017-07-09 07:54:44 +02:00
|
|
|
// (1) BB is an entry block or have no successors.
|
|
|
|
// (2) There's no path coming back through BB successors.
|
2015-06-24 19:53:17 +02:00
|
|
|
if (BB == &BB->getParent()->getEntryBlock() ||
|
|
|
|
!BB->getTerminator()->getNumSuccessors())
|
|
|
|
return true;
|
|
|
|
|
|
|
|
SmallVector<BasicBlock*, 32> Worklist;
|
|
|
|
Worklist.append(succ_begin(BB), succ_end(BB));
|
2015-11-05 22:07:12 +01:00
|
|
|
return !isPotentiallyReachableFromMany(Worklist, BB, DT);
|
2015-06-24 19:53:17 +02:00
|
|
|
}
|
|
|
|
|
2014-07-21 15:15:48 +02:00
|
|
|
// If the value is defined in the same basic block as use and BeforeHere,
|
|
|
|
// there is no need to explore the use if BeforeHere dominates use.
|
|
|
|
// Check whether there is a path from I to BeforeHere.
|
|
|
|
if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
|
|
|
|
!isPotentiallyReachable(I, BeforeHere, DT))
|
2015-06-24 19:53:17 +02:00
|
|
|
return true;
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool shouldExplore(const Use *U) override {
|
|
|
|
Instruction *I = cast<Instruction>(U->getUser());
|
|
|
|
|
|
|
|
if (BeforeHere == I && !IncludeI)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (isSafeToPrune(I))
|
2014-07-21 15:15:48 +02:00
|
|
|
return false;
|
2015-06-24 19:53:17 +02:00
|
|
|
|
2014-07-21 15:15:48 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool captured(const Use *U) override {
|
|
|
|
if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
|
|
|
|
return false;
|
|
|
|
|
2015-06-24 19:53:17 +02:00
|
|
|
if (!shouldExplore(U))
|
2014-07-21 23:30:22 +02:00
|
|
|
return false;
|
|
|
|
|
2014-07-21 15:15:48 +02:00
|
|
|
Captured = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-07-31 16:31:35 +02:00
|
|
|
OrderedBasicBlock *OrderedBB;
|
2014-07-21 15:15:48 +02:00
|
|
|
const Instruction *BeforeHere;
|
2018-04-24 23:12:45 +02:00
|
|
|
const DominatorTree *DT;
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
bool ReturnCaptures;
|
2014-07-21 23:30:22 +02:00
|
|
|
bool IncludeI;
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
bool Captured;
|
|
|
|
};
|
2015-06-23 11:49:53 +02:00
|
|
|
}
|
2009-12-09 00:59:12 +01:00
|
|
|
|
2009-01-18 13:19:30 +01:00
|
|
|
/// PointerMayBeCaptured - Return true if this pointer value may be captured
|
|
|
|
/// by the enclosing function (which is required to exist). This routine can
|
|
|
|
/// be expensive, so consider caching the results. The boolean ReturnCaptures
|
|
|
|
/// specifies whether returning the value (or part of it) from the function
|
2009-11-19 22:57:48 +01:00
|
|
|
/// counts as capturing it or not. The boolean StoreCaptures specified whether
|
|
|
|
/// storing the value (or part of it) into memory anywhere automatically
|
2009-01-18 13:19:30 +01:00
|
|
|
/// counts as capturing it or not.
|
2009-11-19 22:57:48 +01:00
|
|
|
bool llvm::PointerMayBeCaptured(const Value *V,
|
|
|
|
bool ReturnCaptures, bool StoreCaptures) {
|
2011-11-21 20:42:56 +01:00
|
|
|
assert(!isa<GlobalValue>(V) &&
|
|
|
|
"It doesn't make sense to ask whether a global is captured.");
|
|
|
|
|
2011-11-14 23:49:42 +01:00
|
|
|
// TODO: If StoreCaptures is not true, we could do Fancy analysis
|
|
|
|
// to determine whether this store is not actually an escape point.
|
|
|
|
// In that case, BasicAliasAnalysis should be updated as well to
|
|
|
|
// take advantage of this.
|
|
|
|
(void)StoreCaptures;
|
|
|
|
|
|
|
|
SimpleCaptureTracker SCT(ReturnCaptures);
|
2011-11-20 20:37:06 +01:00
|
|
|
PointerMayBeCaptured(V, &SCT);
|
2011-11-14 23:49:42 +01:00
|
|
|
return SCT.Captured;
|
2009-01-18 13:19:30 +01:00
|
|
|
}
|
2011-11-20 20:37:06 +01:00
|
|
|
|
2014-07-21 15:15:48 +02:00
|
|
|
/// PointerMayBeCapturedBefore - Return true if this pointer value may be
|
|
|
|
/// captured by the enclosing function (which is required to exist). If a
|
|
|
|
/// DominatorTree is provided, only captures which happen before the given
|
|
|
|
/// instruction are considered. This routine can be expensive, so consider
|
|
|
|
/// caching the results. The boolean ReturnCaptures specifies whether
|
|
|
|
/// returning the value (or part of it) from the function counts as capturing
|
|
|
|
/// it or not. The boolean StoreCaptures specified whether storing the value
|
|
|
|
/// (or part of it) into memory anywhere automatically counts as capturing it
|
2015-07-31 16:31:35 +02:00
|
|
|
/// or not. A ordered basic block \p OBB can be used in order to speed up
|
|
|
|
/// queries about relative order among instructions in the same basic block.
|
2014-07-21 15:15:48 +02:00
|
|
|
bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
|
|
|
|
bool StoreCaptures, const Instruction *I,
|
2018-04-24 23:12:45 +02:00
|
|
|
const DominatorTree *DT, bool IncludeI,
|
2015-07-31 16:31:35 +02:00
|
|
|
OrderedBasicBlock *OBB) {
|
2014-07-21 15:15:48 +02:00
|
|
|
assert(!isa<GlobalValue>(V) &&
|
|
|
|
"It doesn't make sense to ask whether a global is captured.");
|
2015-07-31 16:31:35 +02:00
|
|
|
bool UseNewOBB = OBB == nullptr;
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
if (!DT)
|
|
|
|
return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures);
|
2015-07-31 16:31:35 +02:00
|
|
|
if (UseNewOBB)
|
|
|
|
OBB = new OrderedBasicBlock(I->getParent());
|
2014-07-21 15:15:48 +02:00
|
|
|
|
|
|
|
// TODO: See comment in PointerMayBeCaptured regarding what could be done
|
|
|
|
// with StoreCaptures.
|
|
|
|
|
2015-07-31 16:31:35 +02:00
|
|
|
CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, OBB);
|
2014-07-21 15:15:48 +02:00
|
|
|
PointerMayBeCaptured(V, &CB);
|
2015-07-31 16:31:35 +02:00
|
|
|
|
|
|
|
if (UseNewOBB)
|
|
|
|
delete OBB;
|
2014-07-21 15:15:48 +02:00
|
|
|
return CB.Captured;
|
|
|
|
}
|
|
|
|
|
2011-11-20 20:37:06 +01:00
|
|
|
/// TODO: Write a new FunctionPass AliasAnalysis so that it can keep
|
|
|
|
/// a cache. Then we can move the code from BasicAliasAnalysis into
|
|
|
|
/// that path, and remove this threshold.
|
|
|
|
static int const Threshold = 20;
|
|
|
|
|
|
|
|
void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
|
|
|
|
assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
|
2014-03-05 11:21:48 +01:00
|
|
|
SmallVector<const Use *, Threshold> Worklist;
|
|
|
|
SmallSet<const Use *, Threshold> Visited;
|
2011-11-20 20:37:06 +01:00
|
|
|
int Count = 0;
|
|
|
|
|
2014-03-09 04:16:01 +01:00
|
|
|
for (const Use &U : V->uses()) {
|
2011-11-20 20:37:06 +01:00
|
|
|
// If there are lots of uses, conservatively say that the value
|
|
|
|
// is captured to avoid taking too much compile time.
|
|
|
|
if (Count++ >= Threshold)
|
|
|
|
return Tracker->tooManyUses();
|
|
|
|
|
2014-03-09 04:16:01 +01:00
|
|
|
if (!Tracker->shouldExplore(&U)) continue;
|
|
|
|
Visited.insert(&U);
|
|
|
|
Worklist.push_back(&U);
|
2011-11-20 20:37:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
while (!Worklist.empty()) {
|
2014-03-05 11:21:48 +01:00
|
|
|
const Use *U = Worklist.pop_back_val();
|
2011-11-20 20:37:06 +01:00
|
|
|
Instruction *I = cast<Instruction>(U->getUser());
|
|
|
|
V = U->get();
|
|
|
|
|
|
|
|
switch (I->getOpcode()) {
|
|
|
|
case Instruction::Call:
|
|
|
|
case Instruction::Invoke: {
|
|
|
|
CallSite CS(I);
|
|
|
|
// Not captured if the callee is readonly, doesn't return a copy through
|
|
|
|
// its return value and doesn't unwind (a readonly function can leak bits
|
|
|
|
// by throwing an exception or not depending on the input value).
|
|
|
|
if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
|
|
|
|
break;
|
|
|
|
|
2016-05-26 19:36:22 +02:00
|
|
|
// Volatile operations effectively capture the memory location that they
|
|
|
|
// load and store to.
|
|
|
|
if (auto *MI = dyn_cast<MemIntrinsic>(I))
|
|
|
|
if (MI->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
|
2011-11-20 20:37:06 +01:00
|
|
|
// Not captured if only passed via 'nocapture' arguments. Note that
|
|
|
|
// calling a function pointer does not in itself cause the pointer to
|
|
|
|
// be captured. This is a subtle point considering that (for example)
|
|
|
|
// the callee might return its own address. It is analogous to saying
|
|
|
|
// that loading a value from a pointer does not cause the pointer to be
|
|
|
|
// captured, even though the loaded value might be the pointer itself
|
|
|
|
// (think of self-referential objects).
|
2015-11-05 00:21:06 +01:00
|
|
|
CallSite::data_operand_iterator B =
|
|
|
|
CS.data_operands_begin(), E = CS.data_operands_end();
|
|
|
|
for (CallSite::data_operand_iterator A = B; A != E; ++A)
|
2011-11-20 20:37:06 +01:00
|
|
|
if (A->get() == V && !CS.doesNotCapture(A - B))
|
|
|
|
// The parameter is not marked 'nocapture' - captured.
|
2011-12-29 00:24:21 +01:00
|
|
|
if (Tracker->captured(U))
|
2011-11-20 20:37:06 +01:00
|
|
|
return;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Instruction::Load:
|
2016-05-26 19:36:22 +02:00
|
|
|
// Volatile loads make the address observable.
|
|
|
|
if (cast<LoadInst>(I)->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
2011-11-20 20:37:06 +01:00
|
|
|
break;
|
|
|
|
case Instruction::VAArg:
|
|
|
|
// "va-arg" from a pointer does not cause it to be captured.
|
|
|
|
break;
|
|
|
|
case Instruction::Store:
|
|
|
|
// Stored the pointer - conservatively assume it may be captured.
|
2016-05-26 19:36:22 +02:00
|
|
|
// Volatile stores make the address observable.
|
|
|
|
if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
|
2011-12-29 00:24:21 +01:00
|
|
|
if (Tracker->captured(U))
|
2011-11-20 20:37:06 +01:00
|
|
|
return;
|
|
|
|
break;
|
2016-05-26 19:36:22 +02:00
|
|
|
case Instruction::AtomicRMW: {
|
|
|
|
// atomicrmw conceptually includes both a load and store from
|
|
|
|
// the same location.
|
|
|
|
// As with a store, the location being accessed is not captured,
|
|
|
|
// but the value being stored is.
|
|
|
|
// Volatile stores make the address observable.
|
|
|
|
auto *ARMWI = cast<AtomicRMWInst>(I);
|
|
|
|
if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
|
2016-02-18 20:23:27 +01:00
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
2016-05-26 19:36:22 +02:00
|
|
|
}
|
|
|
|
case Instruction::AtomicCmpXchg: {
|
|
|
|
// cmpxchg conceptually includes both a load and store from
|
|
|
|
// the same location.
|
|
|
|
// As with a store, the location being accessed is not captured,
|
|
|
|
// but the value being stored is.
|
|
|
|
// Volatile stores make the address observable.
|
|
|
|
auto *ACXI = cast<AtomicCmpXchgInst>(I);
|
|
|
|
if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
|
|
|
|
ACXI->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
|
|
|
}
|
2011-11-20 20:37:06 +01:00
|
|
|
case Instruction::BitCast:
|
|
|
|
case Instruction::GetElementPtr:
|
|
|
|
case Instruction::PHI:
|
|
|
|
case Instruction::Select:
|
2014-01-14 20:11:52 +01:00
|
|
|
case Instruction::AddrSpaceCast:
|
2011-11-20 20:37:06 +01:00
|
|
|
// The original value is not captured via this if the new value isn't.
|
2013-10-03 15:24:02 +02:00
|
|
|
Count = 0;
|
2014-03-09 04:16:01 +01:00
|
|
|
for (Use &UU : I->uses()) {
|
2013-10-03 15:24:02 +02:00
|
|
|
// If there are lots of uses, conservatively say that the value
|
|
|
|
// is captured to avoid taking too much compile time.
|
|
|
|
if (Count++ >= Threshold)
|
|
|
|
return Tracker->tooManyUses();
|
|
|
|
|
2014-11-19 08:49:26 +01:00
|
|
|
if (Visited.insert(&UU).second)
|
2014-03-09 04:16:01 +01:00
|
|
|
if (Tracker->shouldExplore(&UU))
|
|
|
|
Worklist.push_back(&UU);
|
2011-11-20 20:37:06 +01:00
|
|
|
}
|
|
|
|
break;
|
2016-05-03 16:58:21 +02:00
|
|
|
case Instruction::ICmp: {
|
2011-11-20 20:37:06 +01:00
|
|
|
// Don't count comparisons of a no-alias return value against null as
|
|
|
|
// captures. This allows us to ignore comparisons of malloc results
|
|
|
|
// with null, for example.
|
2013-07-06 02:29:58 +02:00
|
|
|
if (ConstantPointerNull *CPN =
|
|
|
|
dyn_cast<ConstantPointerNull>(I->getOperand(1)))
|
|
|
|
if (CPN->getType()->getAddressSpace() == 0)
|
|
|
|
if (isNoAliasCall(V->stripPointerCasts()))
|
2011-11-20 20:37:06 +01:00
|
|
|
break;
|
2016-05-03 16:58:21 +02:00
|
|
|
// Comparison against value stored in global variable. Given the pointer
|
|
|
|
// does not escape, its value cannot be guessed and stored separately in a
|
|
|
|
// global variable.
|
|
|
|
unsigned OtherIndex = (I->getOperand(0) == V) ? 1 : 0;
|
|
|
|
auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
|
|
|
|
if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
|
|
|
|
break;
|
2011-11-20 20:37:06 +01:00
|
|
|
// Otherwise, be conservative. There are crazy ways to capture pointers
|
|
|
|
// using comparisons.
|
2011-12-29 00:24:21 +01:00
|
|
|
if (Tracker->captured(U))
|
2011-11-20 20:37:06 +01:00
|
|
|
return;
|
|
|
|
break;
|
2016-05-03 16:58:21 +02:00
|
|
|
}
|
2011-11-20 20:37:06 +01:00
|
|
|
default:
|
|
|
|
// Something else - be conservative and say it is captured.
|
2011-12-29 00:24:21 +01:00
|
|
|
if (Tracker->captured(U))
|
2011-11-20 20:37:06 +01:00
|
|
|
return;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// All uses examined.
|
|
|
|
}
|