1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-18 18:42:46 +02:00

[Analysis/Transforms/Sanitizers] As part of using inclusive language

within the llvm project, migrate away from the use of blacklist and
whitelist.
This commit is contained in:
Eric Christopher 2020-06-20 00:42:26 -07:00
parent 5111179a5e
commit 688a5b60e8
8 changed files with 18 additions and 17 deletions

View File

@ -39,7 +39,7 @@ public:
LocationMetadata SourceLoc;
StringRef Name;
bool IsDynInit = false;
bool IsBlacklisted = false;
bool IsExcluded = false;
Entry() = default;
};

View File

@ -1113,7 +1113,8 @@ bool CallAnalyzer::visitCastInst(CastInst &I) {
}))
return true;
// Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
// Disable SROA in the face of arbitrary casts we don't explicitly list
// elsewhere.
disableSROA(I.getOperand(0));
// If this is a floating-point cast, and the target says this operation

View File

@ -153,7 +153,7 @@ ARCInstKind llvm::objcarc::GetFunctionClass(const Function *F) {
}
}
// A whitelist of intrinsics that we know do not use objc pointers or decrement
// A list of intrinsics that we know do not use objc pointers or decrement
// ref counts.
static bool isInertIntrinsic(unsigned ID) {
// TODO: Make this into a covered switch.
@ -192,7 +192,7 @@ static bool isInertIntrinsic(unsigned ID) {
}
}
// A whitelist of intrinsics that we know do not use objc pointers or decrement
// A list of intrinsics that we know do not use objc pointers or decrement
// ref counts.
static bool isUseOnlyIntrinsic(unsigned ID) {
// We are conservative and even though intrinsics are unlikely to touch

View File

@ -195,10 +195,10 @@ CleanupPointerRootUsers(GlobalVariable *GV,
function_ref<TargetLibraryInfo &(Function &)> GetTLI) {
// A brief explanation of leak checkers. The goal is to find bugs where
// pointers are forgotten, causing an accumulating growth in memory
// usage over time. The common strategy for leak checkers is to whitelist the
// memory pointed to by globals at exit. This is popular because it also
// solves another problem where the main thread of a C++ program may shut down
// before other threads that are still expecting to use those globals. To
// usage over time. The common strategy for leak checkers is to explicitly
// allow the memory pointed to by globals at exit. This is popular because it
// also solves another problem where the main thread of a C++ program may shut
// down before other threads that are still expecting to use those globals. To
// handle that case, we expect the program may create a singleton and never
// destroy it.

View File

@ -1150,9 +1150,9 @@ GlobalsMetadata::GlobalsMetadata(Module &M) {
E.Name = Name->getString();
ConstantInt *IsDynInit = mdconst::extract<ConstantInt>(MDN->getOperand(3));
E.IsDynInit |= IsDynInit->isOne();
ConstantInt *IsBlacklisted =
ConstantInt *IsExcluded =
mdconst::extract<ConstantInt>(MDN->getOperand(4));
E.IsBlacklisted |= IsBlacklisted->isOne();
E.IsExcluded |= IsExcluded->isOne();
}
}
@ -1814,7 +1814,7 @@ bool ModuleAddressSanitizer::shouldInstrumentGlobal(GlobalVariable *G) const {
// FIXME: Metadata should be attched directly to the global directly instead
// of being added to llvm.asan.globals.
if (GlobalsMD.get(G).IsBlacklisted) return false;
if (GlobalsMD.get(G).IsExcluded) return false;
if (!Ty->isSized()) return false;
if (!G->hasInitializer()) return false;
// Only instrument globals of default address spaces
@ -2265,19 +2265,19 @@ bool ModuleAddressSanitizer::InstrumentGlobals(IRBuilder<> &IRB, Module &M,
// Build set of globals that are aliased by some GA, where
// canInstrumentAliasedGlobal(GA) returns false.
SmallPtrSet<const GlobalVariable *, 16> AliasedGlobalBlacklist;
SmallPtrSet<const GlobalVariable *, 16> AliasedGlobalExclusions;
if (CompileKernel) {
for (auto &GA : M.aliases()) {
if (const auto *GV = dyn_cast<GlobalVariable>(GA.getAliasee())) {
if (!canInstrumentAliasedGlobal(GA))
AliasedGlobalBlacklist.insert(GV);
AliasedGlobalExclusions.insert(GV);
}
}
}
SmallVector<GlobalVariable *, 16> GlobalsToChange;
for (auto &G : M.globals()) {
if (!AliasedGlobalBlacklist.count(&G) && shouldInstrumentGlobal(&G))
if (!AliasedGlobalExclusions.count(&G) && shouldInstrumentGlobal(&G))
GlobalsToChange.push_back(&G);
}

View File

@ -1132,7 +1132,7 @@ static bool isIgnorableInst(const Instruction *I) {
case Intrinsic::annotation:
case Intrinsic::ptr_annotation:
case Intrinsic::var_annotation:
// TODO: the following intrinsics may also be whitelisted:
// TODO: the following intrinsics may also be allowed:
// lifetime_start, lifetime_end, invariant_start, invariant_end
return true;
}

View File

@ -213,7 +213,7 @@ bool NaryReassociatePass::runImpl(Function &F, AssumptionCache *AC_,
return Changed;
}
// Whitelist the instruction types NaryReassociate handles for now.
// Explicitly list the instruction types NaryReassociate handles for now.
static bool isPotentiallyNaryReassociable(Instruction *I) {
switch (I->getOpcode()) {
case Instruction::Add:

View File

@ -247,7 +247,7 @@ static unsigned ComputeSpeculationCost(const Instruction *I,
return TTI.getUserCost(I, TargetTransformInfo::TCK_SizeAndLatency);
default:
return UINT_MAX; // Disallow anything not whitelisted.
return UINT_MAX; // Disallow anything not explicitly listed.
}
}