1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-23 11:13:28 +01:00

[ManagedStatic] Reimplement double-checked locking with std::atomic.

This gets rid of the memory fence in the hot path (dereferencing the
ManagedStatic), trading for an extra mutex lock in the cold path (when
the ManagedStatic was uninitialized). Since this only happens on the
first accesses it shouldn't matter much. On strict architectures like
x86 this removes any atomic instructions from the hot path.

Also remove the tsan annotations, tsan knows how standard atomics work
so they should be unnecessary now.

llvm-svn: 274131
This commit is contained in:
Benjamin Kramer 2016-06-29 15:04:07 +00:00
parent 05f9468ad4
commit d9782db93e
2 changed files with 17 additions and 40 deletions

View File

@ -14,9 +14,9 @@
#ifndef LLVM_SUPPORT_MANAGEDSTATIC_H #ifndef LLVM_SUPPORT_MANAGEDSTATIC_H
#define LLVM_SUPPORT_MANAGEDSTATIC_H #define LLVM_SUPPORT_MANAGEDSTATIC_H
#include "llvm/Support/Atomic.h"
#include "llvm/Support/Compiler.h" #include "llvm/Support/Compiler.h"
#include "llvm/Support/Threading.h" #include <atomic>
#include <cstddef>
namespace llvm { namespace llvm {
@ -41,7 +41,7 @@ class ManagedStaticBase {
protected: protected:
// This should only be used as a static variable, which guarantees that this // This should only be used as a static variable, which guarantees that this
// will be zero initialized. // will be zero initialized.
mutable void *Ptr; mutable std::atomic<void *> Ptr;
mutable void (*DeleterFn)(void*); mutable void (*DeleterFn)(void*);
mutable const ManagedStaticBase *Next; mutable const ManagedStaticBase *Next;
@ -61,40 +61,26 @@ public:
template<class C> template<class C>
class ManagedStatic : public ManagedStaticBase { class ManagedStatic : public ManagedStaticBase {
public: public:
// Accessors. // Accessors.
C &operator*() { C &operator*() {
void* tmp = Ptr; void *Tmp = Ptr.load(std::memory_order_acquire);
if (llvm_is_multithreaded()) sys::MemoryFence(); if (!Tmp)
if (!tmp) RegisterManagedStatic(object_creator<C>, object_deleter<C>::call); RegisterManagedStatic(object_creator<C>, object_deleter<C>::call);
TsanHappensAfter(this);
return *static_cast<C*>(Ptr); return *static_cast<C *>(Ptr.load(std::memory_order_relaxed));
} }
C *operator->() {
void* tmp = Ptr;
if (llvm_is_multithreaded()) sys::MemoryFence();
if (!tmp) RegisterManagedStatic(object_creator<C>, object_deleter<C>::call);
TsanHappensAfter(this);
return static_cast<C*>(Ptr); C *operator->() { return &**this; }
}
const C &operator*() const { const C &operator*() const {
void* tmp = Ptr; void *Tmp = Ptr.load(std::memory_order_acquire);
if (llvm_is_multithreaded()) sys::MemoryFence(); if (!Tmp)
if (!tmp) RegisterManagedStatic(object_creator<C>, object_deleter<C>::call); RegisterManagedStatic(object_creator<C>, object_deleter<C>::call);
TsanHappensAfter(this);
return *static_cast<C*>(Ptr); return *static_cast<C *>(Ptr.load(std::memory_order_relaxed));
} }
const C *operator->() const {
void* tmp = Ptr;
if (llvm_is_multithreaded()) sys::MemoryFence();
if (!tmp) RegisterManagedStatic(object_creator<C>, object_deleter<C>::call);
TsanHappensAfter(this);
return static_cast<C*>(Ptr); const C *operator->() const { return &**this; }
}
}; };
/// llvm_shutdown - Deallocate and destroy all ManagedStatic variables. /// llvm_shutdown - Deallocate and destroy all ManagedStatic variables.

View File

@ -13,7 +13,6 @@
#include "llvm/Support/ManagedStatic.h" #include "llvm/Support/ManagedStatic.h"
#include "llvm/Config/config.h" #include "llvm/Config/config.h"
#include "llvm/Support/Atomic.h"
#include "llvm/Support/Mutex.h" #include "llvm/Support/Mutex.h"
#include "llvm/Support/MutexGuard.h" #include "llvm/Support/MutexGuard.h"
#include "llvm/Support/Threading.h" #include "llvm/Support/Threading.h"
@ -42,18 +41,10 @@ void ManagedStaticBase::RegisterManagedStatic(void *(*Creator)(),
if (llvm_is_multithreaded()) { if (llvm_is_multithreaded()) {
MutexGuard Lock(*getManagedStaticMutex()); MutexGuard Lock(*getManagedStaticMutex());
if (!Ptr) { if (!Ptr.load(std::memory_order_relaxed)) {
void* tmp = Creator(); void *Tmp = Creator();
TsanHappensBefore(this); Ptr.store(Tmp, std::memory_order_release);
sys::MemoryFence();
// This write is racy against the first read in the ManagedStatic
// accessors. The race is benign because it does a second read after a
// memory fence, at which point it isn't possible to get a partial value.
TsanIgnoreWritesBegin();
Ptr = tmp;
TsanIgnoreWritesEnd();
DeleterFn = Deleter; DeleterFn = Deleter;
// Add to list of managed statics. // Add to list of managed statics.