2020-12-25 03:18:36 +01:00
|
|
|
#pragma once
|
2012-11-15 00:39:56 +01:00
|
|
|
|
2020-12-12 13:01:29 +01:00
|
|
|
#include "util/types.hpp"
|
2019-07-27 00:34:10 +02:00
|
|
|
#include "util/atomic.hpp"
|
2020-11-26 10:30:51 +01:00
|
|
|
#include "util/shared_ptr.hpp"
|
2016-08-14 02:22:19 +02:00
|
|
|
|
2016-02-01 22:55:43 +01:00
|
|
|
#include <string>
|
2023-12-23 12:09:06 +01:00
|
|
|
#include <concepts>
|
2016-02-01 22:55:43 +01:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
#include "mutex.h"
|
|
|
|
#include "lockless.h"
|
2016-09-07 00:38:52 +02:00
|
|
|
|
2017-10-21 13:21:37 +02:00
|
|
|
// Hardware core layout
|
|
|
|
enum class native_core_arrangement : u32
|
|
|
|
{
|
|
|
|
undefined,
|
|
|
|
generic,
|
|
|
|
intel_ht,
|
|
|
|
amd_ccx
|
|
|
|
};
|
|
|
|
|
|
|
|
enum class thread_class : u32
|
|
|
|
{
|
|
|
|
general,
|
|
|
|
rsx,
|
|
|
|
spu,
|
|
|
|
ppu
|
|
|
|
};
|
|
|
|
|
2019-09-08 22:27:57 +02:00
|
|
|
enum class thread_state : u32
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2020-11-13 17:06:44 +01:00
|
|
|
created = 0, // Initial state
|
|
|
|
aborting = 1, // The thread has been joined in the destructor or explicitly aborted
|
|
|
|
errored = 2, // Set after the emergency_exit call
|
|
|
|
finished = 3, // Final state, always set at the end of thread execution
|
|
|
|
mask = 3
|
2018-09-25 14:21:04 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
template <class Context>
|
|
|
|
class named_thread;
|
|
|
|
|
2021-02-24 11:56:02 +01:00
|
|
|
class thread_base;
|
|
|
|
|
2021-04-21 16:19:53 +02:00
|
|
|
template <typename Ctx, typename... Args>
|
2018-09-25 14:21:04 +02:00
|
|
|
struct result_storage
|
|
|
|
{
|
2021-02-24 11:45:10 +01:00
|
|
|
static constexpr bool empty = true;
|
|
|
|
|
|
|
|
using type = void;
|
|
|
|
};
|
|
|
|
|
2021-04-21 16:19:53 +02:00
|
|
|
template <typename Ctx, typename... Args> requires (!std::is_void_v<std::invoke_result_t<Ctx, Args&&...>>)
|
|
|
|
struct result_storage<Ctx, Args...>
|
2021-02-24 11:45:10 +01:00
|
|
|
{
|
|
|
|
using T = std::invoke_result_t<Ctx, Args&&...>;
|
|
|
|
|
|
|
|
static_assert(std::is_default_constructible_v<T>);
|
2020-03-20 18:18:08 +01:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
alignas(T) std::byte data[sizeof(T)];
|
|
|
|
|
|
|
|
static constexpr bool empty = false;
|
|
|
|
|
|
|
|
using type = T;
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
T* _get()
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
|
|
|
return reinterpret_cast<T*>(&data);
|
|
|
|
}
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
const T* _get() const
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
|
|
|
return reinterpret_cast<const T*>(&data);
|
|
|
|
}
|
2018-10-01 19:05:47 +02:00
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
void init() noexcept
|
|
|
|
{
|
|
|
|
new (data) T();
|
|
|
|
}
|
|
|
|
|
2018-10-01 19:05:47 +02:00
|
|
|
void destroy() noexcept
|
|
|
|
{
|
2021-02-28 12:39:04 +01:00
|
|
|
_get()->~T();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-04-21 16:19:53 +02:00
|
|
|
template <typename T>
|
2023-12-14 21:20:21 +01:00
|
|
|
concept NamedThreadName = requires (const T&)
|
2021-04-21 16:19:53 +02:00
|
|
|
{
|
2023-12-14 21:20:21 +01:00
|
|
|
std::string(T::thread_name);
|
2021-04-21 16:19:53 +02:00
|
|
|
};
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
// Base class for task queue (linked list)
|
|
|
|
class thread_future
|
|
|
|
{
|
|
|
|
friend class thread_base;
|
|
|
|
|
|
|
|
shared_ptr<thread_future> next{};
|
|
|
|
|
2023-04-26 21:39:26 +02:00
|
|
|
thread_future* prev{};
|
2021-02-28 12:39:04 +01:00
|
|
|
|
|
|
|
protected:
|
|
|
|
atomic_t<void(*)(thread_base*, thread_future*)> exec{};
|
|
|
|
|
2023-07-31 22:57:26 +02:00
|
|
|
atomic_t<u32> done{0};
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
public:
|
|
|
|
// Get reference to the atomic variable for inspection and waiting for
|
|
|
|
const auto& get_wait() const
|
|
|
|
{
|
2023-07-31 22:57:26 +02:00
|
|
|
return done;
|
2021-02-28 12:39:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Wait (preset)
|
|
|
|
void wait() const
|
|
|
|
{
|
2023-07-31 22:57:26 +02:00
|
|
|
done.wait(0);
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
2018-09-25 14:21:04 +02:00
|
|
|
};
|
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
// Thread base class
|
|
|
|
class thread_base
|
2015-07-01 00:25:52 +02:00
|
|
|
{
|
2020-11-12 21:24:35 +01:00
|
|
|
public:
|
2018-09-25 14:21:04 +02:00
|
|
|
// Native thread entry point function type
|
|
|
|
#ifdef _WIN32
|
|
|
|
using native_entry = uint(__stdcall*)(void* arg);
|
|
|
|
#else
|
|
|
|
using native_entry = void*(*)(void* arg);
|
|
|
|
#endif
|
2015-08-21 13:07:31 +02:00
|
|
|
|
2020-11-23 09:32:34 +01:00
|
|
|
const native_entry entry_point;
|
|
|
|
|
2024-01-27 20:33:54 +01:00
|
|
|
// Set name for debugger
|
|
|
|
static void set_name(std::string);
|
|
|
|
|
2020-11-12 21:24:35 +01:00
|
|
|
private:
|
2016-09-07 00:38:52 +02:00
|
|
|
// Thread handle (platform-specific)
|
2020-11-13 17:06:44 +01:00
|
|
|
atomic_t<u64> m_thread{0};
|
2016-09-07 00:38:52 +02:00
|
|
|
|
2023-07-31 22:57:26 +02:00
|
|
|
// Thread cycles
|
|
|
|
atomic_t<u64> m_cycles{0};
|
|
|
|
|
|
|
|
atomic_t<u32> m_dummy{0};
|
|
|
|
|
|
|
|
// Thread state
|
|
|
|
atomic_t<u32> m_sync{0};
|
2019-09-08 22:27:57 +02:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Thread name
|
2020-11-26 10:30:51 +01:00
|
|
|
atomic_ptr<std::string> m_tname;
|
2015-11-26 09:06:29 +01:00
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
// Thread task queue (reversed linked list)
|
|
|
|
atomic_ptr<thread_future> m_taskq{};
|
|
|
|
|
2016-05-13 16:01:48 +02:00
|
|
|
// Start thread
|
2020-11-23 09:32:34 +01:00
|
|
|
void start();
|
2015-11-26 09:06:29 +01:00
|
|
|
|
|
|
|
// Called at the thread start
|
2020-11-13 17:06:44 +01:00
|
|
|
void initialize(void (*error_cb)());
|
2016-04-25 12:49:12 +02:00
|
|
|
|
2020-11-21 05:56:54 +01:00
|
|
|
// Called at the thread end, returns self handle
|
2020-11-13 09:32:47 +01:00
|
|
|
u64 finalize(thread_state result) noexcept;
|
2016-04-25 12:49:12 +02:00
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
// Cleanup after possibly deleting the thread instance
|
2020-11-23 09:32:34 +01:00
|
|
|
static native_entry finalize(u64 _self) noexcept;
|
2020-11-12 21:24:35 +01:00
|
|
|
|
2020-11-21 05:56:54 +01:00
|
|
|
// Make entry point
|
2021-10-05 18:38:52 +02:00
|
|
|
static native_entry make_trampoline(u64(*entry)(thread_base* _base));
|
2020-11-13 14:59:56 +01:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
friend class thread_ctrl;
|
2012-11-15 00:39:56 +01:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
template <class Context>
|
|
|
|
friend class named_thread;
|
2015-11-26 09:06:29 +01:00
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
protected:
|
2021-05-01 08:34:52 +02:00
|
|
|
thread_base(native_entry, std::string name);
|
2018-09-22 21:35:52 +02:00
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
~thread_base();
|
2015-09-26 22:46:04 +02:00
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
public:
|
2018-05-24 21:22:07 +02:00
|
|
|
// Get CPU cycles since last time this function was called. First call returns 0.
|
|
|
|
u64 get_cycles();
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Wait for the thread (it does NOT change thread state, and can be called from multiple threads)
|
2020-12-16 09:07:02 +01:00
|
|
|
bool join(bool dtor = false) const;
|
2018-09-25 14:21:04 +02:00
|
|
|
|
2016-09-07 00:38:52 +02:00
|
|
|
// Notify the thread
|
2016-05-13 16:01:48 +02:00
|
|
|
void notify();
|
2020-11-13 17:06:44 +01:00
|
|
|
|
|
|
|
// Get thread id
|
|
|
|
u64 get_native_id() const;
|
2021-02-28 12:39:04 +01:00
|
|
|
|
|
|
|
// Add work to the queue
|
|
|
|
void push(shared_ptr<thread_future>);
|
|
|
|
|
|
|
|
private:
|
|
|
|
// Clear task queue (execute unless aborting)
|
|
|
|
void exec();
|
2018-09-25 14:21:04 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
// Collection of global function for current thread
|
|
|
|
class thread_ctrl final
|
|
|
|
{
|
|
|
|
// Current thread
|
|
|
|
static thread_local thread_base* g_tls_this_thread;
|
|
|
|
|
2020-03-20 18:18:08 +01:00
|
|
|
// Error handling details
|
|
|
|
static thread_local void(*g_tls_error_callback)();
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Target cpu core layout
|
|
|
|
static atomic_t<native_core_arrangement> g_native_core_layout;
|
|
|
|
|
|
|
|
friend class thread_base;
|
|
|
|
|
2020-02-28 08:43:37 +01:00
|
|
|
// Optimized get_name() for logging
|
|
|
|
static std::string get_name_cached();
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
public:
|
2018-10-01 19:05:47 +02:00
|
|
|
// Get current thread name
|
2020-02-28 08:43:37 +01:00
|
|
|
static std::string get_name()
|
2018-10-01 19:05:47 +02:00
|
|
|
{
|
2024-03-24 08:38:26 +01:00
|
|
|
if (!g_tls_this_thread)
|
|
|
|
{
|
|
|
|
return "not named_thread";
|
|
|
|
}
|
|
|
|
|
2020-02-28 08:43:37 +01:00
|
|
|
return *g_tls_this_thread->m_tname.load();
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get thread name
|
|
|
|
template <typename T>
|
2020-02-28 08:43:37 +01:00
|
|
|
static std::string get_name(const named_thread<T>& thread)
|
2018-10-01 19:05:47 +02:00
|
|
|
{
|
2020-02-28 08:43:37 +01:00
|
|
|
return *static_cast<const thread_base&>(thread).m_tname.load();
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Set current thread name (not recommended)
|
2021-07-18 12:01:06 +02:00
|
|
|
static void set_name(std::string name)
|
2018-10-01 19:05:47 +02:00
|
|
|
{
|
2020-11-26 10:30:51 +01:00
|
|
|
g_tls_this_thread->m_tname.store(make_single<std::string>(name));
|
2021-07-18 12:01:06 +02:00
|
|
|
g_tls_this_thread->set_name(std::move(name));
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Set thread name (not recommended)
|
|
|
|
template <typename T>
|
2021-07-18 12:01:06 +02:00
|
|
|
static void set_name(named_thread<T>& thread, std::string name)
|
2018-10-01 19:05:47 +02:00
|
|
|
{
|
2020-11-26 10:30:51 +01:00
|
|
|
static_cast<thread_base&>(thread).m_tname.store(make_single<std::string>(name));
|
2021-07-18 12:01:06 +02:00
|
|
|
|
|
|
|
if (g_tls_this_thread == std::addressof(static_cast<thread_base&>(thread)))
|
|
|
|
{
|
|
|
|
g_tls_this_thread->set_name(std::move(name));
|
|
|
|
}
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
template <typename T>
|
|
|
|
static u64 get_cycles(named_thread<T>& thread)
|
|
|
|
{
|
|
|
|
return static_cast<thread_base&>(thread).get_cycles();
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
static void notify(named_thread<T>& thread)
|
|
|
|
{
|
|
|
|
static_cast<thread_base&>(thread).notify();
|
|
|
|
}
|
|
|
|
|
2020-06-26 11:17:15 +02:00
|
|
|
template <typename T>
|
2020-11-13 17:06:44 +01:00
|
|
|
static u64 get_native_id(named_thread<T>& thread)
|
2020-06-26 11:17:15 +02:00
|
|
|
{
|
2020-11-13 17:06:44 +01:00
|
|
|
return static_cast<thread_base&>(thread).get_native_id();
|
2020-06-26 11:17:15 +02:00
|
|
|
}
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
// Read current state, possibly executing some tasks
|
|
|
|
static thread_state state();
|
2016-05-13 16:01:48 +02:00
|
|
|
|
2020-06-23 15:41:16 +02:00
|
|
|
// Wait once with timeout. Infinite value is -1.
|
|
|
|
static void wait_for(u64 usec, bool alert = true);
|
|
|
|
|
|
|
|
// Waiting with accurate timeout
|
|
|
|
static void wait_for_accurate(u64 usec);
|
2016-05-13 16:01:48 +02:00
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
// Wait.
|
2016-09-07 00:38:52 +02:00
|
|
|
static inline void wait()
|
2016-07-27 23:43:22 +02:00
|
|
|
{
|
2020-06-23 15:41:16 +02:00
|
|
|
wait_for(-1, true);
|
2016-07-27 23:43:22 +02:00
|
|
|
}
|
|
|
|
|
2021-01-21 16:03:09 +01:00
|
|
|
// Wait for both thread sync var and provided atomic var
|
2021-12-03 16:28:39 +01:00
|
|
|
template <uint Max, typename Func>
|
|
|
|
static inline void wait_on_custom(Func&& setter, u64 usec = -1)
|
2021-01-21 16:03:09 +01:00
|
|
|
{
|
|
|
|
auto _this = g_tls_this_thread;
|
|
|
|
|
2021-02-28 12:39:04 +01:00
|
|
|
if (_this->m_sync.bit_test_reset(2) || _this->m_taskq)
|
2021-01-21 16:03:09 +01:00
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-12-03 16:28:39 +01:00
|
|
|
atomic_wait::list<Max + 2> list{};
|
2023-07-31 22:57:26 +02:00
|
|
|
list.template set<Max>(_this->m_sync, 0);
|
|
|
|
list.template set<Max + 1>(_this->m_taskq);
|
2021-12-03 16:28:39 +01:00
|
|
|
setter(list);
|
2021-01-21 16:03:09 +01:00
|
|
|
list.wait(atomic_wait_timeout{usec <= 0xffff'ffff'ffff'ffff / 1000 ? usec * 1000 : 0xffff'ffff'ffff'ffff});
|
|
|
|
}
|
|
|
|
|
2023-07-31 22:57:26 +02:00
|
|
|
template <typename T, typename U>
|
2021-12-03 16:28:39 +01:00
|
|
|
static inline void wait_on(T& wait, U old, u64 usec = -1)
|
|
|
|
{
|
2023-07-31 22:57:26 +02:00
|
|
|
wait_on_custom<1>([&](atomic_wait::list<3>& list) { list.template set<0>(wait, old); }, usec);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
static inline void wait_on(T& wait)
|
|
|
|
{
|
|
|
|
wait_on_custom<1>([&](atomic_wait::list<3>& list) { list.template set<0>(wait); });
|
2021-12-03 16:28:39 +01:00
|
|
|
}
|
|
|
|
|
2020-03-08 12:48:06 +01:00
|
|
|
// Exit.
|
|
|
|
[[noreturn]] static void emergency_exit(std::string_view reason);
|
|
|
|
|
2015-11-26 09:06:29 +01:00
|
|
|
// Get current thread (may be nullptr)
|
2018-09-25 14:21:04 +02:00
|
|
|
static thread_base* get_current()
|
2015-11-26 09:06:29 +01:00
|
|
|
{
|
|
|
|
return g_tls_this_thread;
|
|
|
|
}
|
|
|
|
|
2017-10-21 13:21:37 +02:00
|
|
|
// Detect layout
|
|
|
|
static void detect_cpu_layout();
|
|
|
|
|
|
|
|
// Returns a core affinity mask. Set whether to generate the high priority set or not
|
2019-07-20 14:57:23 +02:00
|
|
|
static u64 get_affinity_mask(thread_class group);
|
2017-10-21 13:21:37 +02:00
|
|
|
|
|
|
|
// Sets the native thread priority
|
2017-06-24 17:36:49 +02:00
|
|
|
static void set_native_priority(int priority);
|
2017-10-21 13:21:37 +02:00
|
|
|
|
|
|
|
// Sets the preferred affinity mask for this thread
|
2019-07-20 14:57:23 +02:00
|
|
|
static void set_thread_affinity_mask(u64 mask);
|
2020-04-28 20:49:13 +02:00
|
|
|
|
2020-04-29 21:11:59 +02:00
|
|
|
// Get process affinity mask
|
|
|
|
static u64 get_process_affinity_mask();
|
|
|
|
|
2020-04-28 20:49:13 +02:00
|
|
|
// Miscellaneous
|
|
|
|
static u64 get_thread_affinity_mask();
|
2020-04-29 21:11:59 +02:00
|
|
|
|
2020-11-16 11:57:15 +01:00
|
|
|
// Get current thread stack addr and size
|
2020-12-18 08:39:54 +01:00
|
|
|
static std::pair<void*, usz> get_thread_stack();
|
2020-11-16 11:57:15 +01:00
|
|
|
|
2021-01-25 19:49:16 +01:00
|
|
|
// Sets the native thread priority and returns it to zero at destructor
|
|
|
|
struct scoped_priority
|
|
|
|
{
|
|
|
|
explicit scoped_priority(int prio)
|
|
|
|
{
|
|
|
|
set_native_priority(prio);
|
|
|
|
}
|
|
|
|
|
|
|
|
scoped_priority(const scoped_priority&) = delete;
|
|
|
|
|
|
|
|
scoped_priority& operator=(const scoped_priority&) = delete;
|
|
|
|
|
|
|
|
~scoped_priority()
|
|
|
|
{
|
|
|
|
set_native_priority(0);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-03-06 18:10:48 +01:00
|
|
|
// Get thread ID (works for all threads)
|
|
|
|
static u64 get_tid();
|
|
|
|
|
2021-03-06 19:02:37 +01:00
|
|
|
// Check whether current thread is main thread (usually Qt GUI)
|
|
|
|
static bool is_main();
|
|
|
|
|
2020-04-29 21:11:59 +02:00
|
|
|
private:
|
|
|
|
// Miscellaneous
|
|
|
|
static const u64 process_affinity_mask;
|
2018-09-25 14:21:04 +02:00
|
|
|
};
|
|
|
|
|
2021-03-01 16:02:25 +01:00
|
|
|
// Used internally
|
|
|
|
template <bool Discard, typename Ctx, typename... Args>
|
|
|
|
class thread_future_t : public thread_future, result_storage<Ctx, std::conditional_t<Discard, int, void>, Args...>
|
|
|
|
{
|
|
|
|
[[no_unique_address]] decltype(std::make_tuple(std::forward<Args>(std::declval<Args>())...)) m_args;
|
|
|
|
|
|
|
|
[[no_unique_address]] Ctx m_func;
|
|
|
|
|
|
|
|
using future = thread_future_t;
|
|
|
|
|
|
|
|
public:
|
|
|
|
thread_future_t(Ctx&& func, Args&&... args)
|
|
|
|
: m_args(std::forward<Args>(args)...)
|
|
|
|
, m_func(std::forward<Ctx>(func))
|
|
|
|
{
|
|
|
|
thread_future::exec.raw() = +[](thread_base* tb, thread_future* tf)
|
|
|
|
{
|
|
|
|
const auto _this = static_cast<future*>(tf);
|
|
|
|
|
|
|
|
if (!tb) [[unlikely]]
|
|
|
|
{
|
|
|
|
if constexpr (!future::empty && !Discard)
|
|
|
|
{
|
|
|
|
_this->init();
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if constexpr (future::empty || Discard)
|
|
|
|
{
|
|
|
|
std::apply(_this->m_func, std::move(_this->m_args));
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
new (_this->_get()) decltype(auto)(std::apply(_this->m_func, std::move(_this->m_args)));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
~thread_future_t()
|
|
|
|
{
|
|
|
|
if constexpr (!future::empty && !Discard)
|
|
|
|
{
|
|
|
|
if (!this->exec)
|
|
|
|
{
|
|
|
|
this->destroy();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
decltype(auto) get()
|
|
|
|
{
|
|
|
|
while (this->exec)
|
|
|
|
{
|
|
|
|
this->wait();
|
|
|
|
}
|
|
|
|
|
|
|
|
if constexpr (!future::empty && !Discard)
|
|
|
|
{
|
|
|
|
return *this->_get();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
decltype(auto) get() const
|
|
|
|
{
|
|
|
|
while (this->exec)
|
|
|
|
{
|
|
|
|
this->wait();
|
|
|
|
}
|
|
|
|
|
|
|
|
if constexpr (!future::empty && !Discard)
|
|
|
|
{
|
|
|
|
return *this->_get();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-12-14 21:20:21 +01:00
|
|
|
namespace stx
|
|
|
|
{
|
|
|
|
struct launch_retainer;
|
|
|
|
}
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Derived from the callable object Context, possibly a lambda
|
|
|
|
template <class Context>
|
2021-02-24 11:45:10 +01:00
|
|
|
class named_thread final : public Context, result_storage<Context>, thread_base
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2021-02-24 11:45:10 +01:00
|
|
|
using result = result_storage<Context>;
|
2018-09-25 14:21:04 +02:00
|
|
|
using thread = thread_base;
|
|
|
|
|
2020-11-21 05:56:54 +01:00
|
|
|
static u64 entry_point(thread_base* _base)
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2020-11-21 05:56:54 +01:00
|
|
|
return static_cast<named_thread*>(_base)->entry_point2();
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
|
2020-11-21 05:56:54 +01:00
|
|
|
u64 entry_point2()
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2020-11-13 17:06:44 +01:00
|
|
|
thread::initialize([]()
|
2020-03-20 18:18:08 +01:00
|
|
|
{
|
|
|
|
if constexpr (!result::empty)
|
|
|
|
{
|
|
|
|
// Construct using default constructor in the case of failure
|
2021-02-28 12:39:04 +01:00
|
|
|
static_cast<result*>(static_cast<named_thread*>(thread_ctrl::get_current()))->init();
|
2020-03-20 18:18:08 +01:00
|
|
|
}
|
2019-09-08 22:27:57 +02:00
|
|
|
});
|
2018-09-25 14:21:04 +02:00
|
|
|
|
|
|
|
if constexpr (result::empty)
|
|
|
|
{
|
|
|
|
// No result
|
2021-02-24 11:56:02 +01:00
|
|
|
if constexpr (std::is_invocable_v<Context>)
|
|
|
|
{
|
|
|
|
Context::operator()();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Default event loop
|
|
|
|
while (thread_ctrl::state() != thread_state::aborting)
|
|
|
|
{
|
|
|
|
thread_ctrl::wait();
|
|
|
|
}
|
|
|
|
}
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Construct the result using placement new (copy elision should happen)
|
2021-02-28 12:39:04 +01:00
|
|
|
new (result::_get()) decltype(auto)(Context::operator()());
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
|
2020-03-20 18:18:08 +01:00
|
|
|
return thread::finalize(thread_state::finished);
|
2018-10-11 00:17:19 +02:00
|
|
|
}
|
|
|
|
|
2021-12-30 17:39:18 +01:00
|
|
|
#if defined(ARCH_X64)
|
2020-11-21 05:56:54 +01:00
|
|
|
static inline thread::native_entry trampoline = thread::make_trampoline(entry_point);
|
2021-12-30 17:39:18 +01:00
|
|
|
#else
|
|
|
|
static void* trampoline(void* arg)
|
|
|
|
{
|
|
|
|
if (const auto next = thread_base::finalize(entry_point(static_cast<thread_base*>(arg))))
|
|
|
|
{
|
|
|
|
return next(thread_ctrl::get_current());
|
|
|
|
}
|
|
|
|
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
#endif
|
2020-11-13 14:59:56 +01:00
|
|
|
|
2018-10-01 19:05:47 +02:00
|
|
|
friend class thread_ctrl;
|
2018-09-25 14:21:04 +02:00
|
|
|
|
2018-10-01 19:05:47 +02:00
|
|
|
public:
|
2021-04-21 16:19:53 +02:00
|
|
|
// Forwarding constructor with default name (also potentially the default constructor)
|
2023-12-14 21:20:21 +01:00
|
|
|
template <typename... Args> requires (std::is_constructible_v<Context, Args&&...>) && (!(std::is_same_v<std::remove_cvref_t<Args>, stx::launch_retainer> || ...)) && (NamedThreadName<Context>)
|
|
|
|
named_thread(Args&&... args) noexcept
|
2021-04-21 16:19:53 +02:00
|
|
|
: Context(std::forward<Args>(args)...)
|
2021-05-01 08:34:52 +02:00
|
|
|
, thread(trampoline, std::string(Context::thread_name))
|
2019-09-15 14:19:32 +02:00
|
|
|
{
|
2020-11-23 09:32:34 +01:00
|
|
|
thread::start();
|
2019-09-15 14:19:32 +02:00
|
|
|
}
|
|
|
|
|
2023-12-14 21:20:21 +01:00
|
|
|
// Forwarding constructor with default name, does not automatically run the thread
|
|
|
|
template <typename... Args> requires (std::is_constructible_v<Context, Args&&...>) && (NamedThreadName<Context>)
|
|
|
|
named_thread(const stx::launch_retainer&, Args&&... args) noexcept
|
|
|
|
: Context(std::forward<Args>(args)...)
|
|
|
|
, thread(trampoline, std::string(Context::thread_name))
|
|
|
|
{
|
|
|
|
// Create a stand-by thread context
|
|
|
|
m_sync |= static_cast<u32>(thread_state::finished);
|
|
|
|
}
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Normal forwarding constructor
|
2023-12-14 21:20:21 +01:00
|
|
|
template <typename... Args> requires (std::is_constructible_v<Context, Args&&...>) && (!NamedThreadName<Context>)
|
|
|
|
named_thread(std::string name, Args&&... args) noexcept
|
2018-09-25 14:21:04 +02:00
|
|
|
: Context(std::forward<Args>(args)...)
|
2021-05-01 08:34:52 +02:00
|
|
|
, thread(trampoline, std::move(name))
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2020-11-23 09:32:34 +01:00
|
|
|
thread::start();
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Lambda constructor, also the implicit deduction guide candidate
|
2023-12-14 21:20:21 +01:00
|
|
|
named_thread(std::string_view name, Context&& f) noexcept requires (!NamedThreadName<Context>)
|
2018-09-25 14:21:04 +02:00
|
|
|
: Context(std::forward<Context>(f))
|
2021-05-01 08:34:52 +02:00
|
|
|
, thread(trampoline, std::string(name))
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2020-11-23 09:32:34 +01:00
|
|
|
thread::start();
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
|
2018-10-01 19:05:47 +02:00
|
|
|
named_thread(const named_thread&) = delete;
|
|
|
|
|
|
|
|
named_thread& operator=(const named_thread&) = delete;
|
|
|
|
|
2018-09-25 14:21:04 +02:00
|
|
|
// Wait for the completion and access result (if not void)
|
|
|
|
[[nodiscard]] decltype(auto) operator()()
|
|
|
|
{
|
|
|
|
thread::join();
|
|
|
|
|
|
|
|
if constexpr (!result::empty)
|
|
|
|
{
|
2021-02-28 12:39:04 +01:00
|
|
|
return *result::_get();
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait for the completion and access result (if not void)
|
|
|
|
[[nodiscard]] decltype(auto) operator()() const
|
|
|
|
{
|
|
|
|
thread::join();
|
|
|
|
|
|
|
|
if constexpr (!result::empty)
|
|
|
|
{
|
2021-02-28 12:39:04 +01:00
|
|
|
return *result::_get();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Send command to the thread to invoke directly (references should be passed via std::ref())
|
|
|
|
template <bool Discard = true, typename Arg, typename... Args>
|
|
|
|
auto operator()(Arg&& arg, Args&&... args)
|
|
|
|
{
|
|
|
|
// Overloaded operator() of the Context.
|
|
|
|
constexpr bool v1 = std::is_invocable_v<Context, Arg&&, Args&&...>;
|
|
|
|
|
|
|
|
// Anything invocable, not necessarily involving the Context.
|
|
|
|
constexpr bool v2 = std::is_invocable_v<Arg&&, Args&&...>;
|
|
|
|
|
|
|
|
// Could be pointer to a non-static member function (or data member) of the Context.
|
|
|
|
constexpr bool v3 = std::is_member_pointer_v<std::decay_t<Arg>> && std::is_invocable_v<Arg, Context&, Args&&...>;
|
|
|
|
|
|
|
|
// Only one invocation type shall be valid, otherwise we don't know.
|
|
|
|
static_assert((v1 + v2 + v3) == 1, "Ambiguous or invalid named_thread call.");
|
|
|
|
|
|
|
|
if constexpr (v1)
|
|
|
|
{
|
2021-03-01 16:02:25 +01:00
|
|
|
using future = thread_future_t<Discard, Context&, Arg, Args...>;
|
2021-02-28 12:39:04 +01:00
|
|
|
|
2021-03-01 16:02:25 +01:00
|
|
|
single_ptr<future> target = make_single<future>(*static_cast<Context*>(this), std::forward<Arg>(arg), std::forward<Args>(args)...);
|
2021-02-28 12:39:04 +01:00
|
|
|
|
|
|
|
if constexpr (!Discard)
|
|
|
|
{
|
|
|
|
shared_ptr<future> result = std::move(target);
|
|
|
|
|
|
|
|
// Copy result
|
|
|
|
thread::push(result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Move target
|
|
|
|
thread::push(std::move(target));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if constexpr (v2)
|
|
|
|
{
|
2021-03-01 16:02:25 +01:00
|
|
|
using future = thread_future_t<Discard, Arg, Args...>;
|
2021-02-28 12:39:04 +01:00
|
|
|
|
2021-03-01 16:02:25 +01:00
|
|
|
single_ptr<future> target = make_single<future>(std::forward<Arg>(arg), std::forward<Args>(args)...);
|
2021-02-28 12:39:04 +01:00
|
|
|
|
|
|
|
if constexpr (!Discard)
|
|
|
|
{
|
|
|
|
shared_ptr<future> result = std::move(target);
|
|
|
|
thread::push(result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
thread::push(std::move(target));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if constexpr (v3)
|
|
|
|
{
|
2021-03-01 16:02:25 +01:00
|
|
|
using future = thread_future_t<Discard, Arg, Context&, Args...>;
|
2021-02-28 12:39:04 +01:00
|
|
|
|
2021-03-01 16:02:25 +01:00
|
|
|
single_ptr<future> target = make_single<future>(std::forward<Arg>(arg), std::ref(*static_cast<Context*>(this)), std::forward<Args>(args)...);
|
2021-02-28 12:39:04 +01:00
|
|
|
|
|
|
|
if constexpr (!Discard)
|
|
|
|
{
|
|
|
|
shared_ptr<future> result = std::move(target);
|
|
|
|
thread::push(result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
thread::push(std::move(target));
|
|
|
|
return;
|
|
|
|
}
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Access thread state
|
|
|
|
operator thread_state() const
|
|
|
|
{
|
2020-11-13 17:06:44 +01:00
|
|
|
return static_cast<thread_state>(thread::m_sync.load() & 3);
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
|
2018-10-01 19:05:47 +02:00
|
|
|
named_thread& operator=(thread_state s)
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2023-12-14 21:20:21 +01:00
|
|
|
if (s == thread_state::created)
|
|
|
|
{
|
|
|
|
// Run thread
|
|
|
|
ensure(operator thread_state() == thread_state::finished);
|
|
|
|
thread::start();
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
2022-08-25 11:12:29 +02:00
|
|
|
bool notify_sync = false;
|
|
|
|
|
2023-12-14 21:20:21 +01:00
|
|
|
// Try to abort by assigning thread_state::aborting/finished
|
|
|
|
// Join thread by thread_state::finished
|
2023-07-31 22:57:26 +02:00
|
|
|
if (s >= thread_state::aborting && thread::m_sync.fetch_op([](u32& v) { return !(v & 3) && (v |= 1); }).second)
|
2021-06-27 10:43:48 +02:00
|
|
|
{
|
2022-08-25 11:12:29 +02:00
|
|
|
notify_sync = true;
|
2021-06-27 10:43:48 +02:00
|
|
|
}
|
|
|
|
|
2022-08-25 09:27:51 +02:00
|
|
|
if constexpr (std::is_assignable_v<Context&, thread_state>)
|
2018-09-25 14:21:04 +02:00
|
|
|
{
|
2022-08-25 09:27:51 +02:00
|
|
|
static_cast<Context&>(*this) = s;
|
2021-06-08 18:57:11 +02:00
|
|
|
}
|
2021-06-05 21:15:15 +02:00
|
|
|
|
2022-08-25 11:12:29 +02:00
|
|
|
if (notify_sync)
|
|
|
|
{
|
|
|
|
// Notify after context abortion has been made so all conditions for wake-up be satisfied by the time of notification
|
2023-07-31 22:57:26 +02:00
|
|
|
thread::m_sync.notify_all();
|
2022-08-25 11:12:29 +02:00
|
|
|
}
|
|
|
|
|
2021-06-08 18:57:11 +02:00
|
|
|
if (s == thread_state::finished)
|
|
|
|
{
|
|
|
|
// This participates in emulation stopping, use destruction-alike semantics
|
|
|
|
thread::join(true);
|
2018-10-01 19:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Context type doesn't need virtual destructor
|
|
|
|
~named_thread()
|
|
|
|
{
|
2021-06-05 21:15:15 +02:00
|
|
|
// Assign aborting state forcefully and join thread
|
|
|
|
operator=(thread_state::finished);
|
2018-10-01 19:05:47 +02:00
|
|
|
|
|
|
|
if constexpr (!result::empty)
|
|
|
|
{
|
|
|
|
result::destroy();
|
2018-09-25 14:21:04 +02:00
|
|
|
}
|
|
|
|
}
|
2015-11-26 09:06:29 +01:00
|
|
|
};
|
2020-02-29 12:57:41 +01:00
|
|
|
|
|
|
|
// Group of named threads, similar to named_thread
|
|
|
|
template <class Context>
|
|
|
|
class named_thread_group final
|
|
|
|
{
|
|
|
|
using Thread = named_thread<Context>;
|
|
|
|
|
2023-09-09 12:28:33 +02:00
|
|
|
u32 m_count = 0;
|
2020-02-29 12:57:41 +01:00
|
|
|
|
|
|
|
Thread* m_threads;
|
|
|
|
|
2020-03-28 09:05:54 +01:00
|
|
|
void init_threads()
|
|
|
|
{
|
|
|
|
m_threads = static_cast<Thread*>(::operator new(sizeof(Thread) * m_count, std::align_val_t{alignof(Thread)}));
|
|
|
|
}
|
|
|
|
|
2020-02-29 12:57:41 +01:00
|
|
|
public:
|
|
|
|
// Lambda constructor, also the implicit deduction guide candidate
|
2023-09-09 12:28:33 +02:00
|
|
|
named_thread_group(std::string_view name, u32 count, Context&& f) noexcept
|
2020-02-29 12:57:41 +01:00
|
|
|
: m_count(count)
|
|
|
|
, m_threads(nullptr)
|
|
|
|
{
|
|
|
|
if (count == 0)
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-03-28 09:05:54 +01:00
|
|
|
init_threads();
|
2020-02-29 12:57:41 +01:00
|
|
|
|
|
|
|
// Create all threads
|
2023-09-09 12:28:33 +02:00
|
|
|
for (u32 i = 0; i < m_count - 1; i++)
|
2020-02-29 12:57:41 +01:00
|
|
|
{
|
2023-09-09 12:28:33 +02:00
|
|
|
// Copy the context
|
|
|
|
new (static_cast<void*>(m_threads + i)) Thread(std::string(name) + std::to_string(i + 1), static_cast<const Context&>(f));
|
2020-02-29 12:57:41 +01:00
|
|
|
}
|
2023-09-09 12:28:33 +02:00
|
|
|
|
|
|
|
// Move the context (if movable)
|
|
|
|
new (static_cast<void*>(m_threads + m_count - 1)) Thread(std::string(name) + std::to_string(m_count - 1), std::forward<Context>(f));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Constructor with a function performed before adding more threads
|
|
|
|
template <typename CheckAndPrepare>
|
|
|
|
named_thread_group(std::string_view name, u32 count, Context&& f, CheckAndPrepare&& check) noexcept
|
|
|
|
: m_count(count)
|
|
|
|
, m_threads(nullptr)
|
|
|
|
{
|
|
|
|
if (count == 0)
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
init_threads();
|
|
|
|
m_count = 0;
|
|
|
|
|
|
|
|
// Create all threads
|
|
|
|
for (u32 i = 0; i < count - 1; i++)
|
|
|
|
{
|
|
|
|
// Copy the context
|
|
|
|
std::remove_cvref_t<Context> context(static_cast<const Context&>(f));
|
|
|
|
|
|
|
|
// Perform the check and additional preparations for each context
|
|
|
|
if (!std::invoke(std::forward<CheckAndPrepare>(check), i, context))
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_count++;
|
|
|
|
new (static_cast<void*>(m_threads + i)) Thread(std::string(name) + std::to_string(i + 1), std::move(context));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Move the context (if movable)
|
|
|
|
std::remove_cvref_t<Context> context(std::forward<Context>(f));
|
|
|
|
|
|
|
|
if (!std::invoke(std::forward<CheckAndPrepare>(check), m_count - 1, context))
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_count++;
|
|
|
|
new (static_cast<void*>(m_threads + m_count - 1)) Thread(std::string(name) + std::to_string(m_count - 1), std::move(context));
|
2020-02-29 12:57:41 +01:00
|
|
|
}
|
|
|
|
|
2020-03-28 09:05:54 +01:00
|
|
|
// Default constructor
|
2023-09-09 12:28:33 +02:00
|
|
|
named_thread_group(std::string_view name, u32 count) noexcept
|
2020-03-28 09:05:54 +01:00
|
|
|
: m_count(count)
|
|
|
|
, m_threads(nullptr)
|
|
|
|
{
|
|
|
|
if (count == 0)
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
init_threads();
|
|
|
|
|
|
|
|
// Create all threads
|
|
|
|
for (u32 i = 0; i < m_count; i++)
|
|
|
|
{
|
|
|
|
new (static_cast<void*>(m_threads + i)) Thread(std::string(name) + std::to_string(i + 1));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-29 12:57:41 +01:00
|
|
|
named_thread_group(const named_thread_group&) = delete;
|
|
|
|
|
|
|
|
named_thread_group& operator=(const named_thread_group&) = delete;
|
|
|
|
|
|
|
|
// Wait for completion
|
2020-03-20 18:18:08 +01:00
|
|
|
bool join() const
|
2020-02-29 12:57:41 +01:00
|
|
|
{
|
2020-03-20 18:18:08 +01:00
|
|
|
bool result = true;
|
|
|
|
|
2020-02-29 12:57:41 +01:00
|
|
|
for (u32 i = 0; i < m_count; i++)
|
|
|
|
{
|
|
|
|
std::as_const(*std::launder(m_threads + i))();
|
2020-03-20 18:18:08 +01:00
|
|
|
|
|
|
|
if (std::as_const(*std::launder(m_threads + i)) != thread_state::finished)
|
|
|
|
result = false;
|
2020-02-29 12:57:41 +01:00
|
|
|
}
|
2020-03-20 18:18:08 +01:00
|
|
|
|
|
|
|
return result;
|
2020-02-29 12:57:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Join and access specific thread
|
|
|
|
auto operator[](u32 index) const
|
|
|
|
{
|
|
|
|
return std::as_const(*std::launder(m_threads + index))();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Join and access specific thread
|
|
|
|
auto operator[](u32 index)
|
|
|
|
{
|
|
|
|
return (*std::launder(m_threads + index))();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Dumb iterator
|
|
|
|
auto begin()
|
|
|
|
{
|
|
|
|
return std::launder(m_threads);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Dumb iterator
|
|
|
|
auto end()
|
|
|
|
{
|
|
|
|
return m_threads + m_count;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 size() const
|
|
|
|
{
|
|
|
|
return m_count;
|
|
|
|
}
|
|
|
|
|
2023-09-09 12:28:33 +02:00
|
|
|
~named_thread_group() noexcept
|
2020-02-29 12:57:41 +01:00
|
|
|
{
|
|
|
|
// Destroy all threads (it should join them)
|
2023-09-09 12:28:33 +02:00
|
|
|
for (u32 i = m_count - 1; i < m_count; i--)
|
2020-02-29 12:57:41 +01:00
|
|
|
{
|
|
|
|
std::launder(m_threads + i)->~Thread();
|
|
|
|
}
|
|
|
|
|
|
|
|
::operator delete(static_cast<void*>(m_threads), std::align_val_t{alignof(Thread)});
|
|
|
|
}
|
|
|
|
};
|