1
0
mirror of https://github.com/RPCS3/rpcs3.git synced 2024-11-23 11:13:19 +01:00
rpcs3/Utilities/GNU.h

337 lines
8.6 KiB
C
Raw Normal View History

2013-11-19 11:30:58 +01:00
#pragma once
2014-10-07 15:35:44 +02:00
#include <emmintrin.h>
#ifdef _WIN32
#define thread_local __declspec(thread)
#elif __APPLE__
#define thread_local __thread
#endif
2014-07-14 21:15:30 +02:00
#ifdef _WIN32
2014-08-22 16:21:55 +02:00
#define __noinline __declspec(noinline)
2014-07-14 21:15:30 +02:00
#else
2014-08-22 16:21:55 +02:00
#define __noinline __attribute__((noinline))
2014-07-14 21:15:30 +02:00
#endif
2014-07-01 17:34:25 +02:00
template<size_t size>
2014-09-19 02:19:22 +02:00
void strcpy_trunc(char(&dst)[size], const std::string& src)
2014-07-01 17:34:25 +02:00
{
const size_t count = (src.size() >= size) ? size - 1 /* truncation */ : src.size();
memcpy(dst, src.c_str(), count);
dst[count] = 0;
}
2014-09-19 02:19:22 +02:00
template<size_t size, size_t rsize>
void strcpy_trunc(char(&dst)[size], const char(&src)[rsize])
{
const size_t count = (rsize >= size) ? size - 1 /* truncation */ : rsize;
memcpy(dst, src, count);
dst[count] = 0;
}
2013-11-19 11:30:58 +01:00
#if defined(__GNUG__)
#include <cmath>
#include <stdlib.h>
#include <cstdint>
#ifndef __APPLE__
#include <malloc.h>
#endif
#define _fpclass(x) std::fpclassify(x)
2013-11-19 11:30:58 +01:00
#define __forceinline __attribute__((always_inline))
#define _byteswap_ushort(x) __builtin_bswap16(x)
#define _byteswap_ulong(x) __builtin_bswap32(x)
#define _byteswap_uint64(x) __builtin_bswap64(x)
#define INFINITE 0xFFFFFFFF
2014-02-21 17:13:57 +01:00
#define _CRT_ALIGN(x) __attribute__((aligned(x)))
inline uint64_t __umulh(uint64_t a, uint64_t b)
{
uint64_t result;
__asm__("mulq %[b]" : "=d" (result) : [a] "a" (a), [b] "rm" (b));
return result;
}
inline int64_t __mulh(int64_t a, int64_t b)
{
int64_t result;
__asm__("imulq %[b]" : "=d" (result) : [a] "a" (a), [b] "rm" (b));
return result;
}
2014-07-12 09:02:39 +02:00
void * _aligned_malloc(size_t size, size_t alignment);
2014-07-12 09:02:39 +02:00
#ifdef __APPLE__
2014-04-29 22:10:42 +02:00
int clock_gettime(int foo, struct timespec *ts);
#define wxIsNaN(x) ((x) != (x))
#ifndef CLOCK_MONOTONIC
#define CLOCK_MONOTONIC 0
2014-04-29 22:10:42 +02:00
#endif /* !CLOCK_MONOTONIC */
2014-07-12 09:02:39 +02:00
#endif /* __APPLE__ */
2014-04-29 22:10:42 +02:00
#define _aligned_free free
#define DWORD int32_t
2013-11-19 11:30:58 +01:00
#endif
#ifndef InterlockedCompareExchange
static __forceinline uint8_t InterlockedCompareExchange(volatile uint8_t* dest, uint8_t exch, uint8_t comp)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_val_compare_and_swap(dest, comp, exch);
#else
return _InterlockedCompareExchange8((volatile char*)dest, exch, comp);
2014-10-10 20:19:14 +02:00
#endif
}
2014-09-23 16:27:18 +02:00
static __forceinline uint16_t InterlockedCompareExchange(volatile uint16_t* dest, uint16_t exch, uint16_t comp)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_val_compare_and_swap(dest, comp, exch);
#else
2014-09-23 16:27:18 +02:00
return _InterlockedCompareExchange16((volatile short*)dest, exch, comp);
2014-10-10 20:19:14 +02:00
#endif
2014-09-23 16:27:18 +02:00
}
static __forceinline uint32_t InterlockedCompareExchange(volatile uint32_t* dest, uint32_t exch, uint32_t comp)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_val_compare_and_swap(dest, comp, exch);
#else
return _InterlockedCompareExchange((volatile long*)dest, exch, comp);
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline uint64_t InterlockedCompareExchange(volatile uint64_t* dest, uint64_t exch, uint64_t comp)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_val_compare_and_swap(dest, comp, exch);
#else
return _InterlockedCompareExchange64((volatile long long*)dest, exch, comp);
2014-10-10 20:19:14 +02:00
#endif
}
#endif
2014-10-10 20:19:14 +02:00
static __forceinline bool InterlockedCompareExchangeTest(volatile uint8_t* dest, uint8_t exch, uint8_t comp)
{
#if defined(__GNUG__)
return __sync_bool_compare_and_swap(dest, comp, exch);
#else
2014-10-16 19:07:41 +02:00
return (uint8_t)_InterlockedCompareExchange8((volatile char*)dest, exch, comp) == comp;
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline bool InterlockedCompareExchangeTest(volatile uint16_t* dest, uint16_t exch, uint16_t comp)
{
#if defined(__GNUG__)
return __sync_bool_compare_and_swap(dest, comp, exch);
#else
2014-10-16 19:07:41 +02:00
return (uint16_t)_InterlockedCompareExchange16((volatile short*)dest, exch, comp) == comp;
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline bool InterlockedCompareExchangeTest(volatile uint32_t* dest, uint32_t exch, uint32_t comp)
{
#if defined(__GNUG__)
return __sync_bool_compare_and_swap(dest, comp, exch);
#else
2014-10-16 19:07:41 +02:00
return (uint32_t)_InterlockedCompareExchange((volatile long*)dest, exch, comp) == comp;
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline bool InterlockedCompareExchangeTest(volatile uint64_t* dest, uint64_t exch, uint64_t comp)
{
#if defined(__GNUG__)
return __sync_bool_compare_and_swap(dest, comp, exch);
#else
2014-10-16 19:07:41 +02:00
return (uint64_t)_InterlockedCompareExchange64((volatile long long*)dest, exch, comp) == comp;
2014-10-10 20:19:14 +02:00
#endif
}
#ifndef InterlockedExchange
static __forceinline uint8_t InterlockedExchange(volatile uint8_t* dest, uint8_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_lock_test_and_set(dest, value);
#else
return _InterlockedExchange8((volatile char*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
2014-09-23 16:27:18 +02:00
static __forceinline uint16_t InterlockedExchange(volatile uint16_t* dest, uint16_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_lock_test_and_set(dest, value);
#else
2014-09-23 16:27:18 +02:00
return _InterlockedExchange16((volatile short*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
2014-09-23 16:27:18 +02:00
}
static __forceinline uint32_t InterlockedExchange(volatile uint32_t* dest, uint32_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_lock_test_and_set(dest, value);
#else
return _InterlockedExchange((volatile long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline uint64_t InterlockedExchange(volatile uint64_t* dest, uint64_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_lock_test_and_set(dest, value);
#else
return _InterlockedExchange64((volatile long long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
#endif
#ifndef InterlockedOr
static __forceinline uint8_t InterlockedOr(volatile uint8_t* dest, uint8_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_or(dest, value);
#else
return _InterlockedOr8((volatile char*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
2014-09-23 16:27:18 +02:00
static __forceinline uint16_t InterlockedOr(volatile uint16_t* dest, uint16_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_or(dest, value);
#else
2014-09-23 16:27:18 +02:00
return _InterlockedOr16((volatile short*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
2014-09-23 16:27:18 +02:00
}
static __forceinline uint32_t InterlockedOr(volatile uint32_t* dest, uint32_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_or(dest, value);
#else
return _InterlockedOr((volatile long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline uint64_t InterlockedOr(volatile uint64_t* dest, uint64_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_or(dest, value);
#else
return _InterlockedOr64((volatile long long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
#endif
#ifndef InterlockedAnd
static __forceinline uint8_t InterlockedAnd(volatile uint8_t* dest, uint8_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_and(dest, value);
#else
return _InterlockedAnd8((volatile char*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
2014-09-23 16:27:18 +02:00
static __forceinline uint16_t InterlockedAnd(volatile uint16_t* dest, uint16_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_and(dest, value);
#else
2014-09-23 16:27:18 +02:00
return _InterlockedAnd16((volatile short*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
2014-09-23 16:27:18 +02:00
}
static __forceinline uint32_t InterlockedAnd(volatile uint32_t* dest, uint32_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_and(dest, value);
#else
return _InterlockedAnd((volatile long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline uint64_t InterlockedAnd(volatile uint64_t* dest, uint64_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_and(dest, value);
#else
return _InterlockedAnd64((volatile long long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
#endif
#ifndef InterlockedXor
static __forceinline uint8_t InterlockedXor(volatile uint8_t* dest, uint8_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_xor(dest, value);
#else
return _InterlockedXor8((volatile char*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
2014-09-23 16:27:18 +02:00
static __forceinline uint16_t InterlockedXor(volatile uint16_t* dest, uint16_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_xor(dest, value);
#else
2014-09-23 16:27:18 +02:00
return _InterlockedXor16((volatile short*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
2014-09-23 16:27:18 +02:00
}
static __forceinline uint32_t InterlockedXor(volatile uint32_t* dest, uint32_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_xor(dest, value);
#else
return _InterlockedXor((volatile long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
static __forceinline uint64_t InterlockedXor(volatile uint64_t* dest, uint64_t value)
{
2014-10-10 20:19:14 +02:00
#if defined(__GNUG__)
return __sync_fetch_and_xor(dest, value);
#else
return _InterlockedXor64((volatile long long*)dest, value);
2014-10-10 20:19:14 +02:00
#endif
}
#endif
2014-09-27 20:49:33 +02:00
static __forceinline uint32_t cntlz32(uint32_t arg)
{
#if defined(__GNUG__)
return __builtin_clzl(arg);
#else
unsigned long res;
if (!_BitScanReverse(&res, arg))
{
return 32;
}
else
{
return res ^ 31;
}
#endif
}
static __forceinline uint64_t cntlz64(uint64_t arg)
{
#if defined(__GNUG__)
return __builtin_clzll(arg);
#else
unsigned long res;
if (!_BitScanReverse64(&res, arg))
{
return 64;
}
else
{
return res ^ 63;
}
#endif
}
2014-10-07 15:35:44 +02:00
2014-10-08 16:26:08 +02:00
// compare 16 packed unsigned bytes (greater than)
2014-10-07 15:35:44 +02:00
static __forceinline __m128i _mm_cmpgt_epu8(__m128i A, __m128i B)
{
// (A xor 0x80) > (B xor 0x80)
return _mm_cmpgt_epi8(_mm_xor_si128(A, _mm_set1_epi8(-128)), _mm_xor_si128(B, _mm_set1_epi8(-128)));
}
2014-10-08 16:26:08 +02:00
// compare 16 packed unsigned bytes (less or equal)
2014-10-07 15:35:44 +02:00
static __forceinline __m128i _mm_cmple_epu8(__m128i A, __m128i B)
{
// ((B xor 0x80) > (A xor 0x80)) || A == B
2014-10-07 23:37:04 +02:00
return _mm_or_si128(_mm_cmpgt_epu8(B, A), _mm_cmpeq_epi8(A, B));
2014-10-07 15:35:44 +02:00
}