diff options
author | pquerna <pquerna@13f79535-47bb-0310-9956-ffa450edef68> | 2006-07-15 07:40:39 +0000 |
---|---|---|
committer | pquerna <pquerna@13f79535-47bb-0310-9956-ffa450edef68> | 2006-07-15 07:40:39 +0000 |
commit | 1dcaf383698cf84b82a9de245be78f0fdc780c17 (patch) | |
tree | cf42e6149c699c72eae81980cbebf0a5ae47c8ee /atomic | |
parent | b43ca2d1316214e110b2d49b6129183fad2dd4b5 (diff) | |
download | libapr-1dcaf383698cf84b82a9de245be78f0fdc780c17.tar.gz |
Add __MINGW32__ versions of the Win32 Atomic functions, that all do their own evil casting -- but it allows 100% of the testatomic cases to pass, when previously, it aborted if you tried to call any of the atomic functions.
git-svn-id: http://svn.apache.org/repos/asf/apr/apr/trunk@422168 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'atomic')
-rw-r--r-- | atomic/win32/apr_atomic.c | 16 |
1 files changed, 15 insertions, 1 deletions
diff --git a/atomic/win32/apr_atomic.c b/atomic/win32/apr_atomic.c index 3b664a212..907476269 100644 --- a/atomic/win32/apr_atomic.c +++ b/atomic/win32/apr_atomic.c @@ -43,6 +43,8 @@ APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint3 { #if (defined(_M_IA64) || defined(_M_AMD64)) return InterlockedExchangeAdd(mem, val); +#elif defined(__MINGW32__) + return InterlockedExchangeAdd((long *)mem, val); #else return ((apr_atomic_win32_ptr_val_fn)InterlockedExchangeAdd)(mem, val); #endif @@ -55,6 +57,8 @@ APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val) { #if (defined(_M_IA64) || defined(_M_AMD64)) InterlockedExchangeAdd(mem, -val); +#elif defined(__MINGW32__) + InterlockedExchangeAdd((long *)mem, -val); #else ((apr_atomic_win32_ptr_val_fn)InterlockedExchangeAdd)(mem, -val); #endif @@ -65,6 +69,8 @@ APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem) /* we return old value, win32 returns new value :( */ #if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) return InterlockedIncrement(mem) - 1; +#elif defined(__MINGW32__) + return InterlockedIncrement((long *)mem) - 1; #else return ((apr_atomic_win32_ptr_fn)InterlockedIncrement)(mem) - 1; #endif @@ -74,6 +80,8 @@ APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem) { #if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) return InterlockedDecrement(mem); +#elif defined(__MINGW32__) + return InterlockedDecrement((long *)mem); #else return ((apr_atomic_win32_ptr_fn)InterlockedDecrement)(mem); #endif @@ -83,6 +91,8 @@ APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val) { #if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) InterlockedExchange(mem, val); +#elif defined(__MINGW32__) + InterlockedExchange((long*)mem, val); #else ((apr_atomic_win32_ptr_val_fn)InterlockedExchange)(mem, val); #endif @@ -98,6 +108,8 @@ APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint3 { #if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) return InterlockedCompareExchange(mem, with, cmp); +#elif defined(__MINGW32__) + return InterlockedCompareExchange((long*)mem, with, cmp); #else return ((apr_atomic_win32_ptr_val_val_fn)InterlockedCompareExchange)(mem, with, cmp); #endif @@ -105,7 +117,7 @@ APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint3 APR_DECLARE(void *) apr_atomic_casptr(volatile void **mem, void *with, const void *cmp) { -#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) +#if (defined(_M_IA64) || defined(_M_AMD64) || defined(__MINGW32__)) && !defined(RC_INVOKED) return InterlockedCompareExchangePointer((void**)mem, with, (void*)cmp); #else /* Too many VC6 users have stale win32 API files, stub this */ @@ -117,6 +129,8 @@ APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint { #if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED) return InterlockedExchange(mem, val); +#elif defined(__MINGW32__) + return InterlockedExchange((long *)mem, val); #else return ((apr_atomic_win32_ptr_val_fn)InterlockedExchange)(mem, val); #endif |