summaryrefslogtreecommitdiff
path: root/include/atomic
diff options
context:
space:
mode:
authorMikael Ronstrom <mikael@mysql.com>2009-12-18 09:29:18 +0100
committerMikael Ronstrom <mikael@mysql.com>2009-12-18 09:29:18 +0100
commit3ad70924c90bdacec58dd27abe68daaa678c0322 (patch)
tree45f7650d6e8a9039ffc8f765c30b8619d19dc56d /include/atomic
parent4e8d1c6bf30abfd45a993b058ff2a33d4671b73d (diff)
downloadmariadb-git-3ad70924c90bdacec58dd27abe68daaa678c0322.tar.gz
Added extra checks of 64-bit atomic support on GCC and Solaris, also added 64-bit support in solaris.h which was missing
Diffstat (limited to 'include/atomic')
-rw-r--r--include/atomic/solaris.h46
1 files changed, 45 insertions, 1 deletions
diff --git a/include/atomic/solaris.h b/include/atomic/solaris.h
index 45efd9faaba..34c0c6de0ed 100644
--- a/include/atomic/solaris.h
+++ b/include/atomic/solaris.h
@@ -1,4 +1,4 @@
-/* Copyright (C) 2008 MySQL AB
+/* Copyright (C) 2008 MySQL AB, 2009 Sun Microsystems, Inc
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -61,6 +61,18 @@ my_atomic_cas32(int32 volatile *a, int32 *cmp, int32 set)
}
STATIC_INLINE int
+my_atomic_cas64(int64 volatile *a, int64 *cmp, int64 set)
+{
+ int ret;
+ int64 sav;
+ sav = (int64) atomic_cas_64((volatile uint64_t *)a, (uint64_t)*cmp,
+ (uint64_t)set);
+ if (! (ret = (sav == *cmp)))
+ *cmp = sav;
+ return ret;
+}
+
+STATIC_INLINE int
my_atomic_casptr(void * volatile *a, void **cmp, void *set)
{
int ret;
@@ -97,6 +109,14 @@ my_atomic_add32(int32 volatile *a, int32 v)
return (nv - v);
}
+STATIC_INLINE int64
+my_atomic_add64(int64 volatile *a, int64 v)
+{
+ int64 nv;
+ nv = atomic_add_64_nv((volatile uint64_t *)a, v);
+ return (nv - v);
+}
+
/* ------------------------------------------------------------------------ */
#ifdef MY_ATOMIC_MODE_DUMMY
@@ -110,6 +130,9 @@ my_atomic_load16(int16 volatile *a) { return (*a); }
STATIC_INLINE int32
my_atomic_load32(int32 volatile *a) { return (*a); }
+STATIC_INLINE int64
+my_atomic_load64(int64 volatile *a) { return (*a); }
+
STATIC_INLINE void *
my_atomic_loadptr(void * volatile *a) { return (*a); }
@@ -125,6 +148,9 @@ STATIC_INLINE void
my_atomic_store32(int32 volatile *a, int32 v) { *a = v; }
STATIC_INLINE void
+my_atomic_store64(int64 volatile *a, int64 v) { *a = v; }
+
+STATIC_INLINE void
my_atomic_storeptr(void * volatile *a, void *v) { *a = v; }
/* ------------------------------------------------------------------------ */
@@ -149,6 +175,12 @@ my_atomic_load32(int32 volatile *a)
return ((int32) atomic_or_32_nv((volatile uint32_t *)a, 0));
}
+STATIC_INLINE int64
+my_atomic_load64(int64 volatile *a)
+{
+ return ((int64) atomic_or_64_nv((volatile uint64_t *)a, 0));
+}
+
STATIC_INLINE void *
my_atomic_loadptr(void * volatile *a)
{
@@ -176,6 +208,12 @@ my_atomic_store32(int32 volatile *a, int32 v)
}
STATIC_INLINE void
+my_atomic_store64(int64 volatile *a, int64 v)
+{
+ (void) atomic_swap_64((volatile uint64_t *)a, (uint64_t)v);
+}
+
+STATIC_INLINE void
my_atomic_storeptr(void * volatile *a, void *v)
{
(void) atomic_swap_ptr(a, v);
@@ -203,6 +241,12 @@ my_atomic_fas32(int32 volatile *a, int32 v)
return ((int32) atomic_swap_32((volatile uint32_t *)a, (uint32_t)v));
}
+STATIC_INLINE int64
+my_atomic_fas64(int64 volatile *a, int64 v)
+{
+ return ((int64) atomic_swap_64((volatile uint64_t *)a, (uint64_t)v));
+}
+
STATIC_INLINE void *
my_atomic_fasptr(void * volatile *a, void *v)
{