summaryrefslogtreecommitdiff
path: root/include/atomic/rwlock.h
blob: ca5be29ab9b5a076eddd928d51c6929e3491d17d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
/* Copyright (C) 2006 MySQL AB

   This program is free software; you can redistribute it and/or modify
   it under the terms of the GNU General Public License as published by
   the Free Software Foundation; either version 2 of the License, or
   (at your option) any later version.

   This program is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.

   You should have received a copy of the GNU General Public License
   along with this program; if not, write to the Free Software
   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */

typedef struct {pthread_rwlock_t rw;} my_atomic_rwlock_t;

#ifdef MY_ATOMIC_EXTRA_DEBUG
#define CHECK_RW if (rw) if (a->rw) assert(rw == a->rw); else a->rw=rw;
#else
#define CHECK_RW
#endif

#ifdef MY_ATOMIC_MODE_DUMMY
/*
  the following can never be enabled by ./configure, one need to put #define in
  a source to trigger the following warning. The resulting code will be broken,
  it only makes sense to do it to see now test_atomic detects broken
  implementations (another way is to run a UP build on an SMP box).
*/
#warning MY_ATOMIC_MODE_DUMMY and MY_ATOMIC_MODE_RWLOCKS are incompatible
#define my_atomic_rwlock_destroy(name)
#define my_atomic_rwlock_init(name)
#define my_atomic_rwlock_rdlock(name)
#define my_atomic_rwlock_wrlock(name)
#define my_atomic_rwlock_rdunlock(name)
#define my_atomic_rwlock_wrunlock(name)
#else
#define my_atomic_rwlock_destroy(name)     pthread_rwlock_destroy(& (name)->rw)
#define my_atomic_rwlock_init(name)        pthread_rwlock_init(& (name)->rw, 0)
#define my_atomic_rwlock_rdlock(name)      pthread_rwlock_rdlock(& (name)->rw)
#define my_atomic_rwlock_wrlock(name)      pthread_rwlock_wrlock(& (name)->rw)
#define my_atomic_rwlock_rdunlock(name)    pthread_rwlock_unlock(& (name)->rw)
#define my_atomic_rwlock_wrunlock(name)    pthread_rwlock_unlock(& (name)->rw)
#endif

#ifdef HAVE_INLINE

#define make_atomic_add(S)						\
static inline uint ## S my_atomic_add ## S(				\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw)	\
{									\
  uint ## S ret;							\
  CHECK_RW;								\
  if (rw) my_atomic_rwlock_wrlock(rw);					\
  ret= a->val;								\
  a->val+= v;								\
  if (rw) my_atomic_rwlock_wrunlock(rw);				\
  return ret;								\
}

#define make_atomic_swap(S)						\
static inline uint ## S my_atomic_swap ## S(				\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw)	\
{									\
  uint ## S ret;							\
  CHECK_RW;								\
  if (rw) my_atomic_rwlock_wrlock(rw);					\
  ret= a->val;								\
  a->val= v;								\
  if (rw) my_atomic_rwlock_wrunlock(rw);				\
  return ret;								\
}

#define make_atomic_cas(S)						\
static inline uint my_atomic_cas ## S(my_atomic_ ## S ## _t *a,		\
        uint ## S *cmp, uint ## S set, my_atomic_rwlock_t *rw)		\
{									\
  uint ret;								\
  CHECK_RW;								\
  if (rw) my_atomic_rwlock_wrlock(rw);					\
  if (ret= (a->val == *cmp)) a->val= set; else *cmp=a->val;		\
  if (rw) my_atomic_rwlock_wrunlock(rw);				\
  return ret;								\
}

#define make_atomic_load(S)						\
static inline uint ## S my_atomic_load ## S(				\
        my_atomic_ ## S ## _t *a, my_atomic_rwlock_t *rw)		\
{									\
  uint ## S ret;							\
  CHECK_RW;								\
  if (rw) my_atomic_rwlock_wrlock(rw);					\
  ret= a->val;								\
  if (rw) my_atomic_rwlock_wrunlock(rw);				\
  return ret;								\
}

#define make_atomic_store(S)						\
static inline void my_atomic_store ## S(				\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw)	\
{									\
  CHECK_RW;								\
  if (rw) my_atomic_rwlock_rdlock(rw);					\
  (a)->val= (v);							\
  if (rw) my_atomic_rwlock_rdunlock(rw);				\
}

#else /* no inline functions */

#define make_atomic_add(S)						\
extern uint ## S my_atomic_add ## S(					\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw);

#define make_atomic_swap(S)						\
extern uint ## S my_atomic_swap ## S(					\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw);

#define make_atomic_cas(S)						\
extern uint my_atomic_cas ## S(my_atomic_ ## S ## _t *a,		\
        uint ## S *cmp, uint ## S set, my_atomic_rwlock_t *rw);

#define make_atomic_load(S)						\
extern uint ## S my_atomic_load ## S(					\
        my_atomic_ ## S ## _t *a, my_atomic_rwlock_t *rw);

#define make_atomic_store(S)						\
extern void my_atomic_store ## S(					\
        my_atomic_ ## S ## _t *a, uint ## S v, my_atomic_rwlock_t *rw);

#endif

make_atomic_add( 8)
make_atomic_add(16)
make_atomic_add(32)
make_atomic_add(64)
make_atomic_cas( 8)
make_atomic_cas(16)
make_atomic_cas(32)
make_atomic_cas(64)
make_atomic_load( 8)
make_atomic_load(16)
make_atomic_load(32)
make_atomic_load(64)
make_atomic_store( 8)
make_atomic_store(16)
make_atomic_store(32)
make_atomic_store(64)
make_atomic_swap( 8)
make_atomic_swap(16)
make_atomic_swap(32)
make_atomic_swap(64)
#undef make_atomic_add
#undef make_atomic_cas
#undef make_atomic_load
#undef make_atomic_store
#undef make_atomic_swap
#undef CHECK_RW