This source file includes following definitions.
- atomic_fetch_add
- atomic_cmp_set
- atomic_fetch_add
- atomic_cmp_set
- atomic_cas_64
- atomic_cmp_set
- atomic_cas_32
- atomic_cmp_set
- fpm_spinlock
1
2
3
4
5 #ifndef FPM_ATOMIC_H
6 #define FPM_ATOMIC_H 1
7
8 #if HAVE_INTTYPES_H
9 # include <inttypes.h>
10 #else
11 # include <stdint.h>
12 #endif
13 #include <sched.h>
14
15 #ifdef HAVE_BUILTIN_ATOMIC
16
17
18
19
20
21
22
23
24 typedef volatile unsigned long atomic_t;
25 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
26
27 #elif ( __i386__ || __i386 )
28
29 typedef int32_t atomic_int_t;
30 typedef uint32_t atomic_uint_t;
31 typedef volatile atomic_uint_t atomic_t;
32
33
34 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add)
35 {
36 __asm__ volatile ( "lock;" "xaddl %0, %1;" :
37 "+r" (add) : "m" (*value) : "memory");
38
39 return add;
40 }
41
42
43 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
44 {
45 unsigned char res;
46
47 __asm__ volatile ( "lock;" "cmpxchgl %3, %1;" "sete %0;" :
48 "=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
49
50 return res;
51 }
52
53
54 #elif ( __amd64__ || __amd64 || __x86_64__ )
55
56 typedef int64_t atomic_int_t;
57 typedef uint64_t atomic_uint_t;
58 typedef volatile atomic_uint_t atomic_t;
59
60 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add)
61 {
62 __asm__ volatile ( "lock;" "xaddq %0, %1;" :
63 "+r" (add) : "m" (*value) : "memory");
64
65 return add;
66 }
67
68
69 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
70 {
71 unsigned char res;
72
73 __asm__ volatile ( "lock;" "cmpxchgq %3, %1;" "sete %0;" :
74 "=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
75
76 return res;
77 }
78
79
80 #if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
81
82 #elif ( __arm__ || __arm )
83
84 #if (__arch64__ || __arch64)
85 typedef int64_t atomic_int_t;
86 typedef uint64_t atomic_uint_t;
87 #else
88 typedef int32_t atomic_int_t;
89 typedef uint32_t atomic_uint_t;
90 #endif
91
92 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
93
94 #endif
95
96 #elif ( __sparc__ || __sparc )
97
98 #if (__sparcv9 || __sparcv9__)
99
100 #if (__arch64__ || __arch64)
101 typedef uint64_t atomic_uint_t;
102 typedef volatile atomic_uint_t atomic_t;
103
104 static inline int atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t new)
105 {
106 __asm__ __volatile__("casx [%2], %3, %0 " : "=&r"(new) : "0"(new), "r"(lock), "r"(old): "memory");
107
108 return new;
109 }
110
111
112 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
113 {
114 return (atomic_cas_64(lock, old, set)==old);
115 }
116
117 #else
118 typedef uint32_t atomic_uint_t;
119 typedef volatile atomic_uint_t atomic_t;
120
121 static inline int atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t new)
122 {
123 __asm__ __volatile__("cas [%2], %3, %0 " : "=&r"(new) : "0"(new), "r"(lock), "r"(old): "memory");
124
125 return new;
126 }
127
128
129 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
130 {
131 return (atomic_cas_32(lock, old, set)==old);
132 }
133
134 #endif
135
136 #else
137 #error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
138 #endif
139
140 #else
141
142 #error Unsupported processor. Please open a bug report (bugs.php.net).
143
144 #endif
145
146 static inline int fpm_spinlock(atomic_t *lock, int try_once)
147 {
148 if (try_once) {
149 return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
150 }
151
152 for (;;) {
153
154 if (atomic_cmp_set(lock, 0, 1)) {
155 break;
156 }
157
158 sched_yield();
159 }
160
161 return 1;
162 }
163
164
165 #define fpm_unlock(lock) lock = 0
166
167 #endif
168