2
0

apr_atomic.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393
  1. /* Licensed to the Apache Software Foundation (ASF) under one or more
  2. * contributor license agreements. See the NOTICE file distributed with
  3. * this work for additional information regarding copyright ownership.
  4. * The ASF licenses this file to You under the Apache License, Version 2.0
  5. * (the "License"); you may not use this file except in compliance with
  6. * the License. You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "apr.h"
  17. #include "apr_atomic.h"
  18. #include "apr_thread_mutex.h"
  19. #include "apr_private.h"
  20. #include <stdlib.h>
  21. #if defined(__GNUC__) && defined(__STRICT_ANSI__) && !defined(USE_GENERIC_ATOMICS)
  22. /* force use of generic atomics if building e.g. with -std=c89, which
  23. * doesn't allow inline asm */
  24. #define USE_GENERIC_ATOMICS
  25. #endif
  26. #if (defined(__i386__) || defined(__x86_64__)) \
  27. && defined(__GNUC__) && !defined(USE_GENERIC_ATOMICS)
  28. APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem,
  29. apr_uint32_t with,
  30. apr_uint32_t cmp)
  31. {
  32. apr_uint32_t prev;
  33. asm volatile ("lock; cmpxchgl %1, %2"
  34. : "=a" (prev)
  35. : "r" (with), "m" (*(mem)), "0"(cmp)
  36. : "memory", "cc");
  37. return prev;
  38. }
  39. #define APR_OVERRIDE_ATOMIC_CAS32
  40. static apr_uint32_t inline intel_atomic_add32(volatile apr_uint32_t *mem,
  41. apr_uint32_t val)
  42. {
  43. asm volatile ("lock; xaddl %0,%1"
  44. : "=r"(val), "=m"(*mem) /* outputs */
  45. : "0"(val), "m"(*mem) /* inputs */
  46. : "memory", "cc");
  47. return val;
  48. }
  49. APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem,
  50. apr_uint32_t val)
  51. {
  52. return intel_atomic_add32(mem, val);
  53. }
  54. #define APR_OVERRIDE_ATOMIC_ADD32
  55. APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
  56. {
  57. asm volatile ("lock; subl %1, %0"
  58. :
  59. : "m" (*(mem)), "r" (val)
  60. : "memory", "cc");
  61. }
  62. #define APR_OVERRIDE_ATOMIC_SUB32
  63. APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
  64. {
  65. unsigned char prev;
  66. asm volatile ("lock; decl %1;\n\t"
  67. "setnz %%al"
  68. : "=a" (prev)
  69. : "m" (*(mem))
  70. : "memory", "cc");
  71. return prev;
  72. }
  73. #define APR_OVERRIDE_ATOMIC_DEC32
  74. APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
  75. {
  76. return intel_atomic_add32(mem, 1);
  77. }
  78. #define APR_OVERRIDE_ATOMIC_INC32
  79. APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
  80. {
  81. *mem = val;
  82. }
  83. #define APR_OVERRIDE_ATOMIC_SET32
  84. APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
  85. {
  86. apr_uint32_t prev = val;
  87. asm volatile ("lock; xchgl %0, %1"
  88. : "=r" (prev)
  89. : "m" (*(mem)), "0"(prev)
  90. : "memory");
  91. return prev;
  92. }
  93. #define APR_OVERRIDE_ATOMIC_XCHG32
  94. /*#define apr_atomic_init(pool) APR_SUCCESS*/
  95. #endif /* (__linux__ || __EMX__ || __FreeBSD__) && __i386__ */
  96. #if (defined(__PPC__) || defined(__ppc__)) && defined(__GNUC__) \
  97. && !defined(USE_GENERIC_ATOMICS)
  98. APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem,
  99. apr_uint32_t swap,
  100. apr_uint32_t cmp)
  101. {
  102. apr_uint32_t prev;
  103. asm volatile ("0:\n\t" /* retry local label */
  104. "lwarx %0,0,%1\n\t" /* load prev and reserve */
  105. "cmpw %0,%3\n\t" /* does it match cmp? */
  106. "bne- 1f\n\t" /* ...no, bail out */
  107. "stwcx. %2,0,%1\n\t" /* ...yes, conditionally
  108. store swap */
  109. "bne- 0b\n\t" /* start over if we lost
  110. the reservation */
  111. "1:" /* exit local label */
  112. : "=&r"(prev) /* output */
  113. : "b" (mem), "r" (swap), "r"(cmp) /* inputs */
  114. : "memory", "cc"); /* clobbered */
  115. return prev;
  116. }
  117. #define APR_OVERRIDE_ATOMIC_CAS32
  118. APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem,
  119. apr_uint32_t delta)
  120. {
  121. apr_uint32_t prev, temp;
  122. asm volatile ("0:\n\t" /* retry local label */
  123. "lwarx %0,0,%2\n\t" /* load prev and reserve */
  124. "add %1,%0,%3\n\t" /* temp = prev + delta */
  125. "stwcx. %1,0,%2\n\t" /* conditionally store */
  126. "bne- 0b" /* start over if we lost
  127. the reservation */
  128. /*XXX find a cleaner way to define the temp
  129. * it's not an output
  130. */
  131. : "=&r" (prev), "=&r" (temp) /* output, temp */
  132. : "b" (mem), "r" (delta) /* inputs */
  133. : "memory", "cc"); /* clobbered */
  134. return prev;
  135. }
  136. #define APR_OVERRIDE_ATOMIC_ADD32
  137. #endif /* __PPC__ && __GNUC__ */
  138. #if !defined(APR_OVERRIDE_ATOMIC_INIT)
  139. #if APR_HAS_THREADS
  140. #define NUM_ATOMIC_HASH 7
  141. /* shift by 2 to get rid of alignment issues */
  142. #define ATOMIC_HASH(x) (unsigned int)(((unsigned long)(x)>>2)%(unsigned int)NUM_ATOMIC_HASH)
  143. static apr_thread_mutex_t **hash_mutex;
  144. #endif /* APR_HAS_THREADS */
  145. apr_status_t apr_atomic_init(apr_pool_t *p)
  146. {
  147. #if APR_HAS_THREADS
  148. int i;
  149. apr_status_t rv;
  150. hash_mutex = apr_palloc(p, sizeof(apr_thread_mutex_t*) * NUM_ATOMIC_HASH);
  151. for (i = 0; i < NUM_ATOMIC_HASH; i++) {
  152. rv = apr_thread_mutex_create(&(hash_mutex[i]),
  153. APR_THREAD_MUTEX_DEFAULT, p);
  154. if (rv != APR_SUCCESS) {
  155. return rv;
  156. }
  157. }
  158. #endif /* APR_HAS_THREADS */
  159. return APR_SUCCESS;
  160. }
  161. #endif /* !defined(APR_OVERRIDE_ATOMIC_INIT) */
  162. /* abort() if 'x' does not evaluate to APR_SUCCESS. */
  163. #define CHECK(x) do { if ((x) != APR_SUCCESS) abort(); } while (0)
  164. #if !defined(APR_OVERRIDE_ATOMIC_ADD32)
  165. #if defined(APR_OVERRIDE_ATOMIC_CAS32)
  166. apr_uint32_t apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
  167. {
  168. apr_uint32_t old_value, new_value;
  169. do {
  170. old_value = *mem;
  171. new_value = old_value + val;
  172. } while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
  173. return old_value;
  174. }
  175. #else
  176. apr_uint32_t apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
  177. {
  178. apr_uint32_t old_value;
  179. #if APR_HAS_THREADS
  180. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  181. CHECK(apr_thread_mutex_lock(lock));
  182. old_value = *mem;
  183. *mem += val;
  184. CHECK(apr_thread_mutex_unlock(lock));
  185. #else
  186. old_value = *mem;
  187. *mem += val;
  188. #endif /* APR_HAS_THREADS */
  189. return old_value;
  190. }
  191. #endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
  192. #endif /* !defined(APR_OVERRIDE_ATOMIC_ADD32) */
  193. #if !defined(APR_OVERRIDE_ATOMIC_SUB32)
  194. #if defined(APR_OVERRIDE_ATOMIC_CAS32)
  195. void apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
  196. {
  197. apr_uint32_t old_value, new_value;
  198. do {
  199. old_value = *mem;
  200. new_value = old_value - val;
  201. } while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
  202. }
  203. #else
  204. void apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
  205. {
  206. #if APR_HAS_THREADS
  207. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  208. CHECK(apr_thread_mutex_lock(lock));
  209. *mem -= val;
  210. CHECK(apr_thread_mutex_unlock(lock));
  211. #else
  212. *mem -= val;
  213. #endif /* APR_HAS_THREADS */
  214. }
  215. #endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
  216. #endif /* !defined(APR_OVERRIDE_ATOMIC_SUB32) */
  217. #if !defined(APR_OVERRIDE_ATOMIC_SET32)
  218. void apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
  219. {
  220. #if APR_HAS_THREADS
  221. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  222. CHECK(apr_thread_mutex_lock(lock));
  223. *mem = val;
  224. CHECK(apr_thread_mutex_unlock(lock));
  225. #else
  226. *mem = val;
  227. #endif /* APR_HAS_THREADS */
  228. }
  229. #endif /* !defined(APR_OVERRIDE_ATOMIC_SET32) */
  230. #if !defined(APR_OVERRIDE_ATOMIC_INC32)
  231. apr_uint32_t apr_atomic_inc32(volatile apr_uint32_t *mem)
  232. {
  233. return apr_atomic_add32(mem, 1);
  234. }
  235. #endif /* !defined(APR_OVERRIDE_ATOMIC_INC32) */
  236. #if !defined(APR_OVERRIDE_ATOMIC_DEC32)
  237. #if defined(APR_OVERRIDE_ATOMIC_CAS32)
  238. int apr_atomic_dec32(volatile apr_uint32_t *mem)
  239. {
  240. apr_uint32_t old_value, new_value;
  241. do {
  242. old_value = *mem;
  243. new_value = old_value - 1;
  244. } while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
  245. return old_value != 1;
  246. }
  247. #else
  248. int apr_atomic_dec32(volatile apr_uint32_t *mem)
  249. {
  250. #if APR_HAS_THREADS
  251. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  252. apr_uint32_t new;
  253. CHECK(apr_thread_mutex_lock(lock));
  254. (*mem)--;
  255. new = *mem;
  256. CHECK(apr_thread_mutex_unlock(lock));
  257. return new;
  258. #else
  259. (*mem)--;
  260. return *mem;
  261. #endif /* APR_HAS_THREADS */
  262. }
  263. #endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
  264. #endif /* !defined(APR_OVERRIDE_ATOMIC_DEC32) */
  265. #if !defined(APR_OVERRIDE_ATOMIC_CAS32)
  266. apr_uint32_t apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
  267. apr_uint32_t cmp)
  268. {
  269. apr_uint32_t prev;
  270. #if APR_HAS_THREADS
  271. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  272. CHECK(apr_thread_mutex_lock(lock));
  273. prev = *mem;
  274. if (prev == cmp) {
  275. *mem = with;
  276. }
  277. CHECK(apr_thread_mutex_unlock(lock));
  278. #else
  279. prev = *mem;
  280. if (prev == cmp) {
  281. *mem = with;
  282. }
  283. #endif /* APR_HAS_THREADS */
  284. return prev;
  285. }
  286. #endif /* !defined(APR_OVERRIDE_ATOMIC_CAS32) */
  287. #if !defined(APR_OVERRIDE_ATOMIC_XCHG32)
  288. #if defined(APR_OVERRIDE_ATOMIC_CAS32)
  289. apr_uint32_t apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
  290. {
  291. apr_uint32_t prev;
  292. do {
  293. prev = *mem;
  294. } while (apr_atomic_cas32(mem, val, prev) != prev);
  295. return prev;
  296. }
  297. #else
  298. apr_uint32_t apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
  299. {
  300. apr_uint32_t prev;
  301. #if APR_HAS_THREADS
  302. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  303. CHECK(apr_thread_mutex_lock(lock));
  304. prev = *mem;
  305. *mem = val;
  306. CHECK(apr_thread_mutex_unlock(lock));
  307. #else
  308. prev = *mem;
  309. *mem = val;
  310. #endif /* APR_HAS_THREADS */
  311. return prev;
  312. }
  313. #endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
  314. #endif /* !defined(APR_OVERRIDE_ATOMIC_XCHG32) */
  315. #if !defined(APR_OVERRIDE_ATOMIC_CASPTR)
  316. void *apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
  317. {
  318. void *prev;
  319. #if APR_HAS_THREADS
  320. apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
  321. CHECK(apr_thread_mutex_lock(lock));
  322. prev = *(void **)mem;
  323. if (prev == cmp) {
  324. *mem = with;
  325. }
  326. CHECK(apr_thread_mutex_unlock(lock));
  327. #else
  328. prev = *(void **)mem;
  329. if (prev == cmp) {
  330. *mem = with;
  331. }
  332. #endif /* APR_HAS_THREADS */
  333. return prev;
  334. }
  335. #endif /* !defined(APR_OVERRIDE_ATOMIC_CASPTR) */
  336. #if !defined(APR_OVERRIDE_ATOMIC_READ32)
  337. APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)
  338. {
  339. return *mem;
  340. }
  341. #endif