2
0

prng.c 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237
  1. #include "test/jemalloc_test.h"
  2. static void
  3. test_prng_lg_range_u32(bool atomic) {
  4. atomic_u32_t sa, sb;
  5. uint32_t ra, rb;
  6. unsigned lg_range;
  7. atomic_store_u32(&sa, 42, ATOMIC_RELAXED);
  8. ra = prng_lg_range_u32(&sa, 32, atomic);
  9. atomic_store_u32(&sa, 42, ATOMIC_RELAXED);
  10. rb = prng_lg_range_u32(&sa, 32, atomic);
  11. assert_u32_eq(ra, rb,
  12. "Repeated generation should produce repeated results");
  13. atomic_store_u32(&sb, 42, ATOMIC_RELAXED);
  14. rb = prng_lg_range_u32(&sb, 32, atomic);
  15. assert_u32_eq(ra, rb,
  16. "Equivalent generation should produce equivalent results");
  17. atomic_store_u32(&sa, 42, ATOMIC_RELAXED);
  18. ra = prng_lg_range_u32(&sa, 32, atomic);
  19. rb = prng_lg_range_u32(&sa, 32, atomic);
  20. assert_u32_ne(ra, rb,
  21. "Full-width results must not immediately repeat");
  22. atomic_store_u32(&sa, 42, ATOMIC_RELAXED);
  23. ra = prng_lg_range_u32(&sa, 32, atomic);
  24. for (lg_range = 31; lg_range > 0; lg_range--) {
  25. atomic_store_u32(&sb, 42, ATOMIC_RELAXED);
  26. rb = prng_lg_range_u32(&sb, lg_range, atomic);
  27. assert_u32_eq((rb & (UINT32_C(0xffffffff) << lg_range)),
  28. 0, "High order bits should be 0, lg_range=%u", lg_range);
  29. assert_u32_eq(rb, (ra >> (32 - lg_range)),
  30. "Expected high order bits of full-width result, "
  31. "lg_range=%u", lg_range);
  32. }
  33. }
  34. static void
  35. test_prng_lg_range_u64(void) {
  36. uint64_t sa, sb, ra, rb;
  37. unsigned lg_range;
  38. sa = 42;
  39. ra = prng_lg_range_u64(&sa, 64);
  40. sa = 42;
  41. rb = prng_lg_range_u64(&sa, 64);
  42. assert_u64_eq(ra, rb,
  43. "Repeated generation should produce repeated results");
  44. sb = 42;
  45. rb = prng_lg_range_u64(&sb, 64);
  46. assert_u64_eq(ra, rb,
  47. "Equivalent generation should produce equivalent results");
  48. sa = 42;
  49. ra = prng_lg_range_u64(&sa, 64);
  50. rb = prng_lg_range_u64(&sa, 64);
  51. assert_u64_ne(ra, rb,
  52. "Full-width results must not immediately repeat");
  53. sa = 42;
  54. ra = prng_lg_range_u64(&sa, 64);
  55. for (lg_range = 63; lg_range > 0; lg_range--) {
  56. sb = 42;
  57. rb = prng_lg_range_u64(&sb, lg_range);
  58. assert_u64_eq((rb & (UINT64_C(0xffffffffffffffff) << lg_range)),
  59. 0, "High order bits should be 0, lg_range=%u", lg_range);
  60. assert_u64_eq(rb, (ra >> (64 - lg_range)),
  61. "Expected high order bits of full-width result, "
  62. "lg_range=%u", lg_range);
  63. }
  64. }
  65. static void
  66. test_prng_lg_range_zu(bool atomic) {
  67. atomic_zu_t sa, sb;
  68. size_t ra, rb;
  69. unsigned lg_range;
  70. atomic_store_zu(&sa, 42, ATOMIC_RELAXED);
  71. ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  72. atomic_store_zu(&sa, 42, ATOMIC_RELAXED);
  73. rb = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  74. assert_zu_eq(ra, rb,
  75. "Repeated generation should produce repeated results");
  76. atomic_store_zu(&sb, 42, ATOMIC_RELAXED);
  77. rb = prng_lg_range_zu(&sb, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  78. assert_zu_eq(ra, rb,
  79. "Equivalent generation should produce equivalent results");
  80. atomic_store_zu(&sa, 42, ATOMIC_RELAXED);
  81. ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  82. rb = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  83. assert_zu_ne(ra, rb,
  84. "Full-width results must not immediately repeat");
  85. atomic_store_zu(&sa, 42, ATOMIC_RELAXED);
  86. ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic);
  87. for (lg_range = (ZU(1) << (3 + LG_SIZEOF_PTR)) - 1; lg_range > 0;
  88. lg_range--) {
  89. atomic_store_zu(&sb, 42, ATOMIC_RELAXED);
  90. rb = prng_lg_range_zu(&sb, lg_range, atomic);
  91. assert_zu_eq((rb & (SIZE_T_MAX << lg_range)),
  92. 0, "High order bits should be 0, lg_range=%u", lg_range);
  93. assert_zu_eq(rb, (ra >> ((ZU(1) << (3 + LG_SIZEOF_PTR)) -
  94. lg_range)), "Expected high order bits of full-width "
  95. "result, lg_range=%u", lg_range);
  96. }
  97. }
  98. TEST_BEGIN(test_prng_lg_range_u32_nonatomic) {
  99. test_prng_lg_range_u32(false);
  100. }
  101. TEST_END
  102. TEST_BEGIN(test_prng_lg_range_u32_atomic) {
  103. test_prng_lg_range_u32(true);
  104. }
  105. TEST_END
  106. TEST_BEGIN(test_prng_lg_range_u64_nonatomic) {
  107. test_prng_lg_range_u64();
  108. }
  109. TEST_END
  110. TEST_BEGIN(test_prng_lg_range_zu_nonatomic) {
  111. test_prng_lg_range_zu(false);
  112. }
  113. TEST_END
  114. TEST_BEGIN(test_prng_lg_range_zu_atomic) {
  115. test_prng_lg_range_zu(true);
  116. }
  117. TEST_END
  118. static void
  119. test_prng_range_u32(bool atomic) {
  120. uint32_t range;
  121. #define MAX_RANGE 10000000
  122. #define RANGE_STEP 97
  123. #define NREPS 10
  124. for (range = 2; range < MAX_RANGE; range += RANGE_STEP) {
  125. atomic_u32_t s;
  126. unsigned rep;
  127. atomic_store_u32(&s, range, ATOMIC_RELAXED);
  128. for (rep = 0; rep < NREPS; rep++) {
  129. uint32_t r = prng_range_u32(&s, range, atomic);
  130. assert_u32_lt(r, range, "Out of range");
  131. }
  132. }
  133. }
  134. static void
  135. test_prng_range_u64(void) {
  136. uint64_t range;
  137. #define MAX_RANGE 10000000
  138. #define RANGE_STEP 97
  139. #define NREPS 10
  140. for (range = 2; range < MAX_RANGE; range += RANGE_STEP) {
  141. uint64_t s;
  142. unsigned rep;
  143. s = range;
  144. for (rep = 0; rep < NREPS; rep++) {
  145. uint64_t r = prng_range_u64(&s, range);
  146. assert_u64_lt(r, range, "Out of range");
  147. }
  148. }
  149. }
  150. static void
  151. test_prng_range_zu(bool atomic) {
  152. size_t range;
  153. #define MAX_RANGE 10000000
  154. #define RANGE_STEP 97
  155. #define NREPS 10
  156. for (range = 2; range < MAX_RANGE; range += RANGE_STEP) {
  157. atomic_zu_t s;
  158. unsigned rep;
  159. atomic_store_zu(&s, range, ATOMIC_RELAXED);
  160. for (rep = 0; rep < NREPS; rep++) {
  161. size_t r = prng_range_zu(&s, range, atomic);
  162. assert_zu_lt(r, range, "Out of range");
  163. }
  164. }
  165. }
  166. TEST_BEGIN(test_prng_range_u32_nonatomic) {
  167. test_prng_range_u32(false);
  168. }
  169. TEST_END
  170. TEST_BEGIN(test_prng_range_u32_atomic) {
  171. test_prng_range_u32(true);
  172. }
  173. TEST_END
  174. TEST_BEGIN(test_prng_range_u64_nonatomic) {
  175. test_prng_range_u64();
  176. }
  177. TEST_END
  178. TEST_BEGIN(test_prng_range_zu_nonatomic) {
  179. test_prng_range_zu(false);
  180. }
  181. TEST_END
  182. TEST_BEGIN(test_prng_range_zu_atomic) {
  183. test_prng_range_zu(true);
  184. }
  185. TEST_END
  186. int
  187. main(void) {
  188. return test(
  189. test_prng_lg_range_u32_nonatomic,
  190. test_prng_lg_range_u32_atomic,
  191. test_prng_lg_range_u64_nonatomic,
  192. test_prng_lg_range_zu_nonatomic,
  193. test_prng_lg_range_zu_atomic,
  194. test_prng_range_u32_nonatomic,
  195. test_prng_range_u32_atomic,
  196. test_prng_range_u64_nonatomic,
  197. test_prng_range_zu_nonatomic,
  198. test_prng_range_zu_atomic);
  199. }