2
0

prof_reset.c 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286
  1. #include "test/jemalloc_test.h"
  2. static int
  3. prof_dump_open_intercept(bool propagate_err, const char *filename) {
  4. int fd;
  5. fd = open("/dev/null", O_WRONLY);
  6. assert_d_ne(fd, -1, "Unexpected open() failure");
  7. return fd;
  8. }
  9. static void
  10. set_prof_active(bool active) {
  11. assert_d_eq(mallctl("prof.active", NULL, NULL, (void *)&active,
  12. sizeof(active)), 0, "Unexpected mallctl failure");
  13. }
  14. static size_t
  15. get_lg_prof_sample(void) {
  16. size_t lg_prof_sample;
  17. size_t sz = sizeof(size_t);
  18. assert_d_eq(mallctl("prof.lg_sample", (void *)&lg_prof_sample, &sz,
  19. NULL, 0), 0,
  20. "Unexpected mallctl failure while reading profiling sample rate");
  21. return lg_prof_sample;
  22. }
  23. static void
  24. do_prof_reset(size_t lg_prof_sample) {
  25. assert_d_eq(mallctl("prof.reset", NULL, NULL,
  26. (void *)&lg_prof_sample, sizeof(size_t)), 0,
  27. "Unexpected mallctl failure while resetting profile data");
  28. assert_zu_eq(lg_prof_sample, get_lg_prof_sample(),
  29. "Expected profile sample rate change");
  30. }
  31. TEST_BEGIN(test_prof_reset_basic) {
  32. size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
  33. size_t sz;
  34. unsigned i;
  35. test_skip_if(!config_prof);
  36. sz = sizeof(size_t);
  37. assert_d_eq(mallctl("opt.lg_prof_sample", (void *)&lg_prof_sample_orig,
  38. &sz, NULL, 0), 0,
  39. "Unexpected mallctl failure while reading profiling sample rate");
  40. assert_zu_eq(lg_prof_sample_orig, 0,
  41. "Unexpected profiling sample rate");
  42. lg_prof_sample = get_lg_prof_sample();
  43. assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
  44. "Unexpected disagreement between \"opt.lg_prof_sample\" and "
  45. "\"prof.lg_sample\"");
  46. /* Test simple resets. */
  47. for (i = 0; i < 2; i++) {
  48. assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
  49. "Unexpected mallctl failure while resetting profile data");
  50. lg_prof_sample = get_lg_prof_sample();
  51. assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
  52. "Unexpected profile sample rate change");
  53. }
  54. /* Test resets with prof.lg_sample changes. */
  55. lg_prof_sample_next = 1;
  56. for (i = 0; i < 2; i++) {
  57. do_prof_reset(lg_prof_sample_next);
  58. lg_prof_sample = get_lg_prof_sample();
  59. assert_zu_eq(lg_prof_sample, lg_prof_sample_next,
  60. "Expected profile sample rate change");
  61. lg_prof_sample_next = lg_prof_sample_orig;
  62. }
  63. /* Make sure the test code restored prof.lg_sample. */
  64. lg_prof_sample = get_lg_prof_sample();
  65. assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
  66. "Unexpected disagreement between \"opt.lg_prof_sample\" and "
  67. "\"prof.lg_sample\"");
  68. }
  69. TEST_END
  70. bool prof_dump_header_intercepted = false;
  71. prof_cnt_t cnt_all_copy = {0, 0, 0, 0};
  72. static bool
  73. prof_dump_header_intercept(tsdn_t *tsdn, bool propagate_err,
  74. const prof_cnt_t *cnt_all) {
  75. prof_dump_header_intercepted = true;
  76. memcpy(&cnt_all_copy, cnt_all, sizeof(prof_cnt_t));
  77. return false;
  78. }
  79. TEST_BEGIN(test_prof_reset_cleanup) {
  80. void *p;
  81. prof_dump_header_t *prof_dump_header_orig;
  82. test_skip_if(!config_prof);
  83. set_prof_active(true);
  84. assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
  85. p = mallocx(1, 0);
  86. assert_ptr_not_null(p, "Unexpected mallocx() failure");
  87. assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
  88. prof_dump_header_orig = prof_dump_header;
  89. prof_dump_header = prof_dump_header_intercept;
  90. assert_false(prof_dump_header_intercepted, "Unexpected intercept");
  91. assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
  92. 0, "Unexpected error while dumping heap profile");
  93. assert_true(prof_dump_header_intercepted, "Expected intercept");
  94. assert_u64_eq(cnt_all_copy.curobjs, 1, "Expected 1 allocation");
  95. assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
  96. "Unexpected error while resetting heap profile data");
  97. assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
  98. 0, "Unexpected error while dumping heap profile");
  99. assert_u64_eq(cnt_all_copy.curobjs, 0, "Expected 0 allocations");
  100. assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
  101. prof_dump_header = prof_dump_header_orig;
  102. dallocx(p, 0);
  103. assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
  104. set_prof_active(false);
  105. }
  106. TEST_END
  107. #define NTHREADS 4
  108. #define NALLOCS_PER_THREAD (1U << 13)
  109. #define OBJ_RING_BUF_COUNT 1531
  110. #define RESET_INTERVAL (1U << 10)
  111. #define DUMP_INTERVAL 3677
  112. static void *
  113. thd_start(void *varg) {
  114. unsigned thd_ind = *(unsigned *)varg;
  115. unsigned i;
  116. void *objs[OBJ_RING_BUF_COUNT];
  117. memset(objs, 0, sizeof(objs));
  118. for (i = 0; i < NALLOCS_PER_THREAD; i++) {
  119. if (i % RESET_INTERVAL == 0) {
  120. assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0),
  121. 0, "Unexpected error while resetting heap profile "
  122. "data");
  123. }
  124. if (i % DUMP_INTERVAL == 0) {
  125. assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
  126. 0, "Unexpected error while dumping heap profile");
  127. }
  128. {
  129. void **pp = &objs[i % OBJ_RING_BUF_COUNT];
  130. if (*pp != NULL) {
  131. dallocx(*pp, 0);
  132. *pp = NULL;
  133. }
  134. *pp = btalloc(1, thd_ind*NALLOCS_PER_THREAD + i);
  135. assert_ptr_not_null(*pp,
  136. "Unexpected btalloc() failure");
  137. }
  138. }
  139. /* Clean up any remaining objects. */
  140. for (i = 0; i < OBJ_RING_BUF_COUNT; i++) {
  141. void **pp = &objs[i % OBJ_RING_BUF_COUNT];
  142. if (*pp != NULL) {
  143. dallocx(*pp, 0);
  144. *pp = NULL;
  145. }
  146. }
  147. return NULL;
  148. }
  149. TEST_BEGIN(test_prof_reset) {
  150. size_t lg_prof_sample_orig;
  151. thd_t thds[NTHREADS];
  152. unsigned thd_args[NTHREADS];
  153. unsigned i;
  154. size_t bt_count, tdata_count;
  155. test_skip_if(!config_prof);
  156. bt_count = prof_bt_count();
  157. assert_zu_eq(bt_count, 0,
  158. "Unexpected pre-existing tdata structures");
  159. tdata_count = prof_tdata_count();
  160. lg_prof_sample_orig = get_lg_prof_sample();
  161. do_prof_reset(5);
  162. set_prof_active(true);
  163. for (i = 0; i < NTHREADS; i++) {
  164. thd_args[i] = i;
  165. thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
  166. }
  167. for (i = 0; i < NTHREADS; i++) {
  168. thd_join(thds[i], NULL);
  169. }
  170. assert_zu_eq(prof_bt_count(), bt_count,
  171. "Unexpected bactrace count change");
  172. assert_zu_eq(prof_tdata_count(), tdata_count,
  173. "Unexpected remaining tdata structures");
  174. set_prof_active(false);
  175. do_prof_reset(lg_prof_sample_orig);
  176. }
  177. TEST_END
  178. #undef NTHREADS
  179. #undef NALLOCS_PER_THREAD
  180. #undef OBJ_RING_BUF_COUNT
  181. #undef RESET_INTERVAL
  182. #undef DUMP_INTERVAL
  183. /* Test sampling at the same allocation site across resets. */
  184. #define NITER 10
  185. TEST_BEGIN(test_xallocx) {
  186. size_t lg_prof_sample_orig;
  187. unsigned i;
  188. void *ptrs[NITER];
  189. test_skip_if(!config_prof);
  190. lg_prof_sample_orig = get_lg_prof_sample();
  191. set_prof_active(true);
  192. /* Reset profiling. */
  193. do_prof_reset(0);
  194. for (i = 0; i < NITER; i++) {
  195. void *p;
  196. size_t sz, nsz;
  197. /* Reset profiling. */
  198. do_prof_reset(0);
  199. /* Allocate small object (which will be promoted). */
  200. p = ptrs[i] = mallocx(1, 0);
  201. assert_ptr_not_null(p, "Unexpected mallocx() failure");
  202. /* Reset profiling. */
  203. do_prof_reset(0);
  204. /* Perform successful xallocx(). */
  205. sz = sallocx(p, 0);
  206. assert_zu_eq(xallocx(p, sz, 0, 0), sz,
  207. "Unexpected xallocx() failure");
  208. /* Perform unsuccessful xallocx(). */
  209. nsz = nallocx(sz+1, 0);
  210. assert_zu_eq(xallocx(p, nsz, 0, 0), sz,
  211. "Unexpected xallocx() success");
  212. }
  213. for (i = 0; i < NITER; i++) {
  214. /* dallocx. */
  215. dallocx(ptrs[i], 0);
  216. }
  217. set_prof_active(false);
  218. do_prof_reset(lg_prof_sample_orig);
  219. }
  220. TEST_END
  221. #undef NITER
  222. int
  223. main(void) {
  224. /* Intercept dumping prior to running any tests. */
  225. prof_dump_open = prof_dump_open_intercept;
  226. return test_no_reentrancy(
  227. test_prof_reset_basic,
  228. test_prof_reset_cleanup,
  229. test_prof_reset,
  230. test_xallocx);
  231. }