2
0

junk.c 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. #include "test/jemalloc_test.h"
  2. #ifdef JEMALLOC_FILL
  3. # ifndef JEMALLOC_TEST_JUNK_OPT
  4. # define JEMALLOC_TEST_JUNK_OPT "junk:true"
  5. # endif
  6. const char *malloc_conf =
  7. "abort:false,zero:false,redzone:true,quarantine:0," JEMALLOC_TEST_JUNK_OPT;
  8. #endif
  9. static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
  10. static arena_dalloc_junk_large_t *arena_dalloc_junk_large_orig;
  11. static huge_dalloc_junk_t *huge_dalloc_junk_orig;
  12. static void *watch_for_junking;
  13. static bool saw_junking;
  14. static void
  15. watch_junking(void *p)
  16. {
  17. watch_for_junking = p;
  18. saw_junking = false;
  19. }
  20. static void
  21. arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
  22. {
  23. size_t i;
  24. arena_dalloc_junk_small_orig(ptr, bin_info);
  25. for (i = 0; i < bin_info->reg_size; i++) {
  26. assert_c_eq(((char *)ptr)[i], 0x5a,
  27. "Missing junk fill for byte %zu/%zu of deallocated region",
  28. i, bin_info->reg_size);
  29. }
  30. if (ptr == watch_for_junking)
  31. saw_junking = true;
  32. }
  33. static void
  34. arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
  35. {
  36. size_t i;
  37. arena_dalloc_junk_large_orig(ptr, usize);
  38. for (i = 0; i < usize; i++) {
  39. assert_c_eq(((char *)ptr)[i], 0x5a,
  40. "Missing junk fill for byte %zu/%zu of deallocated region",
  41. i, usize);
  42. }
  43. if (ptr == watch_for_junking)
  44. saw_junking = true;
  45. }
  46. static void
  47. huge_dalloc_junk_intercept(void *ptr, size_t usize)
  48. {
  49. huge_dalloc_junk_orig(ptr, usize);
  50. /*
  51. * The conditions under which junk filling actually occurs are nuanced
  52. * enough that it doesn't make sense to duplicate the decision logic in
  53. * test code, so don't actually check that the region is junk-filled.
  54. */
  55. if (ptr == watch_for_junking)
  56. saw_junking = true;
  57. }
  58. static void
  59. test_junk(size_t sz_min, size_t sz_max)
  60. {
  61. char *s;
  62. size_t sz_prev, sz, i;
  63. if (opt_junk_free) {
  64. arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
  65. arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
  66. arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
  67. arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
  68. huge_dalloc_junk_orig = huge_dalloc_junk;
  69. huge_dalloc_junk = huge_dalloc_junk_intercept;
  70. }
  71. sz_prev = 0;
  72. s = (char *)mallocx(sz_min, 0);
  73. assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
  74. for (sz = sallocx(s, 0); sz <= sz_max;
  75. sz_prev = sz, sz = sallocx(s, 0)) {
  76. if (sz_prev > 0) {
  77. assert_c_eq(s[0], 'a',
  78. "Previously allocated byte %zu/%zu is corrupted",
  79. ZU(0), sz_prev);
  80. assert_c_eq(s[sz_prev-1], 'a',
  81. "Previously allocated byte %zu/%zu is corrupted",
  82. sz_prev-1, sz_prev);
  83. }
  84. for (i = sz_prev; i < sz; i++) {
  85. if (opt_junk_alloc) {
  86. assert_c_eq(s[i], 0xa5,
  87. "Newly allocated byte %zu/%zu isn't "
  88. "junk-filled", i, sz);
  89. }
  90. s[i] = 'a';
  91. }
  92. if (xallocx(s, sz+1, 0, 0) == sz) {
  93. watch_junking(s);
  94. s = (char *)rallocx(s, sz+1, 0);
  95. assert_ptr_not_null((void *)s,
  96. "Unexpected rallocx() failure");
  97. assert_true(!opt_junk_free || saw_junking,
  98. "Expected region of size %zu to be junk-filled",
  99. sz);
  100. }
  101. }
  102. watch_junking(s);
  103. dallocx(s, 0);
  104. assert_true(!opt_junk_free || saw_junking,
  105. "Expected region of size %zu to be junk-filled", sz);
  106. if (opt_junk_free) {
  107. arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
  108. arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
  109. huge_dalloc_junk = huge_dalloc_junk_orig;
  110. }
  111. }
  112. TEST_BEGIN(test_junk_small)
  113. {
  114. test_skip_if(!config_fill);
  115. test_junk(1, SMALL_MAXCLASS-1);
  116. }
  117. TEST_END
  118. TEST_BEGIN(test_junk_large)
  119. {
  120. test_skip_if(!config_fill);
  121. test_junk(SMALL_MAXCLASS+1, large_maxclass);
  122. }
  123. TEST_END
  124. TEST_BEGIN(test_junk_huge)
  125. {
  126. test_skip_if(!config_fill);
  127. test_junk(large_maxclass+1, chunksize*2);
  128. }
  129. TEST_END
  130. arena_ralloc_junk_large_t *arena_ralloc_junk_large_orig;
  131. static void *most_recently_trimmed;
  132. static size_t
  133. shrink_size(size_t size)
  134. {
  135. size_t shrink_size;
  136. for (shrink_size = size - 1; nallocx(shrink_size, 0) == size;
  137. shrink_size--)
  138. ; /* Do nothing. */
  139. return (shrink_size);
  140. }
  141. static void
  142. arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize)
  143. {
  144. arena_ralloc_junk_large_orig(ptr, old_usize, usize);
  145. assert_zu_eq(old_usize, large_maxclass, "Unexpected old_usize");
  146. assert_zu_eq(usize, shrink_size(large_maxclass), "Unexpected usize");
  147. most_recently_trimmed = ptr;
  148. }
  149. TEST_BEGIN(test_junk_large_ralloc_shrink)
  150. {
  151. void *p1, *p2;
  152. p1 = mallocx(large_maxclass, 0);
  153. assert_ptr_not_null(p1, "Unexpected mallocx() failure");
  154. arena_ralloc_junk_large_orig = arena_ralloc_junk_large;
  155. arena_ralloc_junk_large = arena_ralloc_junk_large_intercept;
  156. p2 = rallocx(p1, shrink_size(large_maxclass), 0);
  157. assert_ptr_eq(p1, p2, "Unexpected move during shrink");
  158. arena_ralloc_junk_large = arena_ralloc_junk_large_orig;
  159. assert_ptr_eq(most_recently_trimmed, p1,
  160. "Expected trimmed portion of region to be junk-filled");
  161. }
  162. TEST_END
  163. static bool detected_redzone_corruption;
  164. static void
  165. arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after,
  166. size_t offset, uint8_t byte)
  167. {
  168. detected_redzone_corruption = true;
  169. }
  170. TEST_BEGIN(test_junk_redzone)
  171. {
  172. char *s;
  173. arena_redzone_corruption_t *arena_redzone_corruption_orig;
  174. test_skip_if(!config_fill);
  175. test_skip_if(!opt_junk_alloc || !opt_junk_free);
  176. arena_redzone_corruption_orig = arena_redzone_corruption;
  177. arena_redzone_corruption = arena_redzone_corruption_replacement;
  178. /* Test underflow. */
  179. detected_redzone_corruption = false;
  180. s = (char *)mallocx(1, 0);
  181. assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
  182. s[-1] = 0xbb;
  183. dallocx(s, 0);
  184. assert_true(detected_redzone_corruption,
  185. "Did not detect redzone corruption");
  186. /* Test overflow. */
  187. detected_redzone_corruption = false;
  188. s = (char *)mallocx(1, 0);
  189. assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
  190. s[sallocx(s, 0)] = 0xbb;
  191. dallocx(s, 0);
  192. assert_true(detected_redzone_corruption,
  193. "Did not detect redzone corruption");
  194. arena_redzone_corruption = arena_redzone_corruption_orig;
  195. }
  196. TEST_END
  197. int
  198. main(void)
  199. {
  200. assert(!config_fill || opt_junk_alloc || opt_junk_free);
  201. return (test(
  202. test_junk_small,
  203. test_junk_large,
  204. test_junk_huge,
  205. test_junk_large_ralloc_shrink,
  206. test_junk_redzone));
  207. }