1 #include <darwintest.h>
2 #include <darwintest_utils.h>
3
4 #include <mach/mach.h>
5 #include <sys/mman.h>
6
7 T_GLOBAL_META(
8 T_META_NAMESPACE("xnu.vm"),
9 T_META_RADAR_COMPONENT_NAME("xnu"),
10 T_META_RADAR_COMPONENT_VERSION("zalloc"));
11
12 #undef __abortlike
13 #define __abortlike
14 #define panic(fmt, ...) ({ T_FAIL(fmt, __VA_ARGS__); abort(); })
15
16 #define __security_const_late
17 #define ZALLOC_TEST 1
18 #include "../osfmk/kern/zalloc.c"
19
20 #define ZBA_TEST_SIZE (1ul << 20)
21
22 static void
zba_populate_any(vm_address_t addr,vm_size_t size)23 zba_populate_any(vm_address_t addr, vm_size_t size)
24 {
25 int rc = mprotect((void *)addr, size, PROT_READ | PROT_WRITE);
26 T_QUIET; T_ASSERT_POSIX_SUCCESS(rc, "mprotect");
27 }
28
29 static void
zba_populate_nope(vm_address_t addr,vm_size_t size)30 zba_populate_nope(vm_address_t addr, vm_size_t size)
31 {
32 #pragma unused(addr, size)
33 T_FAIL("Trying to extend the storage");
34 T_END;
35 }
36
37 static void
zba_test_allow_extension(void)38 zba_test_allow_extension(void)
39 {
40 zba_test_info.zbats_populate = zba_populate_any;
41 }
42
43 static void
zba_test_disallow_extension(void)44 zba_test_disallow_extension(void)
45 {
46 zba_test_info.zbats_populate = zba_populate_nope;
47 }
48
49 static void
zba_test_setup(void)50 zba_test_setup(void)
51 {
52 kern_return_t kr;
53 int rc;
54
55 kr = vm_allocate(mach_task_self(), &zba_test_info.zbats_base,
56 ZBA_TEST_SIZE + ZBA_CHUNK_SIZE, VM_FLAGS_ANYWHERE);
57 T_ASSERT_MACH_SUCCESS(kr, "vm_allocate()");
58
59 zba_test_info.zbats_base = roundup(zba_test_info.zbats_base,
60 ZBA_CHUNK_SIZE);
61
62 rc = mprotect(zba_base_header(), ZBA_TEST_SIZE, PROT_NONE);
63 T_ASSERT_POSIX_SUCCESS(rc, "mprotect");
64
65 T_LOG("SETUP allocator with base at %p", zba_base_header());
66
67 zba_test_allow_extension();
68 zba_populate(0);
69 zba_init_chunk(0);
70 }
71
72 T_DECL(zone_buddy_allocator_encodings, "test the buddy allocator formulas")
73 {
74 uint8_t bits[sizeof(zba_base_header()->zbah_bits)] = { };
75
76 for (uint32_t o = ZBA_MAX_ORDER + 1; o-- > 0;) {
77 for (vm_address_t pos = 0; pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE << o) {
78 struct zone_bits_chain *zbc;
79 size_t node = zba_node(pos, o);
80
81 zbc = zba_chain_for_node(NULL, node, o);
82 T_QUIET; T_ASSERT_EQ(pos, (vm_offset_t)zbc,
83 "zba_node / zba_chain_for_node is reversible (pos: %lx, node %zd)",
84 pos, node);
85
86
87 if (o == 0) {
88 // leaf nodes aren't represented in the bitmap
89 continue;
90 }
91 T_QUIET; T_ASSERT_LT(node, 8 * sizeof(bits), "fits in bitfield: %zd", pos);
92 T_QUIET; T_ASSERT_EQ(0, bits[node / 8] & (1 << (node % 8)), "never seen");
93 bits[node / 8] ^= 1 << (node % 8);
94 }
95 }
96
97 T_PASS("zba_node, zba_chain_for_node look sane");
98 }
99
100 T_DECL(zone_buddy_allocator, "test the zone bits setup")
101 {
102 vm_address_t base, pos;
103
104 zba_test_setup();
105
106 zba_test_disallow_extension();
107
108 base = (vm_address_t)zba_slot_base();
109 for (pos = zba_chunk_header_size(0); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
110 T_QUIET; T_ASSERT_EQ(base + pos, zba_alloc(0), "alloc");
111 *(uint64_t *)(base + pos) = ~0ull;
112 }
113 for (pos = zba_chunk_header_size(0); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
114 zba_free(base + pos, 0);
115 }
116
117 for (pos = zba_chunk_header_size(0); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
118 T_QUIET; T_ASSERT_EQ(base + pos, zba_alloc(0), "alloc");
119 *(uint64_t *)(base + pos) = ~0ull;
120 }
121 zba_test_allow_extension();
122
123 base += ZBA_CHUNK_SIZE;
124 for (pos = zba_chunk_header_size(1); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
125 T_QUIET; T_ASSERT_EQ(base + pos, zba_alloc(0), "alloc");
126 *(uint64_t *)(base + pos) = ~0ull;
127 }
128
129 for (pos = zba_chunk_header_size(1); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
130 zba_free(base + pos, 0);
131 }
132 base -= ZBA_CHUNK_SIZE;
133 for (pos = zba_chunk_header_size(0); pos < ZBA_CHUNK_SIZE; pos += ZBA_GRANULE) {
134 zba_free(base + pos, 0);
135 }
136 }
137