/home/andy/git/oilshell/oil/mycpp/cheney_heap.cc
Line | Count | Source (jump to first uncovered line) |
1 | | #include <sys/mman.h> // mmap |
2 | | |
3 | | #include "mycpp/runtime.h" |
4 | | |
5 | 0 | void Space::Init(int num_bytes) { |
6 | 0 | void* requested_addr = nullptr; |
7 | |
|
8 | 0 | int fd = -1; // `man 2 mmap` notes that a portable application should set |
9 | | // the fd argument to -1 with MAP_ANONYMOUS because some impls |
10 | | // require it. |
11 | |
|
12 | 0 | int offset = 0; // `man 2 mmap` specifies this must be 0 with MAP_ANONYMOUS |
13 | |
|
14 | 0 | void* p = mmap(requested_addr, num_bytes, PROT_READ | PROT_WRITE, |
15 | 0 | MAP_PRIVATE | MAP_ANONYMOUS, fd, offset); |
16 | |
|
17 | 0 | if (p == MAP_FAILED) { |
18 | 0 | assert(!"mmap() failed, infinite sadness."); |
19 | 0 | } else { |
20 | 0 | begin_ = static_cast<char*>(p); |
21 | 0 | size_ = num_bytes; |
22 | 0 | } |
23 | 0 | } |
24 | | |
25 | 0 | void Space::Free() { |
26 | 0 | munmap(begin_, size_); |
27 | 0 | } |
28 | | |
29 | 0 | Obj* CheneyHeap::Relocate(Obj* obj, Obj* header) { |
30 | | // Move an object from one space to another. |
31 | | // If there's no vtable, then obj == header. Otherwise header points to the |
32 | | // Obj header, which is right after the vtable. |
33 | |
|
34 | 0 | switch (header->heap_tag_) { |
35 | 0 | case Tag::Forwarded: { |
36 | 0 | auto f = reinterpret_cast<LayoutForwarded*>(header); |
37 | 0 | return f->new_location; |
38 | 0 | } |
39 | | |
40 | 0 | case Tag::Global: { // e.g. GlobalStr isn't copied or forwarded |
41 | | // log("*** GLOBAL POINTER"); |
42 | 0 | return obj; |
43 | 0 | } |
44 | | |
45 | 0 | default: { |
46 | 0 | assert(header->heap_tag_ == Tag::Opaque || |
47 | 0 | header->heap_tag_ == Tag::FixedSize || |
48 | 0 | header->heap_tag_ == Tag::Scanned); |
49 | | |
50 | 0 | auto new_location = reinterpret_cast<Obj*>(free_); |
51 | | // Note: if we wanted to save space on ASDL records, we could calculate |
52 | | // their length from the field_mask here. How much would it slow down GC? |
53 | 0 | int n = header->obj_len_; |
54 | 0 | assert(n > 0); // detect common problem |
55 | 0 | memcpy(new_location, obj, n); |
56 | | // log("memcpy %d bytes from %p -> %p", n, obj, new_location); |
57 | | #if 0 |
58 | | if (obj->heap_tag_ == Tag::Opaque) { |
59 | | Str* s = static_cast<Str*>(obj); |
60 | | log("from = %s", s->data_); |
61 | | Str* s2 = static_cast<Str*>(new_location); |
62 | | log("to = %s", s2->data_); |
63 | | } |
64 | | #endif |
65 | | // aligned() like Heap::Allocate() |
66 | 0 | free_ += aligned(n); |
67 | |
|
68 | | #if GC_STATS |
69 | | num_live_objs_++; |
70 | | #endif |
71 | |
|
72 | 0 | auto f = reinterpret_cast<LayoutForwarded*>(header); |
73 | 0 | f->heap_tag_ = Tag::Forwarded; |
74 | 0 | f->new_location = new_location; |
75 | 0 | return new_location; |
76 | 0 | } |
77 | 0 | } // switch |
78 | 0 | } |
79 | | |
80 | 0 | void CheneyHeap::Collect(int to_space_size) { |
81 | | #if GC_STATS |
82 | | log("--> COLLECT with %d roots", roots_top_); |
83 | | num_collections_++; |
84 | | #endif |
85 | |
|
86 | 0 | if (to_space_size == 0) { |
87 | 0 | to_space_size = from_space_.size_; |
88 | 0 | } |
89 | |
|
90 | 0 | assert(to_space_size >= from_space_.size_); |
91 | | |
92 | 0 | to_space_.Init(to_space_size); |
93 | |
|
94 | 0 | if (to_space_.size_ < from_space_.size_) { |
95 | 0 | InvalidCodePath(); |
96 | 0 | } |
97 | | |
98 | 0 | char* scan = to_space_.begin_; // boundary between black and gray |
99 | 0 | free_ = scan; // where to copy new entries |
100 | 0 | limit_ = scan + to_space_.size_; |
101 | |
|
102 | | #if GC_STATS |
103 | | num_live_objs_ = 0; |
104 | | #endif |
105 | |
|
106 | 0 | for (int i = 0; i < roots_top_; ++i) { |
107 | 0 | Obj** handle = roots_[i]; |
108 | 0 | auto root = *handle; |
109 | | #if GC_VERBOSE |
110 | | log("%d. handle %p", i, handle); |
111 | | log(" root %p", root); |
112 | | #endif |
113 | |
|
114 | 0 | if (root) { // could be nullptr |
115 | 0 | auto header = ObjHeader(root); |
116 | | |
117 | | // This updates the underlying Str/List/Dict with a forwarding pointer, |
118 | | // i.e. for other objects that are pointing to it |
119 | 0 | Obj* new_location = Relocate(root, header); |
120 | | #if TODO_BUG |
121 | | for (int j = 0; j < roots_top_; ++j) { |
122 | | Obj** handle2 = roots_[j]; |
123 | | auto root2 = *handle2; |
124 | | if (root2) { |
125 | | switch (root2->heap_tag_) { |
126 | | case Tag::Forwarded: |
127 | | case Tag::Global: |
128 | | case Tag::Opaque: |
129 | | case Tag::FixedSize: |
130 | | case Tag::Scanned: |
131 | | break; |
132 | | default: |
133 | | assert(0); // NOTE(Jesse): Pretty sure this is InvalidCodePath(); |
134 | | } |
135 | | } |
136 | | } |
137 | | |
138 | | #endif |
139 | | |
140 | | // log(" new location %p", new_location); |
141 | | |
142 | | // This update is for the "double indirection", so future accesses to a |
143 | | // local variable use the new location |
144 | 0 | *handle = new_location; |
145 | 0 | } |
146 | 0 | } |
147 | |
|
148 | 0 | while (scan < free_) { |
149 | 0 | auto obj = reinterpret_cast<Obj*>(scan); |
150 | 0 | auto header = ObjHeader(obj); |
151 | |
|
152 | 0 | switch (header->heap_tag_) { |
153 | 0 | case Tag::FixedSize: { |
154 | 0 | auto fixed = reinterpret_cast<LayoutFixed*>(header); |
155 | 0 | int mask = fixed->field_mask_; |
156 | 0 | for (int i = 0; i < 16; ++i) { |
157 | 0 | if (mask & (1 << i)) { |
158 | 0 | Obj* child = fixed->children_[i]; |
159 | | // log("i = %d, p = %p, heap_tag = %d", i, child, child->heap_tag_); |
160 | 0 | if (child) { |
161 | 0 | auto child_header = ObjHeader(child); |
162 | | // log(" fixed: child %d from %p", i, child); |
163 | 0 | fixed->children_[i] = Relocate(child, child_header); |
164 | | // log(" to %p", fixed->children_[i]); |
165 | 0 | } |
166 | 0 | } |
167 | 0 | } |
168 | 0 | break; |
169 | 0 | } |
170 | 0 | case Tag::Scanned: { |
171 | 0 | assert(header == obj); // no inheritance |
172 | 0 | auto slab = reinterpret_cast<Slab<void*>*>(header); |
173 | 0 | int n = (slab->obj_len_ - kSlabHeaderSize) / sizeof(void*); |
174 | 0 | for (int i = 0; i < n; ++i) { |
175 | 0 | Obj* child = reinterpret_cast<Obj*>(slab->items_[i]); |
176 | 0 | if (child) { // note: List<> may have nullptr; Dict is sparse |
177 | 0 | auto child_header = ObjHeader(child); |
178 | 0 | slab->items_[i] = Relocate(child, child_header); |
179 | 0 | } |
180 | 0 | } |
181 | 0 | break; |
182 | 0 | } |
183 | 0 | default: { |
184 | 0 | assert(header->heap_tag_ == Tag::Forwarded || |
185 | 0 | header->heap_tag_ == Tag::Global || |
186 | 0 | header->heap_tag_ == Tag::Opaque); |
187 | 0 | } |
188 | | |
189 | | // other tags like Tag::Opaque have no children |
190 | 0 | } |
191 | | // aligned() like Heap::Allocate() |
192 | 0 | scan += aligned(header->obj_len_); |
193 | 0 | } |
194 | | |
195 | 0 | Swap(); |
196 | |
|
197 | 0 | to_space_.Free(); |
198 | |
|
199 | | #if GC_STATS |
200 | | Report(); |
201 | | #endif |
202 | 0 | } |
203 | | |
204 | | #if GC_STATS |
205 | | void ShowFixedChildren(Obj* obj) { |
206 | | assert(obj->heap_tag_ == Tag::FixedSize); |
207 | | auto fixed = reinterpret_cast<LayoutFixed*>(obj); |
208 | | log("MASK:"); |
209 | | |
210 | | // Note: can this be optimized with the equivalent x & (x-1) trick? |
211 | | // We need the index |
212 | | // There is a de Brjuin sequence solution? |
213 | | // https://stackoverflow.com/questions/757059/position-of-least-significant-bit-that-is-set |
214 | | |
215 | | int mask = fixed->field_mask_; |
216 | | for (int i = 0; i < 16; ++i) { |
217 | | if (mask & (1 << i)) { |
218 | | Obj* child = fixed->children_[i]; |
219 | | if (child) { |
220 | | // make sure we get Tag::Opaque, Tag::Scanned, etc. |
221 | | log("i = %d, p = %p, heap_tag = %d", i, child, child->heap_tag_); |
222 | | } |
223 | | } |
224 | | } |
225 | | } |
226 | | #endif |
227 | | |
228 | 0 | void* CheneyHeap::Allocate(int num_bytes) { |
229 | 0 | int n = aligned(num_bytes); |
230 | | // log("n = %d, p = %p", n, p); |
231 | | |
232 | | // This must be at least sizeof(LayoutForwarded), which happens to be 16 |
233 | | // bytes, because the GC pointer forwarding requires 16 bytes. If we |
234 | | // allocated less than 16 the GC would overwrite the adjacent object when |
235 | | // it went to forward the pointer. |
236 | 0 | assert(n >= static_cast<int>(sizeof(LayoutForwarded))); |
237 | | |
238 | | #if GC_EVERY_ALLOC |
239 | | Collect(); // force collection to find problems early |
240 | | #endif |
241 | | |
242 | 0 | if (free_ + n <= limit_) { // Common case: we have space for it. |
243 | 0 | return Bump(n); |
244 | 0 | } |
245 | | |
246 | | #if GC_STATS |
247 | | // log("GC free_ %p, from_space_ %p, space_size_ %d", free_, from_space_, |
248 | | // space_size_); |
249 | | #endif |
250 | | |
251 | 0 | Collect(); // Try to free some space. |
252 | | |
253 | | // log("after GC: from begin %p, free_ %p, n %d, limit_ %p", |
254 | | // from_space_.begin_, free_, n, limit_); |
255 | |
|
256 | 0 | if (free_ + n <= limit_) { // Now we have space for it. |
257 | 0 | return Bump(n); |
258 | 0 | } |
259 | | |
260 | | // It's still too small. Grow the heap. |
261 | 0 | int multiple = 2; |
262 | 0 | Collect((from_space_.size_ + n) * multiple); |
263 | |
|
264 | | #if GC_STATS |
265 | | num_forced_growths_++; |
266 | | #endif |
267 | |
|
268 | 0 | return Bump(n); |
269 | 0 | } |
270 | | |
271 | | #if MARK_SWEEP |
272 | | #else |
273 | | CheneyHeap gHeap; |
274 | | #endif |