/home/andy/git/oilshell/oil/mycpp/marksweep_heap.cc
Line | Count | Source (jump to first uncovered line) |
1 | | #include "mycpp/runtime.h" |
2 | | |
3 | 39 | void MarkSweepHeap::Init(int collection_thresh) { |
4 | 39 | this->collection_thresh_ = collection_thresh; |
5 | 39 | } |
6 | | |
7 | 4.38k | void* MarkSweepHeap::Allocate(int byte_count) { |
8 | | |
9 | | #if GC_EVERY_ALLOC |
10 | | Collect(); |
11 | | #endif |
12 | | |
13 | | #if GC_STATS |
14 | | this->num_live_objs_++; |
15 | | #endif |
16 | | |
17 | | |
18 | 4.38k | this->current_heap_bytes_ += byte_count; |
19 | 4.38k | if (this->current_heap_bytes_ > this->collection_thresh_) { |
20 | 14 | Collect(); |
21 | 14 | } |
22 | | |
23 | | // TODO: collection policy isn't correct, as this->current_heap_bytes_ isn't |
24 | | // updated on collection. |
25 | | |
26 | 4.38k | if (this->current_heap_bytes_ > this->collection_thresh_) { |
27 | | // |
28 | | // NOTE(Jesse): Generally, doubling results in a lot of wasted space. I've |
29 | | // observed growing by a factor of 1.5x, or even 1.3x, to be a good |
30 | | // time/space tradeoff in the past. Unclear if that's good for a typical |
31 | | // Oil workload, but we've got to start somewhere. |
32 | | // |
33 | | // 1.5x = (3/2) |
34 | | // 1.3x = (13/10) |
35 | | // |
36 | 14 | this->collection_thresh_ = this->current_heap_bytes_ * 3 / 2; |
37 | 14 | } |
38 | | |
39 | 4.38k | void* result = calloc(byte_count, 1); |
40 | 4.38k | assert(result); |
41 | | |
42 | 0 | this->all_allocations_.insert(result); |
43 | | |
44 | 4.38k | return result; |
45 | 4.38k | } |
46 | | |
47 | 47 | void MarkSweepHeap::MarkAllReferences(Obj* obj) { |
48 | 47 | auto header = ObjHeader(obj); |
49 | | |
50 | 47 | this->marked_allocations_.insert(static_cast<void*>(obj)); |
51 | | |
52 | 47 | switch (header->heap_tag_) { |
53 | 6 | case Tag::FixedSize: { |
54 | 6 | auto fixed = reinterpret_cast<LayoutFixed*>(header); |
55 | 6 | int mask = fixed->field_mask_; |
56 | | |
57 | | // TODO(Jesse): Put the 16 in a #define |
58 | 102 | for (int i = 0; i < 16; ++i) { |
59 | 96 | if (mask & (1 << i)) { |
60 | 8 | Obj* child = fixed->children_[i]; |
61 | 8 | if (child) { |
62 | 7 | MarkAllReferences(child); |
63 | 7 | } |
64 | 8 | } |
65 | 96 | } |
66 | | |
67 | 6 | break; |
68 | 0 | } |
69 | | |
70 | 4 | case Tag::Scanned: { |
71 | 4 | assert(header == obj); |
72 | | |
73 | 0 | auto slab = reinterpret_cast<Slab<void*>*>(header); |
74 | | |
75 | | // TODO(Jesse): Give this a name |
76 | 4 | int n = (slab->obj_len_ - kSlabHeaderSize) / sizeof(void*); |
77 | | |
78 | 32 | for (int i = 0; i < n; ++i) { |
79 | 28 | Obj* child = reinterpret_cast<Obj*>(slab->items_[i]); |
80 | 28 | if (child) { |
81 | 5 | MarkAllReferences(child); |
82 | 5 | } |
83 | 28 | } |
84 | | |
85 | 4 | break; |
86 | 0 | } |
87 | | |
88 | 37 | default: { |
89 | 37 | assert(header->heap_tag_ == Tag::Forwarded || |
90 | 37 | header->heap_tag_ == Tag::Global || |
91 | 37 | header->heap_tag_ == Tag::Opaque); |
92 | 37 | } |
93 | | |
94 | | // other tags like Tag::Opaque have no children |
95 | 47 | } |
96 | 47 | } |
97 | | |
98 | 58 | void MarkSweepHeap::Collect() { |
99 | 103 | for (int root_index = 0; root_index < this->roots_top_; ++root_index) { |
100 | | // NOTE(Jesse): This is dereferencing again because I didn't want to |
101 | | // rewrite the stackroots class for this implementation. Realistically we |
102 | | // should do that such that we don't store indirected pointers here. |
103 | 45 | Obj* root = *(this->roots_[root_index]); |
104 | | |
105 | 45 | if (root) { |
106 | 35 | MarkAllReferences(root); |
107 | 35 | } |
108 | 45 | } |
109 | | |
110 | 3.38k | for (auto it = all_allocations_.begin(); it != all_allocations_.end(); ++it) { |
111 | 3.33k | void* alloc = *it; |
112 | | |
113 | 3.33k | auto marked_alloc = marked_allocations_.find(alloc); |
114 | 3.33k | bool alloc_is_dead = marked_alloc == marked_allocations_.end(); |
115 | | |
116 | 3.33k | if (alloc_is_dead) { |
117 | 3.30k | free(alloc); |
118 | | |
119 | | #if GC_STATS |
120 | | this->num_live_objs_--; |
121 | | #endif |
122 | 3.30k | } |
123 | 3.33k | } |
124 | | |
125 | 58 | all_allocations_.clear(); |
126 | | |
127 | 99 | for (auto it = marked_allocations_.begin(); it != marked_allocations_.end(); |
128 | 58 | ++it) { |
129 | 41 | Obj* obj = reinterpret_cast<Obj*>(*it); |
130 | 41 | if (obj->heap_tag_ != Tag::Global) { |
131 | 28 | all_allocations_.insert(*it); |
132 | 28 | } |
133 | 41 | } |
134 | | |
135 | 58 | marked_allocations_.clear(); |
136 | 58 | } |
137 | | |
138 | | #if MARK_SWEEP |
139 | | MarkSweepHeap gHeap; |
140 | | #endif |