1
1
use core:: mem;
2
2
use crate :: mutex:: MutexGuard ;
3
-
3
+ use core :: cmp :: max ;
4
4
use super :: Word ;
5
5
use super :: mutex:: Mutex ;
6
6
7
7
const HEAP_SEG_HEADER_SIZE : usize = mem:: size_of :: < HeapSegment > ( ) ;
8
8
9
9
type SegmentLink = Option < & ' static mut HeapSegment > ;
10
10
11
- pub ( crate ) struct HeapSegment {
11
+ /*
12
+ HeapSegments are the 'header' of each memory block that is NOT allocated on the
13
+ Heap. This representation allows us to store the heap as a linked list of
14
+ memory segments.
15
+ */
16
+
17
+ pub struct HeapSegment {
12
18
size : usize ,
13
19
next : SegmentLink ,
14
20
}
15
21
22
+ /*
23
+ The Heap simply contains a reference to the first available block of memory
24
+ */
25
+
16
26
pub struct Heap {
17
27
head : SegmentLink ,
18
28
}
19
29
30
+ /*
31
+ This type wraps an Heap into a mutex, providing mutual access to it. It is
32
+ needed to implement the trait `GlobalAllocator`, more on GlobalAllocator at
33
+ the end of the file.
34
+ */
35
+
20
36
pub struct LockedHeap {
21
37
heap : Mutex < Heap > ,
22
38
}
23
39
40
+ /*
41
+ HeapIterator is used to iterate through the heap. More on that towards the
42
+ end of the file.
43
+ */
44
+
45
+ pub struct HeapIterator < ' a > {
46
+ next : Option < & ' a HeapSegment >
47
+ }
48
+
24
49
impl LockedHeap {
25
50
pub const fn new ( ) -> Self {
26
51
Self { heap : Mutex :: new ( Heap :: new ( ) ) }
@@ -30,37 +55,60 @@ impl LockedHeap {
30
55
self . heap . lock ( )
31
56
}
32
57
33
- pub fn init ( & self , start_address : Word , size : usize ) {
58
+ pub fn init ( & self , start_address : usize , size : usize ) {
34
59
self . lock ( ) . add_free_segment ( start_address, size) ;
35
60
}
36
61
}
37
62
38
63
impl Heap {
64
+
39
65
const fn new ( ) -> Self {
40
66
Self { head : None }
41
67
}
42
- fn init ( & mut self , start_address : Word , size : usize ) {
68
+
69
+ /* Initializes the heap as a single empty memory block */
70
+
71
+ fn init ( & mut self , start_address : usize , size : usize ) {
43
72
self . add_free_segment ( start_address, size) ;
44
73
}
74
+
75
+ /* Creates the iterator */
76
+
77
+ fn iter ( & self ) -> HeapIterator {
78
+ HeapIterator { next : self . head . as_deref ( ) }
79
+ }
80
+
81
+ /* Allocates to the caller a memory segment */
82
+
45
83
fn allocate_segment ( self : & mut Self , size : usize ) -> Option < * mut u8 > {
84
+ // There is no available memory left
46
85
if self . head . is_none ( ) {
47
86
return None ;
48
87
}
49
- assert ! ( size > HEAP_SEG_HEADER_SIZE ) ;
88
+
89
+ // The heap never allocates segments smaller than HEAP_SEG_HEADER_SIZE
90
+ let actual_size = max ( size, HEAP_SEG_HEADER_SIZE ) ;
50
91
51
- let actual_size = size - HEAP_SEG_HEADER_SIZE ;
52
- if self . head . as_ref ( ) . unwrap ( ) . size >= size {
53
- let mut head = self . head . take ( ) . unwrap ( ) ;
54
- Self :: trim_segment ( head, actual_size) ;
55
- self . head = head. next . take ( ) ;
56
- return Some ( head. end_address ( ) as * mut u8 ) ;
92
+ // Check the head first
93
+ if self . head . as_ref ( ) . unwrap ( ) . size >= actual_size {
94
+ let mut old_head = self . head . take ( ) . unwrap ( ) ;
95
+
96
+ // The segment is split into two new ones, and the firt one is
97
+ // allocated
98
+ Self :: trim_segment ( old_head, actual_size) ;
99
+ self . head = old_head. next . take ( ) ;
100
+ return Some ( old_head. start_address ( ) as * mut u8 ) ;
57
101
}
58
102
59
103
let mut cursor = self . head . as_mut ( ) . unwrap ( ) ;
60
104
let mut advance = true ;
105
+
106
+ // Iterate through the list until a large enough block/segment is found
61
107
while advance {
62
108
advance = match cursor. next . as_ref ( ) {
63
109
None => {
110
+ // The end of the list is reached, there is no large
111
+ // enough segment available
64
112
return None ;
65
113
}
66
114
Some ( next) => {
@@ -72,21 +120,38 @@ impl Heap {
72
120
}
73
121
}
74
122
123
+ // The segment is split into two new ones, and the firt one is
124
+ // allocated
75
125
let mut next = cursor. next . take ( ) . unwrap ( ) ;
76
126
Self :: trim_segment ( next, actual_size) ;
77
127
cursor. next = next. next . take ( ) ;
78
128
79
- self . compaction ( ) ;
80
- Some ( next. end_address ( ) as * mut u8 )
129
+ Some ( next. start_address ( ) as * mut u8 )
81
130
}
82
- fn free_segment ( self : & mut Self , start_address : Word , size : usize ) {
83
- self . add_free_segment ( start_address, size + HEAP_SEG_HEADER_SIZE ) ;
131
+
132
+ /*
133
+ When a segment is freed, it is put back into the list of
134
+ free segments
135
+ */
136
+
137
+ fn free_segment ( self : & mut Self , start_address : usize , size : usize ) {
138
+ self . add_free_segment ( start_address, size) ;
139
+
140
+ // Ajacent segments are merged
84
141
self . compaction ( ) ;
85
142
}
86
- fn add_free_segment ( self : & mut Self , address : Word , size : usize ) {
87
- assert ! ( size > 0 ) ;
143
+
144
+ /*
145
+ The functions inserts a segment into the free list, in the correct
146
+ position
147
+ */
148
+
149
+ fn add_free_segment ( self : & mut Self , address : usize , size : usize ) {
150
+ // The heap should never allocate segments of size less than
151
+ // HEAP_SEG_HEADER_SIZE
152
+ assert ! ( size > HEAP_SEG_HEADER_SIZE ) ;
88
153
89
- let mut new_seg = unsafe { Self :: init_segment ( HeapSegment :: new ( size - HEAP_SEG_HEADER_SIZE ) , address) } ;
154
+ let mut new_seg = unsafe { Self :: init_segment ( HeapSegment :: new ( size) , address) } ;
90
155
if self . head . is_none ( ) || self . head . as_ref ( ) . unwrap ( ) . start_address ( ) > address {
91
156
new_seg. next = self . head . take ( ) ;
92
157
self . head = Some ( new_seg) ;
@@ -96,6 +161,9 @@ impl Heap {
96
161
let mut cursor = self . head . as_mut ( ) . unwrap ( ) ;
97
162
let mut advance = true ;
98
163
while advance {
164
+ // Iterate through the list until a segment starting at a greater address
165
+ // than the new one is found
166
+
99
167
advance = match cursor. next . as_ref ( ) {
100
168
None => {
101
169
false
@@ -107,11 +175,17 @@ impl Heap {
107
175
if advance {
108
176
cursor = cursor. next . as_mut ( ) . unwrap ( ) ;
109
177
} else {
178
+ // The segment is inserted into the list
110
179
new_seg. next = cursor. next . take ( ) ;
111
180
}
112
181
}
113
182
cursor. next = Some ( new_seg) ;
114
183
}
184
+
185
+ /*
186
+ The function looks for adjecent segments and merges them into a single one
187
+ */
188
+
115
189
fn compaction ( self : & mut Self ) {
116
190
if self . head . is_none ( ) {
117
191
return ;
@@ -122,11 +196,16 @@ impl Heap {
122
196
let node_start = cursor. start_address ( ) ;
123
197
let compacted = match cursor. next . as_mut ( ) {
124
198
None => {
199
+ // The end of the list was reached, there are no more
200
+ // segments to merge
125
201
return ;
126
202
}
127
203
Some ( next) => {
204
+ // If the following segment starts the byte after the
205
+ // end of the current segment, the two are merged
206
+
128
207
if next. start_address ( )
129
- == node_start + ( HEAP_SEG_HEADER_SIZE + cursor. size ) as Word
208
+ == node_start + cursor. size
130
209
{
131
210
cursor. size = cursor. size + HEAP_SEG_HEADER_SIZE + next. size ;
132
211
cursor. next = next. next . take ( ) ;
@@ -136,40 +215,85 @@ impl Heap {
136
215
}
137
216
}
138
217
} ;
218
+
219
+ // If two segmetns were merged, the cursor does not need to be
220
+ // advanced, as it might be possible to merge the following
221
+ // segment
139
222
if !compacted {
140
223
cursor = cursor. next . as_mut ( ) . unwrap ( ) ;
141
224
}
142
225
}
143
226
}
144
- unsafe fn init_segment ( seg : HeapSegment , address : Word ) -> & ' static mut HeapSegment {
227
+
228
+ /*
229
+ This function copies an `HeapSegment` struct at the desired address, while
230
+ returning a mutable reference to it.
231
+ */
232
+
233
+ unsafe fn init_segment ( seg : HeapSegment , address : usize ) -> & ' static mut HeapSegment {
145
234
let address_ptr = address as * mut HeapSegment ;
146
235
address_ptr. write ( seg) ;
147
236
& mut * address_ptr
148
237
}
238
+
239
+ /*
240
+ The function trims down a segment splitting it into two new ones of sizes
241
+ <target_size> and <size - target_size>
242
+ */
243
+
149
244
fn trim_segment ( seg : & mut HeapSegment , target_size : usize ) {
150
- let new_seg_addr = seg. start_address ( ) + ( HEAP_SEG_HEADER_SIZE + target_size) as Word ;
245
+ let new_seg_addr = seg. start_address ( ) + target_size;
151
246
let new_seg_size = seg. size - target_size;
247
+
248
+ // The segment gets trimmed only if both the new segments would
249
+ // be larger than HEAP_SEG_HEADER_SIZE
152
250
if new_seg_size > HEAP_SEG_HEADER_SIZE {
153
251
seg. size = target_size;
154
- let mut new_seg = unsafe { Self :: init_segment ( HeapSegment :: new ( new_seg_size - HEAP_SEG_HEADER_SIZE ) , new_seg_addr) } ;
252
+ let mut new_seg = unsafe { Self :: init_segment ( HeapSegment :: new ( new_seg_size) , new_seg_addr) } ;
155
253
new_seg. next = seg. next . take ( ) ;
156
254
seg. next = Some ( new_seg) ;
157
255
}
158
256
}
159
257
}
160
258
259
+ /*
260
+ HeapSegments
261
+ */
262
+
161
263
impl HeapSegment {
162
264
pub const fn new ( size : usize ) -> Self {
163
265
Self { size, next : None }
164
266
}
165
- pub fn start_address ( self : & Self ) -> Word {
166
- self as * const Self as Word
267
+ pub fn start_address ( self : & Self ) -> usize {
268
+ self as * const Self as usize
167
269
}
168
- pub fn end_address ( self : & Self ) -> Word {
169
- self as * const Self as Word + ( self . size + HEAP_SEG_HEADER_SIZE ) as Word
270
+ pub fn end_address ( self : & Self ) -> usize {
271
+ self as * const Self as usize + self . size
170
272
}
171
273
}
172
274
275
+ /*
276
+ HeapIterator implements the Iterator trait, which allows us to iterate
277
+ through heap segments with the `for el in HEAP` construct
278
+ */
279
+
280
+ impl < ' a > Iterator for HeapIterator < ' a > {
281
+ type Item = & ' a HeapSegment ;
282
+
283
+ fn next ( & mut self ) -> Option < Self :: Item > {
284
+ self . next . map ( |node| {
285
+ self . next = node. next . as_deref ( ) ;
286
+ node
287
+ } )
288
+ }
289
+ }
290
+
291
+ /*
292
+ LockedHeap implments the GlobalAlloc interface. Because that allows Rust
293
+ to know how to allocate memory dynamically, we can use standard library types
294
+ like `Box`, `Vec`, ... and so on.
295
+ */
296
+
173
297
use alloc:: alloc:: { GlobalAlloc , Layout } ;
174
298
use core:: ptr;
175
299
@@ -185,10 +309,12 @@ unsafe impl GlobalAlloc for LockedHeap {
185
309
186
310
unsafe fn dealloc ( & self , _ptr : * mut u8 , _layout : Layout ) {
187
311
let mut heap = self . lock ( ) ;
188
- heap. add_free_segment ( _ptr as Word , _layout. size ( ) ) ;
312
+ heap. add_free_segment ( _ptr as usize , _layout. size ( ) ) ;
189
313
}
190
314
}
191
315
316
+ /* The allocation error handler, needed by the `alloc` crate */
317
+
192
318
#[ alloc_error_handler]
193
319
fn alloc_error_handler ( layout : alloc:: alloc:: Layout ) -> ! {
194
320
panic ! ( "allocation error: {:?}" , layout)
0 commit comments