Skip to content

Commit 780fb7b

Browse files
committed
Commented and debugged the code, and added Iterator implementation
1 parent e9ac6dd commit 780fb7b

File tree

1 file changed

+153
-27
lines changed

1 file changed

+153
-27
lines changed

kernel/src/allocator/mod.rs

Lines changed: 153 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,51 @@
11
use core::mem;
22
use crate::mutex::MutexGuard;
3-
3+
use core::cmp::max;
44
use super::Word;
55
use super::mutex::Mutex;
66

77
const HEAP_SEG_HEADER_SIZE: usize = mem::size_of::<HeapSegment>();
88

99
type SegmentLink = Option<&'static mut HeapSegment>;
1010

11-
pub(crate) struct HeapSegment {
11+
/*
12+
HeapSegments are the 'header' of each memory block that is NOT allocated on the
13+
Heap. This representation allows us to store the heap as a linked list of
14+
memory segments.
15+
*/
16+
17+
pub struct HeapSegment {
1218
size: usize,
1319
next: SegmentLink,
1420
}
1521

22+
/*
23+
The Heap simply contains a reference to the first available block of memory
24+
*/
25+
1626
pub struct Heap {
1727
head: SegmentLink,
1828
}
1929

30+
/*
31+
This type wraps an Heap into a mutex, providing mutual access to it. It is
32+
needed to implement the trait `GlobalAllocator`, more on GlobalAllocator at
33+
the end of the file.
34+
*/
35+
2036
pub struct LockedHeap {
2137
heap: Mutex<Heap>,
2238
}
2339

40+
/*
41+
HeapIterator is used to iterate through the heap. More on that towards the
42+
end of the file.
43+
*/
44+
45+
pub struct HeapIterator<'a> {
46+
next: Option<&'a HeapSegment>
47+
}
48+
2449
impl LockedHeap {
2550
pub const fn new() -> Self {
2651
Self { heap: Mutex::new(Heap::new()) }
@@ -30,37 +55,60 @@ impl LockedHeap {
3055
self.heap.lock()
3156
}
3257

33-
pub fn init(&self, start_address: Word, size: usize) {
58+
pub fn init(&self, start_address: usize, size: usize) {
3459
self.lock().add_free_segment(start_address, size);
3560
}
3661
}
3762

3863
impl Heap {
64+
3965
const fn new() -> Self {
4066
Self { head: None }
4167
}
42-
fn init(&mut self, start_address: Word, size: usize) {
68+
69+
/* Initializes the heap as a single empty memory block */
70+
71+
fn init(&mut self, start_address: usize, size: usize) {
4372
self.add_free_segment(start_address, size);
4473
}
74+
75+
/* Creates the iterator */
76+
77+
fn iter(&self) -> HeapIterator {
78+
HeapIterator { next: self.head.as_deref() }
79+
}
80+
81+
/* Allocates to the caller a memory segment */
82+
4583
fn allocate_segment(self: &mut Self, size: usize) -> Option<*mut u8> {
84+
// There is no available memory left
4685
if self.head.is_none() {
4786
return None;
4887
}
49-
assert!(size > HEAP_SEG_HEADER_SIZE);
88+
89+
// The heap never allocates segments smaller than HEAP_SEG_HEADER_SIZE
90+
let actual_size = max(size,HEAP_SEG_HEADER_SIZE);
5091

51-
let actual_size = size - HEAP_SEG_HEADER_SIZE;
52-
if self.head.as_ref().unwrap().size >= size {
53-
let mut head = self.head.take().unwrap();
54-
Self::trim_segment(head, actual_size);
55-
self.head = head.next.take();
56-
return Some(head.end_address() as *mut u8);
92+
// Check the head first
93+
if self.head.as_ref().unwrap().size >= actual_size {
94+
let mut old_head = self.head.take().unwrap();
95+
96+
// The segment is split into two new ones, and the firt one is
97+
// allocated
98+
Self::trim_segment(old_head, actual_size);
99+
self.head = old_head.next.take();
100+
return Some(old_head.start_address() as *mut u8);
57101
}
58102

59103
let mut cursor = self.head.as_mut().unwrap();
60104
let mut advance = true;
105+
106+
// Iterate through the list until a large enough block/segment is found
61107
while advance {
62108
advance = match cursor.next.as_ref() {
63109
None => {
110+
// The end of the list is reached, there is no large
111+
// enough segment available
64112
return None;
65113
}
66114
Some(next) => {
@@ -72,21 +120,38 @@ impl Heap {
72120
}
73121
}
74122

123+
// The segment is split into two new ones, and the firt one is
124+
// allocated
75125
let mut next = cursor.next.take().unwrap();
76126
Self::trim_segment(next, actual_size);
77127
cursor.next = next.next.take();
78128

79-
self.compaction();
80-
Some(next.end_address() as *mut u8)
129+
Some(next.start_address() as *mut u8)
81130
}
82-
fn free_segment(self: &mut Self, start_address: Word, size: usize) {
83-
self.add_free_segment(start_address, size + HEAP_SEG_HEADER_SIZE);
131+
132+
/*
133+
When a segment is freed, it is put back into the list of
134+
free segments
135+
*/
136+
137+
fn free_segment(self: &mut Self, start_address: usize, size: usize) {
138+
self.add_free_segment(start_address, size);
139+
140+
// Ajacent segments are merged
84141
self.compaction();
85142
}
86-
fn add_free_segment(self: &mut Self, address: Word, size: usize) {
87-
assert!(size > 0);
143+
144+
/*
145+
The functions inserts a segment into the free list, in the correct
146+
position
147+
*/
148+
149+
fn add_free_segment(self: &mut Self, address: usize, size: usize) {
150+
// The heap should never allocate segments of size less than
151+
// HEAP_SEG_HEADER_SIZE
152+
assert!(size > HEAP_SEG_HEADER_SIZE);
88153

89-
let mut new_seg = unsafe{Self::init_segment(HeapSegment::new(size - HEAP_SEG_HEADER_SIZE), address)};
154+
let mut new_seg = unsafe{Self::init_segment(HeapSegment::new(size), address)};
90155
if self.head.is_none() || self.head.as_ref().unwrap().start_address() > address {
91156
new_seg.next = self.head.take();
92157
self.head = Some(new_seg);
@@ -96,6 +161,9 @@ impl Heap {
96161
let mut cursor = self.head.as_mut().unwrap();
97162
let mut advance = true;
98163
while advance {
164+
// Iterate through the list until a segment starting at a greater address
165+
// than the new one is found
166+
99167
advance = match cursor.next.as_ref() {
100168
None => {
101169
false
@@ -107,11 +175,17 @@ impl Heap {
107175
if advance {
108176
cursor = cursor.next.as_mut().unwrap();
109177
} else {
178+
// The segment is inserted into the list
110179
new_seg.next = cursor.next.take();
111180
}
112181
}
113182
cursor.next = Some(new_seg);
114183
}
184+
185+
/*
186+
The function looks for adjecent segments and merges them into a single one
187+
*/
188+
115189
fn compaction(self: &mut Self) {
116190
if self.head.is_none() {
117191
return;
@@ -122,11 +196,16 @@ impl Heap {
122196
let node_start = cursor.start_address();
123197
let compacted = match cursor.next.as_mut() {
124198
None => {
199+
// The end of the list was reached, there are no more
200+
// segments to merge
125201
return;
126202
}
127203
Some(next) => {
204+
// If the following segment starts the byte after the
205+
// end of the current segment, the two are merged
206+
128207
if next.start_address()
129-
== node_start + (HEAP_SEG_HEADER_SIZE + cursor.size) as Word
208+
== node_start + cursor.size
130209
{
131210
cursor.size = cursor.size + HEAP_SEG_HEADER_SIZE + next.size;
132211
cursor.next = next.next.take();
@@ -136,40 +215,85 @@ impl Heap {
136215
}
137216
}
138217
};
218+
219+
// If two segmetns were merged, the cursor does not need to be
220+
// advanced, as it might be possible to merge the following
221+
// segment
139222
if !compacted {
140223
cursor = cursor.next.as_mut().unwrap();
141224
}
142225
}
143226
}
144-
unsafe fn init_segment(seg: HeapSegment, address: Word) -> &'static mut HeapSegment {
227+
228+
/*
229+
This function copies an `HeapSegment` struct at the desired address, while
230+
returning a mutable reference to it.
231+
*/
232+
233+
unsafe fn init_segment(seg: HeapSegment, address: usize) -> &'static mut HeapSegment {
145234
let address_ptr = address as *mut HeapSegment;
146235
address_ptr.write(seg);
147236
&mut *address_ptr
148237
}
238+
239+
/*
240+
The function trims down a segment splitting it into two new ones of sizes
241+
<target_size> and <size - target_size>
242+
*/
243+
149244
fn trim_segment(seg: &mut HeapSegment, target_size: usize) {
150-
let new_seg_addr = seg.start_address() + (HEAP_SEG_HEADER_SIZE + target_size) as Word;
245+
let new_seg_addr = seg.start_address() + target_size;
151246
let new_seg_size = seg.size - target_size;
247+
248+
// The segment gets trimmed only if both the new segments would
249+
// be larger than HEAP_SEG_HEADER_SIZE
152250
if new_seg_size > HEAP_SEG_HEADER_SIZE {
153251
seg.size = target_size;
154-
let mut new_seg = unsafe{Self::init_segment(HeapSegment::new(new_seg_size - HEAP_SEG_HEADER_SIZE), new_seg_addr)};
252+
let mut new_seg = unsafe{Self::init_segment(HeapSegment::new(new_seg_size), new_seg_addr)};
155253
new_seg.next = seg.next.take();
156254
seg.next = Some(new_seg);
157255
}
158256
}
159257
}
160258

259+
/*
260+
HeapSegments
261+
*/
262+
161263
impl HeapSegment {
162264
pub const fn new(size: usize) -> Self {
163265
Self { size, next: None }
164266
}
165-
pub fn start_address(self: &Self) -> Word {
166-
self as *const Self as Word
267+
pub fn start_address(self: &Self) -> usize {
268+
self as *const Self as usize
167269
}
168-
pub fn end_address(self: &Self) -> Word {
169-
self as *const Self as Word + (self.size + HEAP_SEG_HEADER_SIZE) as Word
270+
pub fn end_address(self: &Self) -> usize {
271+
self as *const Self as usize + self.size
170272
}
171273
}
172274

275+
/*
276+
HeapIterator implements the Iterator trait, which allows us to iterate
277+
through heap segments with the `for el in HEAP` construct
278+
*/
279+
280+
impl<'a> Iterator for HeapIterator<'a> {
281+
type Item = &'a HeapSegment;
282+
283+
fn next(&mut self) -> Option<Self::Item> {
284+
self.next.map(|node| {
285+
self.next = node.next.as_deref();
286+
node
287+
})
288+
}
289+
}
290+
291+
/*
292+
LockedHeap implments the GlobalAlloc interface. Because that allows Rust
293+
to know how to allocate memory dynamically, we can use standard library types
294+
like `Box`, `Vec`, ... and so on.
295+
*/
296+
173297
use alloc::alloc::{GlobalAlloc, Layout};
174298
use core::ptr;
175299

@@ -185,10 +309,12 @@ unsafe impl GlobalAlloc for LockedHeap {
185309

186310
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
187311
let mut heap = self.lock();
188-
heap.add_free_segment(_ptr as Word, _layout.size());
312+
heap.add_free_segment(_ptr as usize, _layout.size());
189313
}
190314
}
191315

316+
/* The allocation error handler, needed by the `alloc` crate */
317+
192318
#[alloc_error_handler]
193319
fn alloc_error_handler(layout: alloc::alloc::Layout) -> ! {
194320
panic!("allocation error: {:?}", layout)

0 commit comments

Comments
 (0)