Skip to content

Commit 7bd1e9c

Browse files
authored
Add functions to get used and free heap size (#32)
1 parent db3110c commit 7bd1e9c

File tree

2 files changed

+40
-15
lines changed

2 files changed

+40
-15
lines changed

Changelog.md

+3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
2+
- Add function to get used and free heap size ([#32](https://github.com/phil-opp/linked-list-allocator/pull/32))
3+
14
# 0.8.3
25

36
- Prevent writing to heap memory range when size too small ([#31](https://github.com/phil-opp/linked-list-allocator/pull/31))

src/lib.rs

+37-15
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ extern crate alloc;
1414
use alloc::alloc::Layout;
1515
#[cfg(feature = "alloc_ref")]
1616
use alloc::alloc::{AllocErr, AllocInit, AllocRef, MemoryBlock};
17+
#[cfg(feature = "use_spin")]
1718
use core::alloc::GlobalAlloc;
1819
use core::mem;
1920
#[cfg(feature = "use_spin")]
@@ -31,6 +32,7 @@ mod test;
3132
pub struct Heap {
3233
bottom: usize,
3334
size: usize,
35+
used: usize,
3436
holes: HoleList,
3537
}
3638

@@ -40,6 +42,7 @@ impl Heap {
4042
Heap {
4143
bottom: 0,
4244
size: 0,
45+
used: 0,
4346
holes: HoleList::empty(),
4447
}
4548
}
@@ -53,6 +56,7 @@ impl Heap {
5356
pub unsafe fn init(&mut self, heap_bottom: usize, heap_size: usize) {
5457
self.bottom = heap_bottom;
5558
self.size = heap_size;
59+
self.used = 0;
5660
self.holes = HoleList::new(heap_bottom, heap_size);
5761
}
5862

@@ -67,25 +71,37 @@ impl Heap {
6771
Heap {
6872
bottom: heap_bottom,
6973
size: heap_size,
74+
used: 0,
7075
holes: HoleList::new(heap_bottom, heap_size),
7176
}
7277
}
7378
}
7479

75-
/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
76-
/// beginning of that chunk if it was successful. Else it returns `None`.
77-
/// This function scans the list of free memory blocks and uses the first block that is big
78-
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
79-
/// reasonably fast for small allocations.
80-
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
80+
/// Align layout. Returns a layout with size increased to
81+
/// fit at least `HoleList::min_size` and proper alignment of a `Hole`.
82+
fn align_layout(layout: Layout) -> Layout {
8183
let mut size = layout.size();
8284
if size < HoleList::min_size() {
8385
size = HoleList::min_size();
8486
}
8587
let size = align_up(size, mem::align_of::<Hole>());
8688
let layout = Layout::from_size_align(size, layout.align()).unwrap();
8789

88-
self.holes.allocate_first_fit(layout)
90+
layout
91+
}
92+
93+
/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
94+
/// beginning of that chunk if it was successful. Else it returns `None`.
95+
/// This function scans the list of free memory blocks and uses the first block that is big
96+
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
97+
/// reasonably fast for small allocations.
98+
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
99+
let aligned_layout = Self::align_layout(layout);
100+
let res = self.holes.allocate_first_fit(aligned_layout);
101+
if res.is_ok() {
102+
self.used += aligned_layout.size();
103+
}
104+
res
89105
}
90106

91107
/// Frees the given allocation. `ptr` must be a pointer returned
@@ -96,14 +112,9 @@ impl Heap {
96112
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
97113
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
98114
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
99-
let mut size = layout.size();
100-
if size < HoleList::min_size() {
101-
size = HoleList::min_size();
102-
}
103-
let size = align_up(size, mem::align_of::<Hole>());
104-
let layout = Layout::from_size_align(size, layout.align()).unwrap();
105-
106-
self.holes.deallocate(ptr, layout);
115+
let aligned_layout = Self::align_layout(layout);
116+
self.holes.deallocate(ptr, aligned_layout);
117+
self.used -= aligned_layout.size();
107118
}
108119

109120
/// Returns the bottom address of the heap.
@@ -121,6 +132,16 @@ impl Heap {
121132
self.bottom + self.size
122133
}
123134

135+
/// Returns the size of the used part of the heap
136+
pub fn used(&self) -> usize {
137+
self.used
138+
}
139+
140+
/// Returns the size of the free part of the heap
141+
pub fn free(&self) -> usize {
142+
self.size - self.used
143+
}
144+
124145
/// Extends the size of the heap by creating a new hole at the end
125146
///
126147
/// # Unsafety
@@ -182,6 +203,7 @@ impl LockedHeap {
182203
LockedHeap(Spinlock::new(Heap {
183204
bottom: heap_bottom,
184205
size: heap_size,
206+
used: 0,
185207
holes: HoleList::new(heap_bottom, heap_size),
186208
}))
187209
}

0 commit comments

Comments
 (0)