1use crate::prelude::*;
2use alloc::collections::BTreeMap;
3use core::{alloc::Layout, num::NonZeroU32, ops::Bound};
4
5pub(crate) struct FreeList {
7 capacity: usize,
25 free_block_index_to_len: BTreeMap<u32, u32>,
28}
29
30const ALIGN_U32: u32 = 16;
34const ALIGN_USIZE: usize = ALIGN_U32 as usize;
35
36impl FreeList {
37 pub fn layout(size: usize) -> Layout {
40 Layout::from_size_align(size, ALIGN_USIZE).unwrap()
41 }
42
43 pub fn new(capacity: usize) -> Self {
46 log::debug!("FreeList::new({capacity})");
47
48 let mut free_list = FreeList {
49 capacity,
50 free_block_index_to_len: BTreeMap::new(),
51 };
52
53 let end = u32::try_from(free_list.capacity).unwrap_or_else(|_| {
54 assert!(free_list.capacity > usize::try_from(u32::MAX).unwrap());
55 u32::MAX
56 });
57
58 let start = ALIGN_U32;
62
63 let len = round_u32_down_to_pow2(end.saturating_sub(start), ALIGN_U32);
64
65 let entire_range = if len >= ALIGN_U32 {
66 Some((start, len))
67 } else {
68 None
69 };
70
71 free_list.free_block_index_to_len.extend(entire_range);
72
73 free_list
74 }
75
76 pub fn add_capacity(&mut self, additional: usize) {
78 let old_cap = self.capacity;
79 self.capacity = self.capacity.saturating_add(additional);
80 log::debug!(
81 "FreeList::add_capacity({additional:#x}): capacity growing from {old_cap:#x} to {:#x}",
82 self.capacity
83 );
84
85 let old_cap_rounded = round_usize_down_to_pow2(old_cap, ALIGN_USIZE);
89
90 let Ok(old_cap_rounded) = u32::try_from(old_cap_rounded) else {
94 return;
95 };
96
97 let index = NonZeroU32::new(old_cap_rounded).unwrap_or(
99 NonZeroU32::new(ALIGN_U32).unwrap(),
102 );
103
104 let new_cap = u32::try_from(self.capacity).unwrap_or(u32::MAX);
108 let new_cap = round_u32_down_to_pow2(new_cap, ALIGN_U32);
109
110 if index.get() > new_cap {
113 return;
114 }
115
116 let size = new_cap - index.get();
117 debug_assert_eq!(size % ALIGN_U32, 0);
118 if size == 0 {
119 return;
120 }
121
122 let Ok(layout) = Layout::from_size_align(usize::try_from(size).unwrap(), ALIGN_USIZE)
125 else {
126 return;
127 };
128
129 log::trace!(
132 "FreeList::add_capacity(..): adding block {index:#x}..{:#x}",
133 index.get() + size
134 );
135 self.dealloc(index, layout);
136 }
137
138 #[cfg(test)]
139 fn max_size(&self) -> usize {
140 let cap = core::cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
141 round_usize_down_to_pow2(cap.saturating_sub(ALIGN_USIZE), ALIGN_USIZE)
142 }
143
144 fn check_layout(&self, layout: Layout) -> Result<u32> {
147 ensure!(
148 layout.align() <= ALIGN_USIZE,
149 "requested allocation's alignment of {} is greater than max supported \
150 alignment of {ALIGN_USIZE}",
151 layout.align(),
152 );
153
154 let alloc_size = u32::try_from(layout.size()).map_err(|e| {
155 let trap = crate::Trap::AllocationTooLarge;
156 let err = anyhow::Error::from(trap);
157 err.context(e)
158 .context("requested allocation's size does not fit in a u32")
159 })?;
160 alloc_size
161 .checked_next_multiple_of(ALIGN_U32)
162 .ok_or_else(|| {
163 let trap = crate::Trap::AllocationTooLarge;
164 let err = anyhow::Error::from(trap);
165 let err = err.context(format!(
166 "failed to round allocation size of {alloc_size} up to next \
167 multiple of {ALIGN_USIZE}"
168 ));
169 err
170 })
171 }
172
173 fn first_fit(&mut self, alloc_size: u32) -> Option<(u32, u32)> {
176 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
177
178 let (&block_index, &block_len) = self
179 .free_block_index_to_len
180 .iter()
181 .find(|(_idx, len)| **len >= alloc_size)?;
182
183 debug_assert_eq!(block_index % ALIGN_U32, 0);
184 debug_assert_eq!(block_len % ALIGN_U32, 0);
185
186 let entry = self.free_block_index_to_len.remove(&block_index);
187 debug_assert!(entry.is_some());
188
189 Some((block_index, block_len))
190 }
191
192 fn maybe_split(&mut self, alloc_size: u32, block_index: u32, block_len: u32) -> u32 {
197 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
198 debug_assert_eq!(block_index % ALIGN_U32, 0);
199 debug_assert_eq!(block_len % ALIGN_U32, 0);
200
201 if block_len - alloc_size < ALIGN_U32 {
202 return block_len;
204 }
205
206 let new_block_len = alloc_size;
209 let split_start = block_index + alloc_size;
210 let split_len = block_len - alloc_size;
211
212 debug_assert_eq!(new_block_len % ALIGN_U32, 0);
213 debug_assert_eq!(split_start % ALIGN_U32, 0);
214 debug_assert_eq!(split_len % ALIGN_U32, 0);
215
216 self.free_block_index_to_len.insert(split_start, split_len);
217
218 new_block_len
219 }
220
221 pub fn alloc(&mut self, layout: Layout) -> Result<Option<NonZeroU32>> {
232 log::trace!("FreeList::alloc({layout:?})");
233 let alloc_size = self.check_layout(layout)?;
234 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
235
236 let (block_index, block_len) = match self.first_fit(alloc_size) {
237 None => return Ok(None),
238 Some(tup) => tup,
239 };
240 debug_assert_ne!(block_index, 0);
241 debug_assert_eq!(block_index % ALIGN_U32, 0);
242 debug_assert!(block_len >= alloc_size);
243 debug_assert_eq!(block_len % ALIGN_U32, 0);
244
245 let block_len = self.maybe_split(alloc_size, block_index, block_len);
246 debug_assert!(block_len >= alloc_size);
247 debug_assert_eq!(block_len % ALIGN_U32, 0);
248
249 #[cfg(debug_assertions)]
251 self.check_integrity();
252
253 log::trace!("FreeList::alloc({layout:?}) -> {block_index:#x}");
254 Ok(Some(unsafe { NonZeroU32::new_unchecked(block_index) }))
255 }
256
257 pub fn dealloc(&mut self, index: NonZeroU32, layout: Layout) {
259 log::trace!("FreeList::dealloc({index:#x}, {layout:?})");
260
261 let index = index.get();
262 debug_assert_eq!(index % ALIGN_U32, 0);
263
264 let alloc_size = self.check_layout(layout).unwrap();
265 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
266
267 let prev_block = self
268 .free_block_index_to_len
269 .range((Bound::Unbounded, Bound::Excluded(index)))
270 .next_back()
271 .map(|(idx, len)| (*idx, *len));
272
273 let next_block = self
274 .free_block_index_to_len
275 .range((Bound::Excluded(index), Bound::Unbounded))
276 .next()
277 .map(|(idx, len)| (*idx, *len));
278
279 match (prev_block, next_block) {
282 (Some((prev_index, prev_len)), Some((next_index, next_len)))
285 if blocks_are_contiguous(prev_index, prev_len, index)
286 && blocks_are_contiguous(index, alloc_size, next_index) =>
287 {
288 log::trace!(
289 "merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
290 prev_len = prev_index + prev_len,
291 index_end = index + u32::try_from(layout.size()).unwrap(),
292 next_end = next_index + next_len,
293 );
294 self.free_block_index_to_len.remove(&next_index);
295 let merged_block_len = next_index + next_len - prev_index;
296 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
297 *self.free_block_index_to_len.get_mut(&prev_index).unwrap() = merged_block_len;
298 }
299
300 (Some((prev_index, prev_len)), _)
302 if blocks_are_contiguous(prev_index, prev_len, index) =>
303 {
304 log::trace!(
305 "merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}",
306 prev_len = prev_index + prev_len,
307 index_end = index + u32::try_from(layout.size()).unwrap(),
308 );
309 let merged_block_len = index + alloc_size - prev_index;
310 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
311 *self.free_block_index_to_len.get_mut(&prev_index).unwrap() = merged_block_len;
312 }
313
314 (_, Some((next_index, next_len)))
316 if blocks_are_contiguous(index, alloc_size, next_index) =>
317 {
318 log::trace!(
319 "merging blocks {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
320 index_end = index + u32::try_from(layout.size()).unwrap(),
321 next_end = next_index + next_len,
322 );
323 self.free_block_index_to_len.remove(&next_index);
324 let merged_block_len = next_index + next_len - index;
325 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
326 self.free_block_index_to_len.insert(index, merged_block_len);
327 }
328
329 (_, _) => {
332 log::trace!("cannot merge blocks");
333 self.free_block_index_to_len.insert(index, alloc_size);
334 }
335 }
336
337 #[cfg(debug_assertions)]
340 self.check_integrity();
341 }
342
343 #[cfg(debug_assertions)]
353 fn check_integrity(&self) {
354 let mut prev_end = None;
355 for (&index, &len) in self.free_block_index_to_len.iter() {
356 let end = index + len;
358 assert!(usize::try_from(end).unwrap() <= self.capacity);
359
360 if let Some(prev_end) = prev_end {
362 assert!(prev_end < index);
368 }
369
370 assert_eq!(index % ALIGN_U32, 0);
372
373 assert_eq!(len % ALIGN_U32, 0);
375
376 prev_end = Some(end);
377 }
378 }
379}
380
381#[inline]
382fn blocks_are_contiguous(prev_index: u32, prev_len: u32, next_index: u32) -> bool {
383 let end_of_prev = prev_index + prev_len;
389 debug_assert!(
390 next_index >= end_of_prev,
391 "overlapping blocks: \n\
392 \t prev_index = {prev_index:#x}\n\
393 \t prev_len = {prev_len:#x}\n\
394 \tend_of_prev = {end_of_prev:#x}\n\
395 \t next_index = {next_index:#x}\n\
396 `next_index` should be >= `end_of_prev`"
397 );
398 let delta_to_next = next_index - end_of_prev;
399 delta_to_next < ALIGN_U32
400}
401
402#[inline]
403fn round_u32_down_to_pow2(value: u32, divisor: u32) -> u32 {
404 debug_assert!(divisor > 0);
405 debug_assert!(divisor.is_power_of_two());
406 value & !(divisor - 1)
407}
408
409#[inline]
410fn round_usize_down_to_pow2(value: usize, divisor: usize) -> usize {
411 debug_assert!(divisor > 0);
412 debug_assert!(divisor.is_power_of_two());
413 value & !(divisor - 1)
414}
415
416#[cfg(test)]
417mod tests {
418 use super::*;
419 use crate::hash_map::HashMap;
420 use proptest::prelude::*;
421 use std::num::NonZeroUsize;
422
423 fn free_list_block_len_and_size(free_list: &FreeList) -> (usize, Option<usize>) {
424 let len = free_list.free_block_index_to_len.len();
425 let size = free_list
426 .free_block_index_to_len
427 .values()
428 .next()
429 .map(|s| usize::try_from(*s).unwrap());
430 (len, size)
431 }
432
433 proptest! {
434 #[test]
443 #[cfg_attr(miri, ignore)]
444 fn check_no_fragmentation((capacity, ops) in ops()) {
445 let _ = env_logger::try_init();
446
447 let mut live = HashMap::new();
449
450 let mut deferred = vec![];
454
455 let mut free_list = FreeList::new(capacity.get());
457
458 let (initial_len, initial_size) = free_list_block_len_and_size(&free_list);
459 assert!(initial_len == 0 || initial_len == 1);
460 assert!(initial_size.unwrap_or(0) <= capacity.get());
461 assert_eq!(initial_size.unwrap_or(0), free_list.max_size());
462
463 for (id, op) in ops {
465 match op {
466 Op::Alloc(layout) => {
467 if let Ok(Some(ptr)) = free_list.alloc(layout) {
468 live.insert(id, ptr);
469 }
470 }
471 Op::Dealloc(layout) => {
472 if let Some(ptr) = live.remove(&id) {
473 free_list.dealloc(ptr, layout);
474 } else {
475 deferred.push((id, layout));
476 }
477 }
478 }
479 }
480
481 for (id, layout) in deferred {
484 if let Some(ptr) = live.remove(&id) {
488 free_list.dealloc(ptr, layout);
489 }
490 }
491
492 assert!(live.is_empty());
497
498 let (final_len, final_size) = free_list_block_len_and_size(&free_list);
499
500 assert_eq!(final_len, initial_len);
503
504 assert_eq!(final_size, initial_size);
506 }
507 }
508
509 #[derive(Clone, Debug)]
510 enum Op {
511 Alloc(Layout),
512 Dealloc(Layout),
513 }
514
515 fn clamp_to_pow2_in_range(x: usize, max: usize) -> usize {
521 let log_x = max.ilog2() as usize;
522 if log_x == 0 {
523 return 1;
524 }
525 let divisor = usize::MAX / log_x;
526 let y = 1_usize << (x / divisor);
527 assert!(y.is_power_of_two(), "{y} is not a power of two");
528 assert!(y <= max, "{y} is larger than {max}");
529 y
530 }
531
532 fn arbitrary_layout(max_size: NonZeroUsize, size: usize, align: usize) -> Layout {
535 let max_size = std::cmp::min(max_size.get(), usize::try_from(isize::MAX).unwrap());
538
539 let align = clamp_to_pow2_in_range(align, super::ALIGN_USIZE);
542
543 let size = size % (max_size + 1);
545
546 let size = round_usize_down_to_pow2(size, align);
551 assert!(size <= max_size);
552
553 assert_ne!(align, 0);
556 assert!(align.is_power_of_two());
557 assert_eq!(size % align, 0);
558 assert!(size <= usize::try_from(isize::MAX).unwrap());
559
560 Layout::from_size_align(size, align).unwrap()
561 }
562
563 fn ops() -> impl Strategy<Value = (NonZeroUsize, Vec<(usize, Op)>)> {
566 any::<usize>().prop_flat_map(|capacity| {
567 let capacity =
568 NonZeroUsize::new(capacity).unwrap_or_else(|| NonZeroUsize::new(1 << 31).unwrap());
569
570 (
571 Just(capacity),
572 (any::<usize>(), any::<usize>(), any::<usize>())
573 .prop_flat_map(move |(id, size, align)| {
574 let layout = arbitrary_layout(capacity, size, align);
575 vec![
576 Just((id, Op::Alloc(layout))),
577 Just((id, Op::Dealloc(layout))),
578 ]
579 })
580 .prop_shuffle(),
581 )
582 })
583 }
584
585 #[test]
586 fn allocate_no_split() {
587 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 2);
590
591 assert_eq!(free_list.free_block_index_to_len.len(), 1);
592 assert_eq!(free_list.max_size(), ALIGN_USIZE * 2);
593
594 free_list
596 .alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
597 .expect("allocation within 'static' free list limits")
598 .expect("have free space available for allocation");
599
600 assert_eq!(free_list.free_block_index_to_len.len(), 0);
602 }
603
604 #[test]
605 fn allocate_and_split() {
606 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 3);
609
610 assert_eq!(free_list.free_block_index_to_len.len(), 1);
611 assert_eq!(free_list.max_size(), ALIGN_USIZE * 3);
612
613 free_list
615 .alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
616 .expect("allocation within 'static' free list limits")
617 .expect("have free space available for allocation");
618
619 assert_eq!(free_list.free_block_index_to_len.len(), 1);
621 }
622
623 #[test]
624 fn dealloc_merge_prev_and_next() {
625 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
626
627 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
628 assert_eq!(
629 free_list.free_block_index_to_len.len(),
630 1,
631 "initially one big free block"
632 );
633
634 let a = free_list
635 .alloc(layout)
636 .expect("allocation within 'static' free list limits")
637 .expect("have free space available for allocation");
638 assert_eq!(
639 free_list.free_block_index_to_len.len(),
640 1,
641 "should have split the block to allocate `a`"
642 );
643
644 let b = free_list
645 .alloc(layout)
646 .expect("allocation within 'static' free list limits")
647 .expect("have free space available for allocation");
648 assert_eq!(
649 free_list.free_block_index_to_len.len(),
650 1,
651 "should have split the block to allocate `b`"
652 );
653
654 free_list.dealloc(a, layout);
655 assert_eq!(
656 free_list.free_block_index_to_len.len(),
657 2,
658 "should have two non-contiguous free blocks after deallocating `a`"
659 );
660
661 free_list.dealloc(b, layout);
662 assert_eq!(
663 free_list.free_block_index_to_len.len(),
664 1,
665 "should have merged `a` and `b` blocks with the rest to form a \
666 single, contiguous free block after deallocating `b`"
667 );
668 }
669
670 #[test]
671 fn dealloc_merge_with_prev_and_not_next() {
672 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
673
674 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
675 assert_eq!(
676 free_list.free_block_index_to_len.len(),
677 1,
678 "initially one big free block"
679 );
680
681 let a = free_list
682 .alloc(layout)
683 .expect("allocation within 'static' free list limits")
684 .expect("have free space available for allocation");
685 let b = free_list
686 .alloc(layout)
687 .expect("allocation within 'static' free list limits")
688 .expect("have free space available for allocation");
689 let c = free_list
690 .alloc(layout)
691 .expect("allocation within 'static' free list limits")
692 .expect("have free space available for allocation");
693 assert_eq!(
694 free_list.free_block_index_to_len.len(),
695 1,
696 "should have split the block to allocate `a`, `b`, and `c`"
697 );
698
699 free_list.dealloc(a, layout);
700 assert_eq!(
701 free_list.free_block_index_to_len.len(),
702 2,
703 "should have two non-contiguous free blocks after deallocating `a`"
704 );
705
706 free_list.dealloc(b, layout);
707 assert_eq!(
708 free_list.free_block_index_to_len.len(),
709 2,
710 "should have merged `a` and `b` blocks, but not merged with the \
711 rest of the free space"
712 );
713
714 let _ = c;
715 }
716
717 #[test]
718 fn dealloc_merge_with_next_and_not_prev() {
719 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
720
721 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
722 assert_eq!(
723 free_list.free_block_index_to_len.len(),
724 1,
725 "initially one big free block"
726 );
727
728 let a = free_list
729 .alloc(layout)
730 .expect("allocation within 'static' free list limits")
731 .expect("have free space available for allocation");
732 let b = free_list
733 .alloc(layout)
734 .expect("allocation within 'static' free list limits")
735 .expect("have free space available for allocation");
736 let c = free_list
737 .alloc(layout)
738 .expect("allocation within 'static' free list limits")
739 .expect("have free space available for allocation");
740 assert_eq!(
741 free_list.free_block_index_to_len.len(),
742 1,
743 "should have split the block to allocate `a`, `b`, and `c`"
744 );
745
746 free_list.dealloc(a, layout);
747 assert_eq!(
748 free_list.free_block_index_to_len.len(),
749 2,
750 "should have two non-contiguous free blocks after deallocating `a`"
751 );
752
753 free_list.dealloc(c, layout);
754 assert_eq!(
755 free_list.free_block_index_to_len.len(),
756 2,
757 "should have merged `c` block with rest of the free space, but not \
758 with `a` block"
759 );
760
761 let _ = b;
762 }
763
764 #[test]
765 fn dealloc_no_merge() {
766 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
767
768 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
769 assert_eq!(
770 free_list.free_block_index_to_len.len(),
771 1,
772 "initially one big free block"
773 );
774
775 let a = free_list
776 .alloc(layout)
777 .expect("allocation within 'static' free list limits")
778 .expect("have free space available for allocation");
779 let b = free_list
780 .alloc(layout)
781 .expect("allocation within 'static' free list limits")
782 .expect("have free space available for allocation");
783 let c = free_list
784 .alloc(layout)
785 .expect("allocation within 'static' free list limits")
786 .expect("have free space available for allocation");
787 let d = free_list
788 .alloc(layout)
789 .expect("allocation within 'static' free list limits")
790 .expect("have free space available for allocation");
791 assert_eq!(
792 free_list.free_block_index_to_len.len(),
793 1,
794 "should have split the block to allocate `a`, `b`, `c`, and `d`"
795 );
796
797 free_list.dealloc(a, layout);
798 assert_eq!(
799 free_list.free_block_index_to_len.len(),
800 2,
801 "should have two non-contiguous free blocks after deallocating `a`"
802 );
803
804 free_list.dealloc(c, layout);
805 assert_eq!(
806 free_list.free_block_index_to_len.len(),
807 3,
808 "should not have merged `c` block `a` block or rest of the free \
809 space"
810 );
811
812 let _ = (b, d);
813 }
814
815 #[test]
816 fn alloc_size_too_large() {
817 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
819 assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
820
821 assert!(
824 free_list
825 .alloc(Layout::from_size_align(ALIGN_USIZE * 20, ALIGN_USIZE).unwrap())
826 .unwrap()
827 .is_none()
828 );
829 }
830
831 #[test]
832 fn alloc_align_too_large() {
833 let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
835 assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
836
837 assert!(
840 free_list
841 .alloc(Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE * 2).unwrap(),)
842 .is_err()
843 );
844 }
845
846 #[test]
847 fn all_pairwise_alloc_dealloc_orderings() {
848 let tests: &[fn(&mut FreeList, Layout)] = &[
849 |f, l| {
850 let a = f.alloc(l).unwrap().unwrap();
851 let b = f.alloc(l).unwrap().unwrap();
852 f.dealloc(a, l);
853 f.dealloc(b, l);
854 },
855 |f, l| {
856 let a = f.alloc(l).unwrap().unwrap();
857 let b = f.alloc(l).unwrap().unwrap();
858 f.dealloc(b, l);
859 f.dealloc(a, l);
860 },
861 |f, l| {
862 let a = f.alloc(l).unwrap().unwrap();
863 f.dealloc(a, l);
864 let b = f.alloc(l).unwrap().unwrap();
865 f.dealloc(b, l);
866 },
867 ];
868
869 let l = Layout::from_size_align(16, 8).unwrap();
870 for test in tests {
871 let mut f = FreeList::new(0x100);
872 test(&mut f, l);
873 }
874 }
875
876 #[test]
877 fn add_capacity() {
878 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
879
880 let mut free_list = FreeList::new(0);
881 assert!(free_list.alloc(layout).unwrap().is_none(), "no capacity");
882
883 free_list.add_capacity(ALIGN_USIZE);
884 assert!(
885 free_list.alloc(layout).unwrap().is_none(),
886 "still not enough capacity because we won't allocate the zero index"
887 );
888
889 free_list.add_capacity(1);
890 assert!(
891 free_list.alloc(layout).unwrap().is_none(),
892 "still not enough capacity because allocations are multiples of the alignment"
893 );
894
895 free_list.add_capacity(ALIGN_USIZE - 1);
896 let a = free_list
897 .alloc(layout)
898 .unwrap()
899 .expect("now we have enough capacity for one");
900 assert!(
901 free_list.alloc(layout).unwrap().is_none(),
902 "but not enough capacity for two"
903 );
904
905 free_list.add_capacity(ALIGN_USIZE);
906 let b = free_list
907 .alloc(layout)
908 .unwrap()
909 .expect("now we have enough capacity for two");
910
911 free_list.dealloc(a, layout);
912 free_list.dealloc(b, layout);
913 assert_eq!(
914 free_list.free_block_index_to_len.len(),
915 1,
916 "`dealloc` should merge blocks from different `add_capacity` calls together"
917 );
918
919 free_list.add_capacity(ALIGN_USIZE);
920 assert_eq!(
921 free_list.free_block_index_to_len.len(),
922 1,
923 "`add_capacity` should eagerly merge new capacity into the last block \
924 in the free list, when possible"
925 );
926 }
927
928 #[test]
929 fn add_capacity_not_enough_for_first_alloc() {
930 let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
931
932 let mut free_list = FreeList::new(0);
933 assert!(free_list.alloc(layout).unwrap().is_none(), "no capacity");
934
935 for _ in 1..2 * ALIGN_USIZE {
936 free_list.add_capacity(1);
937 assert!(
938 free_list.alloc(layout).unwrap().is_none(),
939 "not enough capacity"
940 );
941 }
942
943 free_list.add_capacity(1);
944 free_list
945 .alloc(layout)
946 .unwrap()
947 .expect("now we have enough capacity for one");
948 assert!(
949 free_list.alloc(layout).unwrap().is_none(),
950 "but not enough capacity for two"
951 );
952 }
953}