[Zion] Use slab allocators immediately during heap init.

This commit is contained in:
Drew Galbraith 2023-11-15 13:02:34 -08:00
parent 659f122c9e
commit cc16210e90
4 changed files with 17 additions and 30 deletions

View File

@ -34,15 +34,9 @@ bool IsSlab(void* addr) {
KernelHeap::KernelHeap() { gKernelHeap = this; }
void KernelHeap::InitializeSlabAllocators() {
slab_8_ = glcr::MakeUnique<SlabAllocator>(8);
slab_16_ = glcr::MakeUnique<SlabAllocator>(16);
slab_32_ = glcr::MakeUnique<SlabAllocator>(32);
}
void* KernelHeap::Allocate(uint64_t size) {
if ((size <= 8) && slab_8_) {
auto ptr_or = slab_8_->Allocate();
if (size <= 8) {
auto ptr_or = slab_8_.Allocate();
if (ptr_or.ok()) {
return ptr_or.value();
}
@ -50,8 +44,8 @@ void* KernelHeap::Allocate(uint64_t size) {
dbgln("Skipped allocation (slab 8): {x}", ptr_or.error());
#endif
}
if ((size <= 16) && slab_16_) {
auto ptr_or = slab_16_->Allocate();
if (size <= 16) {
auto ptr_or = slab_16_.Allocate();
if (ptr_or.ok()) {
return ptr_or.value();
}
@ -59,8 +53,8 @@ void* KernelHeap::Allocate(uint64_t size) {
dbgln("Skipped allocation (slab 16): {x}", ptr_or.error());
#endif
}
if ((size <= 32) && slab_32_) {
auto ptr_or = slab_32_->Allocate();
if (size <= 32) {
auto ptr_or = slab_32_.Allocate();
if (ptr_or.ok()) {
return ptr_or.value();
}
@ -98,18 +92,12 @@ void KernelHeap::DumpDebugDataInternal() {
dbgln("Active Allocations: {}", alloc_count_);
dbgln("Slab Statistics:");
if (slab_8_) {
dbgln("Slab 8: {} slabs, {} allocs", slab_8_->SlabCount(),
slab_8_->Allocations());
}
if (slab_16_) {
dbgln("Slab 16: {} slabs, {} allocs", slab_16_->SlabCount(),
slab_16_->Allocations());
}
if (slab_32_) {
dbgln("Slab 32: {} slabs, {} allocs", slab_32_->SlabCount(),
slab_32_->Allocations());
}
dbgln("Slab 8: {} slabs, {} allocs", slab_8_.SlabCount(),
slab_8_.Allocations());
dbgln("Slab 16: {} slabs, {} allocs", slab_16_.SlabCount(),
slab_16_.Allocations());
dbgln("Slab 32: {} slabs, {} allocs", slab_32_.SlabCount(),
slab_32_.Allocations());
dbgln("");
dbgln("Size Distributions of non slab-allocated.");

View File

@ -23,9 +23,9 @@ class KernelHeap {
uint64_t alloc_count_ = 0;
glcr::UniquePtr<SlabAllocator> slab_8_;
glcr::UniquePtr<SlabAllocator> slab_16_;
glcr::UniquePtr<SlabAllocator> slab_32_;
SlabAllocator slab_8_{8};
SlabAllocator slab_16_{16};
SlabAllocator slab_32_{32};
// Distribution collection for the purpose of investigating a slab allocator.
// 0: 0-8B

View File

@ -140,10 +140,10 @@ void InitBootstrapPageAllocation() {
// if we limit the number of pages this should be fine.
// Currently set to the minimum of 3 for one kernel heap allocation:
// PageDirectory + PageTable + Page
if (entry.type == 0 && entry.length >= 0x3000) {
if (entry.type == 0 && entry.length >= 0x5000) {
gBootstrap.init_page = entry.base;
gBootstrap.next_page = entry.base;
gBootstrap.max_page = entry.base + 0x3000;
gBootstrap.max_page = entry.base + 0x4000;
gBootstrapEnabled = true;
return;
}

View File

@ -25,7 +25,6 @@ extern "C" void zion() {
phys_mem::InitBootstrapPageAllocation();
KernelHeap heap;
phys_mem::InitPhysicalMemoryManager();
heap.InitializeSlabAllocators();
phys_mem::DumpRegions();
dbgln("[boot] Memory allocations available now.");