diff --git a/source/Allocator.cpp b/source/Allocator.cpp index d827773..9f8e8fd 100644 --- a/source/Allocator.cpp +++ b/source/Allocator.cpp @@ -214,8 +214,10 @@ namespace Langulus::Fractalloc /// @param chainStart - [in/out] the start of the chain void Allocator::CollectGarbageChain(Pool*& chainStart) { while (chainStart) { - if (chainStart->IsInUse()) + if (chainStart->IsInUse()) { + chainStart->Trim(); break; + } #if LANGULUS_FEATURE(MEMORY_STATISTICS) mStatistics.DelPool(chainStart); @@ -233,6 +235,7 @@ namespace Langulus::Fractalloc auto pool = chainStart->mNext; while (pool) { if (pool->IsInUse()) { + pool->Trim(); prev = pool; pool = pool->mNext; continue; @@ -631,10 +634,19 @@ namespace Langulus::Fractalloc Logger::Push, Logger::Green, pool->mEntries, Logger::Pop, Logger::Tabs {} ); - Count ecounter {}; + Count consecutiveEmpties = 0; + Count ecounter = 0; do { const auto entry = pool->AllocationFromIndex(ecounter); if (entry->mReferences) { + if (consecutiveEmpties) { + if (consecutiveEmpties == 1) + Logger::Info(ecounter-1, "] ", Logger::Red, "unused entry"); + else + Logger::Info(ecounter - consecutiveEmpties, '-', ecounter-1, "] ", Logger::Red, consecutiveEmpties, " unused entries"); + consecutiveEmpties = 0; + } + Logger::Info(ecounter, "] ", Logger::Green, entry->mAllocatedBytes, " bytes, "); Logger::Append(entry->mReferences, " references: `"); auto raw = entry->GetBlockStart(); @@ -650,9 +662,17 @@ namespace Langulus::Fractalloc else Logger::Append('`'); } - else Logger::Info(ecounter, "] ", Logger::Red, "unused entry"); + else ++consecutiveEmpties; } while (++ecounter < pool->mEntries); + + if (consecutiveEmpties) { + if (consecutiveEmpties == 1) + Logger::Info(ecounter-1, "] ", Logger::Red, "unused entry"); + else + Logger::Info(ecounter - consecutiveEmpties, '-', ecounter-1, "] ", Logger::Red, consecutiveEmpties, " unused entries"); + consecutiveEmpties = 0; + } } } diff --git a/source/Pool.hpp b/source/Pool.hpp index c878943..ee70f12 100644 --- a/source/Pool.hpp +++ b/source/Pool.hpp @@ -93,6 +93,7 @@ namespace Langulus::Fractalloc void FreePoolChain(); void Null(); void Touch(); + void Trim(); NOD() Size ThresholdFromIndex(Offset) const noexcept; NOD() const Allocation* AllocationFromIndex(Offset) const noexcept; diff --git a/source/Pool.inl b/source/Pool.inl index 52984a0..7faa1c8 100644 --- a/source/Pool.inl +++ b/source/Pool.inl @@ -168,12 +168,8 @@ namespace Langulus::Fractalloc } // Always adapt min threshold if bigger entry is introduced - if (bytesWithPadding > mThresholdMin) { + if (bytesWithPadding > mThresholdMin) mThresholdMin = Roof2(bytesWithPadding); - //TODO everytime min threshold changes, - // part of the freed entry chain may get invalid? - // traverse and stitch here? - } LANGULUS_ASSUME(DevAssumes, mAllocatedByFrontend + bytesWithPadding >= mAllocatedByFrontend, @@ -234,12 +230,8 @@ namespace Langulus::Fractalloc if (newtotal > mThreshold) return false; - if (newtotal > mThresholdMin) { + if (newtotal > mThresholdMin) mThresholdMin = Roof2(newtotal); - //TODO everytime min threshold changes, - // part of the freed entry chain may get invalid? - // traverse abd stitch here? - } mAllocatedByFrontend.mSize += addition; } @@ -302,6 +294,60 @@ namespace Langulus::Fractalloc it += 4096; } } + + /// Remove all empty entries at the end and increase threshold as much + /// as possible + LANGULUS(INLINED) + void Pool::Trim() { + LANGULUS_ASSUME(DevAssumes, mEntries, "Should have at least one entry"); + + const Allocation* entry; + Count ecounter = mEntries; + do { + entry = AllocationFromIndex(ecounter - 1); //TODO could be optimized further + if (entry->mReferences) + break; + } + while (--ecounter > 0); + + mEntries = ecounter; + mThreshold = ThresholdFromIndex(mEntries - 1); + mThresholdPrevious = mThreshold != mAllocatedByBackend + ? Size(mThreshold * 2) : mThreshold; + mNextEntry = const_cast(reinterpret_cast(entry)) + + mThresholdPrevious; + + if (mNextEntry >= mMemoryEnd) { + // Reset carriage and shift level when it goes beyond + mThresholdPrevious = mThreshold; + mThreshold.mSize >>= size_t(1); + mNextEntry = mMemory + mThreshold; + } + + // Scan all unused entries up to mEntries and chain them + mLastFreed = nullptr; + ecounter = 0; + do { + entry = AllocationFromIndex(ecounter); + if (not entry->mReferences) { + mLastFreed = const_cast(entry); + break; + } + } while (++ecounter < mEntries - 1); + + auto prev = mLastFreed; + do { + entry = AllocationFromIndex(ecounter - 1); + if (entry->mReferences) + continue; + + prev->mNextFreeEntry = const_cast(entry); + prev = prev->mNextFreeEntry; + } while (++ecounter < mEntries - 1); + + if (prev) + prev->mNextFreeEntry = nullptr; + } /// Get threshold associated with an index /// @attention assumes index is not zero