power_play/src/base/base_arena.c

225 lines
5.7 KiB
C

////////////////////////////////////////////////////////////
//~ Arena management
Arena *AcquireArena(u64 reserve)
{
reserve += ArenaHeaderSize;
// Round up to nearest block size
u64 block_remainder = reserve % ArenaBlockSize;
if (block_remainder > 0)
{
reserve += ArenaBlockSize - block_remainder;
}
u8 *base = ReserveMemory(reserve);
if (!base)
{
Panic(Lit("Failed to reserve memory"));
}
u64 reserved = reserve;
AddGstat(ArenaMemoryReserved, reserve);
// Commit initial block
base = CommitMemory(base, ArenaBlockSize);
if (!base)
{
Panic(Lit("Failed to commit initial memory block: System may be out of memory"));
}
Assert(((u64)base & 0xFFF) == 0); // Base should be 4k aligned
StaticAssert(ArenaHeaderSize <= ArenaBlockSize); // Header must fit in first block
StaticAssert(sizeof(Arena) <= ArenaHeaderSize); // Arena struct must fit in header
AsanPoison(base + sizeof(Arena), ArenaBlockSize - sizeof(Arena));
AddGstat(ArenaMemoryCommitted, ArenaBlockSize);
AddGstat(NumArenas, 1);
// Create & return arena header at beginning of block
Arena *arena = (Arena *)base;
ZeroStruct(arena);
arena->committed = ArenaBlockSize - ArenaHeaderSize;
arena->reserved = reserved;
return arena;
}
void ReleaseArena(Arena *arena)
{
AsanUnpoison(arena, arena->committed + ArenaHeaderSize);
AddGstat(ArenaMemoryCommitted, -(i64)(arena->committed - ArenaHeaderSize));
AddGstat(ArenaMemoryReserved, -(i64)(arena->reserved));
AddGstat(NumArenas, -1);
ReleaseMemory(arena);
}
// Copy the memory from src to dst, replacing old content.
// Dst arena will expand if necessary.
void CopyArena(Arena *dst, Arena *src)
{
ResetArena(dst);
u64 data_size = src->pos;
u8 *data_src = ArenaFirst(src, u8);
u8 *data_dst = PushBytesNoZero(dst, data_size, 1);
CopyBytes(data_dst, data_src, data_size);
}
void ShrinkArena(Arena *arena)
{
// Not implemented
Assert(0);
}
void SetArenaReadonly(Arena *arena)
{
SetMemoryReadonly(arena, arena->committed + ArenaHeaderSize);
}
void SetArenaReadWrite(Arena *arena)
{
SetMemoryReadWrite(arena, arena->committed + ArenaHeaderSize);
}
void *ResetArena(Arena *arena)
{
PopTo(arena, 0);
return ArenaFirst(arena, u8);
}
////////////////////////////////////////////////////////////
//~ Push / pop
void *PushBytesNoZero(Arena *arena, u64 size, u64 align)
{
Assert(align > 0);
u8 *base = ArenaFirst(arena, u8);
u64 start_pos = AlignU64(arena->pos, align);
u64 end_pos = start_pos + size;
void *result = base + start_pos;
// Commit new block(s)
if (size > 0 && end_pos > arena->committed)
{
u64 blocks_needed = (end_pos - arena->committed + ArenaBlockSize - 1) / ArenaBlockSize;
u64 commit_bytes = blocks_needed * ArenaBlockSize;
u64 new_capacity = arena->committed + commit_bytes;
if (new_capacity > arena->reserved)
{
// Hard fail if we overflow reserved memory for now
Panic(Lit("Failed to commit new memory block: Overflow of reserved memory"));
}
void *commit_address = base + arena->committed;
if (!CommitMemory(commit_address, commit_bytes))
{
// Hard fail on memory allocation failure for now
Panic(Lit("Failed to commit new memory block: System may be out of memory"));
}
arena->committed += commit_bytes;
AddGstat(ArenaMemoryCommitted, commit_bytes);
AsanPoison(commit_address, commit_bytes);
}
AsanUnpoison(result, size);
arena->pos = end_pos;
return result;
}
void *PushBytes(Arena *arena, u64 size, u64 align)
{
void *p = PushBytesNoZero(arena, size, align);
ZeroBytes(p, size);
return p;
}
void *PushAlign(Arena *arena, u64 align)
{
u64 push_count = AlignU64(arena->pos, align) - arena->pos;
return PushStructsNoZero(arena, u8, push_count);
}
void PopTo(Arena *arena, u64 pos)
{
Assert(arena->pos >= pos);
AsanPoison(ArenaFirst(arena, u8) + pos, arena->pos - pos);
arena->pos = pos;
}
void PopBytesNoCopy(Arena *arena, u64 size)
{
Assert(arena->pos >= size);
u64 new_pos = arena->pos - size;
AsanPoison(ArenaFirst(arena, u8) + new_pos, arena->pos - new_pos);
arena->pos = new_pos;
}
void PopBytes(Arena *arena, u64 size, void *copy_dst)
{
Assert(arena->pos >= size);
u64 new_pos = arena->pos - size;
void *src = (void *)(ArenaFirst(arena, u8) + new_pos);
CopyBytes(copy_dst, src, size);
AsanPoison(ArenaFirst(arena, u8) + new_pos, arena->pos - new_pos);
arena->pos = new_pos;
}
void *ArenaFirst_(Arena *arena, u64 align)
{
void *result = (void *)AlignU64((u64)arena + ArenaHeaderSize, align);
return result;
}
void *ArenaNext_(Arena *arena, u64 align)
{
void *result = (void *)AlignU64((u64)arena + ArenaHeaderSize + arena->pos, align);
return result;
}
////////////////////////////////////////////////////////////
//~ Temp arena helpers
TempArena BeginTempArena(Arena *arena)
{
TempArena t = Zi;
t.arena = arena;
t.start_pos = arena->pos;
return t;
}
void EndTempArena(TempArena temp)
{
PopTo(temp.arena, temp.start_pos);
}
////////////////////////////////////////////////////////////
//~ Scratch arena helpers
TempArena BeginScratch(Arena *potential_conflict)
{
// This function is currently hard-coded for 2 thread-local scratch arenas
StaticAssert(countof(Base_tl.arenas.scratch) == 2);
// Use `BeginScratchNoConflict` if no conflicts are present
Assert(potential_conflict != 0);
Arena *scratch_arena = Base_tl.arenas.scratch[0];
if (scratch_arena == potential_conflict)
{
scratch_arena = Base_tl.arenas.scratch[1];
}
TempArena temp = BeginTempArena(scratch_arena);
return temp;
}
TempArena BeginScratchNoConflict_(void)
{
Arena *scratch_arena = Base_tl.arenas.scratch[0];
TempArena temp = BeginTempArena(scratch_arena);
return temp;
}
void EndScratch(TempArena scratch_temp)
{
EndTempArena(scratch_temp);
}