263 lines
7.1 KiB
C
263 lines
7.1 KiB
C
ThreadLocal ThreadArenasCtx t_arena_ctx = ZI;
|
|
|
|
////////////////////////////////////////////////////////////
|
|
//~ Arena management
|
|
|
|
Arena *AcquireArena(u64 reserve)
|
|
{
|
|
reserve += ArenaHeaderSize;
|
|
|
|
/* Round up to nearest block size */
|
|
u64 block_remainder = reserve % ArenaBlockSize;
|
|
if (block_remainder > 0)
|
|
{
|
|
reserve += ArenaBlockSize - block_remainder;
|
|
}
|
|
|
|
u8 *base = ReserveMemory(reserve);
|
|
if (!base)
|
|
{
|
|
Panic(Lit("Failed to reserve memory"));
|
|
}
|
|
u64 reserved = reserve;
|
|
AddGstat(GSTAT_MEMORY_RESERVED, reserve);
|
|
|
|
/* Commit initial block */
|
|
base = CommitMemory(base, ArenaBlockSize);
|
|
if (!base)
|
|
{
|
|
Panic(Lit("Failed to commit initial memory block: System may be out of memory"));
|
|
}
|
|
|
|
Assert(((u64)base & 0xFFF) == 0); /* Base should be 4k aligned */
|
|
StaticAssert(ArenaHeaderSize <= ArenaBlockSize); /* Header must fit in first block */
|
|
StaticAssert(sizeof(Arena) <= ArenaHeaderSize); /* Arena struct must fit in header */
|
|
|
|
AsanPoison(base + sizeof(Arena), ArenaBlockSize - sizeof(Arena));
|
|
AddGstat(GSTAT_MEMORY_COMMITTED, ArenaBlockSize);
|
|
AddGstat(GSTAT_NUM_ARENAS, 1);
|
|
|
|
/* Create & return arena header at beginning of block */
|
|
Arena *arena = (Arena *)base;
|
|
ZeroStruct(arena);
|
|
arena->committed = ArenaBlockSize - ArenaHeaderSize;
|
|
arena->reserved = reserved;
|
|
return arena;
|
|
}
|
|
|
|
void ReleaseArena(Arena *arena)
|
|
{
|
|
AsanUnpoison(arena, arena->committed + ArenaHeaderSize);
|
|
AddGstat(GSTAT_MEMORY_COMMITTED, -(i64)(arena->committed - ArenaHeaderSize));
|
|
AddGstat(GSTAT_MEMORY_RESERVED, -(i64)(arena->reserved));
|
|
AddGstat(GSTAT_NUM_ARENAS, -1);
|
|
ReleaseMemory(arena);
|
|
}
|
|
|
|
/* Copy the memory from src to dst, replacing old contents.
|
|
* Dst will expand if necessary. */
|
|
void CopyArena(Arena *dst, Arena *src)
|
|
{
|
|
ResetArena(dst);
|
|
u64 data_size = src->pos;
|
|
u8 *data_src = ArenaFirst(src, u8);
|
|
u8 *data_dst = PushBytesNoZero(dst, data_size, 1);
|
|
CopyBytes(data_dst, data_src, data_size);
|
|
}
|
|
|
|
void ShrinkArena(Arena *arena)
|
|
{
|
|
/* Not implemented */
|
|
Assert(0);
|
|
}
|
|
|
|
void SetArenaReadonly(Arena *arena)
|
|
{
|
|
SetMemoryReadonly(arena, arena->committed + ArenaHeaderSize);
|
|
}
|
|
|
|
void SetArenaReadWrite(Arena *arena)
|
|
{
|
|
SetMemoryReadWrite(arena, arena->committed + ArenaHeaderSize);
|
|
}
|
|
|
|
void *ResetArena(Arena *arena)
|
|
{
|
|
PopTo(arena, 0);
|
|
return ArenaFirst(arena, u8);
|
|
}
|
|
|
|
////////////////////////////////////////////////////////////
|
|
//~ Push / pop
|
|
|
|
void *PushBytesNoZero(Arena *arena, u64 size, u64 align)
|
|
{
|
|
Assert(align > 0);
|
|
|
|
void *result = 0;
|
|
u8 *base = ArenaFirst(arena, u8);
|
|
|
|
/* Check to avoid aligning when size = 0 */
|
|
if (size > 0)
|
|
{
|
|
u64 aligned_start_pos = (arena->pos + (align - 1));
|
|
aligned_start_pos -= aligned_start_pos % align;
|
|
|
|
u64 new_pos = aligned_start_pos + size;
|
|
if (new_pos > arena->committed)
|
|
{
|
|
/* Commit new block(s) */
|
|
u64 blocks_needed = (new_pos - arena->committed + ArenaBlockSize - 1) / ArenaBlockSize;
|
|
u64 commit_bytes = blocks_needed * ArenaBlockSize;
|
|
u64 new_capacity = arena->committed + commit_bytes;
|
|
if (new_capacity > arena->reserved)
|
|
{
|
|
/* Hard fail if we overflow reserved memory for now */
|
|
Panic(Lit("Failed to commit new memory block: Overflow of reserved memory"));
|
|
}
|
|
void *commit_address = base + arena->committed;
|
|
if (!CommitMemory(commit_address, commit_bytes))
|
|
{
|
|
/* Hard fail on memory allocation failure for now */
|
|
Panic(Lit("Failed to commit new memory block: System may be out of memory"));
|
|
}
|
|
arena->committed += commit_bytes;
|
|
AddGstat(GSTAT_MEMORY_COMMITTED, commit_bytes);
|
|
AsanPoison(commit_address, commit_bytes);
|
|
}
|
|
|
|
result = base + aligned_start_pos;
|
|
AsanUnpoison(result, new_pos - aligned_start_pos);
|
|
arena->pos = new_pos;
|
|
}
|
|
else
|
|
{
|
|
result = base + arena->pos;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
void *PushBytes(Arena *arena, u64 size, u64 align)
|
|
{
|
|
void *p = PushBytesNoZero(arena, size, align);
|
|
ZeroBytes(p, size);
|
|
return p;
|
|
}
|
|
|
|
void *PushAlign(Arena *arena, u64 align)
|
|
{
|
|
void *result = 0;
|
|
if (align > 0)
|
|
{
|
|
u64 aligned_start_pos = (arena->pos + (align - 1));
|
|
aligned_start_pos -= aligned_start_pos % align;
|
|
u64 align_bytes = aligned_start_pos - (u64)arena->pos;
|
|
if (align_bytes > 0)
|
|
{
|
|
result = (void *)PushStructsNoZero(arena, u8, align_bytes);
|
|
}
|
|
else
|
|
{
|
|
result = (void *)(ArenaFirst(arena, u8) + arena->pos);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
/* 0 alignment */
|
|
Assert(0);
|
|
result = (void *)(ArenaFirst(arena, u8) + arena->pos);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
void PopTo(Arena *arena, u64 pos)
|
|
{
|
|
Assert(arena->pos >= pos);
|
|
AsanPoison(ArenaFirst(arena, u8) + pos, arena->pos - pos);
|
|
arena->pos = pos;
|
|
}
|
|
|
|
void PopBytesNoCopy(Arena *arena, u64 size)
|
|
{
|
|
Assert(arena->pos >= size);
|
|
u64 new_pos = arena->pos - size;
|
|
AsanPoison(ArenaFirst(arena, u8) + new_pos, arena->pos - new_pos);
|
|
arena->pos = new_pos;
|
|
}
|
|
|
|
void PopBytes(Arena *arena, u64 size, void *copy_dst)
|
|
{
|
|
Assert(arena->pos >= size);
|
|
u64 new_pos = arena->pos - size;
|
|
void *src = (void *)(ArenaFirst(arena, u8) + new_pos);
|
|
CopyBytes(copy_dst, src, size);
|
|
AsanPoison(ArenaFirst(arena, u8) + new_pos, arena->pos - new_pos);
|
|
arena->pos = new_pos;
|
|
}
|
|
|
|
void *ArenaFirst_(Arena *arena, u64 align)
|
|
{
|
|
u64 aligned_start_pos = align - 1;
|
|
aligned_start_pos -= aligned_start_pos % align;
|
|
void *result = ((u8 *)arena + ArenaHeaderSize) + aligned_start_pos;
|
|
return result;
|
|
}
|
|
|
|
void *ArenaNext_(Arena *arena, u64 align)
|
|
{
|
|
u64 aligned_start_pos = (arena->pos + (align - 1));
|
|
aligned_start_pos -= aligned_start_pos % align;
|
|
void *result = ((u8 *)arena + ArenaHeaderSize) + aligned_start_pos;
|
|
return result;
|
|
}
|
|
|
|
////////////////////////////////////////////////////////////
|
|
//~ Temp arena helpers
|
|
|
|
TempArena BeginTempArena(Arena *arena)
|
|
{
|
|
TempArena t = ZI;
|
|
t.arena = arena;
|
|
t.start_pos = arena->pos;
|
|
return t;
|
|
}
|
|
|
|
void EndTempArena(TempArena temp)
|
|
{
|
|
PopTo(temp.arena, temp.start_pos);
|
|
}
|
|
|
|
////////////////////////////////////////////////////////////
|
|
//~ Scratch arena helpers
|
|
|
|
TempArena BeginScratch(Arena *potential_conflict)
|
|
{
|
|
/* This function is currently hard-coded to search through 2 scratch arenas */
|
|
StaticAssert(ScratchArenasPerCtx == 2);
|
|
|
|
/* Use `BeginScratchNoConflict` if no conflicts are present */
|
|
Assert(potential_conflict != 0);
|
|
|
|
Arena *scratch_arena = t_arena_ctx.scratch_arenas[0];
|
|
if (scratch_arena == potential_conflict)
|
|
{
|
|
scratch_arena = t_arena_ctx.scratch_arenas[1];
|
|
}
|
|
TempArena temp = BeginTempArena(scratch_arena);
|
|
return temp;
|
|
}
|
|
|
|
TempArena BeginScratchNoConflict_(void)
|
|
{
|
|
Arena *scratch_arena = t_arena_ctx.scratch_arenas[0];
|
|
TempArena temp = BeginTempArena(scratch_arena);
|
|
return temp;
|
|
}
|
|
|
|
|
|
void EndScratch(TempArena scratch_temp)
|
|
{
|
|
EndTempArena(scratch_temp);
|
|
}
|