get rid of sprite tctx

This commit is contained in:
jacob 2025-05-14 04:12:17 -05:00
parent d0be0d8908
commit c93b2829ce
2 changed files with 88 additions and 111 deletions

View File

@ -8,7 +8,6 @@
#include "util.h"
#include "work.h"
#include "atomic.h"
#include "thread_local.h"
#include "app.h"
#include "renderer.h"
#include "math.h"
@ -17,7 +16,7 @@
#define CACHE_MEMORY_BUDGET (MEGABYTE(256))
#define CACHE_BINS_COUNT 1024
#define MAX_LOADER_THREADS 4
#define MAX_SCOPE_REFERENCES 1024
/* How long between evictor thread scans */
#define EVICTOR_CYCLE_INTERVAL_NS NS_FROM_SECONDS(0.500)
@ -25,8 +24,6 @@
/* Cycles a cache entry spends unused until it's considered evictable */
#define EVICTOR_GRACE_PERIOD_CYCLES (NS_FROM_SECONDS(10.000) / EVICTOR_CYCLE_INTERVAL_NS)
#define TCTX_ARENA_RESERVE MEGABYTE(64)
/* Texture arena only used to store texture struct at the moment. Actual image data is allocated on GPU. */
#define TEXTURE_ARENA_RESERVE MEGABYTE(1)
@ -115,7 +112,6 @@ struct cache {
struct sprite_scope_reference {
struct cache_node *cache_node;
struct sprite_scope_reference *next_in_bin;
struct sprite_scope_reference *next_free;
};
/* ========================== *
@ -137,6 +133,11 @@ GLOBAL struct {
struct arena load_cmds_arena;
struct load_cmd *first_free_load_cmd;
/* Scopes */
struct atomic_i32 scopes_lock;
struct arena scopes_arena;
struct sprite_scope *first_free_scope;
/* Evictor thread */
struct atomic_i32 evictor_cycle;
b32 evictor_shutdown;
@ -146,36 +147,6 @@ GLOBAL struct {
struct sys_thread evictor_thread;
} G = ZI, DEBUG_ALIAS(G, G_sprite);
/* ========================== *
* Thread local state
* ========================== */
struct sprite_tctx {
struct arena arena;
struct sprite_scope *first_free_scope;
struct sprite_scope_reference *first_free_reference;
#if RTC
u32 thread_id;
#endif
};
INTERNAL THREAD_LOCAL_VAR_ALLOC_FUNC_DEF(sprite_tctx_alloc, vtctx)
{
struct sprite_tctx *tctx = (struct sprite_tctx *)vtctx;
tctx->arena = arena_alloc(MEGABYTE(64));
#if RTC
tctx->thread_id = sys_thread_id();
#endif
}
INTERNAL THREAD_LOCAL_VAR_RELEASE_FUNC_DEF(sprite_tctx_release, vtctx)
{
struct sprite_tctx *tctx = (struct sprite_tctx *)vtctx;
arena_release(&tctx->arena);
}
GLOBAL THREAD_LOCAL_VAR_DEF(tl_sprite_tctx, struct sprite_tctx, sprite_tctx_alloc, sprite_tctx_release);
/* ========================== *
* Purple-black image
* ========================== */
@ -269,6 +240,8 @@ struct sprite_startup_receipt sprite_startup(struct renderer_startup_receipt *re
G.load_cmds_arena = arena_alloc(GIGABYTE(64));
G.load_cmds_mutex = sys_mutex_alloc();
G.scopes_arena = arena_alloc(GIGABYTE(64));
G.evictor_mutex = sys_mutex_alloc();
G.evictor_cv = sys_condition_variable_alloc();
atomic_i32_eval_exchange(&G.evictor_cycle, 1);
@ -321,28 +294,6 @@ INTERNAL struct cache_node_hash cache_node_hash_from_tag_hash(u64 tag_hash, enum
return (struct cache_node_hash) { .v = rand_u64_from_seed(tag_hash + kind) };
}
/* ========================== *
* Refcount
* ========================== */
INTERNAL void node_refcount_add(struct cache_node *n, i32 amount)
{
i32 evictor_cycle = atomic_i32_eval(&G.evictor_cycle);
struct atomic_u64 *refcount_atomic = &n->refcount_struct;
u64 old_refcount_uncast = atomic_u64_eval(refcount_atomic);
do {
struct cache_node_refcount new_refcount = *(struct cache_node_refcount *)&old_refcount_uncast;
new_refcount.count += amount;
new_refcount.last_ref_cycle = evictor_cycle;
u64 v = atomic_u64_eval_compare_exchange(refcount_atomic, old_refcount_uncast, *(u64 *)&new_refcount);
if (v != old_refcount_uncast) {
old_refcount_uncast = v;
} else {
break;
}
} while (true);
}
/* ========================== *
* Load
* ========================== */
@ -699,12 +650,28 @@ INTERNAL void cache_node_load_sheet(struct cache_node *n, struct sprite_tag tag)
* Scope
* ========================== */
INTERNAL void refcount_add(struct cache_node *n, i32 amount)
{
i32 evictor_cycle = atomic_i32_eval(&G.evictor_cycle);
struct atomic_u64 *refcount_atomic = &n->refcount_struct;
u64 old_refcount_uncast = atomic_u64_eval(refcount_atomic);
do {
struct cache_node_refcount new_refcount = *(struct cache_node_refcount *)&old_refcount_uncast;
new_refcount.count += amount;
new_refcount.last_ref_cycle = evictor_cycle;
u64 v = atomic_u64_eval_compare_exchange(refcount_atomic, old_refcount_uncast, *(u64 *)&new_refcount);
if (v != old_refcount_uncast) {
old_refcount_uncast = v;
} else {
break;
}
} while (true);
}
/* Returns the slot at which the reference pointer should exist in the sprite scope.
* If the pointed to slot points to NULL, then the reference does not exist in the scope for the node. */
INTERNAL struct sprite_scope_reference **scope_get_reference_slot(struct sprite_scope *scope, struct cache_node *cache_node, u64 cache_bin_index)
INTERNAL struct sprite_scope_reference **sprite_scope_reference_slot_from_node(struct sprite_scope *scope, struct cache_node *cache_node, u64 cache_bin_index)
{
sys_thread_assert(scope->tctx->thread_id);
struct sprite_scope_reference **ref_slot = &scope->reference_bins[cache_bin_index];
while (*ref_slot) {
if ((*ref_slot)->cache_node == cache_node) {
@ -717,66 +684,75 @@ INTERNAL struct sprite_scope_reference **scope_get_reference_slot(struct sprite_
return ref_slot;
}
INTERNAL struct sprite_scope_reference *scope_reference_alloc(struct sprite_scope *scope, struct cache_node *cache_node)
/* `ref_slot` is result from `sprite_scope_reference_slot_from_node` */
INTERNAL void sprite_scope_insert_reference(struct sprite_scope *scope, struct cache_node *cache_node, struct sprite_scope_reference **ref_slot)
{
sys_thread_assert(scope->tctx->thread_id);
if (scope->num_references >= MAX_SCOPE_REFERENCES) {
sys_panic(LIT("Max sprite scope references reached"));
}
ASSERT(*ref_slot == NULL); /* Ref slot should not already have a reference present */
/* Increment refcount */
node_refcount_add(cache_node, 1);
refcount_add(cache_node, 1);
/* Add reference to scope */
struct sprite_tctx *tctx = scope->tctx;
struct sprite_scope_reference *ref;
if (tctx->first_free_reference) {
ref = tctx->first_free_reference;
tctx->first_free_reference = ref->next_free;
/* Grab reference from pool */
struct sprite_scope_reference *ref = &scope->reference_pool[scope->num_references++];
MEMZERO_STRUCT(ref);
} else {
ref = arena_push_zero(&tctx->arena, struct sprite_scope_reference);
}
ref->cache_node = cache_node;
return ref;
if ((ref_slot) == &ref->next_in_bin) {
DEBUGBREAKABLE;
}
*ref_slot = ref;
}
struct sprite_scope *sprite_scope_begin(void)
{
struct sprite_tctx *tctx = thread_local_var_eval(&tl_sprite_tctx);
/* Alloc scope */
struct sprite_scope *res = NULL;
if (tctx->first_free_scope) {
res = tctx->first_free_scope;
tctx->first_free_scope = res->next_free;
MEMZERO(res->reference_bins, sizeof(*res->reference_bins) * CACHE_BINS_COUNT);
*res = (struct sprite_scope) {
.reference_bins = res->reference_bins
};
struct sprite_scope_reference **bins = NULL;
struct sprite_scope_reference *pool = NULL;
{
while (atomic_i32_eval_compare_exchange(&G.scopes_lock, 0, 1) != 0) ix_pause();
{
if (G.first_free_scope) {
res = G.first_free_scope;
G.first_free_scope = res->next_free;
bins = res->reference_bins;
pool = res->reference_pool;
} else {
res = arena_push_zero(&tctx->arena, struct sprite_scope);
res->reference_bins = arena_push_array_zero(&tctx->arena, struct sprite_scope_reference *, CACHE_BINS_COUNT);
res = arena_push(&G.scopes_arena, struct sprite_scope);
bins = arena_push_array(&G.scopes_arena, struct sprite_scope_reference *, CACHE_BINS_COUNT);
pool = arena_push_array(&G.scopes_arena, struct sprite_scope_reference, MAX_SCOPE_REFERENCES);
}
res->tctx = tctx;
}
atomic_i32_eval_exchange(&G.scopes_lock, 0);
}
MEMZERO_STRUCT(res);
MEMZERO(bins, sizeof(*bins) * CACHE_BINS_COUNT);
res->reference_bins = bins;
res->reference_pool = pool;
return res;
}
void sprite_scope_end(struct sprite_scope *scope)
{
sys_thread_assert(scope->tctx->thread_id);
struct sprite_tctx *tctx = scope->tctx;
for (u64 i = 0; i < CACHE_BINS_COUNT; ++i) {
struct sprite_scope_reference *ref = scope->reference_bins[i];
while (ref) {
/* Decrement refcount */
node_refcount_add(ref->cache_node, -1);
/* Add reference to free list */
ref->next_free = tctx->first_free_reference;
tctx->first_free_reference = ref;
ref = ref->next_in_bin;
/* Dereference nodes */
u64 num_references = scope->num_references;
for (u64 i = 0; i < num_references; ++i) {
struct sprite_scope_reference *ref = &scope->reference_pool[i];
refcount_add(ref->cache_node, -1);
}
/* Release scope */
while (atomic_i32_eval_compare_exchange(&G.scopes_lock, 0, 1) != 0) ix_pause();
{
scope->next_free = G.first_free_scope;
G.first_free_scope = scope;
}
scope->next_free = tctx->first_free_scope;
tctx->first_free_scope = scope;
atomic_i32_eval_exchange(&G.scopes_lock, 0);
}
/* ========================== *
@ -804,7 +780,8 @@ INTERNAL struct cache_node *node_lookup_touch(struct sprite_scope *scope, struct
while (n) {
b32 match = false;
if (n->hash.v == hash.v) {
struct sprite_scope_reference **ref_slot = scope_get_reference_slot(scope, n, cache_bin_index);
struct sprite_scope_reference **ref_slot = sprite_scope_reference_slot_from_node(scope, n, cache_bin_index);
#if RESOURCE_RELOADING
if (*ref_slot) {
match = true;
@ -813,12 +790,12 @@ INTERNAL struct cache_node *node_lookup_touch(struct sprite_scope *scope, struct
/* If node is out of date and the scope doesn't already hold a reference to it, then ignore node */
} else {
match = true;
*ref_slot = scope_reference_alloc(scope, n);
sprite_scope_insert_reference(scope, n, ref_slot);
}
}
#else
if (!(*ref_slot)) {
*ref_slot = scope_reference_alloc(scope, n);
*ref_slot = sprite_scope_insert_reference(scope, n);
}
match = true;
#endif
@ -854,9 +831,9 @@ INTERNAL struct cache_node *node_lookup_touch(struct sprite_scope *scope, struct
}
/* Init node and add to bin */
struct sprite_scope_reference **ref_slot = scope_get_reference_slot(scope, n, cache_bin_index);
struct sprite_scope_reference **ref_slot = sprite_scope_reference_slot_from_node(scope, n, cache_bin_index);
if (!(*ref_slot)) {
*ref_slot = scope_reference_alloc(scope, n);
sprite_scope_insert_reference(scope, n, ref_slot);
}
*nonmatching_next = n;
if (nonmatching) {
@ -932,7 +909,7 @@ INTERNAL void *data_from_tag_internal(struct sprite_scope *scope, struct sprite_
}
/* Cmd holds reference to node */
node_refcount_add(n, 1);
refcount_add(n, 1);
}
sys_mutex_unlock(&lock);
@ -1069,7 +1046,7 @@ INTERNAL WORK_TASK_FUNC_DEF(sprite_load_task, arg)
}
/* Free cmd */
node_refcount_add(n, -1);
refcount_add(n, -1);
{
struct sys_lock lock = sys_mutex_lock_e(&G.load_cmds_mutex);
cmd->next_free = G.first_free_load_cmd;
@ -1185,7 +1162,6 @@ INTERNAL SYS_THREAD_ENTRY_POINT_FUNC_DEF(sprite_evictor_thread_entry_point, arg)
}
}
n = n->next_in_bin;
}
}

View File

@ -32,8 +32,9 @@ b32 sprite_tag_eq(struct sprite_tag t1, struct sprite_tag t2);
* ========================== */
struct sprite_scope {
struct sprite_tctx *tctx;
struct sprite_scope_reference **reference_bins;
struct sprite_scope_reference *reference_pool;
u64 num_references;
struct sprite_scope *next_free;
};