97 lines
2.6 KiB
C
97 lines
2.6 KiB
C
#include "tls.h"
|
|
#include "sys.h"
|
|
#include "arena.h"
|
|
#include "atomic.h"
|
|
#include "intrinsics.h"
|
|
|
|
#define THREAD_LOCAL_TABLE_RESERVE (MEGABYTE(1))
|
|
|
|
GLOBAL struct {
|
|
struct atomic_i64 tls_metas_lock_flag;
|
|
u64 tls_metas_count;
|
|
struct thread_local_var_meta tls_metas[MAX_THREAD_LOCAL_VARS];
|
|
} L = { 0 }, DEBUG_LVAR(L_tls);
|
|
|
|
INTERNAL void tls_metas_lock(void)
|
|
{
|
|
while (atomic_i64_eval_compare_exchange(&L.tls_metas_lock_flag, 0, 1) == 0) {
|
|
/* Spinlock */
|
|
ix_pause();
|
|
}
|
|
}
|
|
|
|
INTERNAL void tls_metas_unlock(void)
|
|
{
|
|
atomic_i64_eval_exchange(&L.tls_metas_lock_flag, 0);
|
|
}
|
|
|
|
struct thread_local_store thread_local_store_alloc(void)
|
|
{
|
|
__prof;
|
|
struct thread_local_store t = { 0 };
|
|
t.arena = arena_alloc(THREAD_LOCAL_TABLE_RESERVE);
|
|
return t;
|
|
}
|
|
|
|
void thread_local_store_release(struct thread_local_store *t)
|
|
{
|
|
__prof;
|
|
/* Release allocated tls data in reverse order */
|
|
tls_metas_lock();
|
|
{
|
|
for (u64 i = t->allocation_order_count; i-- > 0;) {
|
|
u64 id = t->allocation_order[i];
|
|
void *data = t->lookup[id];
|
|
struct thread_local_var_meta *meta = &L.tls_metas[id];
|
|
if (meta->release) {
|
|
meta->release(data);
|
|
}
|
|
}
|
|
}
|
|
tls_metas_unlock();
|
|
|
|
arena_release(&t->arena);
|
|
}
|
|
|
|
void *_thread_local_eval(struct thread_local_var_meta *meta)
|
|
{
|
|
/* Register var if unregistered */
|
|
u64 id;
|
|
{
|
|
u64 id_plus_one = atomic_u64_eval(&meta->id_plus_one);
|
|
if (id_plus_one == 0) {
|
|
__profscope(_thread_local_eval__REGISTER);
|
|
tls_metas_lock();
|
|
{
|
|
id = L.tls_metas_count++;
|
|
if (id >= MAX_THREAD_LOCAL_VARS) {
|
|
sys_panic_raw("Maximum number of thread local variables reached");
|
|
}
|
|
atomic_u64_eval_exchange(&meta->id_plus_one, id + 1);
|
|
L.tls_metas[id] = *meta;
|
|
}
|
|
tls_metas_unlock();
|
|
} else {
|
|
id = id_plus_one - 1;
|
|
}
|
|
}
|
|
|
|
/* Allocate var for thread if unallocated */
|
|
struct thread_local_store *t = sys_thread_get_thread_local_store();
|
|
void **data_slot = &t->lookup[id];
|
|
if (!*data_slot) {
|
|
__profscope(_thread_local_eval__ALLOC);
|
|
/* Allocate */
|
|
arena_align(&t->arena, meta->align);
|
|
*data_slot = arena_push_array(&t->arena, u8, meta->size);
|
|
if (meta->alloc) {
|
|
meta->alloc(*data_slot);
|
|
} else {
|
|
MEMZERO(*data_slot, meta->size);
|
|
}
|
|
t->allocation_order[t->allocation_order_count++] = id;
|
|
}
|
|
|
|
return *data_slot;
|
|
}
|