|
|
@ -214,21 +214,21 @@ static MemNode *__SplitMemNode(MemNode *const node, const size_t bytes) |
|
|
|
MemNode *const r = ( MemNode* )(n + (node->size - bytes)); |
|
|
|
node->size -= bytes; |
|
|
|
r->size = bytes; |
|
|
|
|
|
|
|
|
|
|
|
return r; |
|
|
|
} |
|
|
|
|
|
|
|
static void __InsertMemNodeBefore(AllocList *const list, MemNode *const insert, MemNode *const curr) |
|
|
|
{ |
|
|
|
insert->next = curr; |
|
|
|
|
|
|
|
|
|
|
|
if (curr->prev==NULL) list->head = insert; |
|
|
|
else |
|
|
|
{ |
|
|
|
insert->prev = curr->prev; |
|
|
|
curr->prev->next = insert; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
curr->prev = insert; |
|
|
|
} |
|
|
|
|
|
|
@ -259,9 +259,9 @@ static MemNode *__RemoveMemNode(AllocList *const list, MemNode *const node) |
|
|
|
if (list->tail != NULL) list->tail->next = NULL; |
|
|
|
else list->head = NULL; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
list->len--; |
|
|
|
|
|
|
|
|
|
|
|
return node; |
|
|
|
} |
|
|
|
|
|
|
@ -270,12 +270,12 @@ static MemNode *__FindMemNode(AllocList *const list, const size_t bytes) |
|
|
|
for (MemNode *node = list->head; node != NULL; node = node->next) |
|
|
|
{ |
|
|
|
if (node->size < bytes) continue; |
|
|
|
|
|
|
|
|
|
|
|
// Close in size - reduce fragmentation by not splitting |
|
|
|
else if (node->size <= bytes + MEM_SPLIT_THRESHOLD) return __RemoveMemNode(list, node); |
|
|
|
else return __SplitMemNode(node, bytes); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
return NULL; |
|
|
|
} |
|
|
|
|
|
|
@ -295,19 +295,19 @@ static void __InsertMemNode(MemPool *const mempool, AllocList *const list, MemNo |
|
|
|
mempool->arena.offs += iter->size; |
|
|
|
__RemoveMemNode(list, iter); |
|
|
|
iter = list->head; |
|
|
|
|
|
|
|
|
|
|
|
if (iter == NULL) |
|
|
|
{ |
|
|
|
list->head = node; |
|
|
|
list->head = node; |
|
|
|
return; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
const uintptr_t inode = (uintptr_t)node; |
|
|
|
const uintptr_t iiter = (uintptr_t)iter; |
|
|
|
const uintptr_t iter_end = iiter + iter->size; |
|
|
|
const uintptr_t node_end = inode + node->size; |
|
|
|
|
|
|
|
|
|
|
|
if (iter == node) return; |
|
|
|
else if (iter < node) |
|
|
|
{ |
|
|
@ -317,7 +317,7 @@ static void __InsertMemNode(MemPool *const mempool, AllocList *const list, MemNo |
|
|
|
{ |
|
|
|
// if we can coalesce, do so. |
|
|
|
iter->size += node->size; |
|
|
|
|
|
|
|
|
|
|
|
return; |
|
|
|
} |
|
|
|
else if (iter->next == NULL) |
|
|
@ -326,8 +326,8 @@ static void __InsertMemNode(MemPool *const mempool, AllocList *const list, MemNo |
|
|
|
iter->next = node; |
|
|
|
node->prev = iter; |
|
|
|
list->len++; |
|
|
|
|
|
|
|
return; |
|
|
|
|
|
|
|
return; |
|
|
|
} |
|
|
|
} |
|
|
|
else if (iter > node) |
|
|
@ -352,7 +352,7 @@ static void __InsertMemNode(MemPool *const mempool, AllocList *const list, MemNo |
|
|
|
list->head = node; |
|
|
|
list->len++; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
return; |
|
|
|
} |
|
|
|
else if ((iter_end == inode) && !is_bucket) |
|
|
@ -385,14 +385,14 @@ MemPool CreateMemPool(const size_t size) |
|
|
|
{ |
|
|
|
// Align the mempool size to at least the size of an alloc node. |
|
|
|
uint8_t *const restrict buf = malloc(size*sizeof *buf); |
|
|
|
|
|
|
|
|
|
|
|
if (buf==NULL) return mempool; |
|
|
|
else |
|
|
|
{ |
|
|
|
mempool.arena.size = size; |
|
|
|
mempool.arena.mem = (uintptr_t)buf; |
|
|
|
mempool.arena.offs = mempool.arena.mem + mempool.arena.size; |
|
|
|
|
|
|
|
|
|
|
|
return mempool; |
|
|
|
} |
|
|
|
} |
|
|
@ -401,14 +401,14 @@ MemPool CreateMemPool(const size_t size) |
|
|
|
MemPool CreateMemPoolFromBuffer(void *const restrict buf, const size_t size) |
|
|
|
{ |
|
|
|
MemPool mempool = { 0 }; |
|
|
|
|
|
|
|
|
|
|
|
if ((size == 0) || (buf == NULL) || (size <= sizeof(MemNode))) return mempool; |
|
|
|
else |
|
|
|
{ |
|
|
|
mempool.arena.size = size; |
|
|
|
mempool.arena.mem = (uintptr_t)buf; |
|
|
|
mempool.arena.offs = mempool.arena.mem + mempool.arena.size; |
|
|
|
|
|
|
|
|
|
|
|
return mempool; |
|
|
|
} |
|
|
|
} |
|
|
@ -471,7 +471,7 @@ void *MemPoolAlloc(MemPool *const mempool, const size_t size) |
|
|
|
// -------------- |
|
|
|
new_mem->next = new_mem->prev = NULL; |
|
|
|
uint8_t *const restrict final_mem = (uint8_t *)new_mem + sizeof *new_mem; |
|
|
|
|
|
|
|
|
|
|
|
return memset(final_mem, 0, new_mem->size - sizeof *new_mem); |
|
|
|
} |
|
|
|
} |
|
|
@ -487,14 +487,14 @@ void *MemPoolRealloc(MemPool *const restrict mempool, void *const ptr, const siz |
|
|
|
MemNode *const node = (MemNode *)((uint8_t *)ptr - sizeof *node); |
|
|
|
const size_t NODE_SIZE = sizeof *node; |
|
|
|
uint8_t *const resized_block = MemPoolAlloc(mempool, size); |
|
|
|
|
|
|
|
|
|
|
|
if (resized_block == NULL) return NULL; |
|
|
|
else |
|
|
|
{ |
|
|
|
MemNode *const resized = (MemNode *)(resized_block - sizeof *resized); |
|
|
|
memmove(resized_block, ptr, (node->size > resized->size)? (resized->size - NODE_SIZE) : (node->size - NODE_SIZE)); |
|
|
|
MemPoolFree(mempool, ptr); |
|
|
|
|
|
|
|
|
|
|
|
return resized_block; |
|
|
|
} |
|
|
|
} |
|
|
@ -503,7 +503,7 @@ void *MemPoolRealloc(MemPool *const restrict mempool, void *const ptr, const siz |
|
|
|
void MemPoolFree(MemPool *const restrict mempool, void *const ptr) |
|
|
|
{ |
|
|
|
const uintptr_t p = (uintptr_t)ptr; |
|
|
|
|
|
|
|
|
|
|
|
if ((ptr == NULL) || (p - sizeof(MemNode) < mempool->arena.mem)) return; |
|
|
|
else |
|
|
|
{ |
|
|
@ -556,13 +556,13 @@ void MemPoolReset(MemPool *const mempool) |
|
|
|
{ |
|
|
|
mempool->large.head = mempool->large.tail = NULL; |
|
|
|
mempool->large.len = 0; |
|
|
|
|
|
|
|
|
|
|
|
for (size_t i = 0; i < MEMPOOL_BUCKET_SIZE; i++) |
|
|
|
{ |
|
|
|
mempool->buckets[i].head = mempool->buckets[i].tail = NULL; |
|
|
|
mempool->buckets[i].len = 0; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
mempool->arena.offs = mempool->arena.mem + mempool->arena.size; |
|
|
|
} |
|
|
|
|
|
|
@ -573,13 +573,13 @@ void MemPoolReset(MemPool *const mempool) |
|
|
|
ObjPool CreateObjPool(const size_t objsize, const size_t len) |
|
|
|
{ |
|
|
|
ObjPool objpool = { 0 }; |
|
|
|
|
|
|
|
|
|
|
|
if ((len == 0) || (objsize == 0)) return objpool; |
|
|
|
else |
|
|
|
{ |
|
|
|
const size_t aligned_size = __AlignSize(objsize, sizeof(size_t)); |
|
|
|
uint8_t *const restrict buf = calloc(len, aligned_size); |
|
|
|
|
|
|
|
|
|
|
|
if (buf == NULL) return objpool; |
|
|
|
objpool.objSize = aligned_size; |
|
|
|
objpool.memSize = objpool.freeBlocks = len; |
|
|
@ -602,7 +602,7 @@ ObjPool CreateObjPoolFromBuffer(void *const restrict buf, const size_t objsize, |
|
|
|
|
|
|
|
// If the object size isn't large enough to align to a size_t, then we can't use it |
|
|
|
const size_t aligned_size = __AlignSize(objsize, sizeof(size_t)); |
|
|
|
|
|
|
|
|
|
|
|
if ((buf == NULL) || (len == 0) || (objsize < sizeof(size_t)) || (objsize*len != aligned_size*len)) return objpool; |
|
|
|
else |
|
|
|
{ |
|
|
@ -628,7 +628,7 @@ void DestroyObjPool(ObjPool *const restrict objpool) |
|
|
|
{ |
|
|
|
void *const restrict ptr = (void *)objpool->mem; |
|
|
|
free(ptr); |
|
|
|
|
|
|
|
|
|
|
|
*objpool = (ObjPool){ 0 }; |
|
|
|
} |
|
|
|
} |
|
|
@ -646,7 +646,7 @@ void *ObjPoolAlloc(ObjPool *const objpool) |
|
|
|
// After allocating, we set head to the address of the index that *Head holds. |
|
|
|
// Head = &pool[*Head * pool.objsize]; |
|
|
|
objpool->offs = (objpool->freeBlocks != 0)? objpool->mem + (*block*objpool->objSize) : 0; |
|
|
|
|
|
|
|
|
|
|
|
return memset(block, 0, objpool->objSize); |
|
|
|
} |
|
|
|
else return NULL; |
|
|
@ -655,7 +655,7 @@ void *ObjPoolAlloc(ObjPool *const objpool) |
|
|
|
void ObjPoolFree(ObjPool *const restrict objpool, void *const ptr) |
|
|
|
{ |
|
|
|
uintptr_t block = (uintptr_t)ptr; |
|
|
|
|
|
|
|
|
|
|
|
if ((ptr == NULL) || (block < objpool->mem) || (block > objpool->mem + objpool->memSize*objpool->objSize)) return; |
|
|
|
else |
|
|
|
{ |
|
|
@ -687,7 +687,7 @@ void ObjPoolCleanUp(ObjPool *const restrict objpool, void **const restrict ptrre |
|
|
|
BiStack CreateBiStack(const size_t len) |
|
|
|
{ |
|
|
|
BiStack destack = { 0 }; |
|
|
|
|
|
|
|
|
|
|
|
if (len == 0) return destack; |
|
|
|
|
|
|
|
uint8_t *const buf = malloc(len*sizeof *buf); |
|
|
@ -696,21 +696,21 @@ BiStack CreateBiStack(const size_t len) |
|
|
|
destack.mem = (uintptr_t)buf; |
|
|
|
destack.front = destack.mem; |
|
|
|
destack.back = destack.mem + len; |
|
|
|
|
|
|
|
|
|
|
|
return destack; |
|
|
|
} |
|
|
|
|
|
|
|
BiStack CreateBiStackFromBuffer(void *const buf, const size_t len) |
|
|
|
{ |
|
|
|
BiStack destack = { 0 }; |
|
|
|
|
|
|
|
|
|
|
|
if ((len == 0) || (buf == NULL)) return destack; |
|
|
|
else |
|
|
|
{ |
|
|
|
destack.size = len; |
|
|
|
destack.mem = destack.front = (uintptr_t)buf; |
|
|
|
destack.back = destack.mem + len; |
|
|
|
|
|
|
|
|
|
|
|
return destack; |
|
|
|
} |
|
|
|
} |
|
|
@ -738,7 +738,7 @@ void *BiStackAllocFront(BiStack *const restrict destack, const size_t len) |
|
|
|
{ |
|
|
|
uint8_t *const restrict ptr = (uint8_t *)destack->front; |
|
|
|
destack->front += ALIGNED_LEN; |
|
|
|
|
|
|
|
|
|
|
|
return ptr; |
|
|
|
} |
|
|
|
} |
|
|
@ -756,7 +756,7 @@ void *BiStackAllocBack(BiStack *const restrict destack, const size_t len) |
|
|
|
{ |
|
|
|
destack->back -= ALIGNED_LEN; |
|
|
|
uint8_t *const restrict ptr = (uint8_t *)destack->back; |
|
|
|
|
|
|
|
|
|
|
|
return ptr; |
|
|
|
} |
|
|
|
} |
|
|
|