hekate/bdk/mem/heap.c

198 lines
4.1 KiB
C
Raw Permalink Normal View History

2018-05-01 05:15:48 +00:00
/*
2018-08-05 11:40:32 +00:00
* Copyright (c) 2018 naehrwert
* Copyright (c) 2018-2020 CTCaer
2018-08-05 11:40:32 +00:00
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
2018-05-01 05:15:48 +00:00
#include <string.h>
#include "heap.h"
#include <gfx_utils.h>
2018-05-01 05:15:48 +00:00
heap_t _heap;
static void _heap_create(void *start)
2018-05-01 05:15:48 +00:00
{
_heap.start = start;
_heap.first = NULL;
_heap.last = NULL;
2018-05-01 05:15:48 +00:00
}
// Node info is before node address.
static void *_heap_alloc(u32 size)
2018-05-01 05:15:48 +00:00
{
hnode_t *node, *new_node;
2018-05-01 05:15:48 +00:00
// Align to cache line size.
size = ALIGN(size, sizeof(hnode_t));
2018-05-01 05:15:48 +00:00
// First allocation.
if (!_heap.first)
2018-05-01 05:15:48 +00:00
{
node = (hnode_t *)_heap.start;
2018-05-01 05:15:48 +00:00
node->used = 1;
node->size = size;
node->prev = NULL;
node->next = NULL;
2022-07-11 19:10:11 +00:00
_heap.first = node;
_heap.last = node;
2018-05-01 05:15:48 +00:00
return (void *)node + sizeof(hnode_t);
2018-05-01 05:15:48 +00:00
}
#ifdef BDK_MALLOC_NO_DEFRAG
// Get the last allocated block.
node = _heap.last;
#else
// Get first block and find the first available one.
node = _heap.first;
while (true)
2018-05-01 05:15:48 +00:00
{
// Check if there's available unused node.
if (!node->used && (size <= node->size))
2018-05-01 05:15:48 +00:00
{
// Size and offset of the new unused node.
u32 new_size = node->size - size;
new_node = (hnode_t *)((void *)node + sizeof(hnode_t) + size);
2018-05-01 05:15:48 +00:00
// If there's aligned unused space from the old node,
// create a new one and set the leftover size.
if (new_size >= (sizeof(hnode_t) << 2))
{
new_node->size = new_size - sizeof(hnode_t);
new_node->used = 0;
new_node->next = node->next;
// Check that we are not on first node.
if (new_node->next)
new_node->next->prev = new_node;
new_node->prev = node;
node->next = new_node;
}
else // Unused node size is just enough.
size += new_size;
2018-05-01 05:15:48 +00:00
node->size = size;
node->used = 1;
return (void *)node + sizeof(hnode_t);
2018-05-01 05:15:48 +00:00
}
// No unused node found, try the next one.
2018-05-01 05:15:48 +00:00
if (node->next)
node = node->next;
else
break;
2018-05-01 05:15:48 +00:00
}
#endif
2018-05-01 05:15:48 +00:00
// No unused node found, create a new one.
new_node = (hnode_t *)((void *)node + sizeof(hnode_t) + node->size);
new_node->used = 1;
new_node->size = size;
new_node->prev = node;
new_node->next = NULL;
2022-07-11 19:10:11 +00:00
node->next = new_node;
_heap.last = new_node;
2018-05-01 05:15:48 +00:00
return (void *)new_node + sizeof(hnode_t);
2018-05-01 05:15:48 +00:00
}
static void _heap_free(void *addr)
2018-05-01 05:15:48 +00:00
{
hnode_t *node = (hnode_t *)(addr - sizeof(hnode_t));
node->used = 0;
node = _heap.first;
#ifndef BDK_MALLOC_NO_DEFRAG
// Do simple defragmentation on next blocks.
2018-05-01 05:15:48 +00:00
while (node)
{
if (!node->used)
{
2018-05-01 05:15:48 +00:00
if (node->prev && !node->prev->used)
{
node->prev->size += node->size + sizeof(hnode_t);
node->prev->next = node->next;
2018-05-01 05:15:48 +00:00
if (node->next)
node->next->prev = node->prev;
}
}
2018-05-01 05:15:48 +00:00
node = node->next;
}
#endif
2018-05-01 05:15:48 +00:00
}
void heap_init(void *base)
2018-05-01 05:15:48 +00:00
{
_heap_create(base);
2018-05-01 05:15:48 +00:00
}
void heap_set(heap_t *heap)
2020-06-14 11:02:13 +00:00
{
memcpy(&_heap, heap, sizeof(heap_t));
}
2018-05-01 05:15:48 +00:00
void *malloc(u32 size)
{
return _heap_alloc(size);
2018-08-07 14:53:58 +00:00
}
2018-05-01 05:15:48 +00:00
void *calloc(u32 num, u32 size)
{
void *res = (void *)_heap_alloc(num * size);
2020-05-05 16:11:39 +00:00
memset(res, 0, ALIGN(num * size, sizeof(hnode_t))); // Clear the aligned size.
2018-05-01 05:15:48 +00:00
return res;
}
void free(void *buf)
{
if (buf >= _heap.start)
_heap_free(buf);
2018-05-01 05:15:48 +00:00
}
void heap_monitor(heap_monitor_t *mon, bool print_node_stats)
{
u32 count = 0;
memset(mon, 0, sizeof(heap_monitor_t));
hnode_t *node = _heap.first;
while (true)
{
if (node->used)
2022-05-19 11:53:02 +00:00
{
mon->nodes_used++;
mon->used += node->size + sizeof(hnode_t);
2022-05-19 11:53:02 +00:00
}
else
mon->total += node->size + sizeof(hnode_t);
if (print_node_stats)
gfx_printf("%3d - %d, addr: 0x%08X, size: 0x%X\n",
count, node->used, (u32)node + sizeof(hnode_t), node->size);
count++;
2020-05-05 16:11:39 +00:00
if (node->next)
node = node->next;
else
break;
}
mon->total += mon->used;
2022-05-19 11:53:02 +00:00
mon->nodes_total = count;
}