lib/os/heap: some code simplification in sys_heap_aligned_alloc()

It is clearer to apply the alignment in the memory address space
rather than the chunk space.

Signed-off-by: Nicolas Pitre <npitre@baylibre.com>
This commit is contained in:
Nicolas Pitre 2020-06-24 01:10:14 -04:00 committed by Andrew Boie
commit 8a6b02b5bf

View file

@ -157,14 +157,19 @@ static void free_chunks(struct z_heap *h, chunkid_t c)
free_list_add(h, c); free_list_add(h, c);
} }
static chunkid_t mem_to_chunkid(struct z_heap *h, void *p)
{
uint8_t *mem = p, *base = (uint8_t *)chunk_buf(h);
return (mem - chunk_header_bytes(h) - base) / CHUNK_UNIT;
}
void sys_heap_free(struct sys_heap *heap, void *mem) void sys_heap_free(struct sys_heap *heap, void *mem)
{ {
if (mem == NULL) { if (mem == NULL) {
return; /* ISO C free() semantics */ return; /* ISO C free() semantics */
} }
struct z_heap *h = heap->heap; struct z_heap *h = heap->heap;
chunkid_t c = ((uint8_t *)mem - chunk_header_bytes(h) chunkid_t c = mem_to_chunkid(h, mem);
- (uint8_t *)chunk_buf(h)) / CHUNK_UNIT;
/* /*
* This should catch many double-free cases. * This should catch many double-free cases.
@ -251,46 +256,53 @@ void *sys_heap_aligned_alloc(struct sys_heap *heap, size_t align, size_t bytes)
CHECK((align & (align - 1)) == 0); CHECK((align & (align - 1)) == 0);
CHECK(big_heap(h)); CHECK(big_heap(h));
if (align <= CHUNK_UNIT) {
return sys_heap_alloc(heap, bytes);
}
if (bytes == 0) { if (bytes == 0) {
return NULL; return NULL;
} }
/* Find a free block that is guaranteed to fit */ /*
size_t chunksz = bytes_to_chunksz(h, bytes); * Find a free block that is guaranteed to fit.
size_t mask = (align / CHUNK_UNIT) - 1; * We over-allocate to account for alignment and then free
size_t padsz = MAX(CHUNK_UNIT, chunksz + mask); * the extra allocations afterwards.
chunkid_t c0 = alloc_chunks(h, padsz); */
size_t alloc_sz = bytes_to_chunksz(h, bytes);
size_t padded_sz = bytes_to_chunksz(h, bytes + align - 1);
chunkid_t c0 = alloc_chunks(h, padded_sz);
if (c0 == 0) { if (c0 == 0) {
return NULL; return NULL;
} }
/* Align within memory, using "chunk index" units. Remember /* Align allocated memory */
* the block we're aligning starts in the chunk AFTER the void *mem = chunk_mem(h, c0);
* header! mem = (void *) ROUND_UP(mem, align);
*/
size_t c0i = ((size_t) &chunk_buf(h)[c0 + 1]) / CHUNK_UNIT;
size_t ci = ((c0i + mask) & ~mask);
chunkid_t c = c0 + (ci - c0i);
CHECK(c >= c0 && c < c0 + padsz); /* Get corresponding chunk */
CHECK((((size_t) chunk_mem(h, c)) & (align - 1)) == 0); chunkid_t c = mem_to_chunkid(h, mem);
CHECK(c >= c0 && c < c0 + padded_sz);
/* Split and free unused prefix */ /* Split and free unused prefix */
if (c > c0) { if (c > c0) {
split_chunks(h, c0, c); split_chunks(h, c0, c);
set_chunk_used(h, c, true); /* this can't be merged */
free_chunks(h, c0); CHECK(chunk_used(h, left_chunk(h, c0)));
free_list_add(h, c0);
} }
/* Split and free unused suffix */ /* Split and free unused suffix */
if (chunksz < chunk_size(h, c)) { if (alloc_sz < chunk_size(h, c)) {
split_chunks(h, c, c + chunksz); split_chunks(h, c, c + alloc_sz);
set_chunk_used(h, c, true);
free_chunks(h, c + alloc_sz);
} else {
set_chunk_used(h, c, true); set_chunk_used(h, c, true);
free_chunks(h, c + chunksz);
} }
return chunk_mem(h, c); return mem;
} }
void sys_heap_init(struct sys_heap *heap, void *mem, size_t bytes) void sys_heap_init(struct sys_heap *heap, void *mem, size_t bytes)