lib/os/heap: add an additional validation criteria

One fundamental validation criteria is to never have consecutive free
chunks. If that ever happens we failed to merge them. That means a free
chunk must always be surrounded by used chunks.

It is a pain to extend valid_chunk() with new rules as it is.
So a VALIDATE() macro is introduced to make things easier to work with.
It also allows for isolating each test, possibly making VALIDATE() into
__ASSERT() to determine exactly which test is tripping when debugging.

Finally, because of that new validation rule, sys_heap_validate() must
be modified so not to use valid_chunk() while it is flipping all the
"used" flags. So let's run valid_chunk() up front before alterating
chunk headers.

Now sys_heap_validate() has become justifiably more expensive and a few
emulated targets are about to bust the tests/lib/heap test timeout. So
bump the timeout as well.

Signed-off-by: Nicolas Pitre <npitre@baylibre.com>
This commit is contained in:
Nicolas Pitre 2020-06-26 10:37:40 -04:00 committed by Anas Nashif
parent 9b617755d2
commit 130963ad2f
2 changed files with 35 additions and 19 deletions

View file

@ -22,22 +22,30 @@ static size_t max_chunkid(struct z_heap *h)
return h->len - min_chunk_size(h);
}
#define VALIDATE(cond) do { if (!(cond)) { return false; } } while (0)
static bool in_bounds(struct z_heap *h, chunkid_t c)
{
return (c >= right_chunk(h, 0))
&& (c <= max_chunkid(h))
&& (chunk_size(h, c) < h->len);
VALIDATE(c >= right_chunk(h, 0));
VALIDATE(c <= max_chunkid(h));
VALIDATE(chunk_size(h, c) < h->len);
return true;
}
static bool valid_chunk(struct z_heap *h, chunkid_t c)
{
return (chunk_size(h, c) > 0
&& (c + chunk_size(h, c) <= h->len)
&& in_bounds(h, c)
&& (right_chunk(h, left_chunk(h, c)) == c)
&& (left_chunk(h, right_chunk(h, c)) == c)
&& (chunk_used(h, c) || in_bounds(h, prev_free_chunk(h, c)))
&& (chunk_used(h, c) || in_bounds(h, next_free_chunk(h, c))));
VALIDATE(chunk_size(h, c) > 0);
VALIDATE(c + chunk_size(h, c) <= h->len);
VALIDATE(in_bounds(h, c));
VALIDATE(right_chunk(h, left_chunk(h, c)) == c);
VALIDATE(left_chunk(h, right_chunk(h, c)) == c);
if (!chunk_used(h, c)) {
VALIDATE(chunk_used(h, left_chunk(h, c)));
VALIDATE(chunk_used(h, right_chunk(h, c)));
VALIDATE(in_bounds(h, prev_free_chunk(h, c)));
VALIDATE(in_bounds(h, next_free_chunk(h, c)));
}
return true;
}
/* Validate multiple state dimensions for the bucket "next" pointer
@ -65,6 +73,18 @@ bool sys_heap_validate(struct sys_heap *heap)
struct z_heap *h = heap->heap;
chunkid_t c;
/*
* Walk through the chunks linearly, verifying sizes and end pointer.
*/
for (c = right_chunk(h, 0); c <= max_chunkid(h); c = right_chunk(h, c)) {
if (!valid_chunk(h, c)) {
return false;
}
}
if (c != h->len) {
return false; /* Should have exactly consumed the buffer */
}
/* Check the free lists: entry count should match, empty bit
* should be correct, and all chunk entries should point into
* valid unused chunks. Mark those chunks USED, temporarily.
@ -95,17 +115,13 @@ bool sys_heap_validate(struct sys_heap *heap)
}
}
/* Walk through the chunks linearly, verifying sizes and end
* pointer and that the all chunks are now USED (i.e. all free
* blocks were found during enumeration). Mark all blocks
* UNUSED
/*
* Walk through the chunks linearly again, verifying that all chunks
* are now USED (i.e. all free blocks were found during enumeration).
* Mark all such blocks UNUSED.
*/
chunkid_t prev_chunk = 0;
for (c = right_chunk(h, 0); c <= max_chunkid(h); c = right_chunk(h, c)) {
if (!valid_chunk(h, c)) {
return false;
}
if (!chunk_used(h, c)) {
return false;
}

View file

@ -8,4 +8,4 @@ tests:
tags: heap
platform_exclude: m2gl025_miv qemu_riscv32
filter: not CONFIG_SOC_NSIM
timeout: 120
timeout: 240