diff --git a/src/liballocator/mem-pool.c b/src/liballocator/mem-pool.c index e162df809..5863ebae8 100644 --- a/src/liballocator/mem-pool.c +++ b/src/liballocator/mem-pool.c @@ -36,11 +36,6 @@ */ static const uint8_t mem_pool_free_chunk_magic_num = 0x71; -/** - * Number of bits in a single bitmap's bits' block. - */ -static const mword_t mem_bitmap_bits_in_block = sizeof (mword_t) * JERRY_BITSINBYTE; - static void mem_check_pool( mem_pool_state_t *pool_p); /** @@ -48,74 +43,70 @@ static void mem_check_pool( mem_pool_state_t *pool_p); * * Pool will be located in the segment [pool_start; pool_start + pool_size). * Part of pool space will be used for bitmap and the rest will store chunks. - * - * Warning: - * it is incorrect to suppose, that chunk number = pool_size / chunk_size. */ void mem_pool_init(mem_pool_state_t *pool_p, /**< pool */ - size_t chunk_size, /**< size of one chunk */ - uint8_t *pool_start, /**< start of pool space */ - size_t pool_size) /**< pool space size */ + size_t chunk_size, /**< size of pool's chunk */ + uint8_t *pool_start, /**< start of pool space */ + size_t pool_size) /**< pool space size */ { JERRY_ASSERT( pool_p != NULL ); JERRY_ASSERT( (uintptr_t) pool_start % MEM_ALIGNMENT == 0); - JERRY_ASSERT( chunk_size % MEM_ALIGNMENT == 0 ); pool_p->pool_start_p = pool_start; pool_p->pool_size = pool_size; - pool_p->chunk_size = chunk_size; - const size_t bits_in_byte = JERRY_BITSINBYTE; - const size_t bitmap_area_size_alignment = JERRY_MAX( sizeof (mword_t), MEM_ALIGNMENT); + switch ( chunk_size ) + { + case 4: + pool_p->chunk_size_log = 2; + break; - /* - * Calculation chunks number - */ + case 8: + pool_p->chunk_size_log = 3; + break; - size_t bitmap_area_size = 0; - size_t chunks_area_size = JERRY_ALIGNDOWN( pool_size - bitmap_area_size, chunk_size); + case 16: + pool_p->chunk_size_log = 4; + break; + + case 32: + pool_p->chunk_size_log = 5; + break; + + case 64: + pool_p->chunk_size_log = 6; + break; + + default: + JERRY_UNREACHABLE(); + } + + JERRY_ASSERT( chunk_size % MEM_ALIGNMENT == 0 ); + JERRY_ASSERT( chunk_size >= sizeof(mem_pool_chunk_offset_t) ); + + size_t chunks_area_size = JERRY_ALIGNDOWN( pool_size, chunk_size); size_t chunks_number = chunks_area_size / chunk_size; - /* while there is not enough area to hold state of all chunks*/ - while ( bitmap_area_size * bits_in_byte < chunks_number ) - { - JERRY_ASSERT( bitmap_area_size + chunks_area_size <= pool_size ); + JERRY_ASSERT( ( (mem_pool_chunk_offset_t) chunks_number ) == chunks_number ); + pool_p->chunks_number = (mem_pool_chunk_offset_t) chunks_number; - /* correct bitmap area's size and, accordingly, chunks' area's size*/ - - size_t new_bitmap_area_size = bitmap_area_size + bitmap_area_size_alignment; - size_t new_chunks_area_size = JERRY_ALIGNDOWN( pool_size - new_bitmap_area_size, chunk_size); - size_t new_chunks_number = new_chunks_area_size / chunk_size; - - bitmap_area_size = new_bitmap_area_size; - chunks_area_size = new_chunks_area_size; - chunks_number = new_chunks_number; - } - - /* - * Final calculation checks - */ - JERRY_ASSERT( bitmap_area_size * bits_in_byte >= chunks_number ); - JERRY_ASSERT( chunks_area_size >= chunks_number * chunk_size ); - JERRY_ASSERT( bitmap_area_size + chunks_area_size <= pool_size ); - - pool_p->bitmap_p = (mword_t*) pool_start; - pool_p->chunks_p = pool_start + bitmap_area_size; - - JERRY_ASSERT( (uintptr_t) pool_p->chunks_p % MEM_ALIGNMENT == 0 ); - - pool_p->chunks_number = chunks_number; + pool_p->first_free_chunk = 0; /* * All chunks are free right after initialization */ - pool_p->free_chunks_number = chunks_number; - __memset( pool_p->bitmap_p, 0, bitmap_area_size); + pool_p->free_chunks_number = pool_p->chunks_number; -#ifndef JERRY_NDEBUG - __memset( pool_p->chunks_p, mem_pool_free_chunk_magic_num, chunks_area_size); -#endif /* JERRY_NDEBUG */ + for ( uint32_t chunk_index = 0; + chunk_index < chunks_number; + chunk_index++ ) + { + mem_pool_chunk_offset_t *next_free_chunk_offset_p = + (mem_pool_chunk_offset_t*) ( pool_p->pool_start_p + chunk_size * chunk_index ); + + *next_free_chunk_offset_p = chunk_index + 1; + } mem_check_pool( pool_p); } /* mem_pool_init */ @@ -126,57 +117,27 @@ mem_pool_init(mem_pool_state_t *pool_p, /**< pool */ uint8_t* mem_pool_alloc_chunk(mem_pool_state_t *pool_p) /**< pool */ { - mem_check_pool( pool_p); + mem_check_pool( pool_p); - if ( pool_p->free_chunks_number == 0 ) + if ( unlikely( pool_p->free_chunks_number == 0 ) ) { - return NULL; + JERRY_ASSERT( pool_p->first_free_chunk == pool_p->chunks_number ); + + return NULL; } - size_t chunk_index = 0; - size_t bitmap_block_index = 0; + JERRY_ASSERT( pool_p->first_free_chunk < pool_p->chunks_number ); - while ( chunk_index < pool_p->chunks_number ) - { - if ( ~pool_p->bitmap_p[ bitmap_block_index ] != 0 ) - { - break; - } else - { - bitmap_block_index++; - chunk_index += mem_bitmap_bits_in_block; - } - } + mem_pool_chunk_offset_t chunk_index = pool_p->first_free_chunk; + uint8_t *chunk_p = pool_p->pool_start_p + ( chunk_index << pool_p->chunk_size_log ); - if ( chunk_index >= pool_p->chunks_number ) - { - /* no free chunks */ - return NULL; - } + mem_pool_chunk_offset_t *next_free_chunk_offset_p = (mem_pool_chunk_offset_t*) chunk_p; + pool_p->first_free_chunk = *next_free_chunk_offset_p; + pool_p->free_chunks_number--; - /* found bitmap block with a zero bit */ + mem_check_pool( pool_p); - mword_t bit = 1; - for ( size_t bit_index = 0; - bit_index < mem_bitmap_bits_in_block && chunk_index < pool_p->chunks_number; - bit_index++, chunk_index++, bit <<= 1 ) - { - if ( ~pool_p->bitmap_p[ bitmap_block_index ] & bit ) - { - /* found free chunk */ - pool_p->bitmap_p[ bitmap_block_index ] |= bit; - - uint8_t *chunk_p = &pool_p->chunks_p[ chunk_index * pool_p->chunk_size ]; - pool_p->free_chunks_number--; - - mem_check_pool( pool_p); - - return chunk_p; - } - } - - /* that zero bit is at the end of the bitmap and doesn't correspond to any chunk */ - return NULL; + return chunk_p; } /* mem_pool_alloc_chunk */ /** @@ -184,25 +145,21 @@ mem_pool_alloc_chunk(mem_pool_state_t *pool_p) /**< pool */ */ void mem_pool_free_chunk(mem_pool_state_t *pool_p, /**< pool */ - uint8_t *chunk_p) /**< chunk pointer */ + uint8_t *chunk_p) /**< chunk pointer */ { JERRY_ASSERT( pool_p->free_chunks_number < pool_p->chunks_number ); - JERRY_ASSERT( chunk_p >= pool_p->chunks_p && chunk_p <= pool_p->chunks_p + pool_p->chunks_number * pool_p->chunk_size ); - JERRY_ASSERT( ( (uintptr_t) chunk_p - (uintptr_t) pool_p->chunks_p ) % pool_p->chunk_size == 0 ); + JERRY_ASSERT( chunk_p >= pool_p->pool_start_p && chunk_p <= pool_p->pool_start_p + pool_p->chunks_number * ( 1u << pool_p->chunk_size_log ) ); + JERRY_ASSERT( ( (uintptr_t) chunk_p - (uintptr_t) pool_p->pool_start_p ) % ( 1u << pool_p->chunk_size_log ) == 0 ); mem_check_pool( pool_p); - size_t chunk_index = (size_t) (chunk_p - pool_p->chunks_p) / pool_p->chunk_size; - size_t bitmap_block_index = chunk_index / mem_bitmap_bits_in_block; - size_t bitmap_bit_in_block = chunk_index % mem_bitmap_bits_in_block; - mword_t bit_mask = ( 1lu << bitmap_bit_in_block ); + const size_t chunk_byte_offset = (size_t) (chunk_p - pool_p->pool_start_p); + const mem_pool_chunk_offset_t chunk_index = (mem_pool_chunk_offset_t) (chunk_byte_offset >> pool_p->chunk_size_log); + mem_pool_chunk_offset_t *next_free_chunk_offset_p = (mem_pool_chunk_offset_t*) chunk_p; -#ifndef JERRY_NDEBUG - __memset( (uint8_t*) chunk_p, mem_pool_free_chunk_magic_num, pool_p->chunk_size); -#endif /* JERRY_NDEBUG */ - JERRY_ASSERT( pool_p->bitmap_p[ bitmap_block_index ] & bit_mask ); + *next_free_chunk_offset_p = pool_p->first_free_chunk; - pool_p->bitmap_p[ bitmap_block_index ] &= ~bit_mask; + pool_p->first_free_chunk = chunk_index; pool_p->free_chunks_number++; mem_check_pool( pool_p); @@ -216,35 +173,20 @@ mem_check_pool( mem_pool_state_t __unused *pool_p) /**< pool (unused #ifdef JERR { #ifndef JERRY_NDEBUG JERRY_ASSERT( pool_p->chunks_number != 0 ); + JERRY_ASSERT( pool_p->chunks_number * ( 1u << pool_p->chunk_size_log ) <= pool_p->pool_size ); JERRY_ASSERT( pool_p->free_chunks_number <= pool_p->chunks_number ); - JERRY_ASSERT( (uint8_t*) pool_p->bitmap_p == pool_p->pool_start_p ); - JERRY_ASSERT( (uint8_t*) pool_p->chunks_p > pool_p->pool_start_p ); - - uint8_t free_chunk_template[ pool_p->chunk_size ]; - __memset( &free_chunk_template, mem_pool_free_chunk_magic_num, sizeof (free_chunk_template)); size_t met_free_chunks_number = 0; + mem_pool_chunk_offset_t chunk_index = pool_p->first_free_chunk; - for ( size_t chunk_index = 0, bitmap_block_index = 0; - chunk_index < pool_p->chunks_number; - bitmap_block_index++ ) + while ( chunk_index != pool_p->chunks_number ) { - JERRY_ASSERT( (uint8_t*) & pool_p->bitmap_p[ bitmap_block_index ] < pool_p->chunks_p ); + uint8_t *chunk_p = pool_p->pool_start_p + ( 1u << pool_p->chunk_size_log ) * chunk_index; + mem_pool_chunk_offset_t *next_free_chunk_offset_p = (mem_pool_chunk_offset_t*) chunk_p; - mword_t bitmap_block = pool_p->bitmap_p[ bitmap_block_index ]; + met_free_chunks_number++; - mword_t bit_mask = 1; - for ( size_t bitmap_bit_in_block = 0; - chunk_index < pool_p->chunks_number && bitmap_bit_in_block < mem_bitmap_bits_in_block; - bitmap_bit_in_block++, bit_mask <<= 1, chunk_index++ ) - { - if ( ~bitmap_block & bit_mask ) - { - met_free_chunks_number++; - - JERRY_ASSERT( __memcmp( &pool_p->chunks_p[ chunk_index * pool_p->chunk_size ], free_chunk_template, pool_p->chunk_size) == 0 ); - } - } + chunk_index = *next_free_chunk_offset_p; } JERRY_ASSERT( met_free_chunks_number == pool_p->free_chunks_number ); @@ -254,4 +196,4 @@ mem_check_pool( mem_pool_state_t __unused *pool_p) /**< pool (unused #ifdef JERR /** * @} * @} - */ \ No newline at end of file + */ diff --git a/src/liballocator/mem-pool.h b/src/liballocator/mem-pool.h index 493b7b645..47dd300ec 100644 --- a/src/liballocator/mem-pool.h +++ b/src/liballocator/mem-pool.h @@ -24,6 +24,8 @@ #ifndef JERRY_MEM_POOL_H #define JERRY_MEM_POOL_H +typedef uint32_t mem_pool_chunk_offset_t; + /** * State of a memory pool * @@ -34,16 +36,16 @@ typedef struct mem_pool_state_t { uint8_t *pool_start_p; /**< first address of pool space */ size_t pool_size; /**< pool space size */ - size_t chunk_size; /**< size of one chunk */ + size_t chunk_size_log; /**< log of size of one chunk */ - mword_t *bitmap_p; /**< bitmap - pool chunks' state */ - uint8_t *chunks_p; /**< chunks with data */ + mem_pool_chunk_offset_t chunks_number; /**< number of chunks */ + mem_pool_chunk_offset_t free_chunks_number; /**< number of free chunks */ - size_t chunks_number; /**< number of chunks */ - size_t free_chunks_number; /**< number of free chunks */ + mem_pool_chunk_offset_t first_free_chunk; /**< offset of first free chunk + from the beginning of the pool */ struct mem_pool_state_t *next_pool_p; /**< pointer to the next pool with same chunk size */ -} mem_pool_state_t; +} __attribute__((aligned(64))) mem_pool_state_t; extern void mem_pool_init(mem_pool_state_t *pool_p, size_t chunk_size, uint8_t *pool_start, size_t pool_size); extern uint8_t* mem_pool_alloc_chunk(mem_pool_state_t *pool_p); diff --git a/src/liballocator/mem-poolman.c b/src/liballocator/mem-poolman.c index 545ee31e7..ca5449069 100644 --- a/src/liballocator/mem-poolman.c +++ b/src/liballocator/mem-poolman.c @@ -93,31 +93,30 @@ mem_get_chunk_size( mem_pool_chunk_type_t chunk_type) /**< chunk type */ void mem_pools_init(void) { - for ( uint32_t i = 0; i < MEM_POOL_CHUNK_TYPE__COUNT; i++ ) + for ( uint32_t i = 0; i < MEM_POOL_CHUNK_TYPE__COUNT; i++ ) { - mem_pools[ i ] = NULL; - mem_free_chunks_number[ i ] = 0; + mem_pools[ i ] = NULL; + mem_free_chunks_number[ i ] = 0; } - /** - * Space, at least for four pool headers and a bitmap entry. - * - * TODO: Research. - */ - size_t pool_space_size = mem_heap_recommend_allocation_size( 4 * sizeof (mem_pool_state_t) + sizeof (mword_t) ); + /** + * Space, at least for four pool headers and a bitmap entry. + * + * TODO: Research. + */ + size_t pool_space_size = mem_heap_recommend_allocation_size( 4 * sizeof (mem_pool_state_t) ); - mem_space_for_pool_for_pool_headers = mem_heap_alloc_block(pool_space_size, - MEM_HEAP_ALLOC_LONG_TERM); + mem_space_for_pool_for_pool_headers = mem_heap_alloc_block( pool_space_size, MEM_HEAP_ALLOC_LONG_TERM); - /* - * Get chunk type, checking that there is a type corresponding to specified size. - */ - const mem_pool_chunk_type_t chunk_type = mem_size_to_pool_chunk_type( sizeof(mem_pool_state_t)); - - mem_pool_init(&mem_pool_for_pool_headers, - mem_get_chunk_size( chunk_type), - mem_space_for_pool_for_pool_headers, - pool_space_size); + /* + * Get chunk type, checking that there is a type corresponding to specified size. + */ + const mem_pool_chunk_type_t chunk_type = mem_size_to_pool_chunk_type( sizeof(mem_pool_state_t)); + + mem_pool_init(&mem_pool_for_pool_headers, + mem_get_chunk_size( chunk_type), + mem_space_for_pool_for_pool_headers, + pool_space_size); mem_pools_stat_init(); } /* mem_pools_init */ @@ -238,7 +237,7 @@ mem_pools_free(mem_pool_chunk_type_t chunk_type, /**< the chunk type */ /** * Search for the pool containing specified chunk. */ - while ( !( chunk_p >= pool_state->chunks_p + while ( !( chunk_p >= pool_state->pool_start_p && chunk_p <= pool_state->pool_start_p + pool_state->pool_size ) ) { prev_pool_state = pool_state; diff --git a/tests/unit/test_pool.c b/tests/unit/test_pool.c index ba13627f1..3c47ed0a1 100644 --- a/tests/unit/test_pool.c +++ b/tests/unit/test_pool.c @@ -18,6 +18,7 @@ #include "globals.h" #include "mem-allocator.h" #include "mem-pool.h" +#include "mem-poolman.h" extern void srand (unsigned int __seed); extern int rand (void); @@ -51,7 +52,7 @@ main( int __unused argc, mem_pool_state_t pool; uint8_t test_pool[test_pool_area_size] __attribute__((aligned(MEM_ALIGNMENT))); - const size_t chunk_size = MEM_ALIGNMENT * ( ( (size_t) rand() % test_max_chunk_size_divided_by_alignment ) + 1 ); + const size_t chunk_size = mem_get_chunk_size( rand() % MEM_POOL_CHUNK_TYPE__COUNT ); mem_pool_init( &pool, chunk_size, test_pool, sizeof (test_pool));