Style fixes in liballocator, libecmaobjects, libecmaoperations: space between function name and opening parenthesis, no space after opening parenthesis/before closing parenthesis.

This commit is contained in:
Ruben Ayrapetyan
2014-08-11 19:27:07 +04:00
parent e53be2b441
commit b02eefd4ae
16 changed files with 761 additions and 761 deletions
+20 -20
View File
@@ -26,42 +26,42 @@
/**
* Area for heap
*/
static uint8_t mem_heap_area[ MEM_HEAP_AREA_SIZE ] __attribute__((aligned(MEM_ALIGNMENT)));
static uint8_t mem_heap_area[ MEM_HEAP_AREA_SIZE ] __attribute__ ((aligned (MEM_ALIGNMENT)));
/**
* Check that heap area is less or equal than 64K.
*/
JERRY_STATIC_ASSERT( MEM_HEAP_AREA_SIZE <= 64 * 1024 );
JERRY_STATIC_ASSERT(MEM_HEAP_AREA_SIZE <= 64 * 1024);
/**
* Initialize memory allocators.
*/
void
mem_init( void)
mem_init (void)
{
mem_heap_init( mem_heap_area, sizeof (mem_heap_area));
mem_pools_init();
mem_heap_init (mem_heap_area, sizeof (mem_heap_area));
mem_pools_init ();
} /* mem_init */
/**
* Finalize memory allocators.
*/
void
mem_finalize( bool is_show_mem_stats) /**< show heap memory stats
mem_finalize (bool is_show_mem_stats) /**< show heap memory stats
before finalization? */
{
mem_pools_finalize();
mem_pools_finalize ();
if (is_show_mem_stats)
{
mem_heap_print( false, false, true);
mem_heap_print (false, false, true);
#ifdef MEM_STATS
mem_pools_stats_t stats;
mem_pools_get_stats( &stats);
mem_pools_get_stats (&stats);
__printf("Pools stats:\n");
__printf(" Chunk size: %u\n"
__printf ("Pools stats:\n");
__printf (" Chunk size: %u\n"
" Pools: %lu\n"
" Allocated chunks: %lu\n"
" Free chunks: %lu\n"
@@ -76,14 +76,14 @@ mem_finalize( bool is_show_mem_stats) /**< show heap memory stats
#endif /* MEM_STATS */
}
mem_heap_finalize();
mem_heap_finalize ();
} /* mem_finalize */
/**
* Get base pointer for allocation area.
*/
static uintptr_t
mem_get_base_pointer( void)
mem_get_base_pointer (void)
{
return (uintptr_t) mem_heap_area;
} /* mem_get_base_pointer */
@@ -92,20 +92,20 @@ mem_get_base_pointer( void)
* Compress pointer.
*/
uintptr_t
mem_compress_pointer(void *pointer) /**< pointer to compress */
mem_compress_pointer (void *pointer) /**< pointer to compress */
{
JERRY_ASSERT( pointer != NULL );
JERRY_ASSERT(pointer != NULL);
uintptr_t int_ptr = (uintptr_t) pointer;
JERRY_ASSERT(int_ptr % MEM_ALIGNMENT == 0);
int_ptr -= mem_get_base_pointer();
int_ptr -= mem_get_base_pointer ();
int_ptr >>= MEM_ALIGNMENT_LOG;
JERRY_ASSERT((int_ptr & ~((1u << MEM_HEAP_OFFSET_LOG) - 1)) == 0);
JERRY_ASSERT( int_ptr != MEM_COMPRESSED_POINTER_NULL );
JERRY_ASSERT(int_ptr != MEM_COMPRESSED_POINTER_NULL);
return int_ptr;
} /* mem_compress_pointer */
@@ -114,14 +114,14 @@ mem_compress_pointer(void *pointer) /**< pointer to compress */
* Decompress pointer.
*/
void*
mem_decompress_pointer(uintptr_t compressed_pointer) /**< pointer to decompress */
mem_decompress_pointer (uintptr_t compressed_pointer) /**< pointer to decompress */
{
JERRY_ASSERT( compressed_pointer != MEM_COMPRESSED_POINTER_NULL );
JERRY_ASSERT(compressed_pointer != MEM_COMPRESSED_POINTER_NULL);
uintptr_t int_ptr = compressed_pointer;
int_ptr <<= MEM_ALIGNMENT_LOG;
int_ptr += mem_get_base_pointer();
int_ptr += mem_get_base_pointer ();
return (void*) int_ptr;
} /* mem_decompress_pointer */
+5 -5
View File
@@ -40,13 +40,13 @@
/**
* Width of compressed memory pointer
*/
#define MEM_COMPRESSED_POINTER_WIDTH ( MEM_HEAP_OFFSET_LOG - MEM_ALIGNMENT_LOG )
#define MEM_COMPRESSED_POINTER_WIDTH (MEM_HEAP_OFFSET_LOG - MEM_ALIGNMENT_LOG)
extern void mem_init(void);
extern void mem_finalize(bool is_show_mem_stats);
extern void mem_init (void);
extern void mem_finalize (bool is_show_mem_stats);
extern uintptr_t mem_compress_pointer(void *pointer);
extern void* mem_decompress_pointer(uintptr_t compressed_pointer);
extern uintptr_t mem_compress_pointer (void *pointer);
extern void* mem_decompress_pointer (uintptr_t compressed_pointer);
#endif /* !JERRY_MEM_ALLOCATOR_H */
+194 -194
View File
@@ -36,19 +36,19 @@
#ifndef JERRY_NVALGRIND
# include "memcheck.h"
# define VALGRIND_NOACCESS_STRUCT( s) (void)VALGRIND_MAKE_MEM_NOACCESS( ( s ), sizeof( *( s ) ) )
# define VALGRIND_UNDEFINED_STRUCT( s) (void)VALGRIND_MAKE_MEM_UNDEFINED( ( s ), sizeof( *( s ) ) )
# define VALGRIND_DEFINED_STRUCT( s) (void)VALGRIND_MAKE_MEM_DEFINED( ( s ), sizeof( *( s ) ) )
# define VALGRIND_NOACCESS_SPACE( p, s) (void)VALGRIND_MAKE_MEM_NOACCESS( ( p ), ( s ) )
# define VALGRIND_UNDEFINED_SPACE( p, s) (void)VALGRIND_MAKE_MEM_UNDEFINED( ( p ), ( s ) )
# define VALGRIND_DEFINED_SPACE( p, s) (void)VALGRIND_MAKE_MEM_DEFINED( ( p ), ( s ) )
# define VALGRIND_NOACCESS_STRUCT(s) (void)VALGRIND_MAKE_MEM_NOACCESS((s), sizeof (*(s)))
# define VALGRIND_UNDEFINED_STRUCT(s) (void)VALGRIND_MAKE_MEM_UNDEFINED((s), sizeof (*(s)))
# define VALGRIND_DEFINED_STRUCT(s) (void)VALGRIND_MAKE_MEM_DEFINED((s), sizeof (*(s)))
# define VALGRIND_NOACCESS_SPACE(p, s) (void)VALGRIND_MAKE_MEM_NOACCESS((p), (s))
# define VALGRIND_UNDEFINED_SPACE(p, s) (void)VALGRIND_MAKE_MEM_UNDEFINED((p), (s))
# define VALGRIND_DEFINED_SPACE(p, s) (void)VALGRIND_MAKE_MEM_DEFINED((p), (s))
#else /* !JERRRY_NVALGRIND */
# define VALGRIND_NOACCESS_STRUCT( s)
# define VALGRIND_UNDEFINED_STRUCT( s)
# define VALGRIND_DEFINED_STRUCT( s)
# define VALGRIND_NOACCESS_SPACE( p, s)
# define VALGRIND_UNDEFINED_SPACE( p, s)
# define VALGRIND_DEFINED_SPACE( p, s)
# define VALGRIND_NOACCESS_STRUCT(s)
# define VALGRIND_UNDEFINED_STRUCT(s)
# define VALGRIND_DEFINED_STRUCT(s)
# define VALGRIND_NOACCESS_SPACE(p, s)
# define VALGRIND_UNDEFINED_SPACE(p, s)
# define VALGRIND_DEFINED_SPACE(p, s)
#endif /* !JERRY_NVALGRIND */
/**
@@ -95,12 +95,12 @@ typedef struct mem_block_header_t
/**
* Chunk should have enough space for block header
*/
JERRY_STATIC_ASSERT( MEM_HEAP_CHUNK_SIZE >= sizeof (mem_block_header_t) );
JERRY_STATIC_ASSERT(MEM_HEAP_CHUNK_SIZE >= sizeof (mem_block_header_t));
/**
* Chunk size should satisfy the required alignment value
*/
JERRY_STATIC_ASSERT( MEM_HEAP_CHUNK_SIZE % MEM_ALIGNMENT == 0 );
JERRY_STATIC_ASSERT(MEM_HEAP_CHUNK_SIZE % MEM_ALIGNMENT == 0);
/**
* Description of heap state
@@ -118,16 +118,16 @@ typedef struct
*/
mem_heap_state_t mem_heap;
static size_t mem_get_block_chunks_count( const mem_block_header_t *block_header_p);
static size_t mem_get_block_data_space_size( const mem_block_header_t *block_header_p);
static size_t mem_get_block_chunks_count_from_data_size( size_t block_allocated_size);
static size_t mem_get_block_chunks_count (const mem_block_header_t *block_header_p);
static size_t mem_get_block_data_space_size (const mem_block_header_t *block_header_p);
static size_t mem_get_block_chunks_count_from_data_size (size_t block_allocated_size);
static void mem_init_block_header( uint8_t *first_chunk_p,
static void mem_init_block_header (uint8_t *first_chunk_p,
size_t size_in_chunks,
mem_block_state_t block_state,
mem_block_header_t *prev_block_p,
mem_block_header_t *next_block_p);
static void mem_check_heap( void);
static void mem_check_heap (void);
#ifdef MEM_STATS
/**
@@ -135,17 +135,17 @@ static void mem_check_heap( void);
*/
static mem_heap_stats_t mem_heap_stats;
static void mem_heap_stat_init( void);
static void mem_heap_stat_alloc_block( mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block( mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block_split( void);
static void mem_heap_stat_free_block_merge( void);
static void mem_heap_stat_init (void);
static void mem_heap_stat_alloc_block (mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block (mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block_split (void);
static void mem_heap_stat_free_block_merge (void);
#else /* !MEM_STATS */
# define mem_heap_stat_init()
# define mem_heap_stat_alloc_block( v)
# define mem_heap_stat_free_block( v)
# define mem_heap_stat_free_block_split()
# define mem_heap_stat_free_block_merge()
# define mem_heap_stat_init ()
# define mem_heap_stat_alloc_block (v)
# define mem_heap_stat_free_block (v)
# define mem_heap_stat_free_block_split ()
# define mem_heap_stat_free_block_merge ()
#endif /* !MEM_STATS */
/**
@@ -154,23 +154,23 @@ static void mem_heap_stat_free_block_merge( void);
* @return chunks count
*/
static size_t
mem_get_block_chunks_count( const mem_block_header_t *block_header_p) /**< block header */
mem_get_block_chunks_count (const mem_block_header_t *block_header_p) /**< block header */
{
JERRY_ASSERT( block_header_p != NULL );
JERRY_ASSERT(block_header_p != NULL);
const mem_block_header_t *next_block_p = block_header_p->neighbours[ MEM_DIRECTION_NEXT ];
size_t dist_till_block_end;
if ( next_block_p == NULL )
if (next_block_p == NULL)
{
dist_till_block_end = (size_t) ( mem_heap.heap_start + mem_heap.heap_size - (uint8_t*) block_header_p );
dist_till_block_end = (size_t) (mem_heap.heap_start + mem_heap.heap_size - (uint8_t*) block_header_p);
} else
{
dist_till_block_end = (size_t) ( (uint8_t*) next_block_p - (uint8_t*) block_header_p );
dist_till_block_end = (size_t) ((uint8_t*) next_block_p - (uint8_t*) block_header_p);
}
JERRY_ASSERT( dist_till_block_end <= mem_heap.heap_size );
JERRY_ASSERT( dist_till_block_end % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT(dist_till_block_end <= mem_heap.heap_size);
JERRY_ASSERT(dist_till_block_end % MEM_HEAP_CHUNK_SIZE == 0);
return dist_till_block_end / MEM_HEAP_CHUNK_SIZE;
} /* mem_get_block_chunks_count */
@@ -181,9 +181,9 @@ mem_get_block_chunks_count( const mem_block_header_t *block_header_p) /**< block
* @return size of block area that can be used to store data
*/
static size_t
mem_get_block_data_space_size( const mem_block_header_t *block_header_p) /**< block header */
mem_get_block_data_space_size (const mem_block_header_t *block_header_p) /**< block header */
{
return mem_get_block_chunks_count( block_header_p) * MEM_HEAP_CHUNK_SIZE - sizeof (mem_block_header_t);
return mem_get_block_chunks_count (block_header_p) * MEM_HEAP_CHUNK_SIZE - sizeof (mem_block_header_t);
} /* mem_get_block_data_space_size */
/**
@@ -192,30 +192,30 @@ mem_get_block_data_space_size( const mem_block_header_t *block_header_p) /**< bl
* @return chunks count
*/
static size_t
mem_get_block_chunks_count_from_data_size( size_t block_allocated_size) /**< size of block's allocated area */
mem_get_block_chunks_count_from_data_size (size_t block_allocated_size) /**< size of block's allocated area */
{
return JERRY_ALIGNUP( sizeof (mem_block_header_t) + block_allocated_size, MEM_HEAP_CHUNK_SIZE) / MEM_HEAP_CHUNK_SIZE;
return JERRY_ALIGNUP(sizeof (mem_block_header_t) + block_allocated_size, MEM_HEAP_CHUNK_SIZE) / MEM_HEAP_CHUNK_SIZE;
} /* mem_get_block_chunks_count_from_data_size */
/**
* Startup initialization of heap
*/
void
mem_heap_init(uint8_t *heap_start, /**< first address of heap space */
mem_heap_init (uint8_t *heap_start, /**< first address of heap space */
size_t heap_size) /**< heap space size */
{
JERRY_ASSERT( heap_start != NULL );
JERRY_ASSERT( heap_size != 0 );
JERRY_ASSERT( heap_size % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT( (uintptr_t) heap_start % MEM_ALIGNMENT == 0);
JERRY_ASSERT( heap_size <= ( 1u << MEM_HEAP_OFFSET_LOG ) );
JERRY_ASSERT(heap_start != NULL);
JERRY_ASSERT(heap_size != 0);
JERRY_ASSERT(heap_size % MEM_HEAP_CHUNK_SIZE == 0);
JERRY_ASSERT((uintptr_t) heap_start % MEM_ALIGNMENT == 0);
JERRY_ASSERT(heap_size <= (1u << MEM_HEAP_OFFSET_LOG));
mem_heap.heap_start = heap_start;
mem_heap.heap_size = heap_size;
VALGRIND_NOACCESS_SPACE( heap_start, heap_size);
VALGRIND_NOACCESS_SPACE(heap_start, heap_size);
mem_init_block_header(mem_heap.heap_start,
mem_init_block_header (mem_heap.heap_start,
0,
MEM_BLOCK_FREE,
NULL,
@@ -224,30 +224,30 @@ mem_heap_init(uint8_t *heap_start, /**< first address of heap space */
mem_heap.first_block_p = (mem_block_header_t*) mem_heap.heap_start;
mem_heap.last_block_p = mem_heap.first_block_p;
mem_heap_stat_init();
mem_heap_stat_init ();
} /* mem_heap_init */
/**
* Finalize heap
*/
void
mem_heap_finalize(void)
mem_heap_finalize (void)
{
VALGRIND_DEFINED_SPACE( mem_heap.heap_start, mem_heap.heap_size);
VALGRIND_DEFINED_SPACE(mem_heap.heap_start, mem_heap.heap_size);
JERRY_ASSERT( mem_heap.first_block_p == mem_heap.last_block_p );
JERRY_ASSERT( mem_heap.first_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK );
JERRY_ASSERT(mem_heap.first_block_p == mem_heap.last_block_p);
JERRY_ASSERT(mem_heap.first_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK);
VALGRIND_NOACCESS_SPACE( mem_heap.heap_start, mem_heap.heap_size);
VALGRIND_NOACCESS_SPACE(mem_heap.heap_start, mem_heap.heap_size);
__memset( &mem_heap, 0, sizeof(mem_heap));
__memset (&mem_heap, 0, sizeof (mem_heap));
} /* mem_heap_finalize */
/**
* Initialize block header
*/
static void
mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first chunk to use for the block */
mem_init_block_header (uint8_t *first_chunk_p, /**< address of the first chunk to use for the block */
size_t allocated_bytes, /**< size of block's allocated area */
mem_block_state_t block_state, /**< state of the block (allocated or free) */
mem_block_header_t *prev_block_p, /**< previous block */
@@ -255,13 +255,13 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
{
mem_block_header_t *block_header_p = (mem_block_header_t*) first_chunk_p;
VALGRIND_UNDEFINED_STRUCT( block_header_p);
VALGRIND_UNDEFINED_STRUCT(block_header_p);
if ( block_state == MEM_BLOCK_FREE )
if (block_state == MEM_BLOCK_FREE)
{
block_header_p->magic_num = MEM_MAGIC_NUM_OF_FREE_BLOCK;
JERRY_ASSERT( allocated_bytes == 0 );
JERRY_ASSERT(allocated_bytes == 0);
} else
{
block_header_p->magic_num = MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK;
@@ -271,9 +271,9 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
block_header_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
block_header_p->allocated_bytes = allocated_bytes;
JERRY_ASSERT( allocated_bytes <= mem_get_block_data_space_size( block_header_p) );
JERRY_ASSERT(allocated_bytes <= mem_get_block_data_space_size (block_header_p));
VALGRIND_NOACCESS_STRUCT( block_header_p);
VALGRIND_NOACCESS_STRUCT(block_header_p);
} /* mem_init_block_header */
/**
@@ -290,15 +290,15 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
* NULL - if there is not enough memory.
*/
uint8_t*
mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to allocate in bytes */
mem_heap_alloc_block (size_t size_in_bytes, /**< size of region to allocate in bytes */
mem_heap_alloc_term_t alloc_term) /**< expected allocation term */
{
mem_block_header_t *block_p;
mem_direction_t direction;
mem_check_heap();
mem_check_heap ();
if ( alloc_term == MEM_HEAP_ALLOC_SHORT_TERM )
if (alloc_term == MEM_HEAP_ALLOC_SHORT_TERM)
{
block_p = mem_heap.first_block_p;
direction = MEM_DIRECTION_NEXT;
@@ -309,49 +309,49 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
}
/* searching for appropriate block */
while ( block_p != NULL )
while (block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
if ( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
if ( mem_get_block_data_space_size( block_p) >= size_in_bytes )
if (mem_get_block_data_space_size (block_p) >= size_in_bytes)
{
break;
}
} else
{
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
}
mem_block_header_t *next_block_p = block_p->neighbours[ direction ];
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
block_p = next_block_p;
}
if ( block_p == NULL )
if (block_p == NULL)
{
/* not enough free space */
return NULL;
}
/* appropriate block found, allocating space */
size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size( size_in_bytes);
size_t found_block_size_in_chunks = mem_get_block_chunks_count( block_p);
size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes);
size_t found_block_size_in_chunks = mem_get_block_chunks_count (block_p);
JERRY_ASSERT( new_block_size_in_chunks <= found_block_size_in_chunks );
JERRY_ASSERT(new_block_size_in_chunks <= found_block_size_in_chunks);
mem_block_header_t *prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
mem_block_header_t *next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
if ( new_block_size_in_chunks < found_block_size_in_chunks )
if (new_block_size_in_chunks < found_block_size_in_chunks)
{
mem_heap_stat_free_block_split();
mem_heap_stat_free_block_split ();
uint8_t *new_free_block_first_chunk_p = (uint8_t*) block_p + new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE;
mem_init_block_header(new_free_block_first_chunk_p,
mem_init_block_header (new_free_block_first_chunk_p,
0,
MEM_BLOCK_FREE,
block_p /* there we will place new allocated block */,
@@ -359,43 +359,43 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
mem_block_header_t *new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;
if ( next_block_p == NULL )
if (next_block_p == NULL)
{
mem_heap.last_block_p = new_free_block_p;
}
else
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = (mem_block_header_t*) new_free_block_first_chunk_p;
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
next_block_p = new_free_block_p;
}
mem_init_block_header((uint8_t*) block_p,
mem_init_block_header ((uint8_t*) block_p,
size_in_bytes,
MEM_BLOCK_ALLOCATED,
prev_block_p,
next_block_p);
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
mem_heap_stat_alloc_block( block_p);
mem_heap_stat_alloc_block (block_p);
JERRY_ASSERT( mem_get_block_data_space_size( block_p) >= size_in_bytes );
JERRY_ASSERT(mem_get_block_data_space_size (block_p) >= size_in_bytes);
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
mem_check_heap();
mem_check_heap ();
/* return data space beginning address */
uint8_t *data_space_p = (uint8_t*) (block_p + 1);
JERRY_ASSERT( (uintptr_t) data_space_p % MEM_ALIGNMENT == 0);
JERRY_ASSERT((uintptr_t) data_space_p % MEM_ALIGNMENT == 0);
VALGRIND_UNDEFINED_SPACE( data_space_p, size_in_bytes);
VALGRIND_UNDEFINED_SPACE(data_space_p, size_in_bytes);
return data_space_p;
} /* mem_heap_alloc_block */
@@ -404,58 +404,58 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
* Free the memory block.
*/
void
mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of the block */
mem_heap_free_block (uint8_t *ptr) /**< pointer to beginning of data space of the block */
{
/* checking that ptr points to the heap */
JERRY_ASSERT( ptr >= mem_heap.heap_start
&& ptr <= mem_heap.heap_start + mem_heap.heap_size );
JERRY_ASSERT(ptr >= mem_heap.heap_start
&& ptr <= mem_heap.heap_start + mem_heap.heap_size);
mem_check_heap();
mem_check_heap ();
mem_block_header_t *block_p = (mem_block_header_t*) ptr - 1;
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
mem_block_header_t *prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
mem_block_header_t *next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
mem_heap_stat_free_block( block_p);
mem_heap_stat_free_block (block_p);
VALGRIND_NOACCESS_SPACE( ptr, block_p->allocated_bytes);
VALGRIND_NOACCESS_SPACE(ptr, block_p->allocated_bytes);
/* checking magic nums that are neighbour to data space */
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
if ( next_block_p != NULL )
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
JERRY_ASSERT( next_block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK
|| next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK );
JERRY_ASSERT(next_block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK
|| next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK);
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
block_p->magic_num = MEM_MAGIC_NUM_OF_FREE_BLOCK;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
if (next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
/* merge with the next block */
mem_heap_stat_free_block_merge();
mem_heap_stat_free_block_merge ();
mem_block_header_t *next_next_block_p = next_block_p->neighbours[ MEM_DIRECTION_NEXT ];
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
next_block_p = next_next_block_p;
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
block_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = block_p;
}
@@ -465,26 +465,26 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
}
}
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
if ( prev_block_p != NULL )
if (prev_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( prev_block_p);
VALGRIND_DEFINED_STRUCT(prev_block_p);
if ( prev_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (prev_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
/* merge with the previous block */
mem_heap_stat_free_block_merge();
mem_heap_stat_free_block_merge ();
prev_block_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = block_p->neighbours[ MEM_DIRECTION_PREV ];
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
else
{
@@ -492,12 +492,12 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
}
}
VALGRIND_NOACCESS_STRUCT( prev_block_p);
VALGRIND_NOACCESS_STRUCT(prev_block_p);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
mem_check_heap();
mem_check_heap ();
} /* mem_heap_free_block */
/**
@@ -506,10 +506,10 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
* @return recommended allocation size
*/
size_t
mem_heap_recommend_allocation_size( size_t minimum_allocation_size) /**< minimum allocation size */
mem_heap_recommend_allocation_size (size_t minimum_allocation_size) /**< minimum allocation size */
{
size_t minimum_allocation_size_with_block_header = minimum_allocation_size + sizeof (mem_block_header_t);
size_t heap_chunk_aligned_allocation_size = JERRY_ALIGNUP( minimum_allocation_size_with_block_header, MEM_HEAP_CHUNK_SIZE);
size_t heap_chunk_aligned_allocation_size = JERRY_ALIGNUP(minimum_allocation_size_with_block_header, MEM_HEAP_CHUNK_SIZE);
return heap_chunk_aligned_allocation_size - sizeof (mem_block_header_t);
} /* mem_heap_recommend_allocation_size */
@@ -518,59 +518,59 @@ mem_heap_recommend_allocation_size( size_t minimum_allocation_size) /**< minimum
* Print heap
*/
void
mem_heap_print( bool dump_block_headers, /**< print block headers */
mem_heap_print (bool dump_block_headers, /**< print block headers */
bool dump_block_data, /**< print block with data (true)
or print only block header (false) */
bool dump_stats) /**< print heap stats */
{
mem_check_heap();
mem_check_heap ();
JERRY_ASSERT( !dump_block_data || dump_block_headers );
JERRY_ASSERT(!dump_block_data || dump_block_headers);
if ( dump_block_headers )
if (dump_block_headers)
{
__printf("Heap: start=%p size=%lu, first block->%p, last block->%p\n",
__printf ("Heap: start=%p size=%lu, first block->%p, last block->%p\n",
mem_heap.heap_start,
mem_heap.heap_size,
(void*) mem_heap.first_block_p,
(void*) mem_heap.last_block_p);
for ( mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
for (mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
block_p != NULL;
block_p = next_block_p )
block_p = next_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
__printf("Block (%p): magic num=0x%08x, size in chunks=%lu, previous block->%p next block->%p\n",
__printf ("Block (%p): magic num=0x%08x, size in chunks=%lu, previous block->%p next block->%p\n",
(void*) block_p,
block_p->magic_num,
mem_get_block_chunks_count( block_p),
mem_get_block_chunks_count (block_p),
(void*) block_p->neighbours[ MEM_DIRECTION_PREV ],
(void*) block_p->neighbours[ MEM_DIRECTION_NEXT ]);
if ( dump_block_data )
if (dump_block_data)
{
uint8_t *block_data_p = (uint8_t*) (block_p + 1);
for ( uint32_t offset = 0;
offset < mem_get_block_data_space_size( block_p);
offset++ )
for (uint32_t offset = 0;
offset < mem_get_block_data_space_size (block_p);
offset++)
{
__printf("%02x ", block_data_p[ offset ]);
__printf ("%02x ", block_data_p[ offset ]);
}
__printf("\n");
__printf ("\n");
}
next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
}
#ifdef MEM_STATS
if ( dump_stats )
if (dump_stats)
{
__printf("Heap stats:\n");
__printf(" Heap size = %lu bytes\n"
__printf ("Heap stats:\n");
__printf (" Heap size = %lu bytes\n"
" Chunk size = %lu bytes\n"
" Blocks count = %lu\n"
" Allocated blocks count = %lu\n"
@@ -595,80 +595,80 @@ mem_heap_print( bool dump_block_headers, /**< print block headers */
}
#endif /* MEM_STATS */
__printf("\n");
__printf ("\n");
} /* mem_heap_print */
/**
* Check heap consistency
*/
static void
mem_check_heap( void)
mem_check_heap (void)
{
#ifndef JERRY_NDEBUG
JERRY_ASSERT( (uint8_t*) mem_heap.first_block_p == mem_heap.heap_start );
JERRY_ASSERT( mem_heap.heap_size % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT((uint8_t*) mem_heap.first_block_p == mem_heap.heap_start);
JERRY_ASSERT(mem_heap.heap_size % MEM_HEAP_CHUNK_SIZE == 0);
bool is_last_block_was_met = false;
size_t chunk_sizes_sum = 0;
for ( mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
for (mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
block_p != NULL;
block_p = next_block_p )
block_p = next_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
chunk_sizes_sum += mem_get_block_chunks_count( block_p);
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
chunk_sizes_sum += mem_get_block_chunks_count (block_p);
next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
if ( block_p == mem_heap.last_block_p )
if (block_p == mem_heap.last_block_p)
{
is_last_block_was_met = true;
JERRY_ASSERT( next_block_p == NULL );
JERRY_ASSERT(next_block_p == NULL);
} else
{
JERRY_ASSERT( next_block_p != NULL );
JERRY_ASSERT(next_block_p != NULL);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
JERRY_ASSERT( chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size );
JERRY_ASSERT( is_last_block_was_met );
JERRY_ASSERT(chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size);
JERRY_ASSERT(is_last_block_was_met);
bool is_first_block_was_met = false;
chunk_sizes_sum = 0;
for ( mem_block_header_t *block_p = mem_heap.last_block_p, *prev_block_p;
for (mem_block_header_t *block_p = mem_heap.last_block_p, *prev_block_p;
block_p != NULL;
block_p = prev_block_p )
block_p = prev_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
chunk_sizes_sum += mem_get_block_chunks_count( block_p);
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
chunk_sizes_sum += mem_get_block_chunks_count (block_p);
prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
if ( block_p == mem_heap.first_block_p )
if (block_p == mem_heap.first_block_p)
{
is_first_block_was_met = true;
JERRY_ASSERT( prev_block_p == NULL );
JERRY_ASSERT(prev_block_p == NULL);
} else
{
JERRY_ASSERT( prev_block_p != NULL );
JERRY_ASSERT(prev_block_p != NULL);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
JERRY_ASSERT( chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size );
JERRY_ASSERT( is_first_block_was_met );
JERRY_ASSERT(chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size);
JERRY_ASSERT(is_first_block_was_met);
#endif /* !JERRY_NDEBUG */
} /* mem_check_heap */
@@ -677,7 +677,7 @@ mem_check_heap( void)
* Get heap memory usage statistics
*/
void
mem_heap_get_stats( mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
mem_heap_get_stats (mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
{
*out_heap_stats_p = mem_heap_stats;
} /* mem_heap_get_stats */
@@ -686,9 +686,9 @@ mem_heap_get_stats( mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
* Initalize heap memory usage statistics account structure
*/
static void
mem_heap_stat_init()
mem_heap_stat_init ()
{
__memset( &mem_heap_stats, 0, sizeof (mem_heap_stats));
__memset (&mem_heap_stats, 0, sizeof (mem_heap_stats));
mem_heap_stats.size = mem_heap.heap_size;
mem_heap_stats.blocks = 1;
@@ -698,11 +698,11 @@ mem_heap_stat_init()
* Account block allocation
*/
static void
mem_heap_stat_alloc_block( mem_block_header_t *block_header_p) /**< allocated block */
mem_heap_stat_alloc_block (mem_block_header_t *block_header_p) /**< allocated block */
{
JERRY_ASSERT( block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
const size_t chunks = mem_get_block_chunks_count( block_header_p);
const size_t chunks = mem_get_block_chunks_count (block_header_p);
const size_t bytes = block_header_p->allocated_bytes;
const size_t waste_bytes = chunks * MEM_HEAP_CHUNK_SIZE - bytes;
@@ -711,51 +711,51 @@ mem_heap_stat_alloc_block( mem_block_header_t *block_header_p) /**< allocated bl
mem_heap_stats.allocated_bytes += bytes;
mem_heap_stats.waste_bytes += waste_bytes;
if ( mem_heap_stats.allocated_blocks > mem_heap_stats.peak_allocated_blocks )
if (mem_heap_stats.allocated_blocks > mem_heap_stats.peak_allocated_blocks)
{
mem_heap_stats.peak_allocated_blocks = mem_heap_stats.allocated_blocks;
}
if ( mem_heap_stats.allocated_chunks > mem_heap_stats.peak_allocated_chunks )
if (mem_heap_stats.allocated_chunks > mem_heap_stats.peak_allocated_chunks)
{
mem_heap_stats.peak_allocated_chunks = mem_heap_stats.allocated_chunks;
}
if ( mem_heap_stats.allocated_bytes > mem_heap_stats.peak_allocated_bytes )
if (mem_heap_stats.allocated_bytes > mem_heap_stats.peak_allocated_bytes)
{
mem_heap_stats.peak_allocated_bytes = mem_heap_stats.allocated_bytes;
}
if ( mem_heap_stats.waste_bytes > mem_heap_stats.peak_waste_bytes )
if (mem_heap_stats.waste_bytes > mem_heap_stats.peak_waste_bytes)
{
mem_heap_stats.peak_waste_bytes = mem_heap_stats.waste_bytes;
}
JERRY_ASSERT( mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes <= mem_heap_stats.size );
JERRY_ASSERT( mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE );
JERRY_ASSERT(mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes <= mem_heap_stats.size);
JERRY_ASSERT(mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE);
} /* mem_heap_stat_alloc_block */
/**
* Account block freeing
*/
static void
mem_heap_stat_free_block( mem_block_header_t *block_header_p) /**< block to be freed */
mem_heap_stat_free_block (mem_block_header_t *block_header_p) /**< block to be freed */
{
JERRY_ASSERT( block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
const size_t chunks = mem_get_block_chunks_count( block_header_p);
const size_t chunks = mem_get_block_chunks_count (block_header_p);
const size_t bytes = block_header_p->allocated_bytes;
const size_t waste_bytes = chunks * MEM_HEAP_CHUNK_SIZE - bytes;
JERRY_ASSERT( mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes <= mem_heap_stats.size );
JERRY_ASSERT( mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE );
JERRY_ASSERT(mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes <= mem_heap_stats.size);
JERRY_ASSERT(mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE);
JERRY_ASSERT( mem_heap_stats.allocated_blocks >= 1 );
JERRY_ASSERT( mem_heap_stats.allocated_chunks >= chunks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes >= bytes );
JERRY_ASSERT( mem_heap_stats.waste_bytes >= waste_bytes );
JERRY_ASSERT(mem_heap_stats.allocated_blocks >= 1);
JERRY_ASSERT(mem_heap_stats.allocated_chunks >= chunks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes >= bytes);
JERRY_ASSERT(mem_heap_stats.waste_bytes >= waste_bytes);
mem_heap_stats.allocated_blocks--;
mem_heap_stats.allocated_chunks -= chunks;
@@ -767,7 +767,7 @@ mem_heap_stat_free_block( mem_block_header_t *block_header_p) /**< block to be f
* Account free block split
*/
static void
mem_heap_stat_free_block_split( void)
mem_heap_stat_free_block_split (void)
{
mem_heap_stats.blocks++;
} /* mem_heap_stat_free_block_split */
@@ -776,7 +776,7 @@ mem_heap_stat_free_block_split( void)
* Account free block merge
*/
static void
mem_heap_stat_free_block_merge( void)
mem_heap_stat_free_block_merge (void)
{
mem_heap_stats.blocks--;
} /* mem_heap_stat_free_block_merge */
+7 -7
View File
@@ -38,12 +38,12 @@ typedef enum {
MEM_HEAP_ALLOC_LONG_TERM /**< allocated region most likely will not be freed soon */
} mem_heap_alloc_term_t;
extern void mem_heap_init(uint8_t *heap_start, size_t heap_size);
extern void mem_heap_finalize(void);
extern uint8_t* mem_heap_alloc_block(size_t size_in_bytes, mem_heap_alloc_term_t alloc_term);
extern void mem_heap_free_block(uint8_t *ptr);
extern size_t mem_heap_recommend_allocation_size(size_t minimum_allocation_size);
extern void mem_heap_print(bool dump_block_headers, bool dump_block_data, bool dump_stats);
extern void mem_heap_init (uint8_t *heap_start, size_t heap_size);
extern void mem_heap_finalize (void);
extern uint8_t* mem_heap_alloc_block (size_t size_in_bytes, mem_heap_alloc_term_t alloc_term);
extern void mem_heap_free_block (uint8_t *ptr);
extern size_t mem_heap_recommend_allocation_size (size_t minimum_allocation_size);
extern void mem_heap_print (bool dump_block_headers, bool dump_block_data, bool dump_stats);
#ifdef MEM_STATS
/**
@@ -67,7 +67,7 @@ typedef struct {
size_t peak_waste_bytes; /**< peak bytes waste */
} mem_heap_stats_t;
extern void mem_heap_get_stats(mem_heap_stats_t *out_heap_stats_p);
extern void mem_heap_get_stats (mem_heap_stats_t *out_heap_stats_p);
#endif /* MEM_STATS */
/**
+35 -35
View File
@@ -31,13 +31,13 @@
#include "mem-allocator.h"
#include "mem-pool.h"
static void mem_check_pool( mem_pool_state_t *pool_p);
static void mem_check_pool (mem_pool_state_t *pool_p);
/**
* Get address of pool chunk with specified index
*/
#define MEM_POOL_CHUNK_ADDRESS( pool_header_p, chunk_index) ( (uint8_t*) ( MEM_POOL_SPACE_START( pool_p) + \
MEM_POOL_CHUNK_SIZE * chunk_index ) )
#define MEM_POOL_CHUNK_ADDRESS(pool_header_p, chunk_index) ((uint8_t*) (MEM_POOL_SPACE_START(pool_p) + \
MEM_POOL_CHUNK_SIZE * chunk_index))
/**
* Initialization of memory pool.
@@ -46,20 +46,20 @@ static void mem_check_pool( mem_pool_state_t *pool_p);
* Part of pool space will be used for bitmap and the rest will store chunks.
*/
void
mem_pool_init(mem_pool_state_t *pool_p, /**< pool */
mem_pool_init (mem_pool_state_t *pool_p, /**< pool */
size_t pool_size) /**< pool size */
{
JERRY_ASSERT( pool_p != NULL );
JERRY_ASSERT( (size_t)MEM_POOL_SPACE_START( pool_p) % MEM_ALIGNMENT == 0);
JERRY_ASSERT(pool_p != NULL);
JERRY_ASSERT((size_t)MEM_POOL_SPACE_START(pool_p) % MEM_ALIGNMENT == 0);
JERRY_STATIC_ASSERT( MEM_POOL_CHUNK_SIZE % MEM_ALIGNMENT == 0 );
JERRY_STATIC_ASSERT( MEM_POOL_MAX_CHUNKS_NUMBER_LOG <= sizeof(mem_pool_chunk_index_t) * JERRY_BITSINBYTE );
JERRY_ASSERT( sizeof(mem_pool_chunk_index_t) <= MEM_POOL_CHUNK_SIZE );
JERRY_STATIC_ASSERT(MEM_POOL_CHUNK_SIZE % MEM_ALIGNMENT == 0);
JERRY_STATIC_ASSERT(MEM_POOL_MAX_CHUNKS_NUMBER_LOG <= sizeof (mem_pool_chunk_index_t) * JERRY_BITSINBYTE);
JERRY_ASSERT(sizeof (mem_pool_chunk_index_t) <= MEM_POOL_CHUNK_SIZE);
const size_t pool_space_size = pool_size - sizeof(mem_pool_state_t);
const size_t pool_space_size = pool_size - sizeof (mem_pool_state_t);
const size_t chunks_number = pool_space_size / MEM_POOL_CHUNK_SIZE;
JERRY_ASSERT( ( (mem_pool_chunk_index_t) chunks_number ) == chunks_number );
JERRY_ASSERT(((mem_pool_chunk_index_t) chunks_number) == chunks_number);
pool_p->chunks_number = (mem_pool_chunk_index_t) chunks_number;
@@ -73,44 +73,44 @@ mem_pool_init(mem_pool_state_t *pool_p, /**< pool */
*/
pool_p->first_free_chunk = 0;
for ( mem_pool_chunk_index_t chunk_index = 0;
for (mem_pool_chunk_index_t chunk_index = 0;
chunk_index < chunks_number;
chunk_index++ )
chunk_index++)
{
mem_pool_chunk_index_t *next_free_chunk_index_p =
(mem_pool_chunk_index_t*) MEM_POOL_CHUNK_ADDRESS( pool_p, chunk_index);
(mem_pool_chunk_index_t*) MEM_POOL_CHUNK_ADDRESS(pool_p, chunk_index);
*next_free_chunk_index_p = (mem_pool_chunk_index_t) (chunk_index + 1u);
}
mem_check_pool( pool_p);
mem_check_pool (pool_p);
} /* mem_pool_init */
/**
* Allocate a chunk in the pool
*/
uint8_t*
mem_pool_alloc_chunk(mem_pool_state_t *pool_p) /**< pool */
mem_pool_alloc_chunk (mem_pool_state_t *pool_p) /**< pool */
{
mem_check_pool( pool_p);
mem_check_pool (pool_p);
if ( unlikely( pool_p->free_chunks_number == 0 ) )
if (unlikely (pool_p->free_chunks_number == 0))
{
JERRY_ASSERT( pool_p->first_free_chunk == pool_p->chunks_number );
JERRY_ASSERT(pool_p->first_free_chunk == pool_p->chunks_number);
return NULL;
}
JERRY_ASSERT( pool_p->first_free_chunk < pool_p->chunks_number );
JERRY_ASSERT(pool_p->first_free_chunk < pool_p->chunks_number);
mem_pool_chunk_index_t chunk_index = pool_p->first_free_chunk;
uint8_t *chunk_p = MEM_POOL_CHUNK_ADDRESS( pool_p, chunk_index);
uint8_t *chunk_p = MEM_POOL_CHUNK_ADDRESS(pool_p, chunk_index);
mem_pool_chunk_index_t *next_free_chunk_index_p = (mem_pool_chunk_index_t*) chunk_p;
pool_p->first_free_chunk = *next_free_chunk_index_p;
pool_p->free_chunks_number--;
mem_check_pool( pool_p);
mem_check_pool (pool_p);
return chunk_p;
} /* mem_pool_alloc_chunk */
@@ -119,16 +119,16 @@ mem_pool_alloc_chunk(mem_pool_state_t *pool_p) /**< pool */
* Free the chunk in the pool
*/
void
mem_pool_free_chunk(mem_pool_state_t *pool_p, /**< pool */
mem_pool_free_chunk (mem_pool_state_t *pool_p, /**< pool */
uint8_t *chunk_p) /**< chunk pointer */
{
JERRY_ASSERT( pool_p->free_chunks_number < pool_p->chunks_number );
JERRY_ASSERT( chunk_p >= MEM_POOL_SPACE_START( pool_p) && chunk_p <= MEM_POOL_SPACE_START( pool_p) + pool_p->chunks_number * MEM_POOL_CHUNK_SIZE );
JERRY_ASSERT( ( (uintptr_t) chunk_p - (uintptr_t) MEM_POOL_SPACE_START( pool_p) ) % MEM_POOL_CHUNK_SIZE == 0 );
JERRY_ASSERT(pool_p->free_chunks_number < pool_p->chunks_number);
JERRY_ASSERT(chunk_p >= MEM_POOL_SPACE_START(pool_p) && chunk_p <= MEM_POOL_SPACE_START(pool_p) + pool_p->chunks_number * MEM_POOL_CHUNK_SIZE);
JERRY_ASSERT(((uintptr_t) chunk_p - (uintptr_t) MEM_POOL_SPACE_START(pool_p)) % MEM_POOL_CHUNK_SIZE == 0);
mem_check_pool( pool_p);
mem_check_pool (pool_p);
const size_t chunk_byte_offset = (size_t) (chunk_p - MEM_POOL_SPACE_START( pool_p));
const size_t chunk_byte_offset = (size_t) (chunk_p - MEM_POOL_SPACE_START(pool_p));
const mem_pool_chunk_index_t chunk_index = (mem_pool_chunk_index_t) (chunk_byte_offset / MEM_POOL_CHUNK_SIZE);
mem_pool_chunk_index_t *next_free_chunk_index_p = (mem_pool_chunk_index_t*) chunk_p;
@@ -138,25 +138,25 @@ mem_pool_free_chunk(mem_pool_state_t *pool_p, /**< pool */
pool_p->first_free_chunk = chunk_index;
pool_p->free_chunks_number++;
mem_check_pool( pool_p);
mem_check_pool (pool_p);
} /* mem_pool_free_chunk */
/**
* Check pool state consistency
*/
static void
mem_check_pool( mem_pool_state_t __unused *pool_p) /**< pool (unused #ifdef JERRY_NDEBUG) */
mem_check_pool (mem_pool_state_t __unused *pool_p) /**< pool (unused #ifdef JERRY_NDEBUG) */
{
#ifndef JERRY_NDEBUG
JERRY_ASSERT( pool_p->chunks_number != 0 );
JERRY_ASSERT( pool_p->free_chunks_number <= pool_p->chunks_number );
JERRY_ASSERT(pool_p->chunks_number != 0);
JERRY_ASSERT(pool_p->free_chunks_number <= pool_p->chunks_number);
size_t met_free_chunks_number = 0;
mem_pool_chunk_index_t chunk_index = pool_p->first_free_chunk;
while ( chunk_index != pool_p->chunks_number )
while (chunk_index != pool_p->chunks_number)
{
uint8_t *chunk_p = MEM_POOL_CHUNK_ADDRESS( pool_p, chunk_index);
uint8_t *chunk_p = MEM_POOL_CHUNK_ADDRESS(pool_p, chunk_index);
mem_pool_chunk_index_t *next_free_chunk_index_p = (mem_pool_chunk_index_t*) chunk_p;
met_free_chunks_number++;
@@ -164,7 +164,7 @@ mem_check_pool( mem_pool_state_t __unused *pool_p) /**< pool (unused #ifdef JERR
chunk_index = *next_free_chunk_index_p;
}
JERRY_ASSERT( met_free_chunks_number == pool_p->free_chunks_number );
JERRY_ASSERT(met_free_chunks_number == pool_p->free_chunks_number);
#endif /* !JERRY_NDEBUG */
} /* mem_check_pool */
+4 -4
View File
@@ -29,7 +29,7 @@
/**
* Get pool's space size
*/
#define MEM_POOL_SPACE_START( pool_header_p) ( (uint8_t*) ( (mem_pool_state_t*) pool_header_p + 1 ) )
#define MEM_POOL_SPACE_START(pool_header_p) ((uint8_t*) ((mem_pool_state_t*) pool_header_p + 1))
/**
* Index of chunk in a pool
@@ -50,9 +50,9 @@ typedef struct mem_pool_state_t {
unsigned int next_pool_cp : MEM_HEAP_OFFSET_LOG; /**< pointer to the next pool with same chunk size */
} mem_pool_state_t;
extern void mem_pool_init(mem_pool_state_t *pool_p, size_t pool_size);
extern uint8_t* mem_pool_alloc_chunk(mem_pool_state_t *pool_p);
extern void mem_pool_free_chunk(mem_pool_state_t *pool_p, uint8_t *chunk_p);
extern void mem_pool_init (mem_pool_state_t *pool_p, size_t pool_size);
extern uint8_t* mem_pool_alloc_chunk (mem_pool_state_t *pool_p);
extern void mem_pool_free_chunk (mem_pool_state_t *pool_p, uint8_t *chunk_p);
/**
* @}
+55 -55
View File
@@ -49,39 +49,39 @@ size_t mem_free_chunks_number;
*/
mem_pools_stats_t mem_pools_stats;
static void mem_pools_stat_init( void);
static void mem_pools_stat_alloc_pool( void);
static void mem_pools_stat_free_pool( void);
static void mem_pools_stat_alloc_chunk(void );
static void mem_pools_stat_free_chunk( void);
static void mem_pools_stat_init (void);
static void mem_pools_stat_alloc_pool (void);
static void mem_pools_stat_free_pool (void);
static void mem_pools_stat_alloc_chunk (void);
static void mem_pools_stat_free_chunk (void);
#else /* !MEM_STATS */
# define mem_pools_stat_init()
# define mem_pools_stat_alloc_pool()
# define mem_pools_stat_free_pool()
# define mem_pools_stat_alloc_chunk()
# define mem_pools_stat_free_chunk()
# define mem_pools_stat_init ()
# define mem_pools_stat_alloc_pool ()
# define mem_pools_stat_free_pool ()
# define mem_pools_stat_alloc_chunk ()
# define mem_pools_stat_free_chunk ()
#endif /* !MEM_STATS */
/**
* Initialize pool manager
*/
void
mem_pools_init( void)
mem_pools_init (void)
{
mem_pools = NULL;
mem_free_chunks_number = 0;
mem_pools_stat_init();
mem_pools_stat_init ();
} /* mem_pools_init */
/**
* Finalize pool manager
*/
void
mem_pools_finalize( void)
mem_pools_finalize (void)
{
JERRY_ASSERT( mem_pools == NULL );
JERRY_ASSERT( mem_free_chunks_number == 0 );
JERRY_ASSERT(mem_pools == NULL);
JERRY_ASSERT(mem_free_chunks_number == 0);
} /* mem_pools_finalize */
/**
@@ -91,23 +91,23 @@ mem_pools_finalize( void)
* or NULL - if not enough memory.
*/
uint8_t*
mem_pools_alloc( void)
mem_pools_alloc (void)
{
/**
* If there are no free chunks, allocate new pool.
*/
if ( mem_free_chunks_number == 0 )
if (mem_free_chunks_number == 0)
{
/**
* Space, at least for header and eight chunks.
*
* TODO: Config.
*/
size_t pool_size = mem_heap_recommend_allocation_size( sizeof(mem_pool_state_t) + 8 * MEM_POOL_CHUNK_SIZE );
size_t pool_size = mem_heap_recommend_allocation_size (sizeof (mem_pool_state_t) + 8 * MEM_POOL_CHUNK_SIZE);
mem_pool_state_t *pool_state = (mem_pool_state_t*) mem_heap_alloc_block( pool_size, MEM_HEAP_ALLOC_LONG_TERM);
mem_pool_state_t *pool_state = (mem_pool_state_t*) mem_heap_alloc_block (pool_size, MEM_HEAP_ALLOC_LONG_TERM);
if ( pool_state == NULL )
if (pool_state == NULL)
{
/**
* Not enough space for new pool.
@@ -115,15 +115,15 @@ mem_pools_alloc( void)
return NULL;
}
mem_pool_init( pool_state, pool_size);
mem_pool_init (pool_state, pool_size);
pool_state->next_pool_cp = ( mem_pools == NULL ) ? MEM_COMPRESSED_POINTER_NULL
: (uint16_t) mem_compress_pointer( mem_pools);
pool_state->next_pool_cp = (mem_pools == NULL) ? MEM_COMPRESSED_POINTER_NULL
: (uint16_t) mem_compress_pointer (mem_pools);
mem_pools = pool_state;
mem_free_chunks_number += pool_state->chunks_number;
mem_pools_stat_alloc_pool();
mem_pools_stat_alloc_pool ();
}
/**
@@ -133,11 +133,11 @@ mem_pools_alloc( void)
*/
mem_pool_state_t *pool_state = mem_pools;
while ( pool_state->first_free_chunk == pool_state->chunks_number )
while (pool_state->first_free_chunk == pool_state->chunks_number)
{
pool_state = mem_decompress_pointer( pool_state->next_pool_cp);
pool_state = mem_decompress_pointer (pool_state->next_pool_cp);
JERRY_ASSERT( pool_state != NULL );
JERRY_ASSERT(pool_state != NULL);
}
/**
@@ -145,64 +145,64 @@ mem_pools_alloc( void)
*/
mem_free_chunks_number--;
mem_pools_stat_alloc_chunk();
mem_pools_stat_alloc_chunk ();
return mem_pool_alloc_chunk( pool_state);
return mem_pool_alloc_chunk (pool_state);
} /* mem_pools_alloc */
/**
* Free the chunk
*/
void
mem_pools_free( uint8_t *chunk_p) /**< pointer to the chunk */
mem_pools_free (uint8_t *chunk_p) /**< pointer to the chunk */
{
mem_pool_state_t *pool_state = mem_pools, *prev_pool_state = NULL;
/**
* Search for the pool containing specified chunk.
*/
while ( !( chunk_p >= MEM_POOL_SPACE_START( pool_state)
&& chunk_p <= MEM_POOL_SPACE_START( pool_state) + pool_state->chunks_number * MEM_POOL_CHUNK_SIZE ) )
while (!(chunk_p >= MEM_POOL_SPACE_START(pool_state)
&& chunk_p <= MEM_POOL_SPACE_START(pool_state) + pool_state->chunks_number * MEM_POOL_CHUNK_SIZE))
{
prev_pool_state = pool_state;
pool_state = mem_decompress_pointer( pool_state->next_pool_cp);
pool_state = mem_decompress_pointer (pool_state->next_pool_cp);
JERRY_ASSERT( pool_state != NULL );
JERRY_ASSERT(pool_state != NULL);
}
/**
* Free the chunk
*/
mem_pool_free_chunk( pool_state, chunk_p);
mem_pool_free_chunk (pool_state, chunk_p);
mem_free_chunks_number++;
mem_pools_stat_free_chunk();
mem_pools_stat_free_chunk ();
/**
* If all chunks of the pool are free, free the pool itself.
*/
if ( pool_state->free_chunks_number == pool_state->chunks_number )
if (pool_state->free_chunks_number == pool_state->chunks_number)
{
if ( prev_pool_state != NULL )
if (prev_pool_state != NULL)
{
prev_pool_state->next_pool_cp = pool_state->next_pool_cp;
} else
{
if ( pool_state->next_pool_cp == MEM_COMPRESSED_POINTER_NULL )
if (pool_state->next_pool_cp == MEM_COMPRESSED_POINTER_NULL)
{
mem_pools = NULL;
}
else
{
mem_pools = mem_decompress_pointer( pool_state->next_pool_cp);
mem_pools = mem_decompress_pointer (pool_state->next_pool_cp);
}
}
mem_free_chunks_number -= pool_state->chunks_number;
mem_heap_free_block( (uint8_t*)pool_state);
mem_heap_free_block ((uint8_t*)pool_state);
mem_pools_stat_free_pool();
mem_pools_stat_free_pool ();
}
} /* mem_pools_free */
@@ -211,9 +211,9 @@ mem_pools_free( uint8_t *chunk_p) /**< pointer to the chunk */
* Get pools memory usage statistics
*/
void
mem_pools_get_stats( mem_pools_stats_t *out_pools_stats_p) /**< out: pools' stats */
mem_pools_get_stats (mem_pools_stats_t *out_pools_stats_p) /**< out: pools' stats */
{
JERRY_ASSERT( out_pools_stats_p != NULL );
JERRY_ASSERT(out_pools_stats_p != NULL);
*out_pools_stats_p = mem_pools_stats;
} /* mem_pools_get_stats */
@@ -222,21 +222,21 @@ mem_pools_get_stats( mem_pools_stats_t *out_pools_stats_p) /**< out: pools' stat
* Initalize pools' memory usage statistics account structure
*/
static void
mem_pools_stat_init( void)
mem_pools_stat_init (void)
{
__memset( &mem_pools_stats, 0, sizeof (mem_pools_stats));
__memset (&mem_pools_stats, 0, sizeof (mem_pools_stats));
} /* mem_pools_stat_init */
/**
* Account allocation of a pool
*/
static void
mem_pools_stat_alloc_pool( void)
mem_pools_stat_alloc_pool (void)
{
mem_pools_stats.pools_count++;
mem_pools_stats.free_chunks = mem_free_chunks_number;
if ( mem_pools_stats.pools_count > mem_pools_stats.peak_pools_count )
if (mem_pools_stats.pools_count > mem_pools_stats.peak_pools_count)
{
mem_pools_stats.peak_pools_count = mem_pools_stats.pools_count;
}
@@ -246,9 +246,9 @@ mem_pools_stat_alloc_pool( void)
* Account freeing of a pool
*/
static void
mem_pools_stat_free_pool( void)
mem_pools_stat_free_pool (void)
{
JERRY_ASSERT( mem_pools_stats.pools_count > 0 );
JERRY_ASSERT(mem_pools_stats.pools_count > 0);
mem_pools_stats.pools_count--;
mem_pools_stats.free_chunks = mem_free_chunks_number;
@@ -258,14 +258,14 @@ mem_pools_stat_free_pool( void)
* Account allocation of chunk in a pool
*/
static void
mem_pools_stat_alloc_chunk(void)
mem_pools_stat_alloc_chunk (void)
{
JERRY_ASSERT( mem_pools_stats.free_chunks > 0 );
JERRY_ASSERT(mem_pools_stats.free_chunks > 0);
mem_pools_stats.allocated_chunks++;
mem_pools_stats.free_chunks--;
if ( mem_pools_stats.allocated_chunks > mem_pools_stats.peak_allocated_chunks )
if (mem_pools_stats.allocated_chunks > mem_pools_stats.peak_allocated_chunks)
{
mem_pools_stats.peak_allocated_chunks = mem_pools_stats.allocated_chunks;
}
@@ -275,9 +275,9 @@ mem_pools_stat_alloc_chunk(void)
* Account freeing of chunk in a pool
*/
static void
mem_pools_stat_free_chunk(void)
mem_pools_stat_free_chunk (void)
{
JERRY_ASSERT( mem_pools_stats.allocated_chunks > 0 );
JERRY_ASSERT(mem_pools_stats.allocated_chunks > 0);
mem_pools_stats.allocated_chunks--;
mem_pools_stats.free_chunks++;
+5 -5
View File
@@ -29,10 +29,10 @@
#include "globals.h"
extern void mem_pools_init(void);
extern void mem_pools_finalize(void);
extern uint8_t* mem_pools_alloc(void);
extern void mem_pools_free(uint8_t *chunk_p);
extern void mem_pools_init (void);
extern void mem_pools_finalize (void);
extern uint8_t* mem_pools_alloc (void);
extern void mem_pools_free (uint8_t *chunk_p);
#ifdef MEM_STATS
/**
@@ -56,7 +56,7 @@ typedef struct
size_t free_chunks;
} mem_pools_stats_t;
extern void mem_pools_get_stats( mem_pools_stats_t *out_pools_stats_p);
extern void mem_pools_get_stats (mem_pools_stats_t *out_pools_stats_p);
#endif /* MEM_STATS */
#endif /* JERRY_MEM_POOLMAN_H */