Style fixes in liballocator, libecmaobjects, libecmaoperations: space between function name and opening parenthesis, no space after opening parenthesis/before closing parenthesis.

This commit is contained in:
Ruben Ayrapetyan
2014-08-11 19:27:07 +04:00
parent e53be2b441
commit b02eefd4ae
16 changed files with 761 additions and 761 deletions
+194 -194
View File
@@ -36,19 +36,19 @@
#ifndef JERRY_NVALGRIND
# include "memcheck.h"
# define VALGRIND_NOACCESS_STRUCT( s) (void)VALGRIND_MAKE_MEM_NOACCESS( ( s ), sizeof( *( s ) ) )
# define VALGRIND_UNDEFINED_STRUCT( s) (void)VALGRIND_MAKE_MEM_UNDEFINED( ( s ), sizeof( *( s ) ) )
# define VALGRIND_DEFINED_STRUCT( s) (void)VALGRIND_MAKE_MEM_DEFINED( ( s ), sizeof( *( s ) ) )
# define VALGRIND_NOACCESS_SPACE( p, s) (void)VALGRIND_MAKE_MEM_NOACCESS( ( p ), ( s ) )
# define VALGRIND_UNDEFINED_SPACE( p, s) (void)VALGRIND_MAKE_MEM_UNDEFINED( ( p ), ( s ) )
# define VALGRIND_DEFINED_SPACE( p, s) (void)VALGRIND_MAKE_MEM_DEFINED( ( p ), ( s ) )
# define VALGRIND_NOACCESS_STRUCT(s) (void)VALGRIND_MAKE_MEM_NOACCESS((s), sizeof (*(s)))
# define VALGRIND_UNDEFINED_STRUCT(s) (void)VALGRIND_MAKE_MEM_UNDEFINED((s), sizeof (*(s)))
# define VALGRIND_DEFINED_STRUCT(s) (void)VALGRIND_MAKE_MEM_DEFINED((s), sizeof (*(s)))
# define VALGRIND_NOACCESS_SPACE(p, s) (void)VALGRIND_MAKE_MEM_NOACCESS((p), (s))
# define VALGRIND_UNDEFINED_SPACE(p, s) (void)VALGRIND_MAKE_MEM_UNDEFINED((p), (s))
# define VALGRIND_DEFINED_SPACE(p, s) (void)VALGRIND_MAKE_MEM_DEFINED((p), (s))
#else /* !JERRRY_NVALGRIND */
# define VALGRIND_NOACCESS_STRUCT( s)
# define VALGRIND_UNDEFINED_STRUCT( s)
# define VALGRIND_DEFINED_STRUCT( s)
# define VALGRIND_NOACCESS_SPACE( p, s)
# define VALGRIND_UNDEFINED_SPACE( p, s)
# define VALGRIND_DEFINED_SPACE( p, s)
# define VALGRIND_NOACCESS_STRUCT(s)
# define VALGRIND_UNDEFINED_STRUCT(s)
# define VALGRIND_DEFINED_STRUCT(s)
# define VALGRIND_NOACCESS_SPACE(p, s)
# define VALGRIND_UNDEFINED_SPACE(p, s)
# define VALGRIND_DEFINED_SPACE(p, s)
#endif /* !JERRY_NVALGRIND */
/**
@@ -95,12 +95,12 @@ typedef struct mem_block_header_t
/**
* Chunk should have enough space for block header
*/
JERRY_STATIC_ASSERT( MEM_HEAP_CHUNK_SIZE >= sizeof (mem_block_header_t) );
JERRY_STATIC_ASSERT(MEM_HEAP_CHUNK_SIZE >= sizeof (mem_block_header_t));
/**
* Chunk size should satisfy the required alignment value
*/
JERRY_STATIC_ASSERT( MEM_HEAP_CHUNK_SIZE % MEM_ALIGNMENT == 0 );
JERRY_STATIC_ASSERT(MEM_HEAP_CHUNK_SIZE % MEM_ALIGNMENT == 0);
/**
* Description of heap state
@@ -118,16 +118,16 @@ typedef struct
*/
mem_heap_state_t mem_heap;
static size_t mem_get_block_chunks_count( const mem_block_header_t *block_header_p);
static size_t mem_get_block_data_space_size( const mem_block_header_t *block_header_p);
static size_t mem_get_block_chunks_count_from_data_size( size_t block_allocated_size);
static size_t mem_get_block_chunks_count (const mem_block_header_t *block_header_p);
static size_t mem_get_block_data_space_size (const mem_block_header_t *block_header_p);
static size_t mem_get_block_chunks_count_from_data_size (size_t block_allocated_size);
static void mem_init_block_header( uint8_t *first_chunk_p,
static void mem_init_block_header (uint8_t *first_chunk_p,
size_t size_in_chunks,
mem_block_state_t block_state,
mem_block_header_t *prev_block_p,
mem_block_header_t *next_block_p);
static void mem_check_heap( void);
static void mem_check_heap (void);
#ifdef MEM_STATS
/**
@@ -135,17 +135,17 @@ static void mem_check_heap( void);
*/
static mem_heap_stats_t mem_heap_stats;
static void mem_heap_stat_init( void);
static void mem_heap_stat_alloc_block( mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block( mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block_split( void);
static void mem_heap_stat_free_block_merge( void);
static void mem_heap_stat_init (void);
static void mem_heap_stat_alloc_block (mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block (mem_block_header_t *block_header_p);
static void mem_heap_stat_free_block_split (void);
static void mem_heap_stat_free_block_merge (void);
#else /* !MEM_STATS */
# define mem_heap_stat_init()
# define mem_heap_stat_alloc_block( v)
# define mem_heap_stat_free_block( v)
# define mem_heap_stat_free_block_split()
# define mem_heap_stat_free_block_merge()
# define mem_heap_stat_init ()
# define mem_heap_stat_alloc_block (v)
# define mem_heap_stat_free_block (v)
# define mem_heap_stat_free_block_split ()
# define mem_heap_stat_free_block_merge ()
#endif /* !MEM_STATS */
/**
@@ -154,23 +154,23 @@ static void mem_heap_stat_free_block_merge( void);
* @return chunks count
*/
static size_t
mem_get_block_chunks_count( const mem_block_header_t *block_header_p) /**< block header */
mem_get_block_chunks_count (const mem_block_header_t *block_header_p) /**< block header */
{
JERRY_ASSERT( block_header_p != NULL );
JERRY_ASSERT(block_header_p != NULL);
const mem_block_header_t *next_block_p = block_header_p->neighbours[ MEM_DIRECTION_NEXT ];
size_t dist_till_block_end;
if ( next_block_p == NULL )
if (next_block_p == NULL)
{
dist_till_block_end = (size_t) ( mem_heap.heap_start + mem_heap.heap_size - (uint8_t*) block_header_p );
dist_till_block_end = (size_t) (mem_heap.heap_start + mem_heap.heap_size - (uint8_t*) block_header_p);
} else
{
dist_till_block_end = (size_t) ( (uint8_t*) next_block_p - (uint8_t*) block_header_p );
dist_till_block_end = (size_t) ((uint8_t*) next_block_p - (uint8_t*) block_header_p);
}
JERRY_ASSERT( dist_till_block_end <= mem_heap.heap_size );
JERRY_ASSERT( dist_till_block_end % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT(dist_till_block_end <= mem_heap.heap_size);
JERRY_ASSERT(dist_till_block_end % MEM_HEAP_CHUNK_SIZE == 0);
return dist_till_block_end / MEM_HEAP_CHUNK_SIZE;
} /* mem_get_block_chunks_count */
@@ -181,9 +181,9 @@ mem_get_block_chunks_count( const mem_block_header_t *block_header_p) /**< block
* @return size of block area that can be used to store data
*/
static size_t
mem_get_block_data_space_size( const mem_block_header_t *block_header_p) /**< block header */
mem_get_block_data_space_size (const mem_block_header_t *block_header_p) /**< block header */
{
return mem_get_block_chunks_count( block_header_p) * MEM_HEAP_CHUNK_SIZE - sizeof (mem_block_header_t);
return mem_get_block_chunks_count (block_header_p) * MEM_HEAP_CHUNK_SIZE - sizeof (mem_block_header_t);
} /* mem_get_block_data_space_size */
/**
@@ -192,30 +192,30 @@ mem_get_block_data_space_size( const mem_block_header_t *block_header_p) /**< bl
* @return chunks count
*/
static size_t
mem_get_block_chunks_count_from_data_size( size_t block_allocated_size) /**< size of block's allocated area */
mem_get_block_chunks_count_from_data_size (size_t block_allocated_size) /**< size of block's allocated area */
{
return JERRY_ALIGNUP( sizeof (mem_block_header_t) + block_allocated_size, MEM_HEAP_CHUNK_SIZE) / MEM_HEAP_CHUNK_SIZE;
return JERRY_ALIGNUP(sizeof (mem_block_header_t) + block_allocated_size, MEM_HEAP_CHUNK_SIZE) / MEM_HEAP_CHUNK_SIZE;
} /* mem_get_block_chunks_count_from_data_size */
/**
* Startup initialization of heap
*/
void
mem_heap_init(uint8_t *heap_start, /**< first address of heap space */
mem_heap_init (uint8_t *heap_start, /**< first address of heap space */
size_t heap_size) /**< heap space size */
{
JERRY_ASSERT( heap_start != NULL );
JERRY_ASSERT( heap_size != 0 );
JERRY_ASSERT( heap_size % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT( (uintptr_t) heap_start % MEM_ALIGNMENT == 0);
JERRY_ASSERT( heap_size <= ( 1u << MEM_HEAP_OFFSET_LOG ) );
JERRY_ASSERT(heap_start != NULL);
JERRY_ASSERT(heap_size != 0);
JERRY_ASSERT(heap_size % MEM_HEAP_CHUNK_SIZE == 0);
JERRY_ASSERT((uintptr_t) heap_start % MEM_ALIGNMENT == 0);
JERRY_ASSERT(heap_size <= (1u << MEM_HEAP_OFFSET_LOG));
mem_heap.heap_start = heap_start;
mem_heap.heap_size = heap_size;
VALGRIND_NOACCESS_SPACE( heap_start, heap_size);
VALGRIND_NOACCESS_SPACE(heap_start, heap_size);
mem_init_block_header(mem_heap.heap_start,
mem_init_block_header (mem_heap.heap_start,
0,
MEM_BLOCK_FREE,
NULL,
@@ -224,30 +224,30 @@ mem_heap_init(uint8_t *heap_start, /**< first address of heap space */
mem_heap.first_block_p = (mem_block_header_t*) mem_heap.heap_start;
mem_heap.last_block_p = mem_heap.first_block_p;
mem_heap_stat_init();
mem_heap_stat_init ();
} /* mem_heap_init */
/**
* Finalize heap
*/
void
mem_heap_finalize(void)
mem_heap_finalize (void)
{
VALGRIND_DEFINED_SPACE( mem_heap.heap_start, mem_heap.heap_size);
VALGRIND_DEFINED_SPACE(mem_heap.heap_start, mem_heap.heap_size);
JERRY_ASSERT( mem_heap.first_block_p == mem_heap.last_block_p );
JERRY_ASSERT( mem_heap.first_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK );
JERRY_ASSERT(mem_heap.first_block_p == mem_heap.last_block_p);
JERRY_ASSERT(mem_heap.first_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK);
VALGRIND_NOACCESS_SPACE( mem_heap.heap_start, mem_heap.heap_size);
VALGRIND_NOACCESS_SPACE(mem_heap.heap_start, mem_heap.heap_size);
__memset( &mem_heap, 0, sizeof(mem_heap));
__memset (&mem_heap, 0, sizeof (mem_heap));
} /* mem_heap_finalize */
/**
* Initialize block header
*/
static void
mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first chunk to use for the block */
mem_init_block_header (uint8_t *first_chunk_p, /**< address of the first chunk to use for the block */
size_t allocated_bytes, /**< size of block's allocated area */
mem_block_state_t block_state, /**< state of the block (allocated or free) */
mem_block_header_t *prev_block_p, /**< previous block */
@@ -255,13 +255,13 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
{
mem_block_header_t *block_header_p = (mem_block_header_t*) first_chunk_p;
VALGRIND_UNDEFINED_STRUCT( block_header_p);
VALGRIND_UNDEFINED_STRUCT(block_header_p);
if ( block_state == MEM_BLOCK_FREE )
if (block_state == MEM_BLOCK_FREE)
{
block_header_p->magic_num = MEM_MAGIC_NUM_OF_FREE_BLOCK;
JERRY_ASSERT( allocated_bytes == 0 );
JERRY_ASSERT(allocated_bytes == 0);
} else
{
block_header_p->magic_num = MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK;
@@ -271,9 +271,9 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
block_header_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
block_header_p->allocated_bytes = allocated_bytes;
JERRY_ASSERT( allocated_bytes <= mem_get_block_data_space_size( block_header_p) );
JERRY_ASSERT(allocated_bytes <= mem_get_block_data_space_size (block_header_p));
VALGRIND_NOACCESS_STRUCT( block_header_p);
VALGRIND_NOACCESS_STRUCT(block_header_p);
} /* mem_init_block_header */
/**
@@ -290,15 +290,15 @@ mem_init_block_header( uint8_t *first_chunk_p, /**< address of the first
* NULL - if there is not enough memory.
*/
uint8_t*
mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to allocate in bytes */
mem_heap_alloc_block (size_t size_in_bytes, /**< size of region to allocate in bytes */
mem_heap_alloc_term_t alloc_term) /**< expected allocation term */
{
mem_block_header_t *block_p;
mem_direction_t direction;
mem_check_heap();
mem_check_heap ();
if ( alloc_term == MEM_HEAP_ALLOC_SHORT_TERM )
if (alloc_term == MEM_HEAP_ALLOC_SHORT_TERM)
{
block_p = mem_heap.first_block_p;
direction = MEM_DIRECTION_NEXT;
@@ -309,49 +309,49 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
}
/* searching for appropriate block */
while ( block_p != NULL )
while (block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
if ( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
if ( mem_get_block_data_space_size( block_p) >= size_in_bytes )
if (mem_get_block_data_space_size (block_p) >= size_in_bytes)
{
break;
}
} else
{
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
}
mem_block_header_t *next_block_p = block_p->neighbours[ direction ];
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
block_p = next_block_p;
}
if ( block_p == NULL )
if (block_p == NULL)
{
/* not enough free space */
return NULL;
}
/* appropriate block found, allocating space */
size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size( size_in_bytes);
size_t found_block_size_in_chunks = mem_get_block_chunks_count( block_p);
size_t new_block_size_in_chunks = mem_get_block_chunks_count_from_data_size (size_in_bytes);
size_t found_block_size_in_chunks = mem_get_block_chunks_count (block_p);
JERRY_ASSERT( new_block_size_in_chunks <= found_block_size_in_chunks );
JERRY_ASSERT(new_block_size_in_chunks <= found_block_size_in_chunks);
mem_block_header_t *prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
mem_block_header_t *next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
if ( new_block_size_in_chunks < found_block_size_in_chunks )
if (new_block_size_in_chunks < found_block_size_in_chunks)
{
mem_heap_stat_free_block_split();
mem_heap_stat_free_block_split ();
uint8_t *new_free_block_first_chunk_p = (uint8_t*) block_p + new_block_size_in_chunks * MEM_HEAP_CHUNK_SIZE;
mem_init_block_header(new_free_block_first_chunk_p,
mem_init_block_header (new_free_block_first_chunk_p,
0,
MEM_BLOCK_FREE,
block_p /* there we will place new allocated block */,
@@ -359,43 +359,43 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
mem_block_header_t *new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;
if ( next_block_p == NULL )
if (next_block_p == NULL)
{
mem_heap.last_block_p = new_free_block_p;
}
else
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = (mem_block_header_t*) new_free_block_first_chunk_p;
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
next_block_p = new_free_block_p;
}
mem_init_block_header((uint8_t*) block_p,
mem_init_block_header ((uint8_t*) block_p,
size_in_bytes,
MEM_BLOCK_ALLOCATED,
prev_block_p,
next_block_p);
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
mem_heap_stat_alloc_block( block_p);
mem_heap_stat_alloc_block (block_p);
JERRY_ASSERT( mem_get_block_data_space_size( block_p) >= size_in_bytes );
JERRY_ASSERT(mem_get_block_data_space_size (block_p) >= size_in_bytes);
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
mem_check_heap();
mem_check_heap ();
/* return data space beginning address */
uint8_t *data_space_p = (uint8_t*) (block_p + 1);
JERRY_ASSERT( (uintptr_t) data_space_p % MEM_ALIGNMENT == 0);
JERRY_ASSERT((uintptr_t) data_space_p % MEM_ALIGNMENT == 0);
VALGRIND_UNDEFINED_SPACE( data_space_p, size_in_bytes);
VALGRIND_UNDEFINED_SPACE(data_space_p, size_in_bytes);
return data_space_p;
} /* mem_heap_alloc_block */
@@ -404,58 +404,58 @@ mem_heap_alloc_block( size_t size_in_bytes, /**< size of region to all
* Free the memory block.
*/
void
mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of the block */
mem_heap_free_block (uint8_t *ptr) /**< pointer to beginning of data space of the block */
{
/* checking that ptr points to the heap */
JERRY_ASSERT( ptr >= mem_heap.heap_start
&& ptr <= mem_heap.heap_start + mem_heap.heap_size );
JERRY_ASSERT(ptr >= mem_heap.heap_start
&& ptr <= mem_heap.heap_start + mem_heap.heap_size);
mem_check_heap();
mem_check_heap ();
mem_block_header_t *block_p = (mem_block_header_t*) ptr - 1;
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
mem_block_header_t *prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
mem_block_header_t *next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
mem_heap_stat_free_block( block_p);
mem_heap_stat_free_block (block_p);
VALGRIND_NOACCESS_SPACE( ptr, block_p->allocated_bytes);
VALGRIND_NOACCESS_SPACE(ptr, block_p->allocated_bytes);
/* checking magic nums that are neighbour to data space */
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
if ( next_block_p != NULL )
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
JERRY_ASSERT( next_block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK
|| next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK );
JERRY_ASSERT(next_block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK
|| next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK);
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
block_p->magic_num = MEM_MAGIC_NUM_OF_FREE_BLOCK;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
if (next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (next_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
/* merge with the next block */
mem_heap_stat_free_block_merge();
mem_heap_stat_free_block_merge ();
mem_block_header_t *next_next_block_p = next_block_p->neighbours[ MEM_DIRECTION_NEXT ];
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
next_block_p = next_next_block_p;
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
block_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = block_p;
}
@@ -465,26 +465,26 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
}
}
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
if ( prev_block_p != NULL )
if (prev_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( prev_block_p);
VALGRIND_DEFINED_STRUCT(prev_block_p);
if ( prev_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK )
if (prev_block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK)
{
/* merge with the previous block */
mem_heap_stat_free_block_merge();
mem_heap_stat_free_block_merge ();
prev_block_p->neighbours[ MEM_DIRECTION_NEXT ] = next_block_p;
if ( next_block_p != NULL )
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT( next_block_p);
VALGRIND_DEFINED_STRUCT(next_block_p);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = block_p->neighbours[ MEM_DIRECTION_PREV ];
VALGRIND_NOACCESS_STRUCT( next_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
else
{
@@ -492,12 +492,12 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
}
}
VALGRIND_NOACCESS_STRUCT( prev_block_p);
VALGRIND_NOACCESS_STRUCT(prev_block_p);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
mem_check_heap();
mem_check_heap ();
} /* mem_heap_free_block */
/**
@@ -506,10 +506,10 @@ mem_heap_free_block( uint8_t *ptr) /**< pointer to beginning of data space of th
* @return recommended allocation size
*/
size_t
mem_heap_recommend_allocation_size( size_t minimum_allocation_size) /**< minimum allocation size */
mem_heap_recommend_allocation_size (size_t minimum_allocation_size) /**< minimum allocation size */
{
size_t minimum_allocation_size_with_block_header = minimum_allocation_size + sizeof (mem_block_header_t);
size_t heap_chunk_aligned_allocation_size = JERRY_ALIGNUP( minimum_allocation_size_with_block_header, MEM_HEAP_CHUNK_SIZE);
size_t heap_chunk_aligned_allocation_size = JERRY_ALIGNUP(minimum_allocation_size_with_block_header, MEM_HEAP_CHUNK_SIZE);
return heap_chunk_aligned_allocation_size - sizeof (mem_block_header_t);
} /* mem_heap_recommend_allocation_size */
@@ -518,59 +518,59 @@ mem_heap_recommend_allocation_size( size_t minimum_allocation_size) /**< minimum
* Print heap
*/
void
mem_heap_print( bool dump_block_headers, /**< print block headers */
mem_heap_print (bool dump_block_headers, /**< print block headers */
bool dump_block_data, /**< print block with data (true)
or print only block header (false) */
bool dump_stats) /**< print heap stats */
{
mem_check_heap();
mem_check_heap ();
JERRY_ASSERT( !dump_block_data || dump_block_headers );
JERRY_ASSERT(!dump_block_data || dump_block_headers);
if ( dump_block_headers )
if (dump_block_headers)
{
__printf("Heap: start=%p size=%lu, first block->%p, last block->%p\n",
__printf ("Heap: start=%p size=%lu, first block->%p, last block->%p\n",
mem_heap.heap_start,
mem_heap.heap_size,
(void*) mem_heap.first_block_p,
(void*) mem_heap.last_block_p);
for ( mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
for (mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
block_p != NULL;
block_p = next_block_p )
block_p = next_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
__printf("Block (%p): magic num=0x%08x, size in chunks=%lu, previous block->%p next block->%p\n",
__printf ("Block (%p): magic num=0x%08x, size in chunks=%lu, previous block->%p next block->%p\n",
(void*) block_p,
block_p->magic_num,
mem_get_block_chunks_count( block_p),
mem_get_block_chunks_count (block_p),
(void*) block_p->neighbours[ MEM_DIRECTION_PREV ],
(void*) block_p->neighbours[ MEM_DIRECTION_NEXT ]);
if ( dump_block_data )
if (dump_block_data)
{
uint8_t *block_data_p = (uint8_t*) (block_p + 1);
for ( uint32_t offset = 0;
offset < mem_get_block_data_space_size( block_p);
offset++ )
for (uint32_t offset = 0;
offset < mem_get_block_data_space_size (block_p);
offset++)
{
__printf("%02x ", block_data_p[ offset ]);
__printf ("%02x ", block_data_p[ offset ]);
}
__printf("\n");
__printf ("\n");
}
next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
}
#ifdef MEM_STATS
if ( dump_stats )
if (dump_stats)
{
__printf("Heap stats:\n");
__printf(" Heap size = %lu bytes\n"
__printf ("Heap stats:\n");
__printf (" Heap size = %lu bytes\n"
" Chunk size = %lu bytes\n"
" Blocks count = %lu\n"
" Allocated blocks count = %lu\n"
@@ -595,80 +595,80 @@ mem_heap_print( bool dump_block_headers, /**< print block headers */
}
#endif /* MEM_STATS */
__printf("\n");
__printf ("\n");
} /* mem_heap_print */
/**
* Check heap consistency
*/
static void
mem_check_heap( void)
mem_check_heap (void)
{
#ifndef JERRY_NDEBUG
JERRY_ASSERT( (uint8_t*) mem_heap.first_block_p == mem_heap.heap_start );
JERRY_ASSERT( mem_heap.heap_size % MEM_HEAP_CHUNK_SIZE == 0 );
JERRY_ASSERT((uint8_t*) mem_heap.first_block_p == mem_heap.heap_start);
JERRY_ASSERT(mem_heap.heap_size % MEM_HEAP_CHUNK_SIZE == 0);
bool is_last_block_was_met = false;
size_t chunk_sizes_sum = 0;
for ( mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
for (mem_block_header_t *block_p = mem_heap.first_block_p, *next_block_p;
block_p != NULL;
block_p = next_block_p )
block_p = next_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
chunk_sizes_sum += mem_get_block_chunks_count( block_p);
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
chunk_sizes_sum += mem_get_block_chunks_count (block_p);
next_block_p = block_p->neighbours[ MEM_DIRECTION_NEXT ];
if ( block_p == mem_heap.last_block_p )
if (block_p == mem_heap.last_block_p)
{
is_last_block_was_met = true;
JERRY_ASSERT( next_block_p == NULL );
JERRY_ASSERT(next_block_p == NULL);
} else
{
JERRY_ASSERT( next_block_p != NULL );
JERRY_ASSERT(next_block_p != NULL);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
JERRY_ASSERT( chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size );
JERRY_ASSERT( is_last_block_was_met );
JERRY_ASSERT(chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size);
JERRY_ASSERT(is_last_block_was_met);
bool is_first_block_was_met = false;
chunk_sizes_sum = 0;
for ( mem_block_header_t *block_p = mem_heap.last_block_p, *prev_block_p;
for (mem_block_header_t *block_p = mem_heap.last_block_p, *prev_block_p;
block_p != NULL;
block_p = prev_block_p )
block_p = prev_block_p)
{
VALGRIND_DEFINED_STRUCT( block_p);
VALGRIND_DEFINED_STRUCT(block_p);
JERRY_ASSERT( block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
chunk_sizes_sum += mem_get_block_chunks_count( block_p);
JERRY_ASSERT(block_p->magic_num == MEM_MAGIC_NUM_OF_FREE_BLOCK
|| block_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
chunk_sizes_sum += mem_get_block_chunks_count (block_p);
prev_block_p = block_p->neighbours[ MEM_DIRECTION_PREV ];
if ( block_p == mem_heap.first_block_p )
if (block_p == mem_heap.first_block_p)
{
is_first_block_was_met = true;
JERRY_ASSERT( prev_block_p == NULL );
JERRY_ASSERT(prev_block_p == NULL);
} else
{
JERRY_ASSERT( prev_block_p != NULL );
JERRY_ASSERT(prev_block_p != NULL);
}
VALGRIND_NOACCESS_STRUCT( block_p);
VALGRIND_NOACCESS_STRUCT(block_p);
}
JERRY_ASSERT( chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size );
JERRY_ASSERT( is_first_block_was_met );
JERRY_ASSERT(chunk_sizes_sum * MEM_HEAP_CHUNK_SIZE == mem_heap.heap_size);
JERRY_ASSERT(is_first_block_was_met);
#endif /* !JERRY_NDEBUG */
} /* mem_check_heap */
@@ -677,7 +677,7 @@ mem_check_heap( void)
* Get heap memory usage statistics
*/
void
mem_heap_get_stats( mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
mem_heap_get_stats (mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
{
*out_heap_stats_p = mem_heap_stats;
} /* mem_heap_get_stats */
@@ -686,9 +686,9 @@ mem_heap_get_stats( mem_heap_stats_t *out_heap_stats_p) /**< out: heap stats */
* Initalize heap memory usage statistics account structure
*/
static void
mem_heap_stat_init()
mem_heap_stat_init ()
{
__memset( &mem_heap_stats, 0, sizeof (mem_heap_stats));
__memset (&mem_heap_stats, 0, sizeof (mem_heap_stats));
mem_heap_stats.size = mem_heap.heap_size;
mem_heap_stats.blocks = 1;
@@ -698,11 +698,11 @@ mem_heap_stat_init()
* Account block allocation
*/
static void
mem_heap_stat_alloc_block( mem_block_header_t *block_header_p) /**< allocated block */
mem_heap_stat_alloc_block (mem_block_header_t *block_header_p) /**< allocated block */
{
JERRY_ASSERT( block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
const size_t chunks = mem_get_block_chunks_count( block_header_p);
const size_t chunks = mem_get_block_chunks_count (block_header_p);
const size_t bytes = block_header_p->allocated_bytes;
const size_t waste_bytes = chunks * MEM_HEAP_CHUNK_SIZE - bytes;
@@ -711,51 +711,51 @@ mem_heap_stat_alloc_block( mem_block_header_t *block_header_p) /**< allocated bl
mem_heap_stats.allocated_bytes += bytes;
mem_heap_stats.waste_bytes += waste_bytes;
if ( mem_heap_stats.allocated_blocks > mem_heap_stats.peak_allocated_blocks )
if (mem_heap_stats.allocated_blocks > mem_heap_stats.peak_allocated_blocks)
{
mem_heap_stats.peak_allocated_blocks = mem_heap_stats.allocated_blocks;
}
if ( mem_heap_stats.allocated_chunks > mem_heap_stats.peak_allocated_chunks )
if (mem_heap_stats.allocated_chunks > mem_heap_stats.peak_allocated_chunks)
{
mem_heap_stats.peak_allocated_chunks = mem_heap_stats.allocated_chunks;
}
if ( mem_heap_stats.allocated_bytes > mem_heap_stats.peak_allocated_bytes )
if (mem_heap_stats.allocated_bytes > mem_heap_stats.peak_allocated_bytes)
{
mem_heap_stats.peak_allocated_bytes = mem_heap_stats.allocated_bytes;
}
if ( mem_heap_stats.waste_bytes > mem_heap_stats.peak_waste_bytes )
if (mem_heap_stats.waste_bytes > mem_heap_stats.peak_waste_bytes)
{
mem_heap_stats.peak_waste_bytes = mem_heap_stats.waste_bytes;
}
JERRY_ASSERT( mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes <= mem_heap_stats.size );
JERRY_ASSERT( mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE );
JERRY_ASSERT(mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes <= mem_heap_stats.size);
JERRY_ASSERT(mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE);
} /* mem_heap_stat_alloc_block */
/**
* Account block freeing
*/
static void
mem_heap_stat_free_block( mem_block_header_t *block_header_p) /**< block to be freed */
mem_heap_stat_free_block (mem_block_header_t *block_header_p) /**< block to be freed */
{
JERRY_ASSERT( block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK );
JERRY_ASSERT(block_header_p->magic_num == MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK);
const size_t chunks = mem_get_block_chunks_count( block_header_p);
const size_t chunks = mem_get_block_chunks_count (block_header_p);
const size_t bytes = block_header_p->allocated_bytes;
const size_t waste_bytes = chunks * MEM_HEAP_CHUNK_SIZE - bytes;
JERRY_ASSERT( mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes <= mem_heap_stats.size );
JERRY_ASSERT( mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE );
JERRY_ASSERT(mem_heap_stats.allocated_blocks <= mem_heap_stats.blocks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes <= mem_heap_stats.size);
JERRY_ASSERT(mem_heap_stats.allocated_chunks <= mem_heap_stats.size / MEM_HEAP_CHUNK_SIZE);
JERRY_ASSERT( mem_heap_stats.allocated_blocks >= 1 );
JERRY_ASSERT( mem_heap_stats.allocated_chunks >= chunks );
JERRY_ASSERT( mem_heap_stats.allocated_bytes >= bytes );
JERRY_ASSERT( mem_heap_stats.waste_bytes >= waste_bytes );
JERRY_ASSERT(mem_heap_stats.allocated_blocks >= 1);
JERRY_ASSERT(mem_heap_stats.allocated_chunks >= chunks);
JERRY_ASSERT(mem_heap_stats.allocated_bytes >= bytes);
JERRY_ASSERT(mem_heap_stats.waste_bytes >= waste_bytes);
mem_heap_stats.allocated_blocks--;
mem_heap_stats.allocated_chunks -= chunks;
@@ -767,7 +767,7 @@ mem_heap_stat_free_block( mem_block_header_t *block_header_p) /**< block to be f
* Account free block split
*/
static void
mem_heap_stat_free_block_split( void)
mem_heap_stat_free_block_split (void)
{
mem_heap_stats.blocks++;
} /* mem_heap_stat_free_block_split */
@@ -776,7 +776,7 @@ mem_heap_stat_free_block_split( void)
* Account free block merge
*/
static void
mem_heap_stat_free_block_merge( void)
mem_heap_stat_free_block_merge (void)
{
mem_heap_stats.blocks--;
} /* mem_heap_stat_free_block_merge */