Increasing heap size to 256K.

This commit is contained in:
Ruben Ayrapetyan
2015-02-19 17:18:37 +03:00
parent 4e93ef421d
commit d4af5702a4
7 changed files with 91 additions and 48 deletions
+2 -2
View File
@@ -52,7 +52,7 @@
* Size of heap
*/
#ifndef CONFIG_MEM_HEAP_AREA_SIZE
# define CONFIG_MEM_HEAP_AREA_SIZE (64 * 1024)
# define CONFIG_MEM_HEAP_AREA_SIZE (256 * 1024)
#endif /* !CONFIG_MEM_HEAP_AREA_SIZE */
/**
@@ -65,7 +65,7 @@
*
* On the other hand, value 2 ^ CONFIG_MEM_HEAP_OFFSET_LOG should not be less than CONFIG_MEM_HEAP_AREA_SIZE.
*/
#define CONFIG_MEM_HEAP_OFFSET_LOG (16)
#define CONFIG_MEM_HEAP_OFFSET_LOG (18)
/**
* Number of lower bits in key of literal hash table.
-5
View File
@@ -27,11 +27,6 @@
#include "mem-allocator-internal.h"
/**
* Check that heap area is less or equal than 64K.
*/
JERRY_STATIC_ASSERT(MEM_HEAP_AREA_SIZE <= 64 * 1024);
/**
* Area for heap
*/
+10
View File
@@ -43,6 +43,16 @@
*/
#define MEM_COMPRESSED_POINTER_WIDTH (MEM_HEAP_OFFSET_LOG - MEM_ALIGNMENT_LOG)
/**
* Compressed pointer value mask
*/
#define MEM_COMPRESSED_POINTER_MASK ((1ull << MEM_COMPRESSED_POINTER_WIDTH) - 1)
/**
* Heap offset value mask
*/
#define MEM_HEAP_OFFSET_MASK ((1ull << MEM_HEAP_OFFSET_LOG) - 1)
/**
* Severity of a 'try give memory back' request
*
+2 -2
View File
@@ -1,4 +1,4 @@
/* Copyright 2014 Samsung Electronics Co., Ltd.
/* Copyright 2014-2015 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -46,6 +46,6 @@
/**
* Logarithm of required alignment for allocated units/blocks
*/
#define MEM_ALIGNMENT_LOG 2
#define MEM_ALIGNMENT_LOG 3
#endif /* MEM_CONFIG_H */
+72 -34
View File
@@ -88,7 +88,13 @@ typedef enum
/**
* Offset in the heap
*/
#if MEM_HEAP_OFFSET_LOG <= 16
typedef uint16_t mem_heap_offset_t;
#elif MEM_HEAP_OFFSET_LOG <= 32
typedef uint32_t mem_heap_offset_t;
#else /* MEM_HEAP_OFFSET_LOG > 32 */
# error "MEM_HEAP_OFFSET_LOG > 32 is not supported"
#endif /* MEM_HEAP_OFFSET_LOG > 32 */
JERRY_STATIC_ASSERT (sizeof (mem_heap_offset_t) * JERRY_BITSINBYTE >= MEM_HEAP_OFFSET_LOG);
/**
@@ -96,19 +102,23 @@ JERRY_STATIC_ASSERT (sizeof (mem_heap_offset_t) * JERRY_BITSINBYTE >= MEM_HEAP_O
*/
typedef struct __attribute__ ((aligned (MEM_ALIGNMENT))) mem_block_header_t
{
mem_heap_offset_t allocated_bytes : MEM_HEAP_OFFSET_LOG; /**< allocated area size - for allocated blocks;
0 - for free blocks */
mem_heap_offset_t prev_p : MEM_HEAP_OFFSET_LOG; /**< previous block's offset;
* 0 - if the block is the first block */
mem_heap_offset_t next_p : MEM_HEAP_OFFSET_LOG; /**< next block's offset;
* 0 - if the block is the last block */
uint16_t magic_num; /**< magic number (mem_magic_num_of_block_t):
MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK for allocated block
or MEM_MAGIC_NUM_OF_FREE_BLOCK for free block */
mem_heap_offset_t allocated_bytes; /**< allocated area size - for allocated blocks;
0 - for free blocks */
mem_heap_offset_t neighbours[ MEM_DIRECTION_COUNT ]; /**< neighbour blocks' offsets;
0 - if the block is last in specified direction */
* MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK for allocated block
* or MEM_MAGIC_NUM_OF_FREE_BLOCK for free block */
} mem_block_header_t;
#if MEM_HEAP_OFFSET_LOG <= 16
/**
* Check that block header's size is not more than 8 bytes
*/
JERRY_STATIC_ASSERT (sizeof (mem_block_header_t) <= sizeof (uint64_t));
#endif /* MEM_HEAP_OFFSET_LOG <= 16 */
/**
* Chunk should have enough space for block header
@@ -186,7 +196,7 @@ mem_get_blocks_distance (const mem_block_header_t* block_p, /**< block to measur
{
JERRY_ASSERT (another_block_p >= block_p);
ssize_t distance = ((uint8_t*) another_block_p - (uint8_t*)block_p);
size_t distance = (size_t) ((uint8_t*) another_block_p - (uint8_t*)block_p);
JERRY_ASSERT (distance == (mem_heap_offset_t) distance);
@@ -198,10 +208,10 @@ mem_get_blocks_distance (const mem_block_header_t* block_p, /**< block to measur
*
* Note:
* If second_block_p is next neighbour of first_block_p,
* then first_block_p->neighbours[next] = ret_val
* second_block_p->neighbours[prev] = ret_val
* then first_block_p->next_p = ret_val
* second_block_p->prev_p = ret_val
*
* @return offset value for neighbours field
* @return offset value for prev_p / next_p field
*/
static mem_heap_offset_t
mem_get_block_neighbour_field (const mem_block_header_t* first_block_p, /**< first of the blocks
@@ -225,6 +235,42 @@ mem_get_block_neighbour_field (const mem_block_header_t* first_block_p, /**< fir
}
} /* mem_get_block_neighbour_field */
/**
* Set previous for the block
*/
static void
mem_set_block_prev (mem_block_header_t *block_p, /**< block to set previous for */
mem_block_header_t *prev_block_p) /**< previous block (or NULL) */
{
mem_heap_offset_t offset = mem_get_block_neighbour_field (prev_block_p, block_p);
block_p->prev_p = (offset) & MEM_HEAP_OFFSET_MASK;
} /* mem_set_block_prev */
/**
* Set next for the block
*/
static void
mem_set_block_next (mem_block_header_t *block_p, /**< block to set next for */
mem_block_header_t *next_block_p) /**< next block or NULL */
{
mem_heap_offset_t offset = mem_get_block_neighbour_field (block_p, next_block_p);
block_p->next_p = (offset) & MEM_HEAP_OFFSET_MASK;
} /* mem_set_block_next */
/**
* Set allocated bytes for the block
*/
static void
mem_set_block_allocated_bytes (mem_block_header_t *block_p, /**< block to set allocated bytes field for */
size_t allocated_bytes) /**< number of bytes allocated in the block */
{
block_p->allocated_bytes = allocated_bytes & MEM_HEAP_OFFSET_MASK;
JERRY_ASSERT (block_p->allocated_bytes == allocated_bytes);
} /* mem_set_block_allocated_bytes */
/**
* Get block located at specified offset from specified block.
*
@@ -258,7 +304,7 @@ static mem_block_header_t*
mem_get_next_block_by_direction (const mem_block_header_t* block_p, /**< block */
mem_direction_t dir) /**< direction */
{
mem_heap_offset_t offset = block_p->neighbours[dir];
mem_heap_offset_t offset = (dir == MEM_DIRECTION_NEXT ? block_p->next_p : block_p->prev_p);
if (offset != 0)
{
return mem_get_block_by_offset (block_p,
@@ -392,11 +438,9 @@ mem_init_block_header (uint8_t *first_chunk_p, /**< address of the first
block_header_p->magic_num = MEM_MAGIC_NUM_OF_ALLOCATED_BLOCK;
}
block_header_p->neighbours[ MEM_DIRECTION_PREV ] = mem_get_block_neighbour_field (prev_block_p, block_header_p);
block_header_p->neighbours[ MEM_DIRECTION_NEXT ] = mem_get_block_neighbour_field (block_header_p, next_block_p);
JERRY_ASSERT (allocated_bytes == (mem_heap_offset_t) allocated_bytes);
block_header_p->allocated_bytes = (mem_heap_offset_t) allocated_bytes;
mem_set_block_prev (block_header_p, prev_block_p);
mem_set_block_next (block_header_p, next_block_p);
mem_set_block_allocated_bytes (block_header_p, allocated_bytes);
JERRY_ASSERT(allocated_bytes <= mem_get_block_data_space_size (block_header_p));
@@ -487,8 +531,7 @@ void* mem_heap_alloc_block_internal (size_t size_in_bytes, /**< size
VALGRIND_DEFINED_STRUCT(prev_block_p);
prev_block_p->neighbours[ MEM_DIRECTION_NEXT ] = mem_get_block_neighbour_field (prev_block_p,
block_p);
mem_set_block_next (prev_block_p, block_p);
VALGRIND_NOACCESS_STRUCT(prev_block_p);
@@ -500,8 +543,7 @@ void* mem_heap_alloc_block_internal (size_t size_in_bytes, /**< size
{
VALGRIND_DEFINED_STRUCT(next_block_p);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = mem_get_block_neighbour_field (block_p,
next_block_p);
mem_set_block_prev (next_block_p, block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
@@ -525,9 +567,8 @@ void* mem_heap_alloc_block_internal (size_t size_in_bytes, /**< size
{
VALGRIND_DEFINED_STRUCT(next_block_p);
const mem_block_header_t* new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = mem_get_block_neighbour_field (new_free_block_p,
next_block_p);
mem_block_header_t* new_free_block_p = (mem_block_header_t*) new_free_block_first_chunk_p;
mem_set_block_prev (next_block_p, new_free_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
@@ -702,15 +743,12 @@ mem_heap_try_resize_block (void *ptr, /**< pointer to beginning of data space of
new_next_of_current_block_p = block_after_next_p;
}
block_p->neighbours[ MEM_DIRECTION_NEXT ] = mem_get_block_neighbour_field (block_p,
new_next_of_current_block_p);
mem_set_block_next (block_p, new_next_of_current_block_p);
if (block_after_next_p != NULL)
{
VALGRIND_DEFINED_STRUCT (block_after_next_p);
mem_heap_offset_t offset = mem_get_block_neighbour_field (new_prev_of_block_after_next_p,
block_after_next_p);
block_after_next_p->neighbours[ MEM_DIRECTION_PREV ] = offset;
mem_set_block_prev (block_after_next_p, new_prev_of_block_after_next_p);
VALGRIND_NOACCESS_STRUCT (block_after_next_p);
}
@@ -738,7 +776,7 @@ mem_heap_try_resize_block (void *ptr, /**< pointer to beginning of data space of
VALGRIND_UNDEFINED_SPACE (uint8_ptr + block_p->allocated_bytes, size_in_bytes - block_p->allocated_bytes);
}
block_p->allocated_bytes = (mem_heap_offset_t) size_in_bytes;
mem_set_block_allocated_bytes (block_p, size_in_bytes);
}
MEM_HEAP_STAT_ALLOC_BLOCK (block_p);
@@ -806,10 +844,10 @@ mem_heap_free_block (void *ptr) /**< pointer to beginning of data space of the b
VALGRIND_DEFINED_STRUCT(next_block_p);
block_p->neighbours[ MEM_DIRECTION_NEXT ] = mem_get_block_neighbour_field (block_p, next_block_p);
mem_set_block_next (block_p, next_block_p);
if (next_block_p != NULL)
{
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = mem_get_block_neighbour_field (block_p, next_block_p);
mem_set_block_prev (next_block_p, block_p);
}
else
{
@@ -829,13 +867,13 @@ mem_heap_free_block (void *ptr) /**< pointer to beginning of data space of the b
/* merge with the previous block */
MEM_HEAP_STAT_FREE_BLOCK_MERGE ();
prev_block_p->neighbours[ MEM_DIRECTION_NEXT ] = mem_get_block_neighbour_field (prev_block_p, next_block_p);
mem_set_block_next (prev_block_p, next_block_p);
if (next_block_p != NULL)
{
VALGRIND_DEFINED_STRUCT(next_block_p);
const mem_block_header_t* prev_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_PREV);
next_block_p->neighbours[ MEM_DIRECTION_PREV ] = mem_get_block_neighbour_field (prev_block_p, next_block_p);
mem_block_header_t* prev_block_p = mem_get_next_block_by_direction (block_p, MEM_DIRECTION_PREV);
mem_set_block_prev (next_block_p, prev_block_p);
VALGRIND_NOACCESS_STRUCT(next_block_p);
}
+1 -1
View File
@@ -65,7 +65,7 @@ typedef struct __attribute__ ((aligned (MEM_ALIGNMENT))) mem_pool_state_t
unsigned int free_chunks_number : MEM_POOL_MAX_CHUNKS_NUMBER_LOG;
/** Pointer to the next pool with same chunk size */
unsigned int next_pool_cp : MEM_HEAP_OFFSET_LOG;
unsigned int next_pool_cp : MEM_COMPRESSED_POINTER_WIDTH;
} mem_pool_state_t;
extern void mem_pool_init (mem_pool_state_t *pool_p, size_t pool_size);
+4 -4
View File
@@ -116,7 +116,7 @@ mem_pools_alloc_longpath (void)
}
else
{
pool_state->next_pool_cp = (uint16_t) mem_compress_pointer (mem_pools);
pool_state->next_pool_cp = mem_compress_pointer (mem_pools) & MEM_COMPRESSED_POINTER_MASK;
}
mem_pools = pool_state;
@@ -145,7 +145,7 @@ mem_pools_alloc_longpath (void)
JERRY_ASSERT (prev_pool_state_p != NULL && pool_state != mem_pools);
prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp;
pool_state->next_pool_cp = (uint16_t) mem_compress_pointer (mem_pools);
pool_state->next_pool_cp = mem_compress_pointer (mem_pools) & MEM_COMPRESSED_POINTER_MASK;
mem_pools = pool_state;
}
@@ -231,7 +231,7 @@ mem_pools_free (uint8_t *chunk_p) /**< pointer to the chunk */
mem_free_chunks_number -= MEM_POOL_CHUNKS_NUMBER;
mem_heap_free_block ((uint8_t*)pool_state);
mem_heap_free_block ((uint8_t*) pool_state);
MEM_POOLS_STAT_FREE_POOL ();
}
@@ -240,7 +240,7 @@ mem_pools_free (uint8_t *chunk_p) /**< pointer to the chunk */
JERRY_ASSERT (prev_pool_state_p != NULL);
prev_pool_state_p->next_pool_cp = pool_state->next_pool_cp;
pool_state->next_pool_cp = (uint16_t) mem_compress_pointer (mem_pools);
pool_state->next_pool_cp = mem_compress_pointer (mem_pools) & MEM_COMPRESSED_POINTER_MASK;
mem_pools = pool_state;
}
} /* mem_pools_free */