Rewrite serializer and lexer: adjust nums' ids, dump strings, nums and rewrites

This commit is contained in:
Ilmir Usmanov
2014-07-23 17:36:11 +04:00
parent 1b2a52da51
commit f264d24b5a
7 changed files with 117 additions and 39 deletions
+29 -8
View File
@@ -88,7 +88,7 @@ num_and_token;
#define MAX_NAMES 100
#define MAX_NUMS 25
static string_and_token seen_names[MAX_NAMES];
static string_and_token seen_names[MAX_NUMS];
static uint8_t seen_names_count = 0;
static num_and_token seen_nums[MAX_NAMES] =
@@ -254,17 +254,21 @@ uint8_t
lexer_get_strings (const char **strings)
{
if (strings)
{
int i;
for (i = 0; i < seen_names_count; i++)
{
strings[i] = seen_names[i].str;
int i;
for (i = 0; i < seen_names_count; i++)
strings[i] = seen_names[i].str;
}
}
return seen_names_count;
}
uint8_t
lexer_get_reserved_ids_count (void)
{
return (uint8_t) (seen_names_count + seen_nums_count);
}
const char *
lexer_get_string_by_id (uint8_t id)
{
@@ -278,14 +282,31 @@ lexer_get_nums (int *nums)
{
int i;
if (!nums)
return seen_nums_count;
for (i = 0; i < seen_nums_count; i++)
{
nums[i] = seen_nums[i].num;
}
return seen_nums_count;
}
void
lexer_adjust_num_ids (void)
{
size_t i;
for (i = 0; i < seen_nums_count; i++)
seen_nums[i].tok.data.uid = (uint8_t) (seen_nums[i].tok.data.uid + seen_names_count);
for (i = 0; i < sizeof (keyword_tokens) / sizeof (string_and_token); i++)
{
if (!__strncmp ("true", keyword_tokens[i].str, 4)
|| !__strncmp ("false", keyword_tokens[i].str, 5))
keyword_tokens[i].tok.data.uid = (uint8_t) (keyword_tokens[i].tok.data.uid + seen_names_count);
}
}
static void
new_token (void)
{
+2
View File
@@ -156,7 +156,9 @@ token lexer_next_token (void);
void lexer_save_token (token);
void lexer_dump_buffer_state (void);
uint8_t lexer_get_strings (const char **);
uint8_t lexer_get_reserved_ids_count (void);
const char *lexer_get_string_by_id (uint8_t);
uint8_t lexer_get_nums (int *);
void lexer_adjust_num_ids (void);
#endif
+8 -7
View File
@@ -130,19 +130,19 @@ insert_semicolon (void)
do { skip_newlines (); ID = parse_##TYPE (); } while (0)
#define DUMP_VOID_OPCODE(GETOP) \
do { opcode=getop_##GETOP (); serializer_dump_data (&opcode, sizeof (OPCODE)); opcode_counter++; } while (0)
do { opcode=getop_##GETOP (); serializer_dump_opcode (&opcode); opcode_counter++; } while (0)
#define DUMP_OPCODE(GETOP, ...) \
do { opcode=getop_##GETOP (__VA_ARGS__); serializer_dump_data (&opcode, sizeof (OPCODE)); opcode_counter++; } while (0)
do { opcode=getop_##GETOP (__VA_ARGS__); serializer_dump_opcode (&opcode); opcode_counter++; } while (0)
#define REWRITE_OPCODE(OC, GETOP, ...) \
do { opcode=getop_##GETOP (__VA_ARGS__); serializer_rewrite_data ((int8_t) (OC - opcode_counter), &opcode, sizeof (OPCODE)); } while (0)
do { opcode=getop_##GETOP (__VA_ARGS__); serializer_rewrite_opcode ((int8_t) (OC - opcode_counter), &opcode); } while (0)
static T_IDX
integer_zero (void)
{
T_IDX lhs = next_temp_name ();
DUMP_OPCODE (assignment, lhs, OPCODE_ARG_TYPE_NUMBER, 0);
DUMP_OPCODE (assignment, lhs, OPCODE_ARG_TYPE_SMALLINT, 0);
return lhs;
}
@@ -150,7 +150,7 @@ static T_IDX
integer_one (void)
{
T_IDX lhs = next_temp_name ();
DUMP_OPCODE (assignment, lhs, OPCODE_ARG_TYPE_NUMBER, 1);
DUMP_OPCODE (assignment, lhs, OPCODE_ARG_TYPE_SMALLINT, 1);
return lhs;
}
@@ -167,7 +167,7 @@ dump_saved_opcodes (void)
{
uint8_t i;
for (i = 0; i < current_opcode_in_buffer; i++)
serializer_dump_data (&opcodes_buffer[i], sizeof (OPCODE));
serializer_dump_opcode (&opcodes_buffer[i]);
current_opcode_in_buffer = 0;
}
@@ -1805,12 +1805,13 @@ parser_parse_program (void)
skip_newlines ();
JERRY_ASSERT (tok.type == TOK_EOF);
DUMP_OPCODE (exitval, 0);
}
void
parser_init (void)
{
temp_name = min_temp_name = lexer_get_strings (NULL);
temp_name = min_temp_name = lexer_get_reserved_ids_count ();
#ifdef __HOST
debug_file = __fopen ("parser.log", "w");
#endif