Add support of floating-point literals

This commit is contained in:
Ilmir Usmanov
2014-08-12 22:27:12 +04:00
parent eee1daec91
commit 0a68baa479
10 changed files with 144 additions and 81 deletions
+71 -26
View File
@@ -14,19 +14,23 @@
*/
#include "mem-allocator.h"
#include "globals.h"
#include "jerry-libc.h"
#include "lexer.h"
#include "parser.h"
static token saved_token;
static token empty_token = { .type = TOK_EMPTY, .data.uid = 0 };
static token empty_token =
{
.type =
TOK_EMPTY,
.data.uid = 0
};
static bool allow_dump_lines = false;
static size_t buffer_size = 0;
typedef struct
{
int num;
ecma_number_t num;
token tok;
}
num_and_token;
@@ -35,12 +39,8 @@ num_and_token;
static uint8_t seen_names_count = 0;
static num_and_token seen_nums[MAX_NUMS] =
{
[0] = { .num = 0, .tok = { .type = TOK_INT, .data.uid = 0 } },
[1] = { .num = 1, .tok = { .type = TOK_INT, .data.uid = 1 } }
};
static uint8_t seen_nums_count = 2;
static num_and_token seen_nums[MAX_NUMS];
static uint8_t seen_nums_count = 0;
static bool
is_empty (token tok)
@@ -574,12 +574,13 @@ add_token_to_seen_names (token_type tt, const char *string)
}
static token
convert_seen_num_to_token (int num)
convert_seen_num_to_token (ecma_number_t num)
{
size_t i;
for (i = 0; i < seen_nums_count; i++)
{
// token must be exactly the same as seen
if (seen_nums[i].num == num)
{
return seen_nums[i].tok;
@@ -636,7 +637,7 @@ lexer_get_string_by_id (uint8_t id)
}
uint8_t
lexer_get_nums (int32_t *nums)
lexer_get_nums (ecma_number_t *nums)
{
int i;
@@ -868,7 +869,16 @@ parse_number (void)
token_start = NULL;
known_token = convert_seen_num_to_token (res);
if (res <= 255)
{
return (token)
{
.type = TOK_SMALL_INT,
.data.uid = (uint8_t) res
};
}
known_token = convert_seen_num_to_token ((ecma_number_t) res);
if (!is_empty (known_token))
{
return known_token;
@@ -876,12 +886,16 @@ parse_number (void)
known_token = (token)
{
.type = TOK_INT, .data.uid = seen_nums_count
.type = TOK_NUMBER,
.data.uid = seen_nums_count
};
add_num_to_seen_tokens ((num_and_token)
{
.num = res, .tok = known_token
});
add_num_to_seen_tokens (
(num_and_token)
{
.num = (ecma_number_t) res,
.tok = known_token
}
);
return known_token;
}
@@ -948,10 +962,28 @@ parse_number (void)
if (is_fp || is_exp)
{
float res = __strtof (token_start, NULL);
ecma_number_t res = __strtof (token_start, NULL);
token_start = NULL;
JERRY_UNIMPLEMENTED_REF_UNUSED_VARS (res);
return empty_token;
known_token = convert_seen_num_to_token (res);
if (!is_empty (known_token))
{
return known_token;
}
known_token = (token)
{
.type = TOK_NUMBER,
.data.uid = seen_nums_count
};
add_num_to_seen_tokens (
(num_and_token)
{
.num = res,
.tok = known_token
}
);
return known_token;
}
tok_length = (size_t) (buffer - token_start);;
@@ -962,7 +994,16 @@ parse_number (void)
token_start = NULL;
known_token = convert_seen_num_to_token (res);
if (res <= 255)
{
return (token)
{
.type = TOK_SMALL_INT,
.data.uid = (uint8_t) res
};
}
known_token = convert_seen_num_to_token ((ecma_number_t) res);
if (!is_empty (known_token))
{
return known_token;
@@ -970,12 +1011,16 @@ parse_number (void)
known_token = (token)
{
.type = TOK_INT, .data.uid = seen_nums_count
.type = TOK_NUMBER,
.data.uid = seen_nums_count
};
add_num_to_seen_tokens ((num_and_token)
{
.num = res, .tok = known_token
});
add_num_to_seen_tokens (
(num_and_token)
{
.num = (ecma_number_t) res,
.tok = known_token
}
);
return known_token;
}
+6 -5
View File
@@ -17,7 +17,7 @@
#define LEXER_H
#include "globals.h"
#include "ecma-globals.h"
/* Keywords. */
typedef uint8_t keyword;
@@ -65,8 +65,8 @@ typedef uint8_t token_type;
#define TOK_EOF 0 // End of file
#define TOK_NAME 1 // Identifier
#define TOK_KEYWORD 2 // Keyword
#define TOK_INT 3
#define TOK_FLOAT 4
#define TOK_SMALL_INT 3
#define TOK_NUMBER 4
#define TOK_NULL 5
#define TOK_BOOL 6
@@ -144,7 +144,8 @@ typedef struct
uint8_t uid;
}
data;
} __packed
}
__packed
token;
void lexer_init (const char *, size_t, bool);
@@ -156,7 +157,7 @@ void lexer_dump_buffer_state (void);
uint8_t lexer_get_strings (const char **);
uint8_t lexer_get_reserved_ids_count (void);
const char *lexer_get_string_by_id (uint8_t);
uint8_t lexer_get_nums (int32_t *);
uint8_t lexer_get_nums (ecma_number_t *);
void lexer_adjust_num_ids (void);
#endif
+53 -36
View File
@@ -38,7 +38,8 @@ typedef struct
uint8_t size;
uint8_t head;
opcode_counter_t *oc_stack;
} __packed
}
__packed
rewritable_opcode;
#define NESTING_ITERATIONAL 1
@@ -160,7 +161,7 @@ is_keyword (keyword kw)
}
static void
current_token_must_be(token_type tt)
current_token_must_be (token_type tt)
{
if (tok.type != tt)
{
@@ -296,10 +297,10 @@ integer_zero (void)
}
static T_IDX
integer_one (void)
boolean_true (void)
{
T_IDX lhs = next_temp_name ();
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_SMALLINT, 1);
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_SIMPLE, ECMA_SIMPLE_VALUE_TRUE);
return lhs;
}
@@ -416,13 +417,20 @@ dump_intrinsic (T_IDX obj, T_IDX args[3])
static T_IDX
parse_property_name (void)
{
T_IDX lhs;
switch (tok.type)
{
case TOK_NAME:
case TOK_STRING:
case TOK_INT:
case TOK_NUMBER:
return tok.data.uid;
case TOK_SMALL_INT:
lhs = next_temp_name ();
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_SMALLINT, tok.data.uid);
return lhs;
default:
JERRY_UNREACHABLE ();
}
@@ -845,7 +853,7 @@ parse_function_expression (void)
T_IDX name, lhs;
opcode_counter_t jmp_oc;
assert_keyword (KW_FUNCTION);
assert_keyword (KW_FUNCTION);
skip_newlines ();
if (tok.type == TOK_NAME)
@@ -915,11 +923,16 @@ parse_literal (void)
tok.data.uid ? ECMA_SIMPLE_VALUE_TRUE : ECMA_SIMPLE_VALUE_FALSE);
return lhs;
case TOK_INT:
case TOK_NUMBER:
lhs = next_temp_name ();
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_NUMBER, tok.data.uid);
return lhs;
case TOK_SMALL_INT:
lhs = next_temp_name ();
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_SMALLINT, tok.data.uid);
return lhs;
case TOK_STRING:
lhs = next_temp_name ();
DUMP_OPCODE_3 (assignment, lhs, OPCODE_ARG_TYPE_STRING, tok.data.uid);
@@ -949,34 +962,38 @@ parse_primary_expression (void)
DUMP_OPCODE_1 (this, lhs);
return lhs;
}
else if (tok.type == TOK_NAME)
switch (tok.type)
{
return tok.data.uid;
case TOK_NAME:
return tok.data.uid;
case TOK_NULL:
case TOK_BOOL:
case TOK_SMALL_INT:
case TOK_NUMBER:
case TOK_STRING:
return parse_literal ();
case TOK_OPEN_SQUARE:
return parse_array_literal ();
case TOK_OPEN_BRACE:
return parse_object_literal ();
case TOK_OPEN_PAREN:
skip_newlines ();
if (tok.type != TOK_CLOSE_PAREN)
{
lhs = parse_expression ();
token_after_newlines_must_be (TOK_CLOSE_PAREN);
return lhs;
}
// FALLTHRU
default:
JERRY_UNREACHABLE ();
}
else if (tok.type == TOK_NULL || tok.type == TOK_BOOL
|| tok.type == TOK_INT || tok.type == TOK_STRING)
{
return parse_literal ();
}
else if (tok.type == TOK_OPEN_SQUARE)
{
return parse_array_literal ();
}
else if (tok.type == TOK_OPEN_BRACE)
{
return parse_object_literal ();
}
else if (tok.type == TOK_OPEN_PAREN)
{
skip_newlines ();
if (tok.type != TOK_CLOSE_PAREN)
{
lhs = parse_expression ();
token_after_newlines_must_be (TOK_CLOSE_PAREN);
return lhs;
}
}
JERRY_UNREACHABLE ();
}
/* member_expression
@@ -1677,7 +1694,7 @@ parse_for_or_for_in_statement (void)
T_IDX stop;
opcode_counter_t cond_oc, body_oc, step_oc, end_oc;
assert_keyword (KW_FOR);
assert_keyword (KW_FOR);
token_after_newlines_must_be (TOK_OPEN_PAREN);
skip_newlines ();
@@ -1771,7 +1788,7 @@ plain_for:
}
else
{
stop = integer_one ();
stop = boolean_true ();
}
end_oc = opcode_counter;
@@ -2239,5 +2256,5 @@ parser_fatal (jerry_status_t code)
__printf ("FATAL: %d\n", code);
lexer_dump_buffer_state ();
jerry_exit( code);
jerry_exit (code);
}