Fix problems arising from incorrect use of various size types
E.g., * `ssize_t` was used where `lit_utf8_size_t` or `jerry_api_size_t` would have been correct, * `lit_utf8_size_t` was used where `ecma_length_t` would have been correct. Note, the patch also includes internal and public API changes: * `ecma_string_to_utf8_string` does not return negative value if output buffer is not large enough to contain the string; the buffer is expected to be large enough. (`ecma_string_get_size` can be used to retrieve the required size.) * `jerry_api_string_to_char_buffer` adapts the same logic (and `jerry_api_get_string_size` can be used to determine the required size of the buffer). Related issue: #942 JerryScript-DCO-1.0-Signed-off-by: Akos Kiss akiss@inf.u-szeged.hu
This commit is contained in:
+5
-8
@@ -192,17 +192,14 @@ print_value (const jerry_api_value_t * value_p)
|
||||
// String value
|
||||
case JERRY_API_DATA_TYPE_STRING:
|
||||
{
|
||||
ssize_t neg_req_sz, sz;
|
||||
jerry_api_size_t req_sz, sz;
|
||||
// determining required buffer size
|
||||
neg_req_sz = jerry_api_string_to_char_buffer (value_p->v_string,
|
||||
NULL,
|
||||
0);
|
||||
assert (neg_req_sz < 0);
|
||||
char * str_buf_p = (char*) malloc (-neg_req_sz);
|
||||
req_sz = jerry_api_get_string_size (value_p->v_string);
|
||||
char * str_buf_p = (char*) malloc (req_sz);
|
||||
sz = jerry_api_string_to_char_buffer (value_p->v_string,
|
||||
str_buf_p,
|
||||
-neg_req_sz);
|
||||
assert (sz == -neg_req_sz);
|
||||
req_sz);
|
||||
assert (sz == req_sz);
|
||||
|
||||
printf ("%s", str_buf_p);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user