Remove the literal conversion logic to the parser from the tokenizer

This commit is contained in:
gingerBill
2021-08-02 00:53:45 +01:00
parent 97be36d18a
commit 7f3d4cb504
4 changed files with 44 additions and 44 deletions

View File

@@ -2521,7 +2521,7 @@ DECL_ATTRIBUTE_PROC(foreign_block_decl_attribute) {
if (ev.kind == ExactValue_String) {
auto cc = string_to_calling_convention(ev.value_string);
if (cc == ProcCC_Invalid) {
error(elem, "Unknown procedure calling convention: '%.*s'\n", LIT(ev.value_string));
error(elem, "Unknown procedure calling convention: '%.*s'", LIT(ev.value_string));
} else {
c->foreign_context.default_cc = cc;
}
@@ -2533,7 +2533,7 @@ DECL_ATTRIBUTE_PROC(foreign_block_decl_attribute) {
if (ev.kind == ExactValue_String) {
String link_prefix = ev.value_string;
if (!is_foreign_name_valid(link_prefix)) {
error(elem, "Invalid link prefix: '%.*s'\n", LIT(link_prefix));
error(elem, "Invalid link prefix: '%.*s'", LIT(link_prefix));
} else {
c->foreign_context.link_prefix = link_prefix;
}
@@ -3138,7 +3138,10 @@ void check_collect_value_decl(CheckerContext *c, Ast *decl) {
bool success = false;
if (value != nullptr) {
if (value->kind == Ast_BasicLit && value->BasicLit.token.kind == Token_String) {
String v = value->BasicLit.token.string;
String v = {};
if (value->tav.value.kind == ExactValue_String) {
v = value->tav.value.value_string;
}
if (v == "file") {
kind = EntityVisiblity_PrivateToFile;
success = true;

View File

@@ -297,13 +297,13 @@ ExactValue exact_value_float_from_string(String string) {
}
ExactValue exact_value_from_basic_literal(Token token) {
switch (token.kind) {
case Token_String: return exact_value_string(token.string);
case Token_Integer: return exact_value_integer_from_string(token.string);
case Token_Float: return exact_value_float_from_string(token.string);
ExactValue exact_value_from_basic_literal(TokenKind kind, String const &string) {
switch (kind) {
case Token_String: return exact_value_string(string);
case Token_Integer: return exact_value_integer_from_string(string);
case Token_Float: return exact_value_float_from_string(string);
case Token_Imag: {
String str = token.string;
String str = string;
Rune last_rune = cast(Rune)str[str.len-1];
str.len--; // Ignore the 'i|j|k'
f64 imag = float_from_string(str);
@@ -317,7 +317,7 @@ ExactValue exact_value_from_basic_literal(Token token) {
}
case Token_Rune: {
Rune r = GB_RUNE_INVALID;
utf8_decode(token.string.text, token.string.len, &r);
utf8_decode(string.text, string.len, &r);
return exact_value_i64(r);
}
default:

View File

@@ -583,12 +583,38 @@ Ast *ast_undef(AstFile *f, Token token) {
return result;
}
ExactValue exact_value_from_token(AstFile *f, Token const &token) {
String s = token.string;
switch (token.kind) {
case Token_Rune:
if (!unquote_string(ast_allocator(f), &s, 0)) {
syntax_error(token, "Invalid rune literal");
}
break;
case Token_String:
if (!unquote_string(ast_allocator(f), &s, 0, s.text[0] == '`')) {
syntax_error(token, "Invalid string literal");
}
break;
}
return exact_value_from_basic_literal(token.kind, s);
}
String string_value_from_token(AstFile *f, Token const &token) {
ExactValue value = exact_value_from_token(f, token);
String str = {};
if (value.kind == ExactValue_String) {
str = value.value_string;
}
return str;
}
Ast *ast_basic_lit(AstFile *f, Token basic_lit) {
Ast *result = alloc_ast_node(f, Ast_BasicLit);
result->BasicLit.token = basic_lit;
result->tav.mode = Addressing_Constant;
result->tav.value = exact_value_from_basic_literal(basic_lit);
result->tav.value = exact_value_from_token(f, basic_lit);
return result;
}
@@ -3227,9 +3253,9 @@ Ast *parse_proc_type(AstFile *f, Token proc_token) {
ProcCallingConvention cc = ProcCC_Invalid;
if (f->curr_token.kind == Token_String) {
Token token = expect_token(f, Token_String);
auto c = string_to_calling_convention(token.string);
auto c = string_to_calling_convention(string_value_from_token(f, token));
if (c == ProcCC_Invalid) {
syntax_error(token, "Unknown procedure calling convention: '%.*s'\n", LIT(token.string));
syntax_error(token, "Unknown procedure calling convention: '%.*s'", LIT(token.string));
} else {
cc = c;
}
@@ -5114,7 +5140,7 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *>
} else if (node->kind == Ast_ImportDecl) {
ast_node(id, ImportDecl, node);
String original_string = string_trim_whitespace(id->relpath.string);
String original_string = string_trim_whitespace(string_value_from_token(f, id->relpath));
String import_path = {};
bool ok = determine_path_from_string(&p->file_decl_mutex, node, base_dir, original_string, &import_path);
if (!ok) {
@@ -5134,7 +5160,7 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *>
auto fullpaths = array_make<String>(permanent_allocator(), 0, fl->filepaths.count);
for_array(fp_idx, fl->filepaths) {
String file_str = fl->filepaths[fp_idx].string;
String file_str = string_trim_whitespace(string_value_from_token(f, fl->filepaths[fp_idx]));
String fullpath = file_str;
if (allow_check_foreign_filepath()) {
String foreign_path = {};

View File

@@ -717,7 +717,6 @@ struct Tokenizer {
i32 line_count;
i32 error_count;
Array<String> allocated_strings;
TokenizerFlags flags;
bool insert_semicolon;
@@ -806,12 +805,6 @@ void init_tokenizer_with_file_contents(Tokenizer *t, String const &fullpath, gbF
if (t->curr_rune == GB_RUNE_BOM) {
advance_to_next_rune(t); // Ignore BOM at file beginning
}
if (t->allocated_strings.count != 0) {
array_clear(&t->allocated_strings);
} else {
array_init(&t->allocated_strings, heap_allocator());
}
}
TokenizerInitError init_tokenizer(Tokenizer *t, String const &fullpath, TokenizerFlags flags = TokenizerFlag_None) {
@@ -857,10 +850,6 @@ gb_inline void destroy_tokenizer(Tokenizer *t) {
if (t->start != nullptr) {
gb_free(heap_allocator(), t->start);
}
for_array(i, t->allocated_strings) {
gb_free(heap_allocator(), t->allocated_strings[i].text);
}
array_free(&t->allocated_strings);
}
gb_inline i32 digit_value(Rune r) {
@@ -1237,15 +1226,6 @@ void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) {
tokenizer_err(t, "Invalid rune literal");
}
token->string.len = t->curr - token->string.text;
success = unquote_string(heap_allocator(), &token->string, 0);
if (success > 0) {
if (success == 2) {
array_add(&t->allocated_strings, token->string);
}
} else {
tokenizer_err(t, "Invalid rune literal");
}
goto semicolon_check;
} break;
@@ -1288,15 +1268,6 @@ void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) {
}
}
token->string.len = t->curr - token->string.text;
success = unquote_string(heap_allocator(), &token->string, 0, has_carriage_return);
if (success > 0) {
if (success == 2) {
array_add(&t->allocated_strings, token->string);
}
} else {
tokenizer_err(t, "Invalid string literal");
}
goto semicolon_check;
} break;