mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-09 14:32:41 +00:00
Improvements to -strict-style and trailing commas
This commit is contained in:
158
src/parser.cpp
158
src/parser.cpp
@@ -1,5 +1,8 @@
|
||||
#include "parser_pos.cpp"
|
||||
|
||||
// #undef at the bottom of this file
|
||||
#define ALLOW_NEWLINE build_context.strict_style
|
||||
|
||||
Token token_end_of_line(AstFile *f, Token tok) {
|
||||
u8 const *start = f->tokenizer.start + tok.pos.offset;
|
||||
u8 const *s = start;
|
||||
@@ -1278,15 +1281,10 @@ void consume_comment_groups(AstFile *f, Token prev) {
|
||||
}
|
||||
}
|
||||
|
||||
bool ignore_newlines(AstFile *f) {
|
||||
if (f->allow_newline) {
|
||||
return f->expr_level > 0;
|
||||
}
|
||||
return f->expr_level >= 0;
|
||||
gb_inline bool ignore_newlines(AstFile *f) {
|
||||
return f->expr_level > 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
Token advance_token(AstFile *f) {
|
||||
f->lead_comment = nullptr;
|
||||
f->line_comment = nullptr;
|
||||
@@ -1462,7 +1460,7 @@ Token expect_closing_brace_of_field_list(AstFile *f) {
|
||||
return token;
|
||||
}
|
||||
bool ok = true;
|
||||
if (!build_context.strict_style) {
|
||||
if (!f->allow_newline) {
|
||||
ok = !skip_possible_newline(f);
|
||||
}
|
||||
if (ok && allow_token(f, Token_Semicolon)) {
|
||||
@@ -1539,13 +1537,15 @@ void fix_advance_to_next_stmt(AstFile *f) {
|
||||
}
|
||||
}
|
||||
|
||||
Token expect_closing(AstFile *f, TokenKind kind, String context) {
|
||||
Token expect_closing(AstFile *f, TokenKind kind, String const &context) {
|
||||
if (f->curr_token.kind != kind &&
|
||||
f->curr_token.kind == Token_Semicolon &&
|
||||
(f->curr_token.string == "\n" || f->curr_token.kind == Token_EOF)) {
|
||||
Token tok = f->prev_token;
|
||||
tok.pos.column += cast(i32)tok.string.len;
|
||||
syntax_error(tok, "Missing ',' before newline in %.*s", LIT(context));
|
||||
if (f->allow_newline) {
|
||||
Token tok = f->prev_token;
|
||||
tok.pos.column += cast(i32)tok.string.len;
|
||||
syntax_error(tok, "Missing ',' before newline in %.*s", LIT(context));
|
||||
}
|
||||
advance_token(f);
|
||||
}
|
||||
return expect_token(f, kind);
|
||||
@@ -1625,6 +1625,7 @@ Ast * parse_proc_type(AstFile *f, Token proc_token);
|
||||
Array<Ast *> parse_stmt_list(AstFile *f);
|
||||
Ast * parse_stmt(AstFile *f);
|
||||
Ast * parse_body(AstFile *f);
|
||||
Ast * parse_do_body(AstFile *f, Token const &token, char const *msg);
|
||||
Ast * parse_block_stmt(AstFile *f, b32 is_when);
|
||||
|
||||
|
||||
@@ -1708,7 +1709,7 @@ Array<Ast *> parse_element_list(AstFile *f) {
|
||||
|
||||
array_add(&elems, elem);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -1744,7 +1745,7 @@ Array<Ast *> parse_enum_field_list(AstFile *f) {
|
||||
Ast *elem = ast_enum_field_value(f, name, value, docs, comment);
|
||||
array_add(&elems, elem);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1785,14 +1786,14 @@ Ast *parse_value(AstFile *f) {
|
||||
Ast *parse_type_or_ident(AstFile *f);
|
||||
|
||||
|
||||
void check_proc_add_tag(AstFile *f, Ast *tag_expr, u64 *tags, ProcTag tag, String tag_name) {
|
||||
void check_proc_add_tag(AstFile *f, Ast *tag_expr, u64 *tags, ProcTag tag, String const &tag_name) {
|
||||
if (*tags & tag) {
|
||||
syntax_error(tag_expr, "Procedure tag already used: %.*s", LIT(tag_name));
|
||||
}
|
||||
*tags |= tag;
|
||||
}
|
||||
|
||||
bool is_foreign_name_valid(String name) {
|
||||
bool is_foreign_name_valid(String const &name) {
|
||||
if (name.len == 0) {
|
||||
return false;
|
||||
}
|
||||
@@ -1888,7 +1889,7 @@ Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKi
|
||||
Ast *parse_unary_expr(AstFile *f, bool lhs);
|
||||
|
||||
|
||||
Ast *convert_stmt_to_expr(AstFile *f, Ast *statement, String kind) {
|
||||
Ast *convert_stmt_to_expr(AstFile *f, Ast *statement, String const &kind) {
|
||||
if (statement == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
@@ -2077,7 +2078,7 @@ Array<Ast *> parse_union_variant_list(AstFile *f) {
|
||||
if (type->kind != Ast_BadExpr) {
|
||||
array_add(&variants, type);
|
||||
}
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -2235,7 +2236,7 @@ Ast *parse_operand(AstFile *f, bool lhs) {
|
||||
Ast *elem = parse_expr(f, false);
|
||||
array_add(&args, elem);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -2742,7 +2743,7 @@ Ast *parse_call_expr(AstFile *f, Ast *operand) {
|
||||
isize prev_expr_level = f->expr_level;
|
||||
bool prev_allow_newline = f->allow_newline;
|
||||
f->expr_level = 0;
|
||||
f->allow_newline = build_context.strict_style;
|
||||
f->allow_newline = ALLOW_NEWLINE;
|
||||
|
||||
open_paren = expect_token(f, Token_OpenParen);
|
||||
|
||||
@@ -2776,7 +2777,7 @@ Ast *parse_call_expr(AstFile *f, Ast *operand) {
|
||||
}
|
||||
array_add(&args, arg);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -3125,7 +3126,7 @@ Ast *parse_expr(AstFile *f, bool lhs) {
|
||||
|
||||
Array<Ast *> parse_expr_list(AstFile *f, bool lhs) {
|
||||
bool allow_newline = f->allow_newline;
|
||||
f->allow_newline = true;
|
||||
f->allow_newline = ALLOW_NEWLINE;
|
||||
|
||||
auto list = array_make<Ast *>(heap_allocator());
|
||||
for (;;) {
|
||||
@@ -3445,7 +3446,7 @@ Ast *parse_results(AstFile *f, bool *diverging) {
|
||||
}
|
||||
|
||||
|
||||
ProcCallingConvention string_to_calling_convention(String s) {
|
||||
ProcCallingConvention string_to_calling_convention(String const &s) {
|
||||
if (s == "odin") return ProcCC_Odin;
|
||||
if (s == "contextless") return ProcCC_Contextless;
|
||||
if (s == "cdecl") return ProcCC_CDecl;
|
||||
@@ -3726,12 +3727,12 @@ Array<Ast *> convert_to_ident_list(AstFile *f, Array<AstAndFlags> list, bool ign
|
||||
}
|
||||
|
||||
|
||||
bool parse_expect_field_separator(AstFile *f, Ast *param) {
|
||||
bool allow_field_separator(AstFile *f) {
|
||||
Token token = f->curr_token;
|
||||
if (allow_token(f, Token_Comma)) {
|
||||
return true;
|
||||
}
|
||||
if (token.kind == Token_Semicolon) {
|
||||
if (ALLOW_NEWLINE && token.kind == Token_Semicolon) {
|
||||
String p = token_to_string(token);
|
||||
syntax_error(token_end_of_line(f, f->prev_token), "Expected a comma, got a %.*s", LIT(p));
|
||||
advance_token(f);
|
||||
@@ -3785,7 +3786,7 @@ bool check_procedure_name_list(Array<Ast *> const &names) {
|
||||
Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token) {
|
||||
bool prev_allow_newline = f->allow_newline;
|
||||
defer (f->allow_newline = prev_allow_newline);
|
||||
f->allow_newline = build_context.strict_style;
|
||||
f->allow_newline = ALLOW_NEWLINE;
|
||||
|
||||
Token start_token = f->curr_token;
|
||||
|
||||
@@ -3816,7 +3817,7 @@ Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKi
|
||||
}
|
||||
AstAndFlags naf = {param, flags};
|
||||
array_add(&list, naf);
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -3886,7 +3887,7 @@ Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKi
|
||||
}
|
||||
}
|
||||
|
||||
parse_expect_field_separator(f, type);
|
||||
allow_field_separator(f);
|
||||
Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
|
||||
array_add(¶ms, param);
|
||||
|
||||
@@ -3948,7 +3949,7 @@ Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKi
|
||||
}
|
||||
|
||||
|
||||
bool ok = parse_expect_field_separator(f, param);
|
||||
bool ok = allow_field_separator(f);
|
||||
Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
|
||||
array_add(¶ms, param);
|
||||
|
||||
@@ -4005,17 +4006,41 @@ Ast *parse_body(AstFile *f) {
|
||||
Array<Ast *> stmts = {};
|
||||
Token open, close;
|
||||
isize prev_expr_level = f->expr_level;
|
||||
bool prev_allow_newline = f->allow_newline;
|
||||
|
||||
// NOTE(bill): The body may be within an expression so reset to zero
|
||||
f->expr_level = 0;
|
||||
f->allow_newline = false;
|
||||
open = expect_token(f, Token_OpenBrace);
|
||||
stmts = parse_stmt_list(f);
|
||||
close = expect_token(f, Token_CloseBrace);
|
||||
f->expr_level = prev_expr_level;
|
||||
f->allow_newline = prev_allow_newline;
|
||||
|
||||
return ast_block_stmt(f, stmts, open, close);
|
||||
}
|
||||
|
||||
Ast *parse_do_body(AstFile *f, Token const &token, char const *msg) {
|
||||
Token open, close;
|
||||
isize prev_expr_level = f->expr_level;
|
||||
bool prev_allow_newline = f->allow_newline;
|
||||
|
||||
// NOTE(bill): The body may be within an expression so reset to zero
|
||||
f->expr_level = 0;
|
||||
f->allow_newline = false;
|
||||
|
||||
Ast *body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (token.pos.file_id != 0 && !ast_on_same_line(token, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as %s", msg);
|
||||
}
|
||||
f->expr_level = prev_expr_level;
|
||||
f->allow_newline = prev_allow_newline;
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
bool parse_control_statement_semicolon_separator(AstFile *f) {
|
||||
Token tok = peek_token(f);
|
||||
if (tok.kind != Token_OpenBrace) {
|
||||
@@ -4065,12 +4090,7 @@ Ast *parse_if_stmt(AstFile *f) {
|
||||
}
|
||||
|
||||
if (allow_token(f, Token_do)) {
|
||||
body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(cond, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as if condition");
|
||||
}
|
||||
body = parse_do_body(f, cond ? ast_token(cond) : token, "the if statement");
|
||||
} else {
|
||||
body = parse_block_stmt(f, false);
|
||||
}
|
||||
@@ -4085,15 +4105,10 @@ Ast *parse_if_stmt(AstFile *f) {
|
||||
case Token_OpenBrace:
|
||||
else_stmt = parse_block_stmt(f, false);
|
||||
break;
|
||||
case Token_do: {
|
||||
case Token_do:
|
||||
expect_token(f, Token_do);
|
||||
else_stmt = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(else_stmt, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(else_token, else_stmt)) {
|
||||
syntax_error(else_stmt, "The body of a 'do' must be on the same line as 'else'");
|
||||
}
|
||||
} break;
|
||||
else_stmt = parse_do_body(f, else_token, "'else'");
|
||||
break;
|
||||
default:
|
||||
syntax_error(f->curr_token, "Expected if statement block statement");
|
||||
else_stmt = ast_bad_stmt(f, f->curr_token, f->tokens[f->curr_token_index+1]);
|
||||
@@ -4122,12 +4137,7 @@ Ast *parse_when_stmt(AstFile *f) {
|
||||
}
|
||||
|
||||
if (allow_token(f, Token_do)) {
|
||||
body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(cond, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as when statement");
|
||||
}
|
||||
body = parse_do_body(f, cond ? ast_token(cond) : token, "then when statement");
|
||||
} else {
|
||||
body = parse_block_stmt(f, true);
|
||||
}
|
||||
@@ -4144,12 +4154,7 @@ Ast *parse_when_stmt(AstFile *f) {
|
||||
break;
|
||||
case Token_do: {
|
||||
expect_token(f, Token_do);
|
||||
else_stmt = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(else_stmt, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(else_token, else_stmt)) {
|
||||
syntax_error(else_stmt, "The body of a 'do' must be on the same line as 'else'");
|
||||
}
|
||||
else_stmt = parse_do_body(f, else_token, "'else'");
|
||||
} break;
|
||||
default:
|
||||
syntax_error(f->curr_token, "Expected when statement block statement");
|
||||
@@ -4219,12 +4224,7 @@ Ast *parse_for_stmt(AstFile *f) {
|
||||
f->allow_range = prev_allow_range;
|
||||
|
||||
if (allow_token(f, Token_do)) {
|
||||
body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(token, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as the 'for' token");
|
||||
}
|
||||
body = parse_do_body(f, token, "the for statement");
|
||||
} else {
|
||||
body = parse_block_stmt(f, false);
|
||||
}
|
||||
@@ -4265,12 +4265,7 @@ Ast *parse_for_stmt(AstFile *f) {
|
||||
|
||||
|
||||
if (allow_token(f, Token_do)) {
|
||||
body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(token, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as the 'for' token");
|
||||
}
|
||||
body = parse_do_body(f, token, "the for statement");
|
||||
} else {
|
||||
body = parse_block_stmt(f, false);
|
||||
}
|
||||
@@ -4469,7 +4464,7 @@ Ast *parse_foreign_decl(AstFile *f) {
|
||||
Token path = expect_token(f, Token_String);
|
||||
array_add(&filepaths, path);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -4526,7 +4521,7 @@ Ast *parse_attribute(AstFile *f, Token token, TokenKind open_kind, TokenKind clo
|
||||
|
||||
array_add(&elems, elem);
|
||||
|
||||
if (!allow_token(f, Token_Comma)) {
|
||||
if (!allow_field_separator(f)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -4591,12 +4586,7 @@ Ast *parse_unrolled_for_loop(AstFile *f, Token unroll_token) {
|
||||
f->allow_range = prev_allow_range;
|
||||
|
||||
if (allow_token(f, Token_do)) {
|
||||
body = convert_stmt_to_body(f, parse_stmt(f));
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(body, "'do' has been disallowed");
|
||||
} else if (!ast_on_same_line(for_token, body)) {
|
||||
syntax_error(body, "The body of a 'do' must be on the same line as the 'for' token");
|
||||
}
|
||||
body = parse_do_body(f, for_token, "the for statement");
|
||||
} else {
|
||||
body = parse_block_stmt(f, false);
|
||||
}
|
||||
@@ -4780,7 +4770,7 @@ Ast *parse_stmt(AstFile *f) {
|
||||
return parse_block_stmt(f, true);
|
||||
case Token_do: {
|
||||
expect_token(f, Token_do);
|
||||
Ast *stmt = convert_stmt_to_body(f, parse_stmt(f));
|
||||
Ast *stmt = parse_do_body(f, {}, "the for statement");
|
||||
if (build_context.disallow_do) {
|
||||
syntax_error(stmt, "'do' has been disallowed");
|
||||
}
|
||||
@@ -4819,7 +4809,7 @@ Array<Ast *> parse_stmt_list(AstFile *f) {
|
||||
}
|
||||
|
||||
|
||||
ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
|
||||
ParseFileError init_ast_file(AstFile *f, String const &fullpath, TokenPos *err_pos) {
|
||||
GB_ASSERT(f != nullptr);
|
||||
f->fullpath = string_trim_whitespace(fullpath); // Just in case
|
||||
set_file_path_string(f->id, fullpath);
|
||||
@@ -5114,7 +5104,7 @@ gb_global Rune illegal_import_runes[] = {
|
||||
'|', ',', '<', '>', '?',
|
||||
};
|
||||
|
||||
bool is_import_path_valid(String path) {
|
||||
bool is_import_path_valid(String const &path) {
|
||||
if (path.len > 0) {
|
||||
u8 *start = path.text;
|
||||
u8 *end = path.text + path.len;
|
||||
@@ -5146,7 +5136,7 @@ bool is_import_path_valid(String path) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool is_build_flag_path_valid(String path) {
|
||||
bool is_build_flag_path_valid(String const &path) {
|
||||
if (path.len > 0) {
|
||||
u8 *start = path.text;
|
||||
u8 *end = path.text + path.len;
|
||||
@@ -5198,7 +5188,7 @@ bool is_package_name_reserved(String const &name) {
|
||||
}
|
||||
|
||||
|
||||
bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node, String base_dir, String original_string, String *path) {
|
||||
bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node, String base_dir, String const &original_string, String *path) {
|
||||
GB_ASSERT(path != nullptr);
|
||||
|
||||
// NOTE(bill): if file_mutex == nullptr, this means that the code is used within the semantics stage
|
||||
@@ -5312,9 +5302,9 @@ bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node, String bas
|
||||
|
||||
|
||||
|
||||
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls);
|
||||
void parse_setup_file_decls(Parser *p, AstFile *f, String const &base_dir, Slice<Ast *> &decls);
|
||||
|
||||
void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenStmt *ws) {
|
||||
void parse_setup_file_when_stmt(Parser *p, AstFile *f, String const &base_dir, AstWhenStmt *ws) {
|
||||
if (ws->body != nullptr) {
|
||||
auto stmts = ws->body->BlockStmt.stmts;
|
||||
parse_setup_file_decls(p, f, base_dir, stmts);
|
||||
@@ -5333,7 +5323,7 @@ void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenS
|
||||
}
|
||||
}
|
||||
|
||||
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls) {
|
||||
void parse_setup_file_decls(Parser *p, AstFile *f, String const &base_dir, Slice<Ast *> &decls) {
|
||||
for_array(i, decls) {
|
||||
Ast *node = decls[i];
|
||||
if (!is_ast_decl(node) &&
|
||||
@@ -5833,3 +5823,5 @@ ParseFileError parse_packages(Parser *p, String init_filename) {
|
||||
}
|
||||
|
||||
|
||||
|
||||
#undef ALLOW_NEWLINE
|
||||
|
||||
Reference in New Issue
Block a user