diff --git a/src/check_expr.cpp b/src/check_expr.cpp index db8aef9f9..13b066835 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -1886,11 +1886,6 @@ Type *check_get_params(Checker *c, Scope *scope, AstNode *_params, bool *is_vari } - if (operands != nullptr) { - GB_ASSERT_MSG(operands->count >= min_variable_count, "%td vs %td", operands->count, variable_count); - } - - bool is_variadic = false; bool is_c_vararg = false; Array variables = {}; @@ -1949,13 +1944,13 @@ Type *check_get_params(Checker *c, Scope *scope, AstNode *_params, bool *is_vari if (specialization == t_invalid){ specialization = nullptr; } - if (specialization) { - if (!is_type_polymorphic(specialization)) { - gbString str = type_to_string(specialization); - error(tt->specialization, "Type specialization requires a polymorphic type, got %s", str); - gb_string_free(str); - } - } + // if (specialization) { + // if (!is_type_polymorphic(specialization)) { + // gbString str = type_to_string(specialization); + // error(tt->specialization, "Type specialization requires a polymorphic type, got %s", str); + // gb_string_free(str); + // } + // } if (operands != nullptr) { detemine_type_from_operand = true; diff --git a/src/checker.cpp b/src/checker.cpp index ed9da4fee..8f4b2f766 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -1074,11 +1074,11 @@ bool add_entity(Checker *c, Scope *scope, AstNode *identifier, Entity *entity) { String name = entity->token.string; if (!is_blank_ident(name)) { Entity *ie = scope_insert_entity(scope, entity); - if (ie) { + if (ie != nullptr) { TokenPos pos = ie->token.pos; Entity *up = ie->using_parent; if (up != nullptr) { - if (token_pos_eq(pos, up->token.pos)) { + if (pos == up->token.pos) { // NOTE(bill): Error should have been handled already return false; } @@ -1089,7 +1089,7 @@ bool add_entity(Checker *c, Scope *scope, AstNode *identifier, Entity *entity) { LIT(up->token.pos.file), up->token.pos.line, up->token.pos.column); return false; } else { - if (token_pos_eq(pos, entity->token.pos)) { + if (pos == entity->token.pos) { // NOTE(bill): Error should have been handled already return false; } diff --git a/src/parser.cpp b/src/parser.cpp index d2c2d9991..80cf538c4 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -20,8 +20,8 @@ struct CommentGroup { }; -enum ImportedFileKind -{ ImportedFile_Normal, +enum ImportedFileKind { + ImportedFile_Normal, ImportedFile_Shared, ImportedFile_Init, }; @@ -1749,7 +1749,6 @@ bool is_blank_ident(AstNode *node) { // NOTE(bill): Go to next statement to prevent numerous error messages popping up void fix_advance_to_next_stmt(AstFile *f) { -#if 1 for (;;) { Token t = f->curr_token; switch (t.kind) { @@ -1762,6 +1761,7 @@ void fix_advance_to_next_stmt(AstFile *f) { case Token_foreign_system_library: case Token_if: + case Token_for: case Token_when: case Token_return: case Token_match: @@ -1780,12 +1780,12 @@ void fix_advance_to_next_stmt(AstFile *f) { case Token_Hash: { - if (token_pos_eq(t.pos, f->fix_prev_pos) && + if (t.pos == f->fix_prev_pos && f->fix_count < PARSER_MAX_FIX_COUNT) { f->fix_count++; return; } - if (token_pos_cmp(f->fix_prev_pos, t.pos) < 0) { + if (f->fix_prev_pos < t.pos) { f->fix_prev_pos = t.pos; f->fix_count = 0; // NOTE(bill): Reset return; @@ -1795,7 +1795,6 @@ void fix_advance_to_next_stmt(AstFile *f) { } advance_token(f); } -#endif } Token expect_closing(AstFile *f, TokenKind kind, String context) { @@ -4586,7 +4585,7 @@ AstNode *parse_stmt(AstFile *f) { } if (tag == "include") { - syntax_error(token, "#include is not a valid import declaration kind. Use import_load instead"); + syntax_error(token, "#include is not a valid import declaration kind. Did you mean `import`?"); s = ast_bad_stmt(f, token, f->curr_token); } else { syntax_error(token, "Unknown tag directive used: `%.*s`", LIT(tag)); @@ -4641,48 +4640,48 @@ ParseFileError init_ast_file(AstFile *f, String fullpath) { return ParseFile_WrongExtension; } TokenizerInitError err = init_tokenizer(&f->tokenizer, fullpath); - if (err == TokenizerInit_None) { - isize file_size = f->tokenizer.end - f->tokenizer.start; - isize init_token_cap = cast(isize)gb_max(next_pow2(cast(i64)(file_size/2ll)), 16); - array_init(&f->tokens, heap_allocator(), gb_max(init_token_cap, 16)); - - for (;;) { - Token token = tokenizer_get_token(&f->tokenizer); - if (token.kind == Token_Invalid) { - return ParseFile_InvalidToken; - } - array_add(&f->tokens, token); - - if (token.kind == Token_EOF) { - break; - } + if (err != TokenizerInit_None) { + switch (err) { + case TokenizerInit_NotExists: + return ParseFile_NotFound; + case TokenizerInit_Permission: + return ParseFile_Permission; + case TokenizerInit_Empty: + return ParseFile_EmptyFile; } - f->curr_token_index = 0; - f->prev_token = f->tokens[f->curr_token_index]; - f->curr_token = f->tokens[f->curr_token_index]; - - // NOTE(bill): Is this big enough or too small? - isize arena_size = gb_size_of(AstNode); - arena_size *= 2*f->tokens.count; - gb_arena_init_from_allocator(&f->arena, heap_allocator(), arena_size); - array_init(&f->comments, heap_allocator()); - - f->curr_proc = nullptr; - - return ParseFile_None; + return ParseFile_InvalidFile; } - switch (err) { - case TokenizerInit_NotExists: - return ParseFile_NotFound; - case TokenizerInit_Permission: - return ParseFile_Permission; - case TokenizerInit_Empty: - return ParseFile_EmptyFile; + isize file_size = f->tokenizer.end - f->tokenizer.start; + isize init_token_cap = cast(isize)gb_max(next_pow2(cast(i64)(file_size/2ll)), 16); + array_init(&f->tokens, heap_allocator(), gb_max(init_token_cap, 16)); + + for (;;) { + Token token = tokenizer_get_token(&f->tokenizer); + if (token.kind == Token_Invalid) { + return ParseFile_InvalidToken; + } + array_add(&f->tokens, token); + + if (token.kind == Token_EOF) { + break; + } } - return ParseFile_InvalidFile; + f->curr_token_index = 0; + f->prev_token = f->tokens[f->curr_token_index]; + f->curr_token = f->tokens[f->curr_token_index]; + + // NOTE(bill): Is this big enough or too small? + isize arena_size = gb_size_of(AstNode); + arena_size *= 2*f->tokens.count; + gb_arena_init_from_allocator(&f->arena, heap_allocator(), arena_size); + array_init(&f->comments, heap_allocator()); + + f->curr_proc = nullptr; + + return ParseFile_None; } void destroy_ast_file(AstFile *f) { diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 0b88bae5b..c9926ee45 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -154,7 +154,7 @@ TokenPos token_pos(String file, isize line, isize column) { return pos; } -i32 token_pos_cmp(TokenPos a, TokenPos b) { +i32 token_pos_cmp(TokenPos const &a, TokenPos const &b) { if (a.line == b.line) { if (a.column == b.column) { isize min_len = gb_min(a.file.len, b.file.len); @@ -162,13 +162,15 @@ i32 token_pos_cmp(TokenPos a, TokenPos b) { } return (a.column < b.column) ? -1 : +1; } - return (a.line < b.line) ? -1 : +1; } -bool token_pos_eq(TokenPos a, TokenPos b) { - return token_pos_cmp(a, b) == 0; -} +bool operator==(TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) == 0; } +bool operator!=(TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) != 0; } +bool operator< (TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) < 0; } +bool operator<=(TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) <= 0; } +bool operator> (TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) > 0; } +bool operator>=(TokenPos const &a, TokenPos const &b) { return token_pos_cmp(a, b) >= 0; } struct Token { TokenKind kind; @@ -202,7 +204,7 @@ void warning_va(Token token, char *fmt, va_list va) { gb_mutex_lock(&global_error_collector.mutex); global_error_collector.warning_count++; // NOTE(bill): Duplicate error, skip it - if (!token_pos_eq(global_error_collector.prev, token.pos)) { + if (global_error_collector.prev != token.pos) { global_error_collector.prev = token.pos; gb_printf_err("%.*s(%td:%td) Warning: %s\n", LIT(token.pos.file), token.pos.line, token.pos.column, @@ -216,7 +218,7 @@ void error_va(Token token, char *fmt, va_list va) { gb_mutex_lock(&global_error_collector.mutex); global_error_collector.count++; // NOTE(bill): Duplicate error, skip it - if (!token_pos_eq(global_error_collector.prev, token.pos)) { + if (global_error_collector.prev != token.pos) { global_error_collector.prev = token.pos; gb_printf_err("%.*s(%td:%td) %s\n", LIT(token.pos.file), token.pos.line, token.pos.column, @@ -232,7 +234,7 @@ void syntax_error_va(Token token, char *fmt, va_list va) { gb_mutex_lock(&global_error_collector.mutex); global_error_collector.count++; // NOTE(bill): Duplicate error, skip it - if (!token_pos_eq(global_error_collector.prev, token.pos)) { + if (global_error_collector.prev != token.pos) { global_error_collector.prev = token.pos; gb_printf_err("%.*s(%td:%td) Syntax Error: %s\n", LIT(token.pos.file), token.pos.line, token.pos.column, @@ -248,7 +250,7 @@ void syntax_warning_va(Token token, char *fmt, va_list va) { gb_mutex_lock(&global_error_collector.mutex); global_error_collector.warning_count++; // NOTE(bill): Duplicate error, skip it - if (!token_pos_eq(global_error_collector.prev, token.pos)) { + if (global_error_collector.prev != token.pos) { global_error_collector.prev = token.pos; gb_printf_err("%.*s(%td:%td) Syntax Warning: %s\n", LIT(token.pos.file), token.pos.line, token.pos.column,