Merge branch 'master' into parser-experiments

This commit is contained in:
gingerBill
2020-12-06 00:49:48 +00:00
committed by GitHub
86 changed files with 14317 additions and 5037 deletions

View File

@@ -108,8 +108,25 @@ Token ast_token(Ast *node) {
return empty_token;
}
isize ast_node_size(AstKind kind) {
return align_formula_isize(gb_size_of(AstCommonStuff) + ast_variant_sizes[kind], gb_align_of(void *));
}
// NOTE(bill): And this below is why is I/we need a new language! Discriminated unions are a pain in C/C++
Ast *alloc_ast_node(AstFile *f, AstKind kind) {
gbAllocator a = ast_allocator(f);
isize size = ast_node_size(kind);
Ast *node = cast(Ast *)gb_alloc(a, size);
node->kind = kind;
node->file = f;
return node;
}
Ast *clone_ast(Ast *node);
Array<Ast *> clone_ast_array(Array<Ast *> array) {
Array<Ast *> clone_ast_array(Array<Ast *> const &array) {
Array<Ast *> result = {};
if (array.count > 0) {
result = array_make<Ast *>(ast_allocator(nullptr), array.count);
@@ -119,13 +136,23 @@ Array<Ast *> clone_ast_array(Array<Ast *> array) {
}
return result;
}
Slice<Ast *> clone_ast_array(Slice<Ast *> const &array) {
Slice<Ast *> result = {};
if (array.count > 0) {
result = slice_clone(permanent_allocator(), array);
for_array(i, array) {
result[i] = clone_ast(array[i]);
}
}
return result;
}
Ast *clone_ast(Ast *node) {
if (node == nullptr) {
return nullptr;
}
Ast *n = alloc_ast_node(node->file, node->kind);
gb_memmove(n, node, gb_size_of(Ast));
gb_memmove(n, node, ast_node_size(node->kind));
switch (n->kind) {
default: GB_PANIC("Unhandled Ast %.*s", LIT(ast_strings[n->kind])); break;
@@ -463,23 +490,6 @@ bool ast_node_expect(Ast *node, AstKind kind) {
return true;
}
gb_global gbAtomic64 total_allocated_node_memory = {0};
gb_global gbAtomic64 total_subtype_node_memory_test = {0};
// NOTE(bill): And this below is why is I/we need a new language! Discriminated unions are a pain in C/C++
Ast *alloc_ast_node(AstFile *f, AstKind kind) {
gbAllocator a = ast_allocator(f);
gb_atomic64_fetch_add(&total_allocated_node_memory, cast(i64)(gb_size_of(Ast)));
gb_atomic64_fetch_add(&total_subtype_node_memory_test, cast(i64)(gb_size_of(AstCommonStuff) + ast_variant_sizes[kind]));
Ast *node = gb_alloc_item(a, Ast);
node->kind = kind;
node->file = f;
return node;
}
Ast *ast_bad_expr(AstFile *f, Token begin, Token end) {
Ast *result = alloc_ast_node(f, Ast_BadExpr);
result->BadExpr.begin = begin;
@@ -537,10 +547,10 @@ Ast *ast_paren_expr(AstFile *f, Ast *expr, Token open, Token close) {
return result;
}
Ast *ast_call_expr(AstFile *f, Ast *proc, Array<Ast *> args, Token open, Token close, Token ellipsis) {
Ast *ast_call_expr(AstFile *f, Ast *proc, Array<Ast *> const &args, Token open, Token close, Token ellipsis) {
Ast *result = alloc_ast_node(f, Ast_CallExpr);
result->CallExpr.proc = proc;
result->CallExpr.args = args;
result->CallExpr.args = slice_from_array(args);
result->CallExpr.open = open;
result->CallExpr.close = close;
result->CallExpr.ellipsis = ellipsis;
@@ -624,7 +634,8 @@ Ast *ast_undef(AstFile *f, Token token) {
Ast *ast_basic_lit(AstFile *f, Token basic_lit) {
Ast *result = alloc_ast_node(f, Ast_BasicLit);
result->BasicLit.token = basic_lit;
result->BasicLit.value = exact_value_from_basic_literal(basic_lit);
result->tav.mode = Addressing_Constant;
result->tav.value = exact_value_from_basic_literal(basic_lit);
return result;
}
@@ -643,12 +654,12 @@ Ast *ast_ellipsis(AstFile *f, Token token, Ast *expr) {
}
Ast *ast_proc_group(AstFile *f, Token token, Token open, Token close, Array<Ast *> args) {
Ast *ast_proc_group(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &args) {
Ast *result = alloc_ast_node(f, Ast_ProcGroup);
result->ProcGroup.token = token;
result->ProcGroup.open = open;
result->ProcGroup.close = close;
result->ProcGroup.args = args;
result->ProcGroup.args = slice_from_array(args);
return result;
}
@@ -658,7 +669,7 @@ Ast *ast_proc_lit(AstFile *f, Ast *type, Ast *body, u64 tags, Token where_token,
result->ProcLit.body = body;
result->ProcLit.tags = tags;
result->ProcLit.where_token = where_token;
result->ProcLit.where_clauses = where_clauses;
result->ProcLit.where_clauses = slice_from_array(where_clauses);
return result;
}
@@ -670,10 +681,10 @@ Ast *ast_field_value(AstFile *f, Ast *field, Ast *value, Token eq) {
return result;
}
Ast *ast_compound_lit(AstFile *f, Ast *type, Array<Ast *> elems, Token open, Token close) {
Ast *ast_compound_lit(AstFile *f, Ast *type, Array<Ast *> const &elems, Token open, Token close) {
Ast *result = alloc_ast_node(f, Ast_CompoundLit);
result->CompoundLit.type = type;
result->CompoundLit.elems = elems;
result->CompoundLit.elems = slice_from_array(elems);
result->CompoundLit.open = open;
result->CompoundLit.close = close;
return result;
@@ -736,7 +747,7 @@ Ast *ast_inline_asm_expr(AstFile *f, Token token, Token open, Token close,
result->InlineAsmExpr.token = token;
result->InlineAsmExpr.open = open;
result->InlineAsmExpr.close = close;
result->InlineAsmExpr.param_types = param_types;
result->InlineAsmExpr.param_types = slice_from_array(param_types);
result->InlineAsmExpr.return_type = return_type;
result->InlineAsmExpr.asm_string = asm_string;
result->InlineAsmExpr.constraints_string = constraints_string;
@@ -768,18 +779,18 @@ Ast *ast_expr_stmt(AstFile *f, Ast *expr) {
return result;
}
Ast *ast_assign_stmt(AstFile *f, Token op, Array<Ast *> lhs, Array<Ast *> rhs) {
Ast *ast_assign_stmt(AstFile *f, Token op, Array<Ast *> const &lhs, Array<Ast *> const &rhs) {
Ast *result = alloc_ast_node(f, Ast_AssignStmt);
result->AssignStmt.op = op;
result->AssignStmt.lhs = lhs;
result->AssignStmt.rhs = rhs;
result->AssignStmt.lhs = slice_from_array(lhs);
result->AssignStmt.rhs = slice_from_array(rhs);
return result;
}
Ast *ast_block_stmt(AstFile *f, Array<Ast *> stmts, Token open, Token close) {
Ast *ast_block_stmt(AstFile *f, Array<Ast *> const &stmts, Token open, Token close) {
Ast *result = alloc_ast_node(f, Ast_BlockStmt);
result->BlockStmt.stmts = stmts;
result->BlockStmt.stmts = slice_from_array(stmts);
result->BlockStmt.open = open;
result->BlockStmt.close = close;
return result;
@@ -805,10 +816,10 @@ Ast *ast_when_stmt(AstFile *f, Token token, Ast *cond, Ast *body, Ast *else_stmt
}
Ast *ast_return_stmt(AstFile *f, Token token, Array<Ast *> results) {
Ast *ast_return_stmt(AstFile *f, Token token, Array<Ast *> const &results) {
Ast *result = alloc_ast_node(f, Ast_ReturnStmt);
result->ReturnStmt.token = token;
result->ReturnStmt.results = results;
result->ReturnStmt.results = slice_from_array(results);
return result;
}
@@ -866,11 +877,11 @@ Ast *ast_type_switch_stmt(AstFile *f, Token token, Ast *tag, Ast *body) {
return result;
}
Ast *ast_case_clause(AstFile *f, Token token, Array<Ast *> list, Array<Ast *> stmts) {
Ast *ast_case_clause(AstFile *f, Token token, Array<Ast *> const &list, Array<Ast *> const &stmts) {
Ast *result = alloc_ast_node(f, Ast_CaseClause);
result->CaseClause.token = token;
result->CaseClause.list = list;
result->CaseClause.stmts = stmts;
result->CaseClause.list = slice_from_array(list);
result->CaseClause.stmts = slice_from_array(stmts);
return result;
}
@@ -889,10 +900,10 @@ Ast *ast_branch_stmt(AstFile *f, Token token, Ast *label) {
return result;
}
Ast *ast_using_stmt(AstFile *f, Token token, Array<Ast *> list) {
Ast *ast_using_stmt(AstFile *f, Token token, Array<Ast *> const &list) {
Ast *result = alloc_ast_node(f, Ast_UsingStmt);
result->UsingStmt.token = token;
result->UsingStmt.list = list;
result->UsingStmt.list = slice_from_array(list);
return result;
}
@@ -905,10 +916,10 @@ Ast *ast_bad_decl(AstFile *f, Token begin, Token end) {
return result;
}
Ast *ast_field(AstFile *f, Array<Ast *> names, Ast *type, Ast *default_value, u32 flags, Token tag,
Ast *ast_field(AstFile *f, Array<Ast *> const &names, Ast *type, Ast *default_value, u32 flags, Token tag,
CommentGroup *docs, CommentGroup *comment) {
Ast *result = alloc_ast_node(f, Ast_Field);
result->Field.names = names;
result->Field.names = slice_from_array(names);
result->Field.type = type;
result->Field.default_value = default_value;
result->Field.flags = flags;
@@ -918,10 +929,10 @@ Ast *ast_field(AstFile *f, Array<Ast *> names, Ast *type, Ast *default_value, u3
return result;
}
Ast *ast_field_list(AstFile *f, Token token, Array<Ast *> list) {
Ast *ast_field_list(AstFile *f, Token token, Array<Ast *> const &list) {
Ast *result = alloc_ast_node(f, Ast_FieldList);
result->FieldList.token = token;
result->FieldList.list = list;
result->FieldList.list = slice_from_array(list);
return result;
}
@@ -1002,7 +1013,7 @@ Ast *ast_dynamic_array_type(AstFile *f, Token token, Ast *elem) {
return result;
}
Ast *ast_struct_type(AstFile *f, Token token, Array<Ast *> fields, isize field_count,
Ast *ast_struct_type(AstFile *f, Token token, Slice<Ast *> fields, isize field_count,
Ast *polymorphic_params, bool is_packed, bool is_raw_union,
Ast *align,
Token where_token, Array<Ast *> const &where_clauses) {
@@ -1015,38 +1026,38 @@ Ast *ast_struct_type(AstFile *f, Token token, Array<Ast *> fields, isize field_c
result->StructType.is_raw_union = is_raw_union;
result->StructType.align = align;
result->StructType.where_token = where_token;
result->StructType.where_clauses = where_clauses;
result->StructType.where_clauses = slice_from_array(where_clauses);
return result;
}
Ast *ast_union_type(AstFile *f, Token token, Array<Ast *> variants, Ast *polymorphic_params, Ast *align, bool no_nil, bool maybe,
Ast *ast_union_type(AstFile *f, Token token, Array<Ast *> const &variants, Ast *polymorphic_params, Ast *align, bool no_nil, bool maybe,
Token where_token, Array<Ast *> const &where_clauses) {
Ast *result = alloc_ast_node(f, Ast_UnionType);
result->UnionType.token = token;
result->UnionType.variants = variants;
result->UnionType.variants = slice_from_array(variants);
result->UnionType.polymorphic_params = polymorphic_params;
result->UnionType.align = align;
result->UnionType.no_nil = no_nil;
result->UnionType.maybe = maybe;
result->UnionType.maybe = maybe;
result->UnionType.where_token = where_token;
result->UnionType.where_clauses = where_clauses;
result->UnionType.where_clauses = slice_from_array(where_clauses);
return result;
}
Ast *ast_enum_type(AstFile *f, Token token, Ast *base_type, Array<Ast *> fields) {
Ast *ast_enum_type(AstFile *f, Token token, Ast *base_type, Array<Ast *> const &fields) {
Ast *result = alloc_ast_node(f, Ast_EnumType);
result->EnumType.token = token;
result->EnumType.base_type = base_type;
result->EnumType.fields = fields;
result->EnumType.fields = slice_from_array(fields);
return result;
}
Ast *ast_bit_field_type(AstFile *f, Token token, Array<Ast *> fields, Ast *align) {
Ast *ast_bit_field_type(AstFile *f, Token token, Array<Ast *> const &fields, Ast *align) {
Ast *result = alloc_ast_node(f, Ast_BitFieldType);
result->BitFieldType.token = token;
result->BitFieldType.fields = fields;
result->BitFieldType.fields = slice_from_array(fields);
result->BitFieldType.align = align;
return result;
}
@@ -1069,7 +1080,7 @@ Ast *ast_map_type(AstFile *f, Token token, Ast *key, Ast *value) {
Ast *ast_foreign_block_decl(AstFile *f, Token token, Ast *foreign_library, Ast *body,
CommentGroup *docs) {
CommentGroup *docs) {
Ast *result = alloc_ast_node(f, Ast_ForeignBlockDecl);
result->ForeignBlockDecl.token = token;
result->ForeignBlockDecl.foreign_library = foreign_library;
@@ -1087,12 +1098,12 @@ Ast *ast_label_decl(AstFile *f, Token token, Ast *name) {
return result;
}
Ast *ast_value_decl(AstFile *f, Array<Ast *> names, Ast *type, Array<Ast *> values, bool is_mutable,
CommentGroup *docs, CommentGroup *comment) {
Ast *ast_value_decl(AstFile *f, Array<Ast *> const &names, Ast *type, Array<Ast *> const &values, bool is_mutable,
CommentGroup *docs, CommentGroup *comment) {
Ast *result = alloc_ast_node(f, Ast_ValueDecl);
result->ValueDecl.names = names;
result->ValueDecl.names = slice_from_array(names);
result->ValueDecl.type = type;
result->ValueDecl.values = values;
result->ValueDecl.values = slice_from_array(values);
result->ValueDecl.is_mutable = is_mutable;
result->ValueDecl.docs = docs;
result->ValueDecl.comment = comment;
@@ -1111,7 +1122,7 @@ Ast *ast_package_decl(AstFile *f, Token token, Token name, CommentGroup *docs, C
}
Ast *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath, Token import_name,
CommentGroup *docs, CommentGroup *comment) {
CommentGroup *docs, CommentGroup *comment) {
Ast *result = alloc_ast_node(f, Ast_ImportDecl);
result->ImportDecl.token = token;
result->ImportDecl.is_using = is_using;
@@ -1123,10 +1134,10 @@ Ast *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath, Toke
}
Ast *ast_foreign_import_decl(AstFile *f, Token token, Array<Token> filepaths, Token library_name,
CommentGroup *docs, CommentGroup *comment) {
CommentGroup *docs, CommentGroup *comment) {
Ast *result = alloc_ast_node(f, Ast_ForeignImportDecl);
result->ForeignImportDecl.token = token;
result->ForeignImportDecl.filepaths = filepaths;
result->ForeignImportDecl.filepaths = slice_from_array(filepaths);
result->ForeignImportDecl.library_name = library_name;
result->ForeignImportDecl.docs = docs;
result->ForeignImportDecl.comment = comment;
@@ -1136,11 +1147,11 @@ Ast *ast_foreign_import_decl(AstFile *f, Token token, Array<Token> filepaths, To
}
Ast *ast_attribute(AstFile *f, Token token, Token open, Token close, Array<Ast *> elems) {
Ast *ast_attribute(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &elems) {
Ast *result = alloc_ast_node(f, Ast_Attribute);
result->Attribute.token = token;
result->Attribute.open = open;
result->Attribute.elems = elems;
result->Attribute.elems = slice_from_array(elems);
result->Attribute.close = close;
return result;
}
@@ -1182,6 +1193,12 @@ CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) {
Array<Token> list = {};
list.allocator = heap_allocator();
isize end_line = f->curr_token.pos.line;
if (f->curr_token_index == 1 &&
f->prev_token.kind == Token_Comment &&
f->prev_token.pos.line+1 == f->curr_token.pos.line) {
// NOTE(bill): Special logic for the first comment in the file
array_add(&list, f->prev_token);
}
while (f->curr_token.kind == Token_Comment &&
f->curr_token.pos.line <= end_line+n) {
array_add(&list, consume_comment(f, &end_line));
@@ -1192,7 +1209,7 @@ CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) {
CommentGroup *comments = nullptr;
if (list.count > 0) {
comments = gb_alloc_item(heap_allocator(), CommentGroup);
comments->list = list;
comments->list = slice_from_array(list);
array_add(&f->comments, comments);
}
return comments;
@@ -1894,12 +1911,18 @@ Ast *parse_operand(AstFile *f, bool lhs) {
case Token_opaque: {
Token token = expect_token(f, Token_opaque);
warning(token, "opaque is deprecated, please use #opaque");
Ast *type = parse_type(f);
return ast_opaque_type(f, token, type);
}
case Token_Hash: {
Token token = expect_token(f, Token_Hash);
if (allow_token(f, Token_opaque)) {
Ast *type = parse_type(f);
return ast_opaque_type(f, token, type);
}
Token name = expect_token(f, Token_Ident);
if (name.string == "type") {
return ast_helper_type(f, token, parse_type(f));
@@ -2201,7 +2224,7 @@ Ast *parse_operand(AstFile *f, bool lhs) {
if (allow_token(f, Token_OpenParen)) {
isize param_count = 0;
polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, false, true);
polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
if (param_count == 0) {
syntax_error(polymorphic_params, "Expected at least 1 polymorphic parameter");
polymorphic_params = nullptr;
@@ -2262,7 +2285,7 @@ Ast *parse_operand(AstFile *f, bool lhs) {
Ast *fields = parse_struct_field_list(f, &name_count);
Token close = expect_token(f, Token_CloseBrace);
Array<Ast *> decls = {};
Slice<Ast *> decls = {};
if (fields != nullptr) {
GB_ASSERT(fields->kind == Ast_FieldList);
decls = fields->FieldList.list;
@@ -2284,7 +2307,7 @@ Ast *parse_operand(AstFile *f, bool lhs) {
if (allow_token(f, Token_OpenParen)) {
isize param_count = 0;
polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, false, true);
polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
if (param_count == 0) {
syntax_error(polymorphic_params, "Expected at least 1 polymorphic parametric");
polymorphic_params = nullptr;
@@ -2586,7 +2609,15 @@ Ast *parse_call_expr(AstFile *f, Ast *operand) {
f->expr_level--;
close_paren = expect_closing(f, Token_CloseParen, str_lit("argument list"));
return ast_call_expr(f, operand, args, open_paren, close_paren, ellipsis);
Ast *call = ast_call_expr(f, operand, args, open_paren, close_paren, ellipsis);
Ast *o = unparen_expr(operand);
if (o->kind == Ast_SelectorExpr && o->SelectorExpr.token.kind == Token_ArrowRight) {
return ast_selector_call_expr(f, o->SelectorExpr.token, o, call);
}
return call;
}
Ast *parse_atom_expr(AstFile *f, Ast *operand, bool lhs) {
@@ -2638,11 +2669,10 @@ Ast *parse_atom_expr(AstFile *f, Ast *operand, bool lhs) {
case Token_ArrowRight: {
Token token = advance_token(f);
// syntax_error(token, "Selector expressions use '.' rather than '->'");
Ast *sel = ast_selector_expr(f, token, operand, parse_ident(f));
Ast *call = parse_call_expr(f, sel);
operand = ast_selector_call_expr(f, token, sel, call);
operand = ast_selector_expr(f, token, operand, parse_ident(f));
// Ast *call = parse_call_expr(f, sel);
// operand = ast_selector_call_expr(f, token, sel, call);
break;
}
@@ -3317,11 +3347,10 @@ FieldPrefixKind is_token_field_prefix(AstFile *f) {
return FieldPrefix_no_alias;
} else if (f->curr_token.string == "c_vararg") {
return FieldPrefix_c_var_arg;
} else if (f->curr_token.string == "const") {
return FieldPrefix_const;
}
break;
case Token_const:
return FieldPrefix_const;
}
return FieldPrefix_Unknown;
}
@@ -4723,18 +4752,14 @@ void parser_add_foreign_file_to_process(Parser *p, AstPackage *pkg, AstForeignFi
// NOTE(bill): Returns true if it's added
bool try_add_import_path(Parser *p, String const &path, String const &rel_path, TokenPos pos, PackageKind kind = Package_Normal) {
if (build_context.generate_docs) {
return false;
}
AstPackage *try_add_import_path(Parser *p, String const &path, String const &rel_path, TokenPos pos, PackageKind kind = Package_Normal) {
String const FILE_EXT = str_lit(".odin");
gb_mutex_lock(&p->file_add_mutex);
defer (gb_mutex_unlock(&p->file_add_mutex));
if (string_set_exists(&p->imported_files, path)) {
return false;
return nullptr;
}
string_set_add(&p->imported_files, path);
@@ -4757,7 +4782,7 @@ bool try_add_import_path(Parser *p, String const &path, String const &rel_path,
pkg->is_single_file = true;
parser_add_file_to_process(p, pkg, fi, pos);
parser_add_package(p, pkg);
return true;
return pkg;
}
@@ -4773,22 +4798,22 @@ bool try_add_import_path(Parser *p, String const &path, String const &rel_path,
switch (rd_err) {
case ReadDirectory_InvalidPath:
syntax_error(pos, "Invalid path: %.*s", LIT(rel_path));
return false;
return nullptr;
case ReadDirectory_NotExists:
syntax_error(pos, "Path does not exist: %.*s", LIT(rel_path));
return false;
return nullptr;
case ReadDirectory_Permission:
syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
return false;
return nullptr;
case ReadDirectory_NotDir:
syntax_error(pos, "Expected a directory for a package, got a file: %.*s", LIT(rel_path));
return false;
return nullptr;
case ReadDirectory_Empty:
syntax_error(pos, "Empty directory: %.*s", LIT(rel_path));
return false;
return nullptr;
case ReadDirectory_Unknown:
syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
return false;
return nullptr;
}
for_array(list_index, list) {
@@ -4810,7 +4835,7 @@ bool try_add_import_path(Parser *p, String const &path, String const &rel_path,
parser_add_package(p, pkg);
return true;
return pkg;
}
gb_global Rune illegal_import_runes[] = {
@@ -4829,7 +4854,7 @@ bool is_import_path_valid(String path) {
u8 *curr = start;
while (curr < end) {
isize width = 1;
Rune r = curr[0];
Rune r = *curr;
if (r >= 0x80) {
width = gb_utf8_decode(curr, end-curr, &r);
if (r == GB_RUNE_INVALID && width == 1) {
@@ -4854,6 +4879,45 @@ bool is_import_path_valid(String path) {
return false;
}
bool is_build_flag_path_valid(String path) {
if (path.len > 0) {
u8 *start = path.text;
u8 *end = path.text + path.len;
u8 *curr = start;
isize index = 0;
while (curr < end) {
isize width = 1;
Rune r = *curr;
if (r >= 0x80) {
width = gb_utf8_decode(curr, end-curr, &r);
if (r == GB_RUNE_INVALID && width == 1) {
return false;
}
else if (r == GB_RUNE_BOM && curr-start > 0) {
return false;
}
}
for (isize i = 0; i < gb_count_of(illegal_import_runes); i++) {
#if defined(GB_SYSTEM_WINDOWS)
if (r == '\\') {
break;
}
#endif
if (r == illegal_import_runes[i]) {
return false;
}
}
curr += width;
index += 1;
}
return true;
}
return false;
}
bool is_package_name_reserved(String const &name) {
if (name == "builtin") {
@@ -4974,7 +5038,7 @@ bool determine_path_from_string(gbMutex *file_mutex, Ast *node, String base_dir,
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<Ast *> &decls);
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls);
void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenStmt *ws) {
if (ws->body != nullptr) {
@@ -4995,7 +5059,7 @@ void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenS
}
}
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<Ast *> &decls) {
void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls) {
for_array(i, decls) {
Ast *node = decls[i];
if (!is_ast_decl(node) &&
@@ -5034,8 +5098,7 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<Ast *>
} else if (node->kind == Ast_ForeignImportDecl) {
ast_node(fl, ForeignImportDecl, node);
fl->fullpaths.allocator = heap_allocator();
array_reserve(&fl->fullpaths, fl->filepaths.count);
auto fullpaths = array_make<String>(permanent_allocator(), 0, fl->filepaths.count);
for_array(fp_idx, fl->filepaths) {
String file_str = fl->filepaths[fp_idx].string;
@@ -5049,14 +5112,17 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<Ast *>
}
fullpath = foreign_path;
}
array_add(&fl->fullpaths, fullpath);
array_add(&fullpaths, fullpath);
}
if (fl->fullpaths.count == 0) {
if (fullpaths.count == 0) {
syntax_error(decls[i], "No foreign paths found");
decls[i] = ast_bad_decl(f, fl->filepaths[0], fl->filepaths[fl->filepaths.count-1]);
goto end;
}
fl->fullpaths = slice_from_array(fullpaths);
} else if (node->kind == Ast_WhenStmt) {
ast_node(ws, WhenStmt, node);
parse_setup_file_when_stmt(p, f, base_dir, ws);
@@ -5218,12 +5284,12 @@ bool parse_file(Parser *p, AstFile *f) {
f->pkg_decl = pd;
if (f->error_count == 0) {
f->decls = array_make<Ast *>(heap_allocator());
auto decls = array_make<Ast *>(heap_allocator());
while (f->curr_token.kind != Token_EOF) {
Ast *stmt = parse_stmt(f);
if (stmt && stmt->kind != Ast_EmptyStmt) {
array_add(&f->decls, stmt);
array_add(&decls, stmt);
if (stmt->kind == Ast_ExprStmt &&
stmt->ExprStmt.expr != nullptr &&
stmt->ExprStmt.expr->kind == Ast_ProcLit) {
@@ -5232,6 +5298,8 @@ bool parse_file(Parser *p, AstFile *f) {
}
}
f->decls = slice_from_array(decls);
parse_setup_file_decls(p, f, base_dir, f->decls);
}
@@ -5325,7 +5393,7 @@ ParseFileError parse_packages(Parser *p, String init_filename) {
}
TokenPos init_pos = {};
if (!build_context.generate_docs) {
{
String s = get_fullpath_core(heap_allocator(), str_lit("runtime"));
try_add_import_path(p, s, s, init_pos, Package_Runtime);
}
@@ -5333,6 +5401,22 @@ ParseFileError parse_packages(Parser *p, String init_filename) {
try_add_import_path(p, init_fullpath, init_fullpath, init_pos, Package_Init);
p->init_fullpath = init_fullpath;
for_array(i, build_context.extra_packages) {
String path = build_context.extra_packages[i];
String fullpath = path_to_full_path(heap_allocator(), path); // LEAK?
if (!path_is_directory(fullpath)) {
String const ext = str_lit(".odin");
if (!string_ends_with(fullpath, ext)) {
error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(fullpath));
return ParseFile_WrongExtension;
}
}
AstPackage *pkg = try_add_import_path(p, fullpath, fullpath, init_pos, Package_Normal);
if (pkg) {
pkg->is_extra = true;
}
}
thread_pool_start(&parser_thread_pool);
thread_pool_wait_to_process(&parser_thread_pool);