Delay importing entities till all other entities are collected

This commit is contained in:
Ginger Bill
2016-11-30 20:46:00 +00:00
parent ab2ca7cf59
commit be8b9bda2f
4 changed files with 141 additions and 293 deletions

View File

@@ -2,16 +2,12 @@
#import "utf8.odin"
main :: proc() {
MAX :: 64
buf: [MAX]rune
backing: [MAX]byte
offset: int
when MAX > 0 {
msg := "Hello"
}
MAX :: 64
msg := "Hello"
count := utf8.rune_count(msg)
assert(count <= MAX)

View File

@@ -1,175 +1,3 @@
when ODIN_OS == "windows" {
#import "win32.odin"
#load "os_windows.odin"
}
#import "fmt.odin"
File_Time :: type u64
File :: struct {
Handle :: raw_union {
p: rawptr
i: int
}
handle: Handle
last_write_time: File_Time
}
open :: proc(name: string) -> (File, bool) {
using win32
buf: [300]byte
copy(buf[:], name as []byte)
f: File
f.handle.p = CreateFileA(^buf[0], FILE_GENERIC_READ, FILE_SHARE_READ, nil, OPEN_EXISTING, 0, nil) as rawptr
success := f.handle.p != INVALID_HANDLE_VALUE
f.last_write_time = last_write_time(^f)
return f, success
}
create :: proc(name: string) -> (File, bool) {
using win32
buf: [300]byte
copy(buf[:], name as []byte)
f: File
f.handle.p = CreateFileA(^buf[0], FILE_GENERIC_WRITE, FILE_SHARE_READ, nil, CREATE_ALWAYS, 0, nil) as rawptr
success := f.handle.p != INVALID_HANDLE_VALUE
f.last_write_time = last_write_time(^f)
return f, success
}
close :: proc(using f: ^File) {
win32.CloseHandle(handle.p as win32.HANDLE)
}
write :: proc(using f: ^File, buf: []byte) -> bool {
bytes_written: i32
return win32.WriteFile(handle.p as win32.HANDLE, buf.data, buf.count as i32, ^bytes_written, nil) != 0
}
file_has_changed :: proc(f: ^File) -> bool {
last_write_time := last_write_time(f)
if f.last_write_time != last_write_time {
f.last_write_time = last_write_time
return true
}
return false
}
last_write_time :: proc(f: ^File) -> File_Time {
file_info: win32.BY_HANDLE_FILE_INFORMATION
win32.GetFileInformationByHandle(f.handle.p as win32.HANDLE, ^file_info)
l := file_info.last_write_time.low_date_time as File_Time
h := file_info.last_write_time.high_date_time as File_Time
return l | h << 32
}
last_write_time_by_name :: proc(name: string) -> File_Time {
last_write_time: win32.FILETIME
data: win32.WIN32_FILE_ATTRIBUTE_DATA
buf: [1024]byte
path := buf[:0]
fmt.bprint(^path, name, "\x00")
if win32.GetFileAttributesExA(path.data, win32.GetFileExInfoStandard, ^data) != 0 {
last_write_time = data.last_write_time
}
l := last_write_time.low_date_time as File_Time
h := last_write_time.high_date_time as File_Time
return l | h << 32
}
File_Standard :: type enum {
INPUT,
OUTPUT,
ERROR,
}
// NOTE(bill): Uses startup to initialize it
__std_files := [File_Standard.count]File{
{handle = win32.GetStdHandle(win32.STD_INPUT_HANDLE) transmute File.Handle },
{handle = win32.GetStdHandle(win32.STD_OUTPUT_HANDLE) transmute File.Handle },
{handle = win32.GetStdHandle(win32.STD_ERROR_HANDLE) transmute File.Handle },
}
stdin := ^__std_files[File_Standard.INPUT]
stdout := ^__std_files[File_Standard.OUTPUT]
stderr := ^__std_files[File_Standard.ERROR]
read_entire_file :: proc(name: string) -> ([]byte, bool) {
buf: [300]byte
copy(buf[:], name as []byte)
f, file_ok := open(name)
if !file_ok {
return nil, false
}
defer close(^f)
length: i64
file_size_ok := win32.GetFileSizeEx(f.handle.p as win32.HANDLE, ^length) != 0
if !file_size_ok {
return nil, false
}
data := new_slice(u8, length)
if data.data == nil {
return nil, false
}
single_read_length: i32
total_read: i64
for total_read < length {
remaining := length - total_read
to_read: u32
MAX :: 1<<32-1
if remaining <= MAX {
to_read = remaining as u32
} else {
to_read = MAX
}
win32.ReadFile(f.handle.p as win32.HANDLE, ^data[total_read], to_read, ^single_read_length, nil)
if single_read_length <= 0 {
free(data.data)
return nil, false
}
total_read += single_read_length as i64
}
return data, true
}
heap_alloc :: proc(size: int) -> rawptr {
return win32.HeapAlloc(win32.GetProcessHeap(), win32.HEAP_ZERO_MEMORY, size)
}
heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr {
return win32.HeapReAlloc(win32.GetProcessHeap(), win32.HEAP_ZERO_MEMORY, ptr, new_size)
}
heap_free :: proc(ptr: rawptr) {
win32.HeapFree(win32.GetProcessHeap(), 0, ptr)
}
exit :: proc(code: int) {
win32.ExitProcess(code as u32)
}
current_thread_id :: proc() -> int {
GetCurrentThreadId :: proc() -> u32 #foreign #dll_import
return GetCurrentThreadId() as int
}

View File

@@ -223,21 +223,26 @@ typedef struct CheckerContext {
#define MAP_NAME MapExprInfo
#include "../map.c"
typedef struct DelayedImport {
Scope * parent;
AstNodeImportDecl *decl;
} DelayedImport;
// NOTE(bill): Symbol tables
typedef struct CheckerInfo {
MapTypeAndValue types; // Key: AstNode * | Expression -> Type (and value)
MapEntity definitions; // Key: AstNode * | Identifier -> Entity
MapEntity uses; // Key: AstNode * | Identifier -> Entity
MapScope scopes; // Key: AstNode * | Node -> Scope
MapExprInfo untyped; // Key: AstNode * | Expression -> ExprInfo
MapDeclInfo entities; // Key: Entity *
MapEntity foreign_procs; // Key: String
MapAstFile files; // Key: String (full path)
MapIsize type_info_map; // Key: Type *
isize type_info_count;
Entity * implicit_values[ImplicitValue_Count];
Array(String) foreign_libraries; // For the linker
MapTypeAndValue types; // Key: AstNode * | Expression -> Type (and value)
MapEntity definitions; // Key: AstNode * | Identifier -> Entity
MapEntity uses; // Key: AstNode * | Identifier -> Entity
MapScope scopes; // Key: AstNode * | Node -> Scope
MapExprInfo untyped; // Key: AstNode * | Expression -> ExprInfo
MapDeclInfo entities; // Key: Entity *
MapEntity foreign_procs; // Key: String
MapAstFile files; // Key: String (full path)
MapIsize type_info_map; // Key: Type *
isize type_info_count;
Entity * implicit_values[ImplicitValue_Count];
Array(String) foreign_libraries; // For the linker
} CheckerInfo;
typedef struct Checker {
@@ -248,6 +253,8 @@ typedef struct Checker {
BaseTypeSizes sizes;
Scope * global_scope;
Array(ProcedureInfo) procs; // NOTE(bill): Procedures to check
Array(DelayedImport) delayed_imports;
gbArena arena;
gbArena tmp_arena;
@@ -608,6 +615,7 @@ void init_checker(Checker *c, Parser *parser, BaseTypeSizes sizes) {
array_init(&c->proc_stack, a);
array_init(&c->procs, a);
array_init(&c->delayed_imports, a);
// NOTE(bill): Is this big enough or too small?
isize item_size = gb_max3(gb_size_of(Entity), gb_size_of(Type), gb_size_of(Scope));
@@ -633,6 +641,7 @@ void destroy_checker(Checker *c) {
destroy_scope(c->global_scope);
array_free(&c->proc_stack);
array_free(&c->procs);
array_free(&c->delayed_imports);
gb_arena_free(&c->arena);
}
@@ -1117,106 +1126,17 @@ void check_global_collect_entities(Checker *c, Scope *parent_scope, AstNodeArray
switch (decl->kind) {
case_ast_node(bd, BadDecl, decl);
case_end;
case_ast_node(ws, WhenStmt, decl);
// Will be handled later
case_end;
case_ast_node(id, ImportDecl, decl);
if (!parent_scope->is_file) {
// NOTE(bill): _Should_ be caught by the parser
// TODO(bill): Better error handling if it isn't
continue;
}
HashKey key = hash_string(id->fullpath);
Scope **found = map_scope_get(file_scopes, key);
if (found == NULL) {
for_array(scope_index, file_scopes->entries) {
Scope *scope = file_scopes->entries.e[scope_index].value;
gb_printf_err("%.*s\n", LIT(scope->file->tokenizer.fullpath));
}
gb_printf_err("%.*s(%td:%td)\n", LIT(id->token.pos.file), id->token.pos.line, id->token.pos.column);
GB_PANIC("Unable to find scope for file: %.*s", LIT(id->fullpath));
}
Scope *scope = *found;
if (scope->is_global) {
error(id->token, "Importing a #shared_global_scope is disallowed and unnecessary");
continue;
}
bool previously_added = false;
for_array(import_index, parent_scope->imported) {
Scope *prev = parent_scope->imported.e[import_index];
if (prev == scope) {
previously_added = true;
break;
}
}
if (!previously_added) {
array_add(&parent_scope->imported, scope);
} else {
warning(id->token, "Multiple #import of the same file within this scope");
}
if (str_eq(id->import_name.string, str_lit("."))) {
// NOTE(bill): Add imported entities to this file's scope
for_array(elem_index, scope->elements.entries) {
Entity *e = scope->elements.entries.e[elem_index].value;
if (e->scope == parent_scope) {
continue;
}
// NOTE(bill): Do not add other imported entities
add_entity(c, parent_scope, NULL, e);
if (!id->is_load) { // `#import`ed entities don't get exported
HashKey key = hash_string(e->token.string);
map_entity_set(&parent_scope->implicit, key, e);
}
}
} else {
String import_name = id->import_name.string;
if (import_name.len == 0) {
// NOTE(bill): use file name (without extension) as the identifier
// If it is a valid identifier
String filename = id->fullpath;
isize slash = 0;
isize dot = 0;
for (isize i = filename.len-1; i >= 0; i--) {
u8 c = filename.text[i];
if (c == '/' || c == '\\') {
break;
}
slash = i;
}
filename.text += slash;
filename.len -= slash;
dot = filename.len;
while (dot --> 0) {
u8 c = filename.text[dot];
if (c == '.') {
break;
}
}
filename.len = dot;
if (is_string_an_identifier(filename)) {
import_name = filename;
} else {
error_node(decl,
"File name, %.*s, cannot be as an import name as it is not a valid identifier",
LIT(filename));
}
}
if (import_name.len > 0) {
id->import_name.string = import_name;
Entity *e = make_entity_import_name(c->allocator, parent_scope, id->import_name, t_invalid,
id->fullpath, id->import_name.string,
scope);
add_entity(c, parent_scope, NULL, e);
}
}
DelayedImport di = {parent_scope, id};
array_add(&c->delayed_imports, di);
case_end;
case_ast_node(fl, ForeignLibrary, decl);
if (!parent_scope->is_file) {
@@ -1244,9 +1164,6 @@ void check_global_collect_entities(Checker *c, Scope *parent_scope, AstNodeArray
try_add_foreign_library_path(c, file_str);
case_end;
case_ast_node(ws, WhenStmt, decl);
// Will be handled later
case_end;
case_ast_node(cd, ConstDecl, decl);
for_array(i, cd->values) {
AstNode *name = cd->names.e[i];
@@ -1347,6 +1264,110 @@ void check_global_collect_entities(Checker *c, Scope *parent_scope, AstNodeArray
}
}
void check_import_entities(Checker *c, MapScope *file_scopes) {
for_array(i, c->delayed_imports) {
AstNodeImportDecl *id = c->delayed_imports.e[i].decl;
Scope *parent_scope = c->delayed_imports.e[i].parent;
HashKey key = hash_string(id->fullpath);
Scope **found = map_scope_get(file_scopes, key);
if (found == NULL) {
for_array(scope_index, file_scopes->entries) {
Scope *scope = file_scopes->entries.e[scope_index].value;
gb_printf_err("%.*s\n", LIT(scope->file->tokenizer.fullpath));
}
gb_printf_err("%.*s(%td:%td)\n", LIT(id->token.pos.file), id->token.pos.line, id->token.pos.column);
GB_PANIC("Unable to find scope for file: %.*s", LIT(id->fullpath));
}
Scope *scope = *found;
if (scope->is_global) {
error(id->token, "Importing a #shared_global_scope is disallowed and unnecessary");
continue;
}
bool previously_added = false;
for_array(import_index, parent_scope->imported) {
Scope *prev = parent_scope->imported.e[import_index];
if (prev == scope) {
previously_added = true;
break;
}
}
if (!previously_added) {
array_add(&parent_scope->imported, scope);
} else {
warning(id->token, "Multiple #import of the same file within this scope");
}
if (str_eq(id->import_name.string, str_lit("."))) {
// NOTE(bill): Add imported entities to this file's scope
for_array(elem_index, scope->elements.entries) {
Entity *e = scope->elements.entries.e[elem_index].value;
if (e->scope == parent_scope) {
continue;
}
// NOTE(bill): Do not add other imported entities
add_entity(c, parent_scope, NULL, e);
if (!id->is_load) { // `#import`ed entities don't get exported
HashKey key = hash_string(e->token.string);
map_entity_set(&parent_scope->implicit, key, e);
}
}
} else {
String import_name = id->import_name.string;
if (import_name.len == 0) {
// NOTE(bill): use file name (without extension) as the identifier
// If it is a valid identifier
String filename = id->fullpath;
isize slash = 0;
isize dot = 0;
for (isize i = filename.len-1; i >= 0; i--) {
u8 c = filename.text[i];
if (c == '/' || c == '\\') {
break;
}
slash = i;
}
filename.text += slash;
filename.len -= slash;
dot = filename.len;
while (dot --> 0) {
u8 c = filename.text[dot];
if (c == '.') {
break;
}
}
filename.len = dot;
if (is_string_an_identifier(filename)) {
import_name = filename;
} else {
error(id->token,
"File name, %.*s, cannot be as an import name as it is not a valid identifier",
LIT(filename));
}
}
if (import_name.len > 0) {
id->import_name.string = import_name;
Entity *e = make_entity_import_name(c->allocator, parent_scope, id->import_name, t_invalid,
id->fullpath, id->import_name.string,
scope);
add_entity(c, parent_scope, NULL, e);
}
}
}
}
void check_parsed_files(Checker *c) {
MapScope file_scopes; // Key: String (fullpath)
@@ -1384,6 +1405,8 @@ void check_parsed_files(Checker *c) {
check_global_collect_entities(c, f->scope, f->decls, &file_scopes);
}
check_import_entities(c, &file_scopes);
check_global_entities_by_kind(c, Entity_TypeName);
init_preload_types(c);
add_implicit_value(c, ImplicitValue_context, str_lit("context"), str_lit("__context"), t_context);

View File

@@ -2367,8 +2367,7 @@ AstNode *parse_decl(AstFile *f, AstNodeArray names) {
}
return make_type_decl(f, token, names.e[0], parse_type(f));
} else if (f->curr_token.kind == Token_proc &&
is_mutable == false) {
} else if (f->curr_token.kind == Token_proc && is_mutable == false) {
// NOTE(bill): Procedure declarations
Token proc_token = f->curr_token;
AstNode *name = names.e[0];
@@ -2904,7 +2903,7 @@ AstNode *parse_stmt(AstFile *f) {
}
return make_import_decl(f, s->TagStmt.token, file_path, import_name, os, arch, false);
} else if (str_eq(tag, str_lit("include"))) {
} else if (str_eq(tag, str_lit("load"))) {
String os = {0};
String arch = {0};
// TODO(bill): better error messages
@@ -2912,10 +2911,12 @@ AstNode *parse_stmt(AstFile *f) {
Token import_name = file_path;
import_name.string = str_lit(".");
if (f->curr_proc == NULL) {
return make_import_decl(f, s->TagStmt.token, file_path, import_name, os, arch, true);
}
syntax_error(token, "You cannot use #include within a procedure. This must be done at the file scope");
syntax_error(token, "You cannot use #load within a procedure. This must be done at the file scope");
return make_bad_decl(f, token, file_path);
} else {
@@ -3195,7 +3196,7 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, AstNodeArray
node->kind != AstNode_BadStmt &&
node->kind != AstNode_EmptyStmt) {
// NOTE(bill): Sanity check
syntax_error_node(node, "Only declarations are allowed at file scope");
syntax_error_node(node, "Only declarations are allowed at file scope %.*s", LIT(ast_node_strings[node->kind]));
} else if (node->kind == AstNode_WhenStmt) {
parse_setup_file_when_stmt(p, f, base_dir, &node->WhenStmt);
} else if (node->kind == AstNode_ImportDecl) {