Add odin/parser/parse_files.odin

This commit is contained in:
gingerBill
2020-12-13 17:09:41 +00:00
parent f64584b92a
commit cffbd2d276
6 changed files with 200 additions and 47 deletions

View File

@@ -35,11 +35,6 @@ Node_State_Flag :: enum {
}
Node_State_Flags :: distinct bit_set[Node_State_Flag];
Comment_Group :: struct {
list: []tokenizer.Token,
}
Node :: struct {
pos: tokenizer.Pos,
end: tokenizer.Pos,
@@ -47,6 +42,54 @@ Node :: struct {
derived: any,
}
Comment_Group :: struct {
using node: Node,
list: []tokenizer.Token,
}
Package_Kind :: enum {
Normal,
Runtime,
Init,
}
Package :: struct {
using node: Node,
kind: Package_Kind,
id: int,
name: string,
fullpath: string,
files: map[string]^File,
user_data: rawptr,
}
File :: struct {
using node: Node,
id: int,
pkg: ^Package,
fullpath: string,
src: []byte,
docs: ^Comment_Group,
pkg_decl: ^Package_Decl,
pkg_token: tokenizer.Token,
pkg_name: string,
decls: [dynamic]^Stmt,
imports: [dynamic]^Import_Decl,
directive_count: int,
comments: [dynamic]^Comment_Group,
syntax_warning_count: int,
syntax_error_count: int,
}
// Base Types
Expr :: struct {
using expr_base: Node,

View File

@@ -67,6 +67,11 @@ clone_node :: proc(node: ^Node) -> ^Node {
align = ti.align;
}
switch in node.derived {
case Package, File:
panic("Cannot clone this node type");
}
res := cast(^Node)mem.alloc(size, align);
src: rawptr = node;
if node.derived != nil {

View File

@@ -1,40 +0,0 @@
package odin_ast
import "core:odin/tokenizer"
Package_Kind :: enum {
Normal,
Runtime,
Init,
}
Package :: struct {
kind: Package_Kind,
id: int,
name: string,
fullpath: string,
files: []^File,
user_data: rawptr,
}
File :: struct {
id: int,
pkg: ^Package,
fullpath: string,
src: []byte,
pkg_decl: ^Package_Decl,
pkg_token: tokenizer.Token,
pkg_name: string,
decls: [dynamic]^Stmt,
imports: [dynamic]^Import_Decl,
directive_count: int,
comments: [dynamic]^Comment_Group,
syntax_warning_count: int,
syntax_error_count: int,
}

View File

@@ -59,6 +59,18 @@ walk :: proc(v: ^Visitor, node: ^Node) {
}
switch n in &node.derived {
case File:
if n.docs != nil {
walk(v, n.docs);
}
walk_stmt_list(v, n.decls[:]);
case Package:
for _, f in n.files {
walk(v, f);
}
case Comment_Group:
// empty
case Bad_Expr:
case Ident:
case Implicit:
@@ -252,29 +264,59 @@ walk :: proc(v: ^Visitor, node: ^Node) {
case Bad_Decl:
case Value_Decl:
if n.docs != nil {
walk(v, n.docs);
}
walk_attribute_list(v, n.attributes[:]);
walk_expr_list(v, n.names);
if n.type != nil {
walk(v, n.type);
}
walk_expr_list(v, n.values);
if n.comment != nil {
walk(v, n.comment);
}
case Package_Decl:
if n.docs != nil {
walk(v, n.docs);
}
if n.comment != nil {
walk(v, n.comment);
}
case Import_Decl:
if n.docs != nil {
walk(v, n.docs);
}
if n.comment != nil {
walk(v, n.comment);
}
case Foreign_Block_Decl:
if n.docs != nil {
walk(v, n.docs);
}
walk_attribute_list(v, n.attributes[:]);
if n.foreign_library != nil {
walk(v, n.foreign_library);
}
walk(v, n.body);
case Foreign_Import_Decl:
if n.docs != nil {
walk(v, n.docs);
}
walk_attribute_list(v, n.attributes[:]);
walk(v, n.name);
if n.comment != nil {
walk(v, n.comment);
}
case Proc_Group:
walk_expr_list(v, n.args);
case Attribute:
walk_expr_list(v, n.elems);
case Field:
if n.docs != nil {
walk(v, n.docs);
}
walk_expr_list(v, n.names);
if n.type != nil {
walk(v, n.type);
@@ -282,6 +324,9 @@ walk :: proc(v: ^Visitor, node: ^Node) {
if n.default_value != nil {
walk(v, n.default_value);
}
if n.comment != nil {
walk(v, n.comment);
}
case Field_List:
for x in n.list {
walk(v, x);

View File

@@ -0,0 +1,89 @@
package odin_parser
import "core:odin/tokenizer"
import "core:odin/ast"
import "core:path/filepath"
import "core:fmt"
import "core:os"
import "core:slice"
collect_package :: proc(path: string) -> (pkg: ^ast.Package, success: bool) {
NO_POS :: tokenizer.Pos{};
pkg_path, ok := filepath.abs(path);
if !ok {
return;
}
path_pattern := fmt.tprintf("%s/*.odin", pkg_path);
matches, err := filepath.glob(path_pattern);
defer delete(matches);
if err != nil {
return;
}
pkg = ast.new(ast.Package, NO_POS, NO_POS);
pkg.fullpath = pkg_path;
for match in matches {
src: []byte;
fullpath, ok := filepath.abs(match);
if !ok {
return;
}
src, ok = os.read_entire_file(fullpath);
if !ok {
delete(fullpath);
return;
}
file := ast.new(ast.File, NO_POS, NO_POS);
file.pkg = pkg;
file.src = src;
file.fullpath = fullpath;
pkg.files[fullpath] = file;
}
success = true;
return;
}
parse_package :: proc(pkg: ^ast.Package, p: ^Parser = nil) -> bool {
p := p;
if p == nil {
p = &Parser{};
p^ = default_parser();
}
ok := true;
files := make([]^ast.File, len(pkg.files), context.temp_allocator);
i := 0;
for _, file in pkg.files {
files[i] = file;
i += 1;
}
slice.sort(files);
for file in files {
if !parse_file(p, file) {
ok = false;
}
if pkg.name == "" {
pkg.name = file.pkg_decl.name;
} else if pkg.name != file.pkg_decl.name {
error(p, file.pkg_decl.pos, "different package name, expected '%s', got '%s'", pkg.name, file.pkg_decl.name);
}
}
return ok;
}
parse_package_from_path :: proc(path: string, p: ^Parser = nil) -> (pkg: ^ast.Package, ok: bool) {
pkg, ok = collect_package(path);
if !ok {
return;
}
ok = parse_package(pkg, p);
return;
}

View File

@@ -107,6 +107,14 @@ default_parser :: proc() -> Parser {
};
}
is_package_name_reserved :: proc(name: string) -> bool {
switch name {
case "builtin", "intrinsics":
return true;
}
return false;
}
parse_file :: proc(p: ^Parser, file: ^ast.File) -> bool {
zero_parser: {
p.prev_tok = {};
@@ -139,8 +147,11 @@ parse_file :: proc(p: ^Parser, file: ^ast.File) -> bool {
pkg_name := expect_token_after(p, .Ident, "package");
if pkg_name.kind == .Ident {
if is_blank_ident(pkg_name) {
switch name := pkg_name.text; {
case is_blank_ident(name):
error(p, pkg_name.pos, "invalid package name '_'");
case is_package_name_reserved(name), file.pkg.kind != .Runtime && name == "runtime":
error(p, pkg_name.pos, "use of reserved package name '%s'", name);
}
}
p.file.pkg_name = pkg_name.text;
@@ -276,7 +287,7 @@ consume_comment_group :: proc(p: ^Parser, n: int) -> (comments: ^ast.Comment_Gro
}
if len(list) > 0 {
comments = new(ast.Comment_Group);
comments = ast.new(ast.Comment_Group, list[0].pos, end_pos(list[len(list)-1]));
comments.list = list[:];
append(&p.file.comments, comments);
}