mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-06 21:17:40 +00:00
Begin generic declarations for lists of specifications
This commit is contained in:
@@ -1,21 +1,22 @@
|
||||
const TAU = 6.28318530717958647692528676655900576;
|
||||
const PI = 3.14159265358979323846264338327950288;
|
||||
const ONE_OVER_TAU = 0.636619772367581343075535053490057448;
|
||||
const ONE_OVER_PI = 0.159154943091895335768883763372514362;
|
||||
const (
|
||||
TAU = 6.28318530717958647692528676655900576;
|
||||
PI = 3.14159265358979323846264338327950288;
|
||||
ONE_OVER_TAU = 0.636619772367581343075535053490057448;
|
||||
ONE_OVER_PI = 0.159154943091895335768883763372514362;
|
||||
|
||||
const E = 2.71828182845904523536;
|
||||
const SQRT_TWO = 1.41421356237309504880168872420969808;
|
||||
const SQRT_THREE = 1.73205080756887729352744634150587236;
|
||||
const SQRT_FIVE = 2.23606797749978969640917366873127623;
|
||||
E = 2.71828182845904523536;
|
||||
SQRT_TWO = 1.41421356237309504880168872420969808;
|
||||
SQRT_THREE = 1.73205080756887729352744634150587236;
|
||||
SQRT_FIVE = 2.23606797749978969640917366873127623;
|
||||
|
||||
const LOG_TWO = 0.693147180559945309417232121458176568;
|
||||
const LOG_TEN = 2.30258509299404568401799145468436421;
|
||||
LOG_TWO = 0.693147180559945309417232121458176568;
|
||||
LOG_TEN = 2.30258509299404568401799145468436421;
|
||||
|
||||
const EPSILON = 1.19209290e-7;
|
||||
|
||||
const τ = TAU;
|
||||
const π = PI;
|
||||
EPSILON = 1.19209290e-7;
|
||||
|
||||
τ = TAU;
|
||||
π = PI;
|
||||
);
|
||||
|
||||
type Vec2 [vector 2]f32;
|
||||
type Vec3 [vector 3]f32;
|
||||
|
||||
@@ -41,64 +41,65 @@ proc GetProcAddress(name string) -> proc() {
|
||||
return res;
|
||||
}
|
||||
|
||||
var (
|
||||
GenBuffers proc(count i32, buffers ^u32);
|
||||
GenVertexArrays proc(count i32, buffers ^u32);
|
||||
GenSamplers proc(count i32, buffers ^u32);
|
||||
BindBuffer proc(target i32, buffer u32);
|
||||
BindVertexArray proc(buffer u32);
|
||||
BindSampler proc(position i32, sampler u32);
|
||||
BufferData proc(target i32, size int, data rawptr, usage i32);
|
||||
BufferSubData proc(target i32, offset, size int, data rawptr);
|
||||
|
||||
var GenBuffers proc(count i32, buffers ^u32);
|
||||
var GenVertexArrays proc(count i32, buffers ^u32);
|
||||
var GenSamplers proc(count i32, buffers ^u32);
|
||||
var BindBuffer proc(target i32, buffer u32);
|
||||
var BindVertexArray proc(buffer u32);
|
||||
var BindSampler proc(position i32, sampler u32);
|
||||
var BufferData proc(target i32, size int, data rawptr, usage i32);
|
||||
var BufferSubData proc(target i32, offset, size int, data rawptr);
|
||||
DrawArrays proc(mode, first i32, count u32);
|
||||
DrawElements proc(mode i32, count u32, type_ i32, indices rawptr);
|
||||
|
||||
var DrawArrays proc(mode, first i32, count u32);
|
||||
var DrawElements proc(mode i32, count u32, type_ i32, indices rawptr);
|
||||
MapBuffer proc(target, access i32) -> rawptr;
|
||||
UnmapBuffer proc(target i32);
|
||||
|
||||
var MapBuffer proc(target, access i32) -> rawptr;
|
||||
var UnmapBuffer proc(target i32);
|
||||
VertexAttribPointer proc(index u32, size, type_ i32, normalized i32, stride u32, pointer rawptr);
|
||||
EnableVertexAttribArray proc(index u32);
|
||||
|
||||
var VertexAttribPointer proc(index u32, size, type_ i32, normalized i32, stride u32, pointer rawptr);
|
||||
var EnableVertexAttribArray proc(index u32);
|
||||
|
||||
var CreateShader proc(shader_type i32) -> u32;
|
||||
var ShaderSource proc(shader u32, count u32, str ^^byte, length ^i32);
|
||||
var CompileShader proc(shader u32);
|
||||
var CreateProgram proc() -> u32;
|
||||
var AttachShader proc(program, shader u32);
|
||||
var DetachShader proc(program, shader u32);
|
||||
var DeleteShader proc(shader u32);
|
||||
var LinkProgram proc(program u32);
|
||||
var UseProgram proc(program u32);
|
||||
var DeleteProgram proc(program u32);
|
||||
CreateShader proc(shader_type i32) -> u32;
|
||||
ShaderSource proc(shader u32, count u32, str ^^byte, length ^i32);
|
||||
CompileShader proc(shader u32);
|
||||
CreateProgram proc() -> u32;
|
||||
AttachShader proc(program, shader u32);
|
||||
DetachShader proc(program, shader u32);
|
||||
DeleteShader proc(shader u32);
|
||||
LinkProgram proc(program u32);
|
||||
UseProgram proc(program u32);
|
||||
DeleteProgram proc(program u32);
|
||||
|
||||
|
||||
var GetShaderiv proc(shader u32, pname i32, params ^i32);
|
||||
var GetProgramiv proc(program u32, pname i32, params ^i32);
|
||||
var GetShaderInfoLog proc(shader u32, max_length u32, length ^u32, info_long ^byte);
|
||||
var GetProgramInfoLog proc(program u32, max_length u32, length ^u32, info_long ^byte);
|
||||
GetShaderiv proc(shader u32, pname i32, params ^i32);
|
||||
GetProgramiv proc(program u32, pname i32, params ^i32);
|
||||
GetShaderInfoLog proc(shader u32, max_length u32, length ^u32, info_long ^byte);
|
||||
GetProgramInfoLog proc(program u32, max_length u32, length ^u32, info_long ^byte);
|
||||
|
||||
var ActiveTexture proc(texture i32);
|
||||
var GenerateMipmap proc(target i32);
|
||||
ActiveTexture proc(texture i32);
|
||||
GenerateMipmap proc(target i32);
|
||||
|
||||
var SamplerParameteri proc(sampler u32, pname i32, param i32);
|
||||
var SamplerParameterf proc(sampler u32, pname i32, param f32);
|
||||
var SamplerParameteriv proc(sampler u32, pname i32, params ^i32);
|
||||
var SamplerParameterfv proc(sampler u32, pname i32, params ^f32);
|
||||
var SamplerParameterIiv proc(sampler u32, pname i32, params ^i32);
|
||||
var SamplerParameterIuiv proc(sampler u32, pname i32, params ^u32);
|
||||
SamplerParameteri proc(sampler u32, pname i32, param i32);
|
||||
SamplerParameterf proc(sampler u32, pname i32, param f32);
|
||||
SamplerParameteriv proc(sampler u32, pname i32, params ^i32);
|
||||
SamplerParameterfv proc(sampler u32, pname i32, params ^f32);
|
||||
SamplerParameterIiv proc(sampler u32, pname i32, params ^i32);
|
||||
SamplerParameterIuiv proc(sampler u32, pname i32, params ^u32);
|
||||
|
||||
|
||||
var Uniform1i proc(loc i32, v0 i32);
|
||||
var Uniform2i proc(loc i32, v0, v1 i32);
|
||||
var Uniform3i proc(loc i32, v0, v1, v2 i32);
|
||||
var Uniform4i proc(loc i32, v0, v1, v2, v3 i32);
|
||||
var Uniform1f proc(loc i32, v0 f32);
|
||||
var Uniform2f proc(loc i32, v0, v1 f32);
|
||||
var Uniform3f proc(loc i32, v0, v1, v2 f32);
|
||||
var Uniform4f proc(loc i32, v0, v1, v2, v3 f32);
|
||||
var UniformMatrix4fv proc(loc i32, count u32, transpose i32, value ^f32);
|
||||
Uniform1i proc(loc i32, v0 i32);
|
||||
Uniform2i proc(loc i32, v0, v1 i32);
|
||||
Uniform3i proc(loc i32, v0, v1, v2 i32);
|
||||
Uniform4i proc(loc i32, v0, v1, v2, v3 i32);
|
||||
Uniform1f proc(loc i32, v0 f32);
|
||||
Uniform2f proc(loc i32, v0, v1 f32);
|
||||
Uniform3f proc(loc i32, v0, v1, v2 f32);
|
||||
Uniform4f proc(loc i32, v0, v1, v2, v3 f32);
|
||||
UniformMatrix4fv proc(loc i32, count u32, transpose i32, value ^f32);
|
||||
|
||||
var GetUniformLocation proc(program u32, name ^byte) -> i32;
|
||||
GetUniformLocation proc(program u32, name ^byte) -> i32;
|
||||
);
|
||||
|
||||
proc init() {
|
||||
proc set_proc_address(p rawptr, name string) #inline { (p as ^proc())^ = GetProcAddress(name); }
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,14 @@
|
||||
const RUNE_ERROR = '\ufffd';
|
||||
const RUNE_SELF = 0x80;
|
||||
const RUNE_BOM = 0xfeff;
|
||||
const RUNE_EOF = ~(0 as rune);
|
||||
const MAX_RUNE = '\U0010ffff';
|
||||
const UTF_MAX = 4;
|
||||
|
||||
|
||||
const SURROGATE_MIN = 0xd800;
|
||||
const SURROGATE_MAX = 0xdfff;
|
||||
const (
|
||||
RUNE_ERROR = '\ufffd';
|
||||
RUNE_SELF = 0x80;
|
||||
RUNE_BOM = 0xfeff;
|
||||
RUNE_EOF = ~(0 as rune);
|
||||
MAX_RUNE = '\U0010ffff';
|
||||
UTF_MAX = 4;
|
||||
|
||||
SURROGATE_MIN = 0xd800;
|
||||
SURROGATE_MAX = 0xdfff;
|
||||
);
|
||||
|
||||
type Accept_Range struct {
|
||||
lo, hi u8;
|
||||
|
||||
@@ -1106,6 +1106,94 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
|
||||
switch (decl->kind) {
|
||||
case_ast_node(bd, BadDecl, decl);
|
||||
case_end;
|
||||
case_ast_node(gd, GenericDecl, decl);
|
||||
if (!parent_scope->is_file) {
|
||||
// NOTE(bill): Within a procedure, variables must be in order
|
||||
continue;
|
||||
}
|
||||
|
||||
for_array(spec_index, gd->specs) {
|
||||
AstNode *spec = gd->specs.e[spec_index];
|
||||
switch (spec->kind) {
|
||||
case_ast_node(vs, ValueSpec, spec);
|
||||
switch (vs->keyword) {
|
||||
case Token_var: {
|
||||
// NOTE(bill): You need to store the entity information here unline a constant declaration
|
||||
isize entity_count = vs->names.count;
|
||||
isize entity_index = 0;
|
||||
Entity **entities = gb_alloc_array(c->allocator, Entity *, entity_count);
|
||||
DeclInfo *di = NULL;
|
||||
if (vs->values.count > 0) {
|
||||
di = make_declaration_info(heap_allocator(), parent_scope);
|
||||
di->entities = entities;
|
||||
di->entity_count = entity_count;
|
||||
di->type_expr = vs->type;
|
||||
di->init_expr = vs->values.e[0];
|
||||
}
|
||||
|
||||
for_array(i, vs->names) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
AstNode *value = NULL;
|
||||
if (i < vs->values.count) {
|
||||
value = vs->values.e[i];
|
||||
}
|
||||
if (name->kind != AstNode_Ident) {
|
||||
error_node(name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[name->kind]));
|
||||
continue;
|
||||
}
|
||||
Entity *e = make_entity_variable(c->allocator, parent_scope, name->Ident, NULL);
|
||||
e->identifier = name;
|
||||
entities[entity_index++] = e;
|
||||
|
||||
DeclInfo *d = di;
|
||||
if (d == NULL) {
|
||||
AstNode *init_expr = value;
|
||||
d = make_declaration_info(heap_allocator(), e->scope);
|
||||
d->type_expr = vs->type;
|
||||
d->init_expr = init_expr;
|
||||
d->var_decl_tags = gd->tags;
|
||||
}
|
||||
|
||||
add_entity_and_decl_info(c, name, e, d);
|
||||
}
|
||||
} break;
|
||||
|
||||
case Token_const: {
|
||||
for_array(i, vs->values) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
AstNode *value = unparen_expr(vs->values.e[i]);
|
||||
if (name->kind != AstNode_Ident) {
|
||||
error_node(name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[name->kind]));
|
||||
continue;
|
||||
}
|
||||
|
||||
ExactValue v = {ExactValue_Invalid};
|
||||
Entity *e = make_entity_constant(c->allocator, parent_scope, name->Ident, NULL, v);
|
||||
e->identifier = name;
|
||||
DeclInfo *di = make_declaration_info(c->allocator, e->scope);
|
||||
di->type_expr = vs->type;
|
||||
di->init_expr = value;
|
||||
add_entity_and_decl_info(c, name, e, di);
|
||||
}
|
||||
|
||||
isize lhs_count = vs->names.count;
|
||||
isize rhs_count = vs->values.count;
|
||||
|
||||
if (rhs_count == 0 && vs->type == NULL) {
|
||||
error_node(decl, "Missing type or initial expression");
|
||||
} else if (lhs_count < rhs_count) {
|
||||
error_node(decl, "Extra initial expression");
|
||||
}
|
||||
} break;
|
||||
}
|
||||
case_end;
|
||||
|
||||
default:
|
||||
error(ast_node_token(spec), "Invalid specification in declaration: `%.*s`", LIT(ast_node_strings[spec->kind]));
|
||||
break;
|
||||
}
|
||||
}
|
||||
case_end;
|
||||
case_ast_node(id, ImportDecl, decl);
|
||||
if (!parent_scope->is_file) {
|
||||
// NOTE(bill): _Should_ be caught by the parser
|
||||
@@ -1428,7 +1516,10 @@ void check_parsed_files(Checker *c) {
|
||||
|
||||
ImplicitValueInfo *ivi = &implicit_value_infos[i];
|
||||
Entity *backing = scope_lookup_entity(e->scope, ivi->backing_name);
|
||||
GB_ASSERT(backing != NULL);
|
||||
// GB_ASSERT(backing != NULL);
|
||||
if (backing == NULL) {
|
||||
gb_exit(1);
|
||||
}
|
||||
e->ImplicitValue.backing = backing;
|
||||
}
|
||||
|
||||
|
||||
@@ -80,6 +80,89 @@ void check_local_collect_entities(Checker *c, AstNodeArray nodes, DelayedEntitie
|
||||
case_ast_node(ws, WhenStmt, node);
|
||||
// Will be handled later
|
||||
case_end;
|
||||
case_ast_node(gd, GenericDecl, node);
|
||||
for_array(spec_index, gd->specs) {
|
||||
AstNode *spec = gd->specs.e[spec_index];
|
||||
switch (spec->kind) {
|
||||
case_ast_node(vs, ValueSpec, spec);
|
||||
switch (vs->keyword) {
|
||||
case Token_var:
|
||||
break;
|
||||
|
||||
case Token_const: {
|
||||
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&c->tmp_arena);
|
||||
|
||||
isize entity_count = vs->names.count;
|
||||
isize entity_index = 0;
|
||||
Entity **entities = gb_alloc_array(c->tmp_allocator, Entity *, entity_count);
|
||||
|
||||
for_array(i, vs->values) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
AstNode *value = unparen_expr(vs->values.e[i]);
|
||||
if (name->kind != AstNode_Ident) {
|
||||
error_node(name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[name->kind]));
|
||||
entities[entity_index++] = NULL;
|
||||
continue;
|
||||
}
|
||||
|
||||
ExactValue v = {ExactValue_Invalid};
|
||||
Entity *e = make_entity_constant(c->allocator, c->context.scope, name->Ident, NULL, v);
|
||||
e->identifier = name;
|
||||
entities[entity_index++] = e;
|
||||
DeclInfo *d = make_declaration_info(c->allocator, e->scope);
|
||||
d->type_expr = vs->type;
|
||||
d->init_expr = value;
|
||||
add_entity_and_decl_info(c, name, e, d);
|
||||
|
||||
DelayedEntity delay = {name, e, d};
|
||||
array_add(delayed_entities, delay);
|
||||
}
|
||||
|
||||
isize lhs_count = vs->names.count;
|
||||
isize rhs_count = vs->values.count;
|
||||
|
||||
// TODO(bill): Better error messages or is this good enough?
|
||||
if (rhs_count == 0 && vs->type == NULL) {
|
||||
error_node(node, "Missing type or initial expression");
|
||||
} else if (lhs_count < rhs_count) {
|
||||
error_node(node, "Extra initial expression");
|
||||
}
|
||||
|
||||
if (dof != NULL) {
|
||||
// NOTE(bill): Within a record
|
||||
for_array(i, vs->names) {
|
||||
Entity *e = entities[i];
|
||||
if (e == NULL) {
|
||||
continue;
|
||||
}
|
||||
AstNode *name = vs->names.e[i];
|
||||
if (name->kind != AstNode_Ident) {
|
||||
continue;
|
||||
}
|
||||
Token name_token = name->Ident;
|
||||
if (str_eq(name_token.string, str_lit("_"))) {
|
||||
dof->other_fields[dof->other_field_index++] = e;
|
||||
} else {
|
||||
HashKey key = hash_string(name_token.string);
|
||||
if (map_entity_get(dof->entity_map, key) != NULL) {
|
||||
// TODO(bill): Scope checking already checks the declaration
|
||||
error(name_token, "`%.*s` is already declared in this record", LIT(name_token.string));
|
||||
} else {
|
||||
map_entity_set(dof->entity_map, key, e);
|
||||
dof->other_fields[dof->other_field_index++] = e;
|
||||
}
|
||||
add_entity(c, c->context.scope, name, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gb_temp_arena_memory_end(tmp);
|
||||
} break;
|
||||
}
|
||||
case_end;
|
||||
}
|
||||
}
|
||||
case_end;
|
||||
case_ast_node(cd, ConstDecl, node);
|
||||
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&c->tmp_arena);
|
||||
|
||||
|
||||
@@ -1074,6 +1074,90 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
|
||||
check_var_decl_node(c, node);
|
||||
case_end;
|
||||
|
||||
case_ast_node(gd, GenericDecl, node);
|
||||
for_array(spec_index, gd->specs) {
|
||||
AstNode *spec = gd->specs.e[spec_index];
|
||||
switch (spec->kind) {
|
||||
case_ast_node(vs, ValueSpec, spec);
|
||||
switch (vs->keyword) {
|
||||
case Token_var: {
|
||||
isize entity_count = vs->names.count;
|
||||
isize entity_index = 0;
|
||||
Entity **entities = gb_alloc_array(c->allocator, Entity *, entity_count);
|
||||
|
||||
for_array(i, vs->names) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
Entity *entity = NULL;
|
||||
if (name->kind == AstNode_Ident) {
|
||||
Token token = name->Ident;
|
||||
String str = token.string;
|
||||
Entity *found = NULL;
|
||||
// NOTE(bill): Ignore assignments to `_`
|
||||
if (str_ne(str, str_lit("_"))) {
|
||||
found = current_scope_lookup_entity(c->context.scope, str);
|
||||
}
|
||||
if (found == NULL) {
|
||||
entity = make_entity_variable(c->allocator, c->context.scope, token, NULL);
|
||||
add_entity_definition(&c->info, name, entity);
|
||||
} else {
|
||||
TokenPos pos = found->token.pos;
|
||||
error(token,
|
||||
"Redeclaration of `%.*s` in this scope\n"
|
||||
"\tat %.*s(%td:%td)",
|
||||
LIT(str), LIT(pos.file), pos.line, pos.column);
|
||||
entity = found;
|
||||
}
|
||||
} else {
|
||||
error_node(name, "A variable declaration must be an identifier");
|
||||
}
|
||||
if (entity == NULL) {
|
||||
entity = make_entity_dummy_variable(c->allocator, c->global_scope, ast_node_token(name));
|
||||
}
|
||||
entities[entity_index++] = entity;
|
||||
}
|
||||
|
||||
Type *init_type = NULL;
|
||||
if (vs->type) {
|
||||
init_type = check_type_extra(c, vs->type, NULL);
|
||||
if (init_type == NULL) {
|
||||
init_type = t_invalid;
|
||||
}
|
||||
}
|
||||
|
||||
for (isize i = 0; i < entity_count; i++) {
|
||||
Entity *e = entities[i];
|
||||
GB_ASSERT(e != NULL);
|
||||
if (e->flags & EntityFlag_Visited) {
|
||||
e->type = t_invalid;
|
||||
continue;
|
||||
}
|
||||
e->flags |= EntityFlag_Visited;
|
||||
|
||||
if (e->type == NULL)
|
||||
e->type = init_type;
|
||||
}
|
||||
|
||||
check_init_variables(c, entities, entity_count, vs->values, str_lit("variable declaration"));
|
||||
|
||||
for_array(i, vs->names) {
|
||||
if (entities[i] != NULL) {
|
||||
add_entity(c, c->context.scope, vs->names.e[i], entities[i]);
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
case Token_const:
|
||||
break;
|
||||
}
|
||||
case_end;
|
||||
|
||||
default:
|
||||
error(ast_node_token(spec), "Invalid specification in declaration: `%.*s`", LIT(ast_node_strings[spec->kind]));
|
||||
break;
|
||||
}
|
||||
}
|
||||
case_end;
|
||||
|
||||
case_ast_node(cd, ConstDecl, node);
|
||||
// NOTE(bill): Handled elsewhere
|
||||
case_end;
|
||||
|
||||
479
src/parser.c
479
src/parser.c
@@ -224,37 +224,53 @@ AST_NODE_KIND(_ComplexStmtBegin, "", i32) \
|
||||
\
|
||||
AST_NODE_KIND(_ComplexStmtEnd, "", i32) \
|
||||
AST_NODE_KIND(_StmtEnd, "", i32) \
|
||||
AST_NODE_KIND(_SpecBegin, "", i32) \
|
||||
AST_NODE_KIND(ValueSpec, "value specification", struct { \
|
||||
TokenKind keyword; \
|
||||
AstNodeArray names; \
|
||||
AstNode * type; \
|
||||
AstNodeArray values; \
|
||||
}) \
|
||||
AST_NODE_KIND(_SpecEnd, "", i32) \
|
||||
AST_NODE_KIND(_DeclBegin, "", i32) \
|
||||
AST_NODE_KIND(BadDecl, "bad declaration", struct { Token begin, end; }) \
|
||||
AST_NODE_KIND(BadDecl, "bad declaration", struct { Token begin, end; }) \
|
||||
AST_NODE_KIND(GenericDecl, "generic declaration", struct { \
|
||||
Token token; \
|
||||
Token open, close; \
|
||||
AstNodeArray specs; \
|
||||
u64 tags; \
|
||||
bool is_using; \
|
||||
}) \
|
||||
AST_NODE_KIND(VarDecl, "variable declaration", struct { \
|
||||
u64 tags; \
|
||||
bool is_using; \
|
||||
AstNodeArray names; \
|
||||
AstNode * type; \
|
||||
AstNodeArray values; \
|
||||
AstNode * note; \
|
||||
u64 tags; \
|
||||
bool is_using; \
|
||||
AstNodeArray names; \
|
||||
AstNode * type; \
|
||||
AstNodeArray values; \
|
||||
AstNode * note; \
|
||||
}) \
|
||||
AST_NODE_KIND(ConstDecl, "constant declaration", struct { \
|
||||
u64 tags; \
|
||||
AstNodeArray names; \
|
||||
AstNode * type; \
|
||||
AstNodeArray values; \
|
||||
AstNode * note; \
|
||||
}) \
|
||||
AST_NODE_KIND(ProcDecl, "procedure declaration", struct { \
|
||||
AstNode *name; \
|
||||
AstNode *type; \
|
||||
AstNode *body; \
|
||||
u64 tags; \
|
||||
String foreign_name; \
|
||||
String link_name; \
|
||||
AstNode *note; \
|
||||
u64 tags; \
|
||||
AstNodeArray names; \
|
||||
AstNode * type; \
|
||||
AstNodeArray values; \
|
||||
AstNode * note; \
|
||||
}) \
|
||||
AST_NODE_KIND(TypeDecl, "type declaration", struct { \
|
||||
Token token; \
|
||||
AstNode *name, *type; \
|
||||
Token token; \
|
||||
AstNode *name; \
|
||||
AstNode *type; \
|
||||
AstNode *note; \
|
||||
}) \
|
||||
AST_NODE_KIND(ProcDecl, "procedure declaration", struct { \
|
||||
AstNode *name; \
|
||||
AstNode *type; \
|
||||
AstNode *body; \
|
||||
u64 tags; \
|
||||
String foreign_name; \
|
||||
String link_name; \
|
||||
AstNode *note; \
|
||||
}) \
|
||||
AST_NODE_KIND(ImportDecl, "import declaration", struct { \
|
||||
Token token, relpath; \
|
||||
String fullpath; \
|
||||
@@ -459,20 +475,27 @@ Token ast_node_token(AstNode *node) {
|
||||
return node->PushAllocator.token;
|
||||
case AstNode_PushContext:
|
||||
return node->PushContext.token;
|
||||
|
||||
case AstNode_BadDecl:
|
||||
return node->BadDecl.begin;
|
||||
case AstNode_GenericDecl:
|
||||
return node->GenericDecl.token;
|
||||
case AstNode_VarDecl:
|
||||
return ast_node_token(node->VarDecl.names.e[0]);
|
||||
case AstNode_ConstDecl:
|
||||
return ast_node_token(node->ConstDecl.names.e[0]);
|
||||
case AstNode_ProcDecl:
|
||||
return node->ProcDecl.name->Ident;
|
||||
return ast_node_token(node->ProcDecl.name);
|
||||
case AstNode_TypeDecl:
|
||||
return node->TypeDecl.token;
|
||||
return ast_node_token(node->TypeDecl.name);
|
||||
case AstNode_ImportDecl:
|
||||
return node->ImportDecl.token;
|
||||
case AstNode_ForeignLibrary:
|
||||
return node->ForeignLibrary.token;
|
||||
|
||||
case AstNode_ValueSpec:
|
||||
return ast_node_token(node->ValueSpec.names.e[0]);
|
||||
|
||||
case AstNode_Parameter: {
|
||||
if (node->Parameter.names.count > 0) {
|
||||
return ast_node_token(node->Parameter.names.e[0]);
|
||||
@@ -1033,6 +1056,29 @@ AstNode *make_foreign_library(AstFile *f, Token token, Token filepath, AstNode *
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
AstNode *make_generic_decl(AstFile *f, Token token, Token open, Token close, AstNodeArray specs, u64 tags, bool is_using) {
|
||||
AstNode *result = make_node(f, AstNode_GenericDecl);
|
||||
result->GenericDecl.token = token;
|
||||
result->GenericDecl.open = open;
|
||||
result->GenericDecl.close = close;
|
||||
result->GenericDecl.specs = specs;
|
||||
result->GenericDecl.tags = tags;
|
||||
result->GenericDecl.is_using = is_using;
|
||||
return result;
|
||||
}
|
||||
|
||||
AstNode *make_value_spec(AstFile *f, TokenKind keyword, AstNodeArray names, AstNode *type, AstNodeArray values) {
|
||||
AstNode *result = make_node(f, AstNode_ValueSpec);
|
||||
result->ValueSpec.keyword = keyword;
|
||||
result->ValueSpec.names = names;
|
||||
result->ValueSpec.type = type;
|
||||
result->ValueSpec.values = values;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
bool next_token(AstFile *f) {
|
||||
if (f->curr_token_index+1 < f->tokens.count) {
|
||||
if (f->curr_token.kind != Token_Comment) {
|
||||
@@ -1054,16 +1100,9 @@ Token expect_token(AstFile *f, TokenKind kind) {
|
||||
Token prev = f->curr_token;
|
||||
if (prev.kind != kind) {
|
||||
String p = token_strings[prev.kind];
|
||||
if (prev.kind == Token_Semicolon &&
|
||||
str_eq(prev.string, str_lit("\n"))) {
|
||||
syntax_error(f->curr_token, "Expected `%.*s`, got newline",
|
||||
LIT(token_strings[kind]),
|
||||
LIT(p));
|
||||
} else {
|
||||
syntax_error(f->curr_token, "Expected `%.*s`, got `%.*s`",
|
||||
LIT(token_strings[kind]),
|
||||
LIT(token_strings[prev.kind]));
|
||||
}
|
||||
syntax_error(f->curr_token, "Expected `%.*s`, got `%.*s`",
|
||||
LIT(token_strings[kind]),
|
||||
LIT(token_strings[prev.kind]));
|
||||
}
|
||||
next_token(f);
|
||||
return prev;
|
||||
@@ -1073,10 +1112,6 @@ Token expect_token_after(AstFile *f, TokenKind kind, char *msg) {
|
||||
Token prev = f->curr_token;
|
||||
if (prev.kind != kind) {
|
||||
String p = token_strings[prev.kind];
|
||||
if (prev.kind == Token_Semicolon &&
|
||||
str_eq(prev.string, str_lit("\n"))) {
|
||||
p = str_lit("newline");
|
||||
}
|
||||
syntax_error(f->curr_token, "Expected `%.*s` after %s, got `%.*s`",
|
||||
LIT(token_strings[kind]),
|
||||
msg,
|
||||
@@ -1136,6 +1171,11 @@ void fix_advance_to_next_stmt(AstFile *f) {
|
||||
case Token_Semicolon:
|
||||
return;
|
||||
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
case Token_type:
|
||||
case Token_proc:
|
||||
|
||||
case Token_if:
|
||||
case Token_when:
|
||||
case Token_return:
|
||||
@@ -1204,8 +1244,9 @@ void expect_semicolon(AstFile *f, AstNode *s) {
|
||||
break;
|
||||
}
|
||||
|
||||
syntax_error(prev_token, "Expected `;` after %.*s, got %.*s",
|
||||
LIT(ast_node_strings[s->kind]), LIT(token_strings[prev_token.kind]));
|
||||
syntax_error(prev_token, "Expected `;` after %.*s, got %.*s %d %d",
|
||||
LIT(ast_node_strings[s->kind]), LIT(token_strings[prev_token.kind]),
|
||||
Token_Semicolon, prev_token.kind);
|
||||
} else {
|
||||
syntax_error(prev_token, "Expected `;`");
|
||||
}
|
||||
@@ -1831,6 +1872,21 @@ AstNodeArray parse_rhs_expr_list(AstFile *f) {
|
||||
return parse_expr_list(f, false);
|
||||
}
|
||||
|
||||
AstNodeArray parse_identfier_list(AstFile *f) {
|
||||
AstNodeArray list = make_ast_node_array(f);
|
||||
|
||||
do {
|
||||
array_add(&list, parse_identifier(f));
|
||||
if (f->curr_token.kind != Token_Comma ||
|
||||
f->curr_token.kind == Token_EOF) {
|
||||
break;
|
||||
}
|
||||
next_token(f);
|
||||
} while (true);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
void parse_check_name_list_for_reserves(AstFile *f, AstNodeArray names) {
|
||||
for_array(i, names) {
|
||||
AstNode *name = names.e[i];
|
||||
@@ -1845,13 +1901,126 @@ void parse_check_name_list_for_reserves(AstFile *f, AstNodeArray names) {
|
||||
}
|
||||
}
|
||||
|
||||
AstNode *parse_value_decl(AstFile *f);
|
||||
AstNode *parse_type_attempt(AstFile *f) {
|
||||
AstNode *type = parse_identifier_or_type(f);
|
||||
if (type != NULL) {
|
||||
// TODO(bill): Handle?
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
AstNode *parse_type(AstFile *f) {
|
||||
AstNode *type = parse_type_attempt(f);
|
||||
if (type == NULL) {
|
||||
Token token = f->curr_token;
|
||||
syntax_error(token, "Expected a type");
|
||||
next_token(f);
|
||||
return make_bad_expr(f, token, f->curr_token);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
|
||||
#define PARSE_SPEC_PROC(name) AstNode *(name)(AstFile *f, TokenKind keyword)
|
||||
typedef PARSE_SPEC_PROC(*ParserSpecProc);
|
||||
|
||||
|
||||
AstNode *parse_generic_decl(AstFile *f, TokenKind keyword, ParserSpecProc spec_proc) {
|
||||
Token token = expect_token(f, keyword);
|
||||
Token open = {0}, close = {0};
|
||||
AstNodeArray specs = {0};
|
||||
if (f->curr_token.kind == Token_OpenParen) {
|
||||
open = expect_token(f, Token_OpenParen);
|
||||
array_init(&specs, heap_allocator());
|
||||
|
||||
while (f->curr_token.kind != Token_CloseParen &&
|
||||
f->curr_token.kind != Token_EOF) {
|
||||
AstNode *spec = spec_proc(f, keyword);
|
||||
array_add(&specs, spec);
|
||||
expect_semicolon(f, spec);
|
||||
}
|
||||
|
||||
close = expect_token(f, Token_CloseParen);
|
||||
} else {
|
||||
array_init_reserve(&specs, heap_allocator(), 1);
|
||||
array_add(&specs, spec_proc(f, keyword));
|
||||
}
|
||||
|
||||
return make_generic_decl(f, token, open, close, specs, 0, false);
|
||||
}
|
||||
|
||||
PARSE_SPEC_PROC(parse_value_spec) {
|
||||
AstNodeArray names = parse_identfier_list(f);
|
||||
parse_check_name_list_for_reserves(f, names);
|
||||
AstNode *type = parse_type_attempt(f);
|
||||
AstNodeArray values = {0};
|
||||
|
||||
if (allow_token(f, Token_Eq)) {
|
||||
values = parse_rhs_expr_list(f);
|
||||
}
|
||||
|
||||
if (values.count > names.count) {
|
||||
syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
|
||||
}
|
||||
|
||||
if (keyword == Token_const) {
|
||||
if (values.count < names.count) {
|
||||
syntax_error(f->curr_token, "All constant declarations must be defined");
|
||||
} else if (values.count == 0) {
|
||||
syntax_error(f->curr_token, "Expected an expression for this declaration");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == NULL && values.count == 0 && names.count > 0) {
|
||||
syntax_error(f->curr_token, "Missing type or initialization");
|
||||
return make_bad_decl(f, f->curr_token, f->curr_token);
|
||||
}
|
||||
|
||||
// TODO(bill): Fix this so it does not require it
|
||||
if (values.e == NULL) {
|
||||
values = make_ast_node_array(f);
|
||||
}
|
||||
|
||||
return make_value_spec(f, keyword, names, type, values);
|
||||
}
|
||||
|
||||
|
||||
AstNode *parse_type_decl(AstFile *f) {
|
||||
Token token = expect_token(f, Token_type);
|
||||
AstNode *name = parse_identifier(f);
|
||||
AstNode *type = parse_type(f);
|
||||
return make_type_decl(f, token, name, type);
|
||||
}
|
||||
|
||||
AstNode *parse_proc_decl(AstFile *f);
|
||||
|
||||
AstNode *parse_decl(AstFile *f) {
|
||||
switch (f->curr_token.kind) {
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
return parse_generic_decl(f, f->curr_token.kind, parse_value_spec);
|
||||
|
||||
case Token_type:
|
||||
return parse_type_decl(f);
|
||||
|
||||
case Token_proc:
|
||||
return parse_proc_decl(f);
|
||||
|
||||
default: {
|
||||
Token token = f->curr_token;
|
||||
syntax_error(token, "Expected a declaration");
|
||||
fix_advance_to_next_stmt(f);
|
||||
return make_bad_decl(f, token, f->curr_token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
AstNode *parse_simple_stmt(AstFile *f) {
|
||||
switch (f->curr_token.kind) {
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
return parse_value_decl(f);
|
||||
return parse_decl(f);
|
||||
}
|
||||
|
||||
isize lhs_count = 0, rhs_count = 0;
|
||||
@@ -1888,85 +2057,6 @@ AstNode *parse_simple_stmt(AstFile *f) {
|
||||
}
|
||||
return make_assign_stmt(f, token, lhs, rhs);
|
||||
} break;
|
||||
|
||||
// case Token_Colon: { // Declare
|
||||
// AstNodeArray names = lhs;
|
||||
// parse_check_name_list_for_reserves(f, names);
|
||||
|
||||
// Token colon = expect_token(f, Token_Colon);
|
||||
// AstNode *type = parse_identifier_or_type(f);
|
||||
// AstNodeArray values = {0};
|
||||
|
||||
// if (allow_token(f, Token_Eq)) {
|
||||
// values = parse_rhs_expr_list(f);
|
||||
// if (values.count > names.count) {
|
||||
// syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
|
||||
// } else if (values.count == 0) {
|
||||
// syntax_error(f->curr_token, "Expected an expression for this declaration");
|
||||
// }
|
||||
// if (type == NULL && values.count == 0) {
|
||||
// syntax_error(f->curr_token, "Missing variable type or initialization");
|
||||
// return make_bad_decl(f, f->curr_token, f->curr_token);
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (values.e == NULL) {
|
||||
// values = make_ast_node_array(f);
|
||||
// }
|
||||
|
||||
// return make_var_decl(f, names, type, values);
|
||||
// } break;
|
||||
|
||||
// case Token_ColonColon: {
|
||||
// AstNodeArray names = lhs;
|
||||
// parse_check_name_list_for_reserves(f, names);
|
||||
|
||||
// Token colon_colon = expect_token(f, Token_ColonColon);
|
||||
|
||||
// // if (f->curr_token.kind == Token_type ||
|
||||
// // f->curr_token.kind == Token_struct ||
|
||||
// // f->curr_token.kind == Token_enum ||
|
||||
// // f->curr_token.kind == Token_union ||
|
||||
// // f->curr_token.kind == Token_raw_union) {
|
||||
// // // if (f->curr_token.kind == Token_type) {
|
||||
// // Token token = f->curr_token;
|
||||
// // if (token.kind == Token_type) {
|
||||
// // next_token(f);
|
||||
// // }
|
||||
// // if (names.count != 1) {
|
||||
// // syntax_error_node(names.e[0], "You can only declare one type at a time");
|
||||
// // return make_bad_decl(f, names.e[0]->Ident, token);
|
||||
// // }
|
||||
|
||||
// // return make_type_decl(f, token, names.e[0], parse_type(f));
|
||||
// // } else if (f->curr_token.kind == Token_proc) {
|
||||
// // // NOTE(bill): Procedure declarations
|
||||
// // Token proc_token = f->curr_token;
|
||||
// // AstNode *name = names.e[0];
|
||||
// // if (names.count != 1) {
|
||||
// // syntax_error(proc_token, "You can only declare one procedure at a time");
|
||||
// // return make_bad_decl(f, name->Ident, proc_token);
|
||||
// // }
|
||||
|
||||
// // return parse_proc_decl(f, proc_token, name);
|
||||
// // }
|
||||
|
||||
// AstNodeArray values = parse_rhs_expr_list(f);
|
||||
// if (values.count > names.count) {
|
||||
// syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
|
||||
// } else if (values.count < names.count) {
|
||||
// syntax_error(f->curr_token, "All constant declarations must be defined");
|
||||
// } else if (values.count == 0) {
|
||||
// syntax_error(f->curr_token, "Expected an expression for this declaration");
|
||||
// }
|
||||
|
||||
// if (values.count == 0 && names.count > 0) {
|
||||
// syntax_error(f->curr_token, "Missing constant value");
|
||||
// return make_bad_decl(f, f->curr_token, f->curr_token);
|
||||
// }
|
||||
|
||||
// return make_const_decl(f, names, NULL, values);
|
||||
// } break;
|
||||
}
|
||||
|
||||
if (lhs_count > 1) {
|
||||
@@ -2013,41 +2103,6 @@ AstNode *convert_stmt_to_expr(AstFile *f, AstNode *statement, String kind) {
|
||||
return make_bad_expr(f, f->curr_token, f->tokens.e[f->curr_token_index+1]);
|
||||
}
|
||||
|
||||
AstNodeArray parse_identfier_list(AstFile *f) {
|
||||
AstNodeArray list = make_ast_node_array(f);
|
||||
|
||||
do {
|
||||
array_add(&list, parse_identifier(f));
|
||||
if (f->curr_token.kind != Token_Comma ||
|
||||
f->curr_token.kind == Token_EOF) {
|
||||
break;
|
||||
}
|
||||
next_token(f);
|
||||
} while (true);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
|
||||
AstNode *parse_type_attempt(AstFile *f) {
|
||||
AstNode *type = parse_identifier_or_type(f);
|
||||
if (type != NULL) {
|
||||
// TODO(bill): Handle?
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
AstNode *parse_type(AstFile *f) {
|
||||
AstNode *type = parse_type_attempt(f);
|
||||
if (type == NULL) {
|
||||
Token token = f->curr_token;
|
||||
syntax_error(token, "Expected a type");
|
||||
next_token(f);
|
||||
return make_bad_expr(f, token, f->curr_token);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
|
||||
void parse_proc_signature(AstFile *f, AstNodeArray *params, AstNodeArray *results);
|
||||
@@ -2691,75 +2746,15 @@ AstNode *parse_asm_stmt(AstFile *f) {
|
||||
|
||||
}
|
||||
|
||||
AstNode *parse_type_decl(AstFile *f) {
|
||||
Token token = expect_token(f, Token_type);
|
||||
AstNode *name = parse_identifier(f);
|
||||
AstNode *type = parse_type(f);
|
||||
return make_type_decl(f, token, name, type);
|
||||
}
|
||||
|
||||
AstNode *parse_value_decl(AstFile *f) {
|
||||
Token token = f->curr_token;
|
||||
switch (token.kind) {
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
next_token(f);
|
||||
break;
|
||||
default:
|
||||
next_token(f);
|
||||
syntax_error(token, "Expected a variable or constant declaration");
|
||||
fix_advance_to_next_stmt(f);
|
||||
return make_bad_decl(f, token, f->curr_token);
|
||||
}
|
||||
|
||||
AstNodeArray names = parse_identfier_list(f);
|
||||
parse_check_name_list_for_reserves(f, names);
|
||||
AstNode *type = parse_type_attempt(f);
|
||||
AstNodeArray values = {0};
|
||||
|
||||
if (allow_token(f, Token_Eq)) {
|
||||
values = parse_rhs_expr_list(f);
|
||||
}
|
||||
|
||||
if (values.count > names.count) {
|
||||
syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
|
||||
} else if (token.kind == Token_const) {
|
||||
if (values.count < names.count) {
|
||||
syntax_error(f->curr_token, "All constant declarations must be defined");
|
||||
} else if (values.count == 0) {
|
||||
syntax_error(f->curr_token, "Expected an expression for this declaration");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == NULL && values.count == 0 && names.count > 0) {
|
||||
syntax_error(f->curr_token, "Missing type or initialization");
|
||||
return make_bad_decl(f, f->curr_token, f->curr_token);
|
||||
}
|
||||
|
||||
// TODO(bill): Fix this so it does not require it
|
||||
if (values.e == NULL) {
|
||||
values = make_ast_node_array(f);
|
||||
}
|
||||
|
||||
|
||||
AstNode *decl = NULL;
|
||||
|
||||
switch (token.kind) {
|
||||
case Token_var:
|
||||
decl = make_var_decl(f, names, type, values);
|
||||
break;
|
||||
case Token_const:
|
||||
decl = make_const_decl(f, names, type, values);
|
||||
break;
|
||||
}
|
||||
return decl;
|
||||
}
|
||||
|
||||
AstNode *parse_stmt(AstFile *f) {
|
||||
AstNode *s = NULL;
|
||||
Token token = f->curr_token;
|
||||
switch (token.kind) {
|
||||
// Operands
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
|
||||
case Token_Ident:
|
||||
case Token_Integer:
|
||||
case Token_Float:
|
||||
@@ -2775,6 +2770,13 @@ AstNode *parse_stmt(AstFile *f) {
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
|
||||
case Token_proc:
|
||||
return parse_proc_decl(f);
|
||||
case Token_type:
|
||||
s = parse_type_decl(f);
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
|
||||
// TODO(bill): other keywords
|
||||
case Token_if: return parse_if_stmt(f);
|
||||
case Token_when: return parse_when_stmt(f);
|
||||
@@ -2792,25 +2794,9 @@ AstNode *parse_stmt(AstFile *f) {
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
|
||||
case Token_proc:
|
||||
return parse_proc_decl(f);
|
||||
case Token_type:
|
||||
s = parse_type_decl(f);
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
case Token_var:
|
||||
case Token_const:
|
||||
s = parse_value_decl(f);
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
|
||||
|
||||
case Token_using: {
|
||||
AstNode *node = NULL;
|
||||
|
||||
next_token(f);
|
||||
node = parse_stmt(f);
|
||||
|
||||
AstNode *node = parse_stmt(f);
|
||||
bool valid = false;
|
||||
|
||||
switch (node->kind) {
|
||||
@@ -2833,7 +2819,6 @@ AstNode *parse_stmt(AstFile *f) {
|
||||
return make_bad_stmt(f, token, f->curr_token);
|
||||
}
|
||||
|
||||
|
||||
return make_using_stmt(f, token, node);
|
||||
} break;
|
||||
|
||||
@@ -2961,17 +2946,23 @@ AstNode *parse_stmt(AstFile *f) {
|
||||
expect_semicolon(f, s);
|
||||
return s;
|
||||
} else if (str_eq(tag, str_lit("thread_local"))) {
|
||||
AstNode *var_decl = parse_simple_stmt(f);
|
||||
if (var_decl->kind != AstNode_VarDecl) {
|
||||
AstNode *decl = parse_simple_stmt(f);
|
||||
if (decl->kind != AstNode_VarDecl &&
|
||||
(decl->kind == AstNode_GenericDecl &&
|
||||
decl->GenericDecl.token.kind != Token_var)) {
|
||||
syntax_error(token, "#thread_local may only be applied to variable declarations");
|
||||
return make_bad_decl(f, token, ast_node_token(var_decl));
|
||||
return make_bad_decl(f, token, ast_node_token(decl));
|
||||
}
|
||||
if (f->curr_proc != NULL) {
|
||||
syntax_error(token, "#thread_local is only allowed at the file scope");
|
||||
return make_bad_decl(f, token, ast_node_token(var_decl));
|
||||
return make_bad_decl(f, token, ast_node_token(decl));
|
||||
}
|
||||
var_decl->VarDecl.tags |= VarDeclTag_thread_local;
|
||||
return var_decl;
|
||||
if (decl->kind == AstNode_VarDecl) {
|
||||
decl->VarDecl.tags |= VarDeclTag_thread_local;
|
||||
} else if (decl->kind == AstNode_GenericDecl) {
|
||||
decl->GenericDecl.tags |= VarDeclTag_thread_local;
|
||||
}
|
||||
return decl;
|
||||
} else if (str_eq(tag, str_lit("bounds_check"))) {
|
||||
s = parse_stmt(f);
|
||||
s->stmt_state_flags |= StmtStateFlag_bounds_check;
|
||||
|
||||
72
src/ssa.c
72
src/ssa.c
@@ -3849,7 +3849,8 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
|
||||
|
||||
case_ast_node(us, UsingStmt, node);
|
||||
AstNode *decl = unparen_expr(us->node);
|
||||
if (decl->kind == AstNode_VarDecl) {
|
||||
if (decl->kind == AstNode_VarDecl &&
|
||||
decl->kind == AstNode_GenericDecl) {
|
||||
ssa_build_stmt(proc, decl);
|
||||
}
|
||||
case_end;
|
||||
@@ -3858,6 +3859,75 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
|
||||
ssa_build_when_stmt(proc, ws);
|
||||
case_end;
|
||||
|
||||
|
||||
case_ast_node(gd, GenericDecl, node);
|
||||
for_array(spec_index, gd->specs) {
|
||||
AstNode *spec = gd->specs.e[spec_index];
|
||||
switch (spec->kind) {
|
||||
case_ast_node(vs, ValueSpec, spec);
|
||||
switch (vs->keyword) {
|
||||
case Token_const:
|
||||
break;
|
||||
case Token_var: {
|
||||
ssaModule *m = proc->module;
|
||||
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&m->tmp_arena);
|
||||
|
||||
if (vs->values.count == 0) { // declared and zero-initialized
|
||||
for_array(i, vs->names) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
if (!ssa_is_blank_ident(name)) {
|
||||
ssa_add_local_for_identifier(proc, name, true);
|
||||
}
|
||||
}
|
||||
} else { // Tuple(s)
|
||||
Array(ssaAddr) lvals;
|
||||
ssaValueArray inits;
|
||||
array_init_reserve(&lvals, m->tmp_allocator, vs->names.count);
|
||||
array_init_reserve(&inits, m->tmp_allocator, vs->names.count);
|
||||
|
||||
for_array(i, vs->names) {
|
||||
AstNode *name = vs->names.e[i];
|
||||
ssaAddr lval = ssa_make_addr(NULL, NULL);
|
||||
if (!ssa_is_blank_ident(name)) {
|
||||
ssa_add_local_for_identifier(proc, name, false);
|
||||
lval = ssa_build_addr(proc, name);
|
||||
}
|
||||
|
||||
array_add(&lvals, lval);
|
||||
}
|
||||
|
||||
for_array(i, vs->values) {
|
||||
ssaValue *init = ssa_build_expr(proc, vs->values.e[i]);
|
||||
Type *t = ssa_type(init);
|
||||
if (t->kind == Type_Tuple) {
|
||||
for (isize i = 0; i < t->Tuple.variable_count; i++) {
|
||||
Entity *e = t->Tuple.variables[i];
|
||||
ssaValue *v = ssa_emit_struct_ev(proc, init, i);
|
||||
array_add(&inits, v);
|
||||
}
|
||||
} else {
|
||||
array_add(&inits, init);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for_array(i, inits) {
|
||||
if (lvals.e[i].addr == NULL) {
|
||||
continue;
|
||||
}
|
||||
ssaValue *v = ssa_emit_conv(proc, inits.e[i], ssa_addr_type(lvals.e[i]));
|
||||
ssa_addr_store(proc, lvals.e[i], v);
|
||||
}
|
||||
}
|
||||
|
||||
gb_temp_arena_memory_end(tmp);
|
||||
} break;
|
||||
}
|
||||
case_end;
|
||||
}
|
||||
}
|
||||
case_end;
|
||||
|
||||
case_ast_node(vd, VarDecl, node);
|
||||
ssaModule *m = proc->module;
|
||||
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&m->tmp_arena);
|
||||
|
||||
Reference in New Issue
Block a user