mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-21 20:10:35 +00:00
Merge branch 'master' into windows-llvm-11.1.0
This commit is contained in:
@@ -66,7 +66,8 @@ OS :: enum u8 {
|
||||
_Unknown = 14,
|
||||
Unknown = 255,
|
||||
}
|
||||
OS_Name :: #partial [OS]string{
|
||||
OS_Name :: #sparse[OS]string{
|
||||
._Unknown = "",
|
||||
.FAT = "FAT",
|
||||
.Amiga = "Amiga",
|
||||
.VMS = "VMS/OpenVMS",
|
||||
|
||||
@@ -172,7 +172,7 @@ Error :: enum int {
|
||||
Unimplemented = 127,
|
||||
}
|
||||
|
||||
Error_String :: #partial [Error]string{
|
||||
Error_String :: #sparse[Error]string{
|
||||
.Okay = "Okay",
|
||||
.Out_Of_Memory = "Out of memory",
|
||||
.Invalid_Pointer = "Invalid pointer",
|
||||
@@ -182,6 +182,7 @@ Error_String :: #partial [Error]string{
|
||||
.Max_Iterations_Reached = "Max iterations reached",
|
||||
.Buffer_Overflow = "Buffer overflow",
|
||||
.Integer_Overflow = "Integer overflow",
|
||||
.Integer_Underflow = "Integer underflow",
|
||||
|
||||
.Division_by_Zero = "Division by zero",
|
||||
.Math_Domain_Error = "Math domain error",
|
||||
|
||||
@@ -206,11 +206,19 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
|
||||
}
|
||||
}
|
||||
|
||||
aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> ([]byte, mem.Allocator_Error) {
|
||||
aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> (new_memory: []byte, err: mem.Allocator_Error) {
|
||||
if p == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return aligned_alloc(new_size, new_alignment, p)
|
||||
|
||||
new_memory = aligned_alloc(new_size, new_alignment, p) or_return
|
||||
|
||||
// NOTE: heap_resize does not zero the new memory, so we do it
|
||||
if new_size > old_size {
|
||||
new_region := mem.raw_data(new_memory[old_size:])
|
||||
mem.zero(new_region, new_size - old_size)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
switch mode {
|
||||
|
||||
@@ -553,6 +553,8 @@ heap_alloc :: proc(size: int) -> rawptr {
|
||||
return _unix_calloc(1, size)
|
||||
}
|
||||
heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr {
|
||||
// NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on
|
||||
// POSIX platforms. Ensure your caller takes this into account.
|
||||
return _unix_realloc(ptr, new_size)
|
||||
}
|
||||
heap_free :: proc(ptr: rawptr) {
|
||||
|
||||
@@ -378,6 +378,8 @@ heap_alloc :: proc(size: int) -> rawptr {
|
||||
}
|
||||
|
||||
heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr {
|
||||
// NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on
|
||||
// POSIX platforms. Ensure your caller takes this into account.
|
||||
return _unix_realloc(ptr, c.size_t(new_size));
|
||||
}
|
||||
|
||||
|
||||
@@ -727,6 +727,8 @@ heap_alloc :: proc(size: int) -> rawptr {
|
||||
}
|
||||
|
||||
heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr {
|
||||
// NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on
|
||||
// POSIX platforms. Ensure your caller takes this into account.
|
||||
return _unix_realloc(ptr, c.size_t(new_size))
|
||||
}
|
||||
|
||||
|
||||
@@ -472,6 +472,9 @@ write_type_writer :: proc(w: io.Writer, ti: ^Type_Info, n_written: ^int = nil) -
|
||||
write_type(w, info.elem, &n) or_return
|
||||
|
||||
case Type_Info_Enumerated_Array:
|
||||
if info.is_sparse {
|
||||
io.write_string(w, "#sparse", &n) or_return
|
||||
}
|
||||
io.write_string(w, "[", &n) or_return
|
||||
write_type(w, info.index, &n) or_return
|
||||
io.write_string(w, "]", &n) or_return
|
||||
|
||||
@@ -95,6 +95,7 @@ Type_Info_Enumerated_Array :: struct {
|
||||
count: int,
|
||||
min_value: Type_Info_Enum_Value,
|
||||
max_value: Type_Info_Enum_Value,
|
||||
is_sparse: bool,
|
||||
}
|
||||
Type_Info_Dynamic_Array :: struct {elem: ^Type_Info, elem_size: int}
|
||||
Type_Info_Slice :: struct {elem: ^Type_Info, elem_size: int}
|
||||
|
||||
@@ -260,6 +260,9 @@ print_type :: proc "contextless" (ti: ^Type_Info) {
|
||||
print_type(info.elem)
|
||||
|
||||
case Type_Info_Enumerated_Array:
|
||||
if info.is_sparse {
|
||||
print_string("#sparse")
|
||||
}
|
||||
print_byte('[')
|
||||
print_type(info.index)
|
||||
print_byte(']')
|
||||
|
||||
@@ -1921,14 +1921,14 @@ constant_literal_expressions :: proc() {
|
||||
|
||||
fmt.println("-------")
|
||||
|
||||
Partial_Baz :: enum{A=5, B, C, D=16}
|
||||
#assert(len(Partial_Baz) < len(#partial [Partial_Baz]int))
|
||||
PARTIAL_ENUM_ARRAY_CONST :: #partial [Partial_Baz]int{.A ..= .C = 1, .D = 16}
|
||||
Sparse_Baz :: enum{A=5, B, C, D=16}
|
||||
#assert(len(Sparse_Baz) < len(#sparse[Sparse_Baz]int))
|
||||
SPARSE_ENUM_ARRAY_CONST :: #sparse[Sparse_Baz]int{.A ..= .C = 1, .D = 16}
|
||||
|
||||
fmt.println(PARTIAL_ENUM_ARRAY_CONST[.A])
|
||||
fmt.println(PARTIAL_ENUM_ARRAY_CONST[.B])
|
||||
fmt.println(PARTIAL_ENUM_ARRAY_CONST[.C])
|
||||
fmt.println(PARTIAL_ENUM_ARRAY_CONST[.D])
|
||||
fmt.println(SPARSE_ENUM_ARRAY_CONST[.A])
|
||||
fmt.println(SPARSE_ENUM_ARRAY_CONST[.B])
|
||||
fmt.println(SPARSE_ENUM_ARRAY_CONST[.C])
|
||||
fmt.println(SPARSE_ENUM_ARRAY_CONST[.D])
|
||||
|
||||
fmt.println("-------")
|
||||
|
||||
|
||||
@@ -6956,6 +6956,100 @@ void check_matrix_index_expr(CheckerContext *c, Operand *o, Ast *node, Type *typ
|
||||
}
|
||||
|
||||
|
||||
struct TypeAndToken {
|
||||
Type *type;
|
||||
Token token;
|
||||
};
|
||||
|
||||
void add_constant_switch_case(CheckerContext *ctx, PtrMap<uintptr, TypeAndToken> *seen, Operand operand, bool use_expr = true) {
|
||||
if (operand.mode != Addressing_Constant) {
|
||||
return;
|
||||
}
|
||||
if (operand.value.kind == ExactValue_Invalid) {
|
||||
return;
|
||||
}
|
||||
|
||||
uintptr key = hash_exact_value(operand.value);
|
||||
TypeAndToken *found = map_get(seen, key);
|
||||
if (found != nullptr) {
|
||||
isize count = multi_map_count(seen, key);
|
||||
TypeAndToken *taps = gb_alloc_array(temporary_allocator(), TypeAndToken, count);
|
||||
|
||||
multi_map_get_all(seen, key, taps);
|
||||
for (isize i = 0; i < count; i++) {
|
||||
TypeAndToken tap = taps[i];
|
||||
if (!are_types_identical(operand.type, tap.type)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
TokenPos pos = tap.token.pos;
|
||||
if (use_expr) {
|
||||
gbString expr_str = expr_to_string(operand.expr);
|
||||
error(operand.expr,
|
||||
"Duplicate case '%s'\n"
|
||||
"\tprevious case at %s",
|
||||
expr_str,
|
||||
token_pos_to_string(pos));
|
||||
gb_string_free(expr_str);
|
||||
} else {
|
||||
error(operand.expr, "Duplicate case found with previous case at %s", token_pos_to_string(pos));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
TypeAndToken tap = {operand.type, ast_token(operand.expr)};
|
||||
multi_map_insert(seen, key, tap);
|
||||
}
|
||||
|
||||
typedef PtrMap<uintptr, TypeAndToken> SeenMap;
|
||||
|
||||
void add_to_seen_map(CheckerContext *ctx, SeenMap *seen, TokenKind upper_op, Operand const &x, Operand const &lhs, Operand const &rhs) {
|
||||
if (is_type_enum(x.type)) {
|
||||
// TODO(bill): Fix this logic so it's fast!!!
|
||||
|
||||
i64 v0 = exact_value_to_i64(lhs.value);
|
||||
i64 v1 = exact_value_to_i64(rhs.value);
|
||||
Operand v = {};
|
||||
v.mode = Addressing_Constant;
|
||||
v.type = x.type;
|
||||
v.expr = x.expr;
|
||||
|
||||
Type *bt = base_type(x.type);
|
||||
GB_ASSERT(bt->kind == Type_Enum);
|
||||
for (i64 vi = v0; vi <= v1; vi++) {
|
||||
if (upper_op != Token_LtEq && vi == v1) {
|
||||
break;
|
||||
}
|
||||
|
||||
bool found = false;
|
||||
for_array(j, bt->Enum.fields) {
|
||||
Entity *f = bt->Enum.fields[j];
|
||||
GB_ASSERT(f->kind == Entity_Constant);
|
||||
|
||||
i64 fv = exact_value_to_i64(f->Constant.value);
|
||||
if (fv == vi) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
v.value = exact_value_i64(vi);
|
||||
add_constant_switch_case(ctx, seen, v);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
add_constant_switch_case(ctx, seen, lhs);
|
||||
if (upper_op == Token_LtEq) {
|
||||
add_constant_switch_case(ctx, seen, rhs);
|
||||
}
|
||||
}
|
||||
}
|
||||
void add_to_seen_map(CheckerContext *ctx, SeenMap *seen, Operand const &x) {
|
||||
add_constant_switch_case(ctx, seen, x);
|
||||
}
|
||||
|
||||
|
||||
ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) {
|
||||
u32 prev_state_flags = c->state_flags;
|
||||
defer (c->state_flags = prev_state_flags);
|
||||
@@ -7863,6 +7957,11 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type
|
||||
if (bet == t_invalid) {
|
||||
break;
|
||||
}
|
||||
bool is_partial = cl->tag && (cl->tag->BasicDirective.name.string == "partial");
|
||||
|
||||
SeenMap seen = {}; // NOTE(bill): Multimap, Key: ExactValue
|
||||
map_init(&seen, heap_allocator());
|
||||
defer (map_destroy(&seen));
|
||||
|
||||
if (cl->elems.count > 0 && cl->elems[0]->kind == Ast_FieldValue) {
|
||||
RangeCache rc = range_cache_make(heap_allocator());
|
||||
@@ -7936,6 +8035,12 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type
|
||||
check_assignment(c, &operand, elem_type, context_name);
|
||||
|
||||
is_constant = is_constant && operand.mode == Addressing_Constant;
|
||||
|
||||
TokenKind upper_op = Token_LtEq;
|
||||
if (op.kind == Token_RangeHalf) {
|
||||
upper_op = Token_Lt;
|
||||
}
|
||||
add_to_seen_map(c, &seen, upper_op, x, x, y);
|
||||
} else {
|
||||
Operand op_index = {};
|
||||
check_expr_with_type_hint(c, &op_index, fv->field, index_type);
|
||||
@@ -7971,6 +8076,8 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type
|
||||
check_assignment(c, &operand, elem_type, context_name);
|
||||
|
||||
is_constant = is_constant && operand.mode == Addressing_Constant;
|
||||
|
||||
add_to_seen_map(c, &seen, op_index);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8006,11 +8113,53 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type
|
||||
}
|
||||
}
|
||||
|
||||
bool was_error = false;
|
||||
if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) {
|
||||
if (0 < max && max < t->EnumeratedArray.count) {
|
||||
error(node, "Expected %lld values for this enumerated array literal, got %lld", cast(long long)t->EnumeratedArray.count, cast(long long)max);
|
||||
was_error = true;
|
||||
} else {
|
||||
error(node, "Enumerated array literals must only have 'field = value' elements, bare elements are not allowed");
|
||||
was_error = true;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(bill): Check for missing cases when `#partial literal` is not present
|
||||
if (cl->elems.count > 0 && !was_error && !is_partial) {
|
||||
Type *et = base_type(index_type);
|
||||
GB_ASSERT(et->kind == Type_Enum);
|
||||
auto fields = et->Enum.fields;
|
||||
|
||||
auto unhandled = array_make<Entity *>(temporary_allocator(), 0, fields.count);
|
||||
|
||||
for_array(i, fields) {
|
||||
Entity *f = fields[i];
|
||||
if (f->kind != Entity_Constant) {
|
||||
continue;
|
||||
}
|
||||
ExactValue v = f->Constant.value;
|
||||
auto found = map_get(&seen, hash_exact_value(v));
|
||||
if (!found) {
|
||||
array_add(&unhandled, f);
|
||||
}
|
||||
}
|
||||
|
||||
if (unhandled.count > 0) {
|
||||
begin_error_block();
|
||||
defer (end_error_block());
|
||||
|
||||
if (unhandled.count == 1) {
|
||||
error_no_newline(node, "Unhandled enumerated array case: %.*s", LIT(unhandled[0]->token.string));
|
||||
} else {
|
||||
error_no_newline(node, "Unhandled enumerated array cases: ");
|
||||
for_array(i, unhandled) {
|
||||
Entity *f = unhandled[i];
|
||||
error_line("\t%.*s\n", LIT(f->token.string));
|
||||
}
|
||||
}
|
||||
error_line("\n");
|
||||
|
||||
error_line("\tSuggestion: Was '#partial %s {...}' wanted?\n", type_to_string(index_type));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -697,54 +697,6 @@ bool check_using_stmt_entity(CheckerContext *ctx, AstUsingStmt *us, Ast *expr, b
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
struct TypeAndToken {
|
||||
Type *type;
|
||||
Token token;
|
||||
};
|
||||
|
||||
|
||||
void add_constant_switch_case(CheckerContext *ctx, PtrMap<uintptr, TypeAndToken> *seen, Operand operand, bool use_expr = true) {
|
||||
if (operand.mode != Addressing_Constant) {
|
||||
return;
|
||||
}
|
||||
if (operand.value.kind == ExactValue_Invalid) {
|
||||
return;
|
||||
}
|
||||
|
||||
uintptr key = hash_exact_value(operand.value);
|
||||
TypeAndToken *found = map_get(seen, key);
|
||||
if (found != nullptr) {
|
||||
isize count = multi_map_count(seen, key);
|
||||
TypeAndToken *taps = gb_alloc_array(temporary_allocator(), TypeAndToken, count);
|
||||
|
||||
multi_map_get_all(seen, key, taps);
|
||||
for (isize i = 0; i < count; i++) {
|
||||
TypeAndToken tap = taps[i];
|
||||
if (!are_types_identical(operand.type, tap.type)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
TokenPos pos = tap.token.pos;
|
||||
if (use_expr) {
|
||||
gbString expr_str = expr_to_string(operand.expr);
|
||||
error(operand.expr,
|
||||
"Duplicate case '%s'\n"
|
||||
"\tprevious case at %s",
|
||||
expr_str,
|
||||
token_pos_to_string(pos));
|
||||
gb_string_free(expr_str);
|
||||
} else {
|
||||
error(operand.expr, "Duplicate case found with previous case at %s", token_pos_to_string(pos));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
TypeAndToken tap = {operand.type, ast_token(operand.expr)};
|
||||
multi_map_insert(seen, key, tap);
|
||||
}
|
||||
|
||||
void check_inline_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) {
|
||||
ast_node(irs, UnrollRangeStmt, node);
|
||||
check_open_scope(ctx, node);
|
||||
@@ -1009,9 +961,9 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) {
|
||||
|
||||
TokenKind upper_op = Token_Invalid;
|
||||
switch (be->op.kind) {
|
||||
case Token_Ellipsis: upper_op = Token_GtEq; break;
|
||||
case Token_RangeFull: upper_op = Token_GtEq; break;
|
||||
case Token_RangeHalf: upper_op = Token_Gt; break;
|
||||
case Token_Ellipsis: upper_op = Token_LtEq; break;
|
||||
case Token_RangeFull: upper_op = Token_LtEq; break;
|
||||
case Token_RangeHalf: upper_op = Token_Lt; break;
|
||||
default: GB_PANIC("Invalid range operator"); break;
|
||||
}
|
||||
|
||||
@@ -1032,45 +984,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) {
|
||||
Operand b1 = rhs;
|
||||
check_comparison(ctx, &a1, &b1, Token_LtEq);
|
||||
|
||||
if (is_type_enum(x.type)) {
|
||||
// TODO(bill): Fix this logic so it's fast!!!
|
||||
|
||||
i64 v0 = exact_value_to_i64(lhs.value);
|
||||
i64 v1 = exact_value_to_i64(rhs.value);
|
||||
Operand v = {};
|
||||
v.mode = Addressing_Constant;
|
||||
v.type = x.type;
|
||||
v.expr = x.expr;
|
||||
|
||||
Type *bt = base_type(x.type);
|
||||
GB_ASSERT(bt->kind == Type_Enum);
|
||||
for (i64 vi = v0; vi <= v1; vi++) {
|
||||
if (upper_op != Token_GtEq && vi == v1) {
|
||||
break;
|
||||
}
|
||||
|
||||
bool found = false;
|
||||
for_array(j, bt->Enum.fields) {
|
||||
Entity *f = bt->Enum.fields[j];
|
||||
GB_ASSERT(f->kind == Entity_Constant);
|
||||
|
||||
i64 fv = exact_value_to_i64(f->Constant.value);
|
||||
if (fv == vi) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
v.value = exact_value_i64(vi);
|
||||
add_constant_switch_case(ctx, &seen, v);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
add_constant_switch_case(ctx, &seen, lhs);
|
||||
if (upper_op == Token_GtEq) {
|
||||
add_constant_switch_case(ctx, &seen, rhs);
|
||||
}
|
||||
}
|
||||
add_to_seen_map(ctx, &seen, upper_op, x, lhs, rhs);
|
||||
|
||||
if (is_type_string(x.type)) {
|
||||
// NOTE(bill): Force dependency for strings here
|
||||
@@ -1115,7 +1029,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) {
|
||||
continue;
|
||||
}
|
||||
update_untyped_expr_type(ctx, z.expr, x.type, !is_type_untyped(x.type));
|
||||
add_constant_switch_case(ctx, &seen, y);
|
||||
add_to_seen_map(ctx, &seen, y);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2713,29 +2713,30 @@ bool check_type_internal(CheckerContext *ctx, Ast *e, Type **type, Type *named_t
|
||||
|
||||
Type *t = alloc_type_enumerated_array(elem, index, bt->Enum.min_value, bt->Enum.max_value, Token_Invalid);
|
||||
|
||||
bool is_partial = false;
|
||||
bool is_sparse = false;
|
||||
if (at->tag != nullptr) {
|
||||
GB_ASSERT(at->tag->kind == Ast_BasicDirective);
|
||||
String name = at->tag->BasicDirective.name.string;
|
||||
if (name == "partial") {
|
||||
is_partial = true;
|
||||
if (name == "sparse") {
|
||||
is_sparse = true;
|
||||
} else {
|
||||
error(at->tag, "Invalid tag applied to an enumerated array, got #%.*s", LIT(name));
|
||||
}
|
||||
}
|
||||
|
||||
if (!is_partial && t->EnumeratedArray.count > bt->Enum.fields.count) {
|
||||
if (!is_sparse && t->EnumeratedArray.count > bt->Enum.fields.count) {
|
||||
error(e, "Non-contiguous enumeration used as an index in an enumerated array");
|
||||
long long ea_count = cast(long long)t->EnumeratedArray.count;
|
||||
long long enum_count = cast(long long)bt->Enum.fields.count;
|
||||
error_line("\tenumerated array length: %lld\n", ea_count);
|
||||
error_line("\tenum field count: %lld\n", enum_count);
|
||||
error_line("\tSuggestion: prepend #partial to the enumerated array to allow for non-named elements\n");
|
||||
error_line("\tSuggestion: prepend #sparse to the enumerated array to allow for non-contiguous elements\n");
|
||||
if (2*enum_count < ea_count) {
|
||||
error_line("\tWarning: the number of named elements is much smaller than the length of the array, are you sure this is what you want?\n");
|
||||
error_line("\t this warning will be removed if #partial is applied\n");
|
||||
error_line("\t this warning will be removed if #sparse is applied\n");
|
||||
}
|
||||
}
|
||||
t->EnumeratedArray.is_sparse = is_sparse;
|
||||
|
||||
*type = t;
|
||||
|
||||
|
||||
@@ -454,7 +454,7 @@ void lb_setup_type_info_data(lbProcedure *p) { // NOTE(bill): Setup type_info da
|
||||
case Type_EnumeratedArray: {
|
||||
tag = lb_const_ptr_cast(m, variant_ptr, t_type_info_enumerated_array_ptr);
|
||||
|
||||
LLVMValueRef vals[6] = {
|
||||
LLVMValueRef vals[7] = {
|
||||
lb_get_type_info_ptr(m, t->EnumeratedArray.elem).value,
|
||||
lb_get_type_info_ptr(m, t->EnumeratedArray.index).value,
|
||||
lb_const_int(m, t_int, type_size_of(t->EnumeratedArray.elem)).value,
|
||||
@@ -463,6 +463,8 @@ void lb_setup_type_info_data(lbProcedure *p) { // NOTE(bill): Setup type_info da
|
||||
// Unions
|
||||
LLVMConstNull(lb_type(m, t_type_info_enum_value)),
|
||||
LLVMConstNull(lb_type(m, t_type_info_enum_value)),
|
||||
|
||||
lb_const_bool(m, t_bool, t->EnumeratedArray.is_sparse).value,
|
||||
};
|
||||
|
||||
lbValue res = {};
|
||||
|
||||
@@ -2135,6 +2135,22 @@ Ast *parse_operand(AstFile *f, bool lhs) {
|
||||
}
|
||||
return original_type;
|
||||
} else if (name.string == "partial") {
|
||||
Ast *tag = ast_basic_directive(f, token, name);
|
||||
Ast *original_expr = parse_expr(f, lhs);
|
||||
Ast *expr = unparen_expr(original_expr);
|
||||
switch (expr->kind) {
|
||||
case Ast_ArrayType:
|
||||
syntax_error(expr, "#partial has been replaced with #sparse for non-contiguous enumerated array types");
|
||||
break;
|
||||
case Ast_CompoundLit:
|
||||
expr->CompoundLit.tag = tag;
|
||||
break;
|
||||
default:
|
||||
syntax_error(expr, "Expected a compound literal after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[expr->kind]));
|
||||
break;
|
||||
}
|
||||
return original_expr;
|
||||
} else if (name.string == "sparse") {
|
||||
Ast *tag = ast_basic_directive(f, token, name);
|
||||
Ast *original_type = parse_type(f);
|
||||
Ast *type = unparen_expr(original_type);
|
||||
|
||||
@@ -350,6 +350,7 @@ char const *inline_asm_dialect_strings[InlineAsmDialect_COUNT] = {
|
||||
Slice<Ast *> elems; \
|
||||
Token open, close; \
|
||||
i64 max_count; \
|
||||
Ast *tag; \
|
||||
}) \
|
||||
AST_KIND(_ExprBegin, "", bool) \
|
||||
AST_KIND(BadExpr, "bad expression", struct { Token begin, end; }) \
|
||||
|
||||
@@ -221,6 +221,7 @@ struct TypeProc {
|
||||
ExactValue *max_value; \
|
||||
i64 count; \
|
||||
TokenKind op; \
|
||||
bool is_sparse; \
|
||||
}) \
|
||||
TYPE_KIND(Slice, struct { Type *elem; }) \
|
||||
TYPE_KIND(DynamicArray, struct { Type *elem; }) \
|
||||
@@ -3830,6 +3831,9 @@ gbString write_type_to_string(gbString str, Type *type) {
|
||||
break;
|
||||
|
||||
case Type_EnumeratedArray:
|
||||
if (type->EnumeratedArray.is_sparse) {
|
||||
str = gb_string_appendc(str, "#sparse");
|
||||
}
|
||||
str = gb_string_append_rune(str, '[');
|
||||
str = write_type_to_string(str, type->EnumeratedArray.index);
|
||||
str = gb_string_append_rune(str, ']');
|
||||
|
||||
Reference in New Issue
Block a user