Remove dead code for the "fixed" map idea

This commit is contained in:
Ginger Bill
2017-07-29 14:43:42 +01:00
parent 7bd62481ad
commit ca36fabfc0
7 changed files with 56 additions and 105 deletions

View File

@@ -98,7 +98,6 @@ TypeInfo :: struct #ordered {
key: ^TypeInfo;
value: ^TypeInfo;
generated_struct: ^TypeInfo;
count: int; // == 0 if dynamic
};
BitField :: struct #ordered {
names: []string;

View File

@@ -2733,7 +2733,7 @@ Entity *check_ident(Checker *c, Operand *o, AstNode *n, Type *named_type, Type *
return e;
}
i64 check_array_or_map_count(Checker *c, AstNode *e, bool is_map) {
i64 check_array_count(Checker *c, AstNode *e) {
if (e == nullptr) {
return 0;
}
@@ -2746,11 +2746,7 @@ i64 check_array_or_map_count(Checker *c, AstNode *e, bool is_map) {
check_expr(c, &o, e);
if (o.mode != Addressing_Constant) {
if (o.mode != Addressing_Invalid) {
if (is_map) {
error(e, "Fixed map count must be a constant");
} else {
error(e, "Array count must be a constant");
}
error(e, "Array count must be a constant");
}
return 0;
}
@@ -2758,26 +2754,15 @@ i64 check_array_or_map_count(Checker *c, AstNode *e, bool is_map) {
if (is_type_untyped(type) || is_type_integer(type)) {
if (o.value.kind == ExactValue_Integer) {
i64 count = i128_to_i64(o.value.value_integer);
if (is_map) {
if (count > 0) {
return count;
}
error(e, "Invalid fixed map count");
} else {
if (count >= 0) {
return count;
}
error(e, "Invalid negative array count %lld", cast(long long)count);
if (count >= 0) {
return count;
}
error(e, "Invalid negative array count %lld", cast(long long)count);
return 0;
}
}
if (is_map) {
error(e, "Fixed map count must be an integer");
} else {
error(e, "Array count must be an integer");
}
error(e, "Array count must be an integer");
return 0;
}
@@ -2801,10 +2786,10 @@ void generate_map_entry_type(gbAllocator a, Type *type) {
/*
struct {
hash: Map_Key,
next: int,
key: Key_Type,
value: Value_Type,
hash: __MapKey;
next: int;
key: Key;
value: Value;
}
*/
AstNode *dummy_node = gb_alloc_item(a, AstNode);
@@ -2819,6 +2804,7 @@ void generate_map_entry_type(gbAllocator a, Type *type) {
array_add(&fields, make_entity_field(a, s, make_token_ident(str_lit("value")), type->Map.value, false, 2));
entry_type->Struct.is_ordered = true;
entry_type->Struct.fields = fields;
entry_type->Struct.fields_in_src_order = fields;
@@ -2828,8 +2814,8 @@ void generate_map_entry_type(gbAllocator a, Type *type) {
void generate_map_internal_types(gbAllocator a, Type *type) {
GB_ASSERT(type->kind == Type_Map);
if (type->Map.generated_struct_type != nullptr) return;
generate_map_entry_type(a, type);
if (type->Map.generated_struct_type != nullptr) return;
Type *key = type->Map.key;
Type *value = type->Map.value;
GB_ASSERT(key != nullptr);
@@ -2840,7 +2826,7 @@ void generate_map_internal_types(gbAllocator a, Type *type) {
/*
struct {
hashes: [dynamic]int;
entries; [dynamic]EntryType;
entries: [dynamic]EntryType;
}
*/
AstNode *dummy_node = gb_alloc_item(a, AstNode);
@@ -2856,7 +2842,7 @@ void generate_map_internal_types(gbAllocator a, Type *type) {
array_add(&fields, make_entity_field(a, s, make_token_ident(str_lit("hashes")), hashes_type, false, 0));
array_add(&fields, make_entity_field(a, s, make_token_ident(str_lit("entries")), entries_type, false, 1));
generated_struct_type->Struct.is_ordered = true;
generated_struct_type->Struct.fields = fields;
generated_struct_type->Struct.fields_in_src_order = fields;
@@ -2869,7 +2855,6 @@ void check_map_type(Checker *c, Type *type, AstNode *node) {
GB_ASSERT(type->kind == Type_Map);
ast_node(mt, MapType, node);
i64 count = check_array_or_map_count(c, mt->count, true);
Type *key = check_type(c, mt->key);
Type *value = check_type(c, mt->value);
@@ -2883,12 +2868,6 @@ void check_map_type(Checker *c, Type *type, AstNode *node) {
}
}
if (count > 0) {
count = 0;
error(node, "Fixed map types are not yet implemented");
}
type->Map.count = count;
type->Map.key = key;
type->Map.value = value;
@@ -3026,7 +3005,7 @@ bool check_type_internal(Checker *c, AstNode *e, Type **type, Type *named_type)
case_ast_node(at, ArrayType, e);
if (at->count != nullptr) {
Type *elem = check_type(c, at->elem, nullptr);
i64 count = check_array_or_map_count(c, at->count, false);
i64 count = check_array_count(c, at->count);
if (count < 0) {
error(at->count, "... can only be used in conjuction with compound literals");
count = 0;
@@ -3050,7 +3029,7 @@ bool check_type_internal(Checker *c, AstNode *e, Type **type, Type *named_type)
case_ast_node(vt, VectorType, e);
Type *elem = check_type(c, vt->elem);
Type *be = base_type(elem);
i64 count = check_array_or_map_count(c, vt->count, false);
i64 count = check_array_count(c, vt->count);
if (is_type_vector(be) || (!is_type_boolean(be) && !is_type_numeric(be) && be->kind != Type_Generic)) {
gbString err_str = type_to_string(elem);
error(vt->elem, "Vector element type must be numerical or a boolean, got `%s`", err_str);
@@ -5048,7 +5027,7 @@ bool check_builtin_procedure(Checker *c, Operand *operand, AstNode *call, i32 id
if (is_type_slice(type)) {
min_args = 2;
max_args = 3;
} else if (is_type_dynamic_map(type)) {
} else if (is_type_map(type)) {
min_args = 1;
max_args = 2;
} else if (is_type_dynamic_array(type)) {

View File

@@ -2411,7 +2411,8 @@ irValue *ir_emit_struct_ep(irProcedure *proc, irValue *s, i32 index) {
case 2: result_type = t_int_ptr; break;
case 3: result_type = t_allocator_ptr; break;
}
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
generate_map_internal_types(a, t);
Type *gst = t->Map.generated_struct_type;
switch (index) {
case 0: result_type = make_type_pointer(a, gst->Struct.fields[0]->type); break;
@@ -2471,7 +2472,8 @@ irValue *ir_emit_struct_ev(irProcedure *proc, irValue *s, i32 index) {
case 2: result_type = t_int; break;
case 3: result_type = t_allocator; break;
}
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
generate_map_internal_types(a, t);
Type *gst = t->Map.generated_struct_type;
switch (index) {
case 0: result_type = gst->Struct.fields[0]->type; break;
@@ -3888,7 +3890,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
return ir_slice_count(proc, v);
} else if (is_type_dynamic_array(t)) {
return ir_dynamic_array_count(proc, v);
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
ir_emit_comment(proc, str_lit("len: map"));
irValue *entries = ir_emit_struct_ev(proc, v, 1);
return ir_dynamic_array_count(proc, entries);
@@ -3999,7 +4001,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
irValue *slice = ir_add_local_generated(proc, type);
ir_fill_slice(proc, slice, ptr, count, capacity);
return ir_emit_load(proc, slice);
} else if (is_type_dynamic_map(type)) {
} else if (is_type_map(type)) {
irValue *int_16 = ir_const_int(a, 16);
irValue *cap = int_16;
if (ce->args.count == 2) {
@@ -4068,7 +4070,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
args[0] = da_allocator;
args[1] = ptr;
return ir_emit_global_call(proc, "free_ptr_with_allocator", args, 2);
} else if (is_type_dynamic_map(type)) {
} else if (is_type_map(type)) {
irValue *map = ir_build_expr(proc, node);
irValue *map_ptr = ir_address_from_load_or_generate_local(proc, map);
@@ -4148,7 +4150,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
args[2] = elem_align;
args[3] = capacity;
return ir_emit_global_call(proc, "__dynamic_array_reserve", args, 4);
} else if (is_type_dynamic_map(type)) {
} else if (is_type_map(type)) {
irValue **args = gb_alloc_array(a, irValue *, 2);
args[0] = ir_gen_map_header(proc, ptr, type);
args[1] = capacity;
@@ -4171,7 +4173,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
if (is_type_dynamic_array(t)) {
irValue *count_ptr = ir_emit_struct_ep(proc, ptr, 1);
ir_emit_store(proc, count_ptr, v_zero);
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
irValue *ha = ir_emit_struct_ep(proc, ptr, 0);
irValue *ea = ir_emit_struct_ep(proc, ptr, 1);
ir_emit_store(proc, ir_emit_struct_ep(proc, ha, 1), v_zero);
@@ -4301,7 +4303,7 @@ irValue *ir_build_builtin_proc(irProcedure *proc, AstNode *expr, TypeAndValue tv
irValue *map = ir_build_expr(proc, ce->args[0]);
irValue *key = ir_build_expr(proc, ce->args[1]);
Type *map_type = ir_type(map);
GB_ASSERT(is_type_dynamic_map(map_type));
GB_ASSERT(is_type_map(map_type));
Type *key_type = base_type(map_type)->Map.key;
irValue *addr = ir_address_from_load_or_generate_local(proc, map);
@@ -8220,6 +8222,7 @@ void ir_gen_tree(irGen *s) {
case Type_Map: {
ir_emit_comment(proc, str_lit("TypeInfoMap"));
tag = ir_emit_conv(proc, variant_ptr, t_type_info_map_ptr);
generate_map_internal_types(a, t);
irValue *key = ir_emit_struct_ep(proc, tag, 0);
irValue *value = ir_emit_struct_ep(proc, tag, 1);
@@ -8229,15 +8232,14 @@ void ir_gen_tree(irGen *s) {
ir_emit_store(proc, key, ir_get_type_info_ptr(proc, t->Map.key));
ir_emit_store(proc, value, ir_get_type_info_ptr(proc, t->Map.value));
ir_emit_store(proc, generated_struct, ir_get_type_info_ptr(proc, t->Map.generated_struct_type));
ir_emit_store(proc, count, ir_const_int(a, t->Map.count));
} break;
case Type_BitField: {
ir_emit_comment(proc, str_lit("TypeInfoBitField"));
tag = ir_emit_conv(proc, variant_ptr, t_type_info_map_ptr);
// names: []string,
// bits: []u32,
// offsets: []u32,
// names: []string;
// bits: []u32;
// offsets: []u32;
isize count = t->BitField.field_count;
if (count > 0) {
Entity **fields = t->BitField.fields;

View File

@@ -371,6 +371,7 @@ void ir_print_type(irFileBuffer *f, irModule *m, Type *t) {
} return;
case Type_Map: {
generate_map_internal_types(m->allocator, t);
GB_ASSERT(t->Map.generated_struct_type != nullptr);
ir_print_type(f, m, t->Map.generated_struct_type);
} break;

View File

@@ -1486,10 +1486,9 @@ AstNode *ast_bit_field_type(AstFile *f, Token token, Array<AstNode *> fields, As
return result;
}
AstNode *ast_map_type(AstFile *f, Token token, AstNode *count, AstNode *key, AstNode *value) {
AstNode *ast_map_type(AstFile *f, Token token, AstNode *key, AstNode *value) {
AstNode *result = make_ast_node(f, AstNode_MapType);
result->MapType.token = token;
result->MapType.count = count;
result->MapType.key = key;
result->MapType.value = value;
return result;
@@ -2399,29 +2398,25 @@ AstNode *parse_operand(AstFile *f, bool lhs) {
case Token_map: {
Token token = expect_token(f, Token_map);
AstNode *count = nullptr;
AstNode *key = nullptr;
AstNode *value = nullptr;
Token open, close;
Token open = expect_token_after(f, Token_OpenBracket, "map");
key = parse_expr(f, true);
if (allow_token(f, Token_Comma)) {
count = key;
key = parse_type(f);
}
Token close = expect_token(f, Token_CloseBracket);
open = expect_token_after(f, Token_OpenBracket, "map");
key = parse_expr(f, true);
close = expect_token(f, Token_CloseBracket);
value = parse_type(f);
return ast_map_type(f, token, count, key, value);
return ast_map_type(f, token, key, value);
} break;
case Token_struct: {
Token token = expect_token(f, Token_struct);
Token token = expect_token(f, Token_struct);
AstNode *polymorphic_params = nullptr;
bool is_packed = false;
bool is_ordered = false;
bool is_raw_union = false;
AstNode *align = nullptr;
bool is_packed = false;
bool is_ordered = false;
bool is_raw_union = false;
AstNode *align = nullptr;
if (allow_token(f, Token_OpenParen)) {
isize param_count = 0;

View File

@@ -839,7 +839,7 @@ ssaValue *ssa_emit_ptr_index(ssaProc *p, ssaValue *s, i64 index) {
case 2: result_type = t_int_ptr; break;
case 3: result_type = t_allocator_ptr; break;
}
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
Type *gst = t->Map.generated_struct_type;
switch (index) {
case 0: result_type = make_type_pointer(a, gst->Struct.fields[0]->type); break;
@@ -899,7 +899,7 @@ ssaValue *ssa_emit_value_index(ssaProc *p, ssaValue *s, i64 index) {
case 2: result_type = t_int; break;
case 3: result_type = t_allocator; break;
}
} else if (is_type_dynamic_map(t)) {
} else if (is_type_map(t)) {
Type *gst = t->Map.generated_struct_type;
switch (index) {
case 0: result_type = gst->Struct.fields[0]->type; break;

View File

@@ -150,7 +150,6 @@ struct TypeStruct {
ProcCallingConvention calling_convention; \
}) \
TYPE_KIND(Map, struct { \
i64 count; /* 0 if dynamic */ \
Type * key; \
Type * value; \
Type * entry_type; \
@@ -386,10 +385,10 @@ gb_global Type *t_map_header = nullptr;
i64 type_size_of (gbAllocator allocator, Type *t);
i64 type_align_of (gbAllocator allocator, Type *t);
i64 type_offset_of (gbAllocator allocator, Type *t, i32 index);
gbString type_to_string (Type *type);
i64 type_size_of (gbAllocator allocator, Type *t);
i64 type_align_of (gbAllocator allocator, Type *t);
i64 type_offset_of (gbAllocator allocator, Type *t, i32 index);
gbString type_to_string (Type *type);
void generate_map_internal_types(gbAllocator a, Type *type);
@@ -567,7 +566,6 @@ Type *make_type_map(gbAllocator a, i64 count, Type *key, Type *value) {
if (key != nullptr) {
GB_ASSERT(is_type_valid_for_keys(key));
}
t->Map.count = count;
t->Map.key = key;
t->Map.value = value;
return t;
@@ -852,14 +850,6 @@ bool is_type_map(Type *t) {
return t->kind == Type_Map;
}
bool is_type_fixed_map(Type *t) {
t = base_type(t);
return t->kind == Type_Map && t->Map.count > 0;
}
bool is_type_dynamic_map(Type *t) {
t = base_type(t); return t->kind == Type_Map && t->Map.count == 0;
}
@@ -1207,8 +1197,7 @@ bool are_types_identical(Type *x, Type *y) {
case Type_Map:
if (y->kind == Type_Map) {
return x->Map.count == y->Map.count &&
are_types_identical(x->Map.key, y->Map.key) &&
return are_types_identical(x->Map.key, y->Map.key) &&
are_types_identical(x->Map.value, y->Map.value);
}
break;
@@ -1824,14 +1813,9 @@ i64 type_align_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
return max;
} break;
case Type_Map: {
if (t->Map.count == 0) { // Dynamic
// return build_context.word_size;
generate_map_internal_types(allocator, t);
return type_align_of_internal(allocator, t->Map.generated_struct_type, path);
}
GB_PANIC("TODO(bill): Fixed map alignment");
} break;
case Type_Map:
generate_map_internal_types(allocator, t);
return type_align_of_internal(allocator, t->Map.generated_struct_type, path);
case Type_Enum:
return type_align_of_internal(allocator, t->Enum.base_type, path);
@@ -2053,15 +2037,9 @@ i64 type_size_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
// data + len + cap + allocator(procedure+data)
return 3*build_context.word_size + 2*build_context.word_size;
case Type_Map: {
if (t->Map.count == 0) { // Dynamic
// i64 da = 3*build_context.word_size + 2*build_context.word_size;
// return 2 * da;
generate_map_internal_types(allocator, t);
return type_size_of_internal(allocator, t->Map.generated_struct_type, path);
}
GB_PANIC("TODO(bill): Fixed map size");
}
case Type_Map:
generate_map_internal_types(allocator, t);
return type_size_of_internal(allocator, t->Map.generated_struct_type, path);
case Type_Tuple: {
i64 count, align, size;
@@ -2350,9 +2328,6 @@ gbString write_type_to_string(gbString str, Type *type) {
case Type_Map: {
str = gb_string_appendc(str, "map[");
if (type->Map.count > 0) {
str = gb_string_append_fmt(str, "%d, ", cast(int)type->Map.count);
}
str = write_type_to_string(str, type->Map.key);
str = gb_string_append_rune(str, ']');
str = write_type_to_string(str, type->Map.value);