General improvements

This commit is contained in:
gingerBill
2026-03-17 12:55:25 +00:00
parent e18b15e8f0
commit 46936e0e52
6 changed files with 106 additions and 39 deletions

View File

@@ -7822,11 +7822,14 @@ gb_internal CallArgumentError check_polymorphic_record_type(CheckerContext *c, O
{
// NOTE(bill, 2019-10-26): Allow a cycle in the parameters but not in the fields themselves
auto prev_type_path = c->type_path;
c->type_path = new_checker_type_path();
defer ({
destroy_checker_type_path(c->type_path);
c->type_path = prev_type_path;
});
TEMPORARY_ALLOCATOR_GUARD();
c->type_path = new_checker_type_path(temporary_allocator());
defer (c->type_path = prev_type_path);
if (is_call_expr_field_value(ce)) {
named_fields = true;
operands = array_make<Operand>(temporary_allocator(), ce->args.count);

View File

@@ -1857,6 +1857,11 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para
bool is_c_vararg = false;
auto variables = array_make<Entity *>(permanent_allocator(), 0, variable_count);
i32 field_group_index = -1;
Entity *entities_to_use = permanent_alloc_array<Entity>(variable_count);
isize entities_to_use_index = 0;
for_array(i, params) {
Ast *param = params[i];
if (param->kind != Ast_Field) {
@@ -2096,7 +2101,12 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para
p->flags &= ~FieldFlag_no_capture;
}
param = alloc_entity_type_name(scope, name->Ident.token, type, EntityState_Resolved);
param = &entities_to_use[entities_to_use_index++];
INTERNAL_ENTITY_INIT(param, Entity_TypeName, scope, name->Ident.token, type);
param->state = EntityState_Resolved;
param->interned_name.store(name->Ident.interned);
param->interned_name_hash.store(name->Ident.hash);
param->TypeName.is_type_alias = true;
} else {
ExactValue poly_const = {};
@@ -2250,10 +2260,36 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para
// failed
}
param = alloc_entity_const_param(scope, name->Ident.token, type, poly_const, is_type_polymorphic(type));
// param = alloc_entity_const_param(scope, name->Ident.token, type, poly_const, is_type_polymorphic(type));
param = &entities_to_use[entities_to_use_index++];
INTERNAL_ENTITY_INIT(param, Entity_Constant, scope, name->Ident.token, type);
param->flags |= EntityFlag_Used|EntityFlag_Param;
if (is_type_polymorphic(type)) {
param->flags |= EntityFlag_PolyConst;
}
param->Constant.value = poly_const;
param->interned_name.store(name->Ident.interned);
param->interned_name_hash.store(name->Ident.hash);
param->Constant.field_group_index = field_group_index;
} else {
param = alloc_entity_param(scope, name->Ident.token, type, is_using, true);
// param = alloc_entity_param(scope, name->Ident.token, type, is_using, true);
param = &entities_to_use[entities_to_use_index++];
INTERNAL_ENTITY_INIT(param, Entity_Variable, scope, name->Ident.token, type);
param->state = EntityState_Resolved;
param->flags |= EntityFlag_Used|EntityFlag_Param|EntityFlag_Value;
if (is_using) {
param->flags |= EntityFlag_Using;
}
param->interned_name.store(name->Ident.interned);
param->interned_name_hash.store(name->Ident.hash);
param->Variable.param_value = param_value;
param->Variable.field_group_index = field_group_index;
param->Variable.type_expr = type_expr;
@@ -3627,7 +3663,8 @@ gb_internal bool check_type_internal(CheckerContext *ctx, Ast *e, Type **type, T
CheckerContext c = *ctx;
TEMPORARY_ALLOCATOR_GUARD();
c.type_path = new_checker_type_path(temporary_allocator());
c.type_path = new_checker_type_path();
defer (destroy_checker_type_path(c.type_path));
Type *elem = t_invalid;
Operand o = {};
@@ -3900,8 +3937,8 @@ gb_internal bool check_type_internal(CheckerContext *ctx, Ast *e, Type **type, T
gb_internal Type *check_type(CheckerContext *ctx, Ast *e) {
CheckerContext c = *ctx;
TEMPORARY_ALLOCATOR_GUARD();
c.type_path = new_checker_type_path(temporary_allocator());
c.type_path = new_checker_type_path();
defer (destroy_checker_type_path(c.type_path));
return check_type_expr(&c, e, nullptr);
}

View File

@@ -1550,12 +1550,12 @@ gb_internal void init_checker_context(CheckerContext *ctx, Checker *c) {
ctx->scope = builtin_pkg->scope;
ctx->pkg = builtin_pkg;
ctx->type_path = new_checker_type_path(heap_allocator());
ctx->type_path = new_checker_type_path();
ctx->type_level = 0;
}
gb_internal void destroy_checker_context(CheckerContext *ctx) {
destroy_checker_type_path(ctx->type_path, heap_allocator());
destroy_checker_type_path(ctx->type_path);
}
gb_internal bool add_curr_ast_file(CheckerContext *ctx, AstFile *file) {
@@ -3284,18 +3284,18 @@ gb_internal Type *find_type_in_pkg(CheckerInfo *info, String const &pkg, String
gb_internal gb_thread_local std::atomic<AtomicFreelist<CheckerTypePath> *> checker_type_path_free_list;
gb_internal CheckerTypePath *new_checker_type_path(gbAllocator allocator) {
gb_internal CheckerTypePath *new_checker_type_path() {
// TODO(bill): Cache to reuse `CheckerTypePath
auto *tp = atomic_freelist_get(checker_type_path_free_list);
if (tp == nullptr) {
tp = permanent_alloc_item<AtomicFreelist<CheckerTypePath> >();
array_init(&tp->value, allocator, 0, 16);
array_init(&tp->value, permanent_allocator(), 0, 16);
}
return &tp->value;
}
gb_internal void destroy_checker_type_path(CheckerTypePath *path, gbAllocator allocator) {
gb_internal void destroy_checker_type_path(CheckerTypePath *path) {
auto *tp = cast(AtomicFreelist<CheckerTypePath> *)path;
array_clear(&tp->value);

View File

@@ -909,8 +909,8 @@ gb_internal void check_collect_entities(CheckerContext *c, Slice<Ast *> const &n
gb_internal void check_collect_entities_from_when_stmt(CheckerContext *c, AstWhenStmt *ws);
gb_internal void check_delayed_file_import_entity(CheckerContext *c, Ast *decl);
gb_internal CheckerTypePath *new_checker_type_path(gbAllocator allocator);
gb_internal void destroy_checker_type_path(CheckerTypePath *tp, gbAllocator allocator);
gb_internal CheckerTypePath *new_checker_type_path();
gb_internal void destroy_checker_type_path(CheckerTypePath *tp);
gb_internal void check_type_path_push(CheckerContext *c, Entity *e);
gb_internal Entity *check_type_path_pop (CheckerContext *c);

View File

@@ -412,10 +412,9 @@ gb_internal Entity *alloc_entity_type_name(Scope *scope, Token token, Type *type
}
gb_internal Entity *alloc_entity_param(Scope *scope, Token token, Type *type, bool is_using, bool is_value) {
Entity *entity = alloc_entity_variable(scope, token, type);
Entity *entity = alloc_entity_variable(scope, token, type, EntityState_Resolved);
entity->flags |= EntityFlag_Used;
entity->flags |= EntityFlag_Param;
entity->state = EntityState_Resolved;
if (is_using) entity->flags |= EntityFlag_Using;
if (is_value) entity->flags |= EntityFlag_Value;
return entity;

View File

@@ -1,11 +1,14 @@
enum {PTR_SET_INLINE_CAP = 16};
template <typename T>
struct PtrSet {
static_assert(TypeIsPointer<T>::value || TypeIsPtrSizedInteger<T>::value, "PtrSet::T must be a pointer");
static constexpr uintptr TOMBSTONE = ~(uintptr)(0ull);
T * keys;
usize count;
usize capacity;
T * keys;
u32 count;
u32 capacity;
T inline_keys[PTR_SET_INLINE_CAP];
};
template <typename T> gb_internal void ptr_set_init (PtrSet<T> *s, isize capacity = 16);
@@ -27,17 +30,23 @@ template <typename T>
gb_internal void ptr_set_init(PtrSet<T> *s, isize capacity) {
GB_ASSERT(s->keys == nullptr);
if (capacity != 0) {
capacity = next_pow2_isize(gb_max(16, capacity));
s->keys = gb_alloc_array(ptr_set_allocator(), T, capacity);
capacity = next_pow2_isize(gb_max(PTR_SET_INLINE_CAP, capacity));
if (capacity > PTR_SET_INLINE_CAP) {
s->keys = gb_alloc_array(ptr_set_allocator(), T, capacity);
} else {
s->keys = s->inline_keys;
}
// This memory will be zeroed, no need to explicitly zero it
}
s->count = 0;
s->capacity = capacity;
s->capacity = cast(u32)capacity;
}
template <typename T>
gb_internal void ptr_set_destroy(PtrSet<T> *s) {
gb_free(ptr_set_allocator(), s->keys);
if (s->keys != s->inline_keys) {
gb_free(ptr_set_allocator(), s->keys);
}
s->keys = nullptr;
s->count = 0;
s->capacity = 0;
@@ -47,16 +56,10 @@ template <typename T>
gb_internal isize ptr_set__find(PtrSet<T> *s, T ptr) {
GB_ASSERT(ptr != 0);
if (s->count != 0) {
#if 0
for (usize i = 0; i < s->capacity; i++) {
if (s->keys[i] == ptr) {
return i;
}
}
#else
u32 hash = ptr_map_hash_key(ptr);
usize mask = s->capacity-1;
usize hash_index = cast(usize)hash & mask;
for (usize i = 0; i < s->capacity; i++) {
T key = s->keys[hash_index];
if (key == ptr) {
@@ -66,14 +69,14 @@ gb_internal isize ptr_set__find(PtrSet<T> *s, T ptr) {
}
hash_index = (hash_index+1)&mask;
}
#endif
}
return -1;
}
template <typename T>
gb_internal bool ptr_set__full(PtrSet<T> *s) {
return 0.75f * s->capacity <= s->count;
usize grow_at = s->capacity - (s->capacity>>2);
return s->count >= grow_at;
}
template <typename T>
@@ -187,11 +190,36 @@ gb_internal T ptr_set_add(PtrSet<T> *s, T ptr) {
template <typename T>
gb_internal void ptr_set_remove(PtrSet<T> *s, T ptr) {
isize index = ptr_set__find(s, ptr);
if (index >= 0) {
GB_ASSERT(s->count > 0);
s->keys[index] = (T)PtrSet<T>::TOMBSTONE;
s->count--;
if (index < 0) {
return;
}
#if 0
u32 mask = s->capacity-1;
u32 i = cast(u32)index;
s->count -= 1;
for (;;) {
u32 next = (i + 1) & mask;
T key = s->keys[next];
if (key == 0) {
break;
}
u32 natural = ptr_map_hash_key(key) & mask;
if (((next - natural) & mask) == 0) {
break;
}
s->keys[i] = key;
i = next;
}
s->keys[i] = 0;
#else
GB_ASSERT(s->count > 0);
s->keys[index] = (T)PtrSet<T>::TOMBSTONE;
s->count--;
#endif
}
template <typename T>