for &e, i in array and for k, &v in map (alternative to passing the iterable by pointer)

This commit is contained in:
gingerBill
2023-06-26 15:20:40 +01:00
parent 19ea090633
commit ea00619c3b
4 changed files with 94 additions and 39 deletions

View File

@@ -2273,7 +2273,15 @@ gb_internal void check_unary_expr(CheckerContext *c, Operand *o, Token op, Ast *
defer (end_error_block());
error(op, "Cannot take the pointer address of '%s'", str);
if (e != nullptr && (e->flags & EntityFlag_ForValue) != 0) {
error_line("\tSuggestion: Did you want to pass the iterable value to the for statement by pointer to get addressable semantics?\n");
Type *parent_type = type_deref(e->Variable.for_loop_parent_type);
if (parent_type != nullptr && is_type_string(parent_type)) {
error_line("\tSuggestion: Iterating over a string produces an intermediate 'rune' value which cannot be addressed.\n");
} else if (parent_type != nullptr && is_type_tuple(parent_type)) {
error_line("\tSuggestion: Iterating over a procedure does not produce values which are addressable.\n");
} else {
error_line("\tSuggestion: Did you want to pass the iterable value to the for statement by pointer to get addressable semantics?\n");
}
}
if (e != nullptr && (e->flags & EntityFlag_SwitchValue) != 0) {
error_line("\tSuggestion: Did you want to pass the value to the switch statement by pointer to get addressable semantics?\n");

View File

@@ -1469,12 +1469,15 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
Ast *expr = unparen_expr(rs->expr);
bool is_possibly_addressable = true;
isize max_val_count = 2;
if (is_ast_range(expr)) {
ast_node(ie, BinaryExpr, expr);
Operand x = {};
Operand y = {};
is_possibly_addressable = false;
bool ok = check_range(ctx, expr, true, &x, &y, nullptr);
if (!ok) {
goto skip_expr_range_stmt;
@@ -1497,6 +1500,8 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
gb_string_free(t);
goto skip_expr_range_stmt;
} else {
is_possibly_addressable = false;
if (is_reverse) {
error(node, "#reverse for is not supported for enum types");
}
@@ -1510,7 +1515,8 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
Type *t = base_type(type_deref(operand.type));
switch (t->kind) {
case Type_Basic:
if (is_type_string(t) && t->Basic.kind != Basic_cstring) {
if (t->Basic.kind == Basic_string) {
is_possibly_addressable = false;
array_add(&vals, t_rune);
array_add(&vals, t_int);
if (is_reverse) {
@@ -1529,6 +1535,7 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
case Type_Array:
if (is_ptr) use_by_reference_for_value = true;
if (!is_ptr) is_possibly_addressable = operand.mode == Addressing_Variable;
array_add(&vals, t->Array.elem);
array_add(&vals, t_int);
break;
@@ -1575,6 +1582,8 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
array_add(&vals, e->type);
}
is_possibly_addressable = false;
if (rs->vals.count > 1 && rs->vals[1] != nullptr && count < 3) {
gbString s = type_to_string(t);
error(operand.expr, "Expected a 3-valued expression on the rhs, got (%s)", s);
@@ -1644,8 +1653,13 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
}
Ast * name = lhs[i];
Type *type = rhs[i];
Entity *entity = nullptr;
bool is_addressed = false;
if (name->kind == Ast_UnaryExpr && name->UnaryExpr.op.kind == Token_And) {
is_addressed = true;
name = name->UnaryExpr.expr;
}
if (name->kind == Ast_Ident) {
Token token = name->Ident.token;
String str = token.string;
@@ -1659,7 +1673,16 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
entity->flags |= EntityFlag_ForValue;
entity->flags |= EntityFlag_Value;
entity->identifier = name;
if (i == addressable_index && use_by_reference_for_value) {
entity->Variable.for_loop_parent_type = type_of_expr(expr);
if (is_addressed) {
if (is_possibly_addressable && i == addressable_index) {
entity->flags &= ~EntityFlag_Value;
} else {
char const *idx_name = is_map ? "key" : "index";
error(token, "The %s variable '%.*s' cannot be made addressable", idx_name, LIT(str));
}
} else if (i == addressable_index && use_by_reference_for_value) {
entity->flags &= ~EntityFlag_Value;
}
if (is_soa) {
@@ -1678,7 +1701,9 @@ gb_internal void check_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags)
entity = found;
}
} else {
error(name, "A variable declaration must be an identifier");
gbString s = expr_to_string(lhs[i]);
error(name, "A variable declaration must be an identifier, got %s", s);
gb_string_free(s);
}
if (entity == nullptr) {

View File

@@ -209,6 +209,8 @@ struct Entity {
ParameterValue param_value;
Type *for_loop_parent_type;
String thread_local_model;
Entity * foreign_library;
Ast * foreign_library_ident;

View File

@@ -619,6 +619,18 @@ gb_internal void lb_build_range_string(lbProcedure *p, lbValue expr, Type *val_t
}
gb_internal Ast *lb_strip_and_prefix(Ast *ident) {
if (ident != nullptr) {
if (ident->kind == Ast_UnaryExpr && ident->UnaryExpr.op.kind == Token_And) {
ident = ident->UnaryExpr.expr;
}
GB_ASSERT(ident->kind == Ast_Ident);
}
return ident;
}
gb_internal void lb_build_range_interval(lbProcedure *p, AstBinaryExpr *node,
AstRangeStmt *rs, Scope *scope) {
bool ADD_EXTRA_WRAPPING_CHECK = true;
@@ -627,13 +639,15 @@ gb_internal void lb_build_range_interval(lbProcedure *p, AstBinaryExpr *node,
lb_open_scope(p, scope);
Ast *val0 = rs->vals.count > 0 ? lb_strip_and_prefix(rs->vals[0]) : nullptr;
Ast *val1 = rs->vals.count > 1 ? lb_strip_and_prefix(rs->vals[1]) : nullptr;
Type *val0_type = nullptr;
Type *val1_type = nullptr;
if (rs->vals.count > 0 && rs->vals[0] != nullptr && !is_blank_ident(rs->vals[0])) {
val0_type = type_of_expr(rs->vals[0]);
if (val0 != nullptr && !is_blank_ident(val0)) {
val0_type = type_of_expr(val0);
}
if (rs->vals.count > 1 && rs->vals[1] != nullptr && !is_blank_ident(rs->vals[1])) {
val1_type = type_of_expr(rs->vals[1]);
if (val1 != nullptr && !is_blank_ident(val1)) {
val1_type = type_of_expr(val1);
}
TokenKind op = Token_Lt;
@@ -649,7 +663,7 @@ gb_internal void lb_build_range_interval(lbProcedure *p, AstBinaryExpr *node,
lbAddr value;
if (val0_type != nullptr) {
Entity *e = entity_of_node(rs->vals[0]);
Entity *e = entity_of_node(val0);
value = lb_add_local(p, val0_type, e, false);
} else {
value = lb_add_local_generated(p, lower.type, false);
@@ -658,7 +672,7 @@ gb_internal void lb_build_range_interval(lbProcedure *p, AstBinaryExpr *node,
lbAddr index;
if (val1_type != nullptr) {
Entity *e = entity_of_node(rs->vals[1]);
Entity *e = entity_of_node(val1);
index = lb_add_local(p, val1_type, e, false);
} else {
index = lb_add_local_generated(p, t_int, false);
@@ -680,8 +694,8 @@ gb_internal void lb_build_range_interval(lbProcedure *p, AstBinaryExpr *node,
lbValue val = lb_addr_load(p, value);
lbValue idx = lb_addr_load(p, index);
if (val0_type) lb_store_range_stmt_val(p, rs->vals[0], val);
if (val1_type) lb_store_range_stmt_val(p, rs->vals[1], idx);
if (val0_type) lb_store_range_stmt_val(p, val0, val);
if (val1_type) lb_store_range_stmt_val(p, val1, idx);
{
// NOTE: this check block will most likely be optimized out, and is here
@@ -815,12 +829,14 @@ gb_internal void lb_build_range_stmt_struct_soa(lbProcedure *p, AstRangeStmt *rs
lb_open_scope(p, scope);
Ast *val0 = rs->vals.count > 0 ? lb_strip_and_prefix(rs->vals[0]) : nullptr;
Ast *val1 = rs->vals.count > 1 ? lb_strip_and_prefix(rs->vals[1]) : nullptr;
Type *val_types[2] = {};
if (rs->vals.count > 0 && rs->vals[0] != nullptr && !is_blank_ident(rs->vals[0])) {
val_types[0] = type_of_expr(rs->vals[0]);
if (val0 != nullptr && !is_blank_ident(val0)) {
val_types[0] = type_of_expr(val0);
}
if (rs->vals.count > 1 && rs->vals[1] != nullptr && !is_blank_ident(rs->vals[1])) {
val_types[1] = type_of_expr(rs->vals[1]);
if (val1 != nullptr && !is_blank_ident(val1)) {
val_types[1] = type_of_expr(val1);
}
@@ -901,14 +917,14 @@ gb_internal void lb_build_range_stmt_struct_soa(lbProcedure *p, AstRangeStmt *rs
if (val_types[0]) {
Entity *e = entity_of_node(rs->vals[0]);
Entity *e = entity_of_node(val0);
if (e != nullptr) {
lbAddr soa_val = lb_addr_soa_variable(array.addr, lb_addr_load(p, index), nullptr);
map_set(&p->module->soa_values, e, soa_val);
}
}
if (val_types[1]) {
lb_store_range_stmt_val(p, rs->vals[1], lb_addr_load(p, index));
lb_store_range_stmt_val(p, val1, lb_addr_load(p, index));
}
@@ -942,13 +958,15 @@ gb_internal void lb_build_range_stmt(lbProcedure *p, AstRangeStmt *rs, Scope *sc
lb_open_scope(p, scope);
Ast *val0 = rs->vals.count > 0 ? lb_strip_and_prefix(rs->vals[0]) : nullptr;
Ast *val1 = rs->vals.count > 1 ? lb_strip_and_prefix(rs->vals[1]) : nullptr;
Type *val0_type = nullptr;
Type *val1_type = nullptr;
if (rs->vals.count > 0 && rs->vals[0] != nullptr && !is_blank_ident(rs->vals[0])) {
val0_type = type_of_expr(rs->vals[0]);
if (val0 != nullptr && !is_blank_ident(val0)) {
val0_type = type_of_expr(val0);
}
if (rs->vals.count > 1 && rs->vals[1] != nullptr && !is_blank_ident(rs->vals[1])) {
val1_type = type_of_expr(rs->vals[1]);
if (val1 != nullptr && !is_blank_ident(val1)) {
val1_type = type_of_expr(val1);
}
lbValue val = {};
@@ -1042,11 +1060,11 @@ gb_internal void lb_build_range_stmt(lbProcedure *p, AstRangeStmt *rs, Scope *sc
if (is_map) {
if (val0_type) lb_store_range_stmt_val(p, rs->vals[0], key);
if (val1_type) lb_store_range_stmt_val(p, rs->vals[1], val);
if (val0_type) lb_store_range_stmt_val(p, val0, key);
if (val1_type) lb_store_range_stmt_val(p, val1, val);
} else {
if (val0_type) lb_store_range_stmt_val(p, rs->vals[0], val);
if (val1_type) lb_store_range_stmt_val(p, rs->vals[1], key);
if (val0_type) lb_store_range_stmt_val(p, val0, val);
if (val1_type) lb_store_range_stmt_val(p, val1, key);
}
lb_push_target_list(p, rs->label, done, loop, nullptr);
@@ -1064,21 +1082,23 @@ gb_internal void lb_build_unroll_range_stmt(lbProcedure *p, AstUnrollRangeStmt *
lb_open_scope(p, scope); // Open scope here
Ast *val0 = lb_strip_and_prefix(rs->val0);
Ast *val1 = lb_strip_and_prefix(rs->val1);
Type *val0_type = nullptr;
Type *val1_type = nullptr;
if (rs->val0 != nullptr && !is_blank_ident(rs->val0)) {
val0_type = type_of_expr(rs->val0);
if (val0 != nullptr && !is_blank_ident(val0)) {
val0_type = type_of_expr(val0);
}
if (rs->val1 != nullptr && !is_blank_ident(rs->val1)) {
val1_type = type_of_expr(rs->val1);
if (val1 != nullptr && !is_blank_ident(val1)) {
val1_type = type_of_expr(val1);
}
if (val0_type != nullptr) {
Entity *e = entity_of_node(rs->val0);
Entity *e = entity_of_node(val0);
lb_add_local(p, e->type, e, true);
}
if (val1_type != nullptr) {
Entity *e = entity_of_node(rs->val1);
Entity *e = entity_of_node(val1);
lb_add_local(p, e->type, e, true);
}
@@ -1092,8 +1112,8 @@ gb_internal void lb_build_unroll_range_stmt(lbProcedure *p, AstUnrollRangeStmt *
lbAddr val0_addr = {};
lbAddr val1_addr = {};
if (val0_type) val0_addr = lb_build_addr(p, rs->val0);
if (val1_type) val1_addr = lb_build_addr(p, rs->val1);
if (val0_type) val0_addr = lb_build_addr(p, val0);
if (val1_type) val1_addr = lb_build_addr(p, val1);
TokenKind op = expr->BinaryExpr.op.kind;
Ast *start_expr = expr->BinaryExpr.left;
@@ -1135,8 +1155,8 @@ gb_internal void lb_build_unroll_range_stmt(lbProcedure *p, AstUnrollRangeStmt *
lbAddr val0_addr = {};
lbAddr val1_addr = {};
if (val0_type) val0_addr = lb_build_addr(p, rs->val0);
if (val1_type) val1_addr = lb_build_addr(p, rs->val1);
if (val0_type) val0_addr = lb_build_addr(p, val0);
if (val1_type) val1_addr = lb_build_addr(p, val1);
for_array(i, bet->Enum.fields) {
Entity *field = bet->Enum.fields[i];
@@ -1149,8 +1169,8 @@ gb_internal void lb_build_unroll_range_stmt(lbProcedure *p, AstUnrollRangeStmt *
} else {
lbAddr val0_addr = {};
lbAddr val1_addr = {};
if (val0_type) val0_addr = lb_build_addr(p, rs->val0);
if (val1_type) val1_addr = lb_build_addr(p, rs->val1);
if (val0_type) val0_addr = lb_build_addr(p, val0);
if (val1_type) val1_addr = lb_build_addr(p, val1);
GB_ASSERT(expr->tav.mode == Addressing_Constant);