From 0de7df9eab9b256e0d1c8da7c9fc8c422c5ac1a7 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 11:37:14 +0100 Subject: [PATCH] Improve `//+vet`; remove `using` in many places; add `//+vet !using-stmt` where necessary --- core/encoding/entity/entity.odin | 37 +++--- core/encoding/xml/debug_print.odin | 30 +++-- core/image/netpbm/netpbm.odin | 1 + core/image/png/helpers.odin | 7 +- core/image/png/png.odin | 16 +-- core/net/url.odin | 70 ++++++------ core/odin/printer/visit.odin | 178 ++++++++++++++--------------- src/build_settings.cpp | 8 +- src/check_decl.cpp | 4 +- src/check_expr.cpp | 6 +- src/check_stmt.cpp | 4 +- src/checker.cpp | 22 ++++ src/checker.hpp | 8 +- src/parser.cpp | 23 ++-- 14 files changed, 212 insertions(+), 202 deletions(-) diff --git a/core/encoding/entity/entity.odin b/core/encoding/entity/entity.odin index 694fcdffc..ec640c69f 100644 --- a/core/encoding/entity/entity.odin +++ b/core/encoding/entity/entity.odin @@ -184,28 +184,26 @@ decode_xml :: proc(input: string, options := XML_Decode_Options{}, allocator := advance :: proc(t: ^Tokenizer) -> (err: Error) { if t == nil { return .Tokenizer_Is_Nil } - using t - #no_bounds_check { - if read_offset < len(src) { - offset = read_offset - r, w = rune(src[read_offset]), 1 + if t.read_offset < len(t.src) { + t.offset = t.read_offset + t.r, t.w = rune(t.src[t.read_offset]), 1 switch { - case r == 0: + case t.r == 0: return .Illegal_NUL_Character - case r >= utf8.RUNE_SELF: - r, w = utf8.decode_rune_in_string(src[read_offset:]) - if r == utf8.RUNE_ERROR && w == 1 { + case t.r >= utf8.RUNE_SELF: + t.r, t.w = utf8.decode_rune_in_string(t.src[t.read_offset:]) + if t.r == utf8.RUNE_ERROR && t.w == 1 { return .Illegal_UTF_Encoding - } else if r == utf8.RUNE_BOM && offset > 0 { + } else if t.r == utf8.RUNE_BOM && t.offset > 0 { return .Illegal_BOM } } - read_offset += w + t.read_offset += t.w return .None } else { - offset = len(src) - r = -1 + t.offset = len(t.src) + t.r = -1 return } } @@ -273,26 +271,25 @@ _extract_xml_entity :: proc(t: ^Tokenizer) -> (entity: string, err: Error) { All of these would be in the ASCII range. Even if one is not, it doesn't matter. All characters we need to compare to extract are. */ - using t length := len(t.src) found := false #no_bounds_check { - for read_offset < length { - if src[read_offset] == ';' { + for t.read_offset < length { + if t.src[t.read_offset] == ';' { + t.read_offset += 1 found = true - read_offset += 1 break } - read_offset += 1 + t.read_offset += 1 } } if found { - return string(src[offset + 1 : read_offset - 1]), .None + return string(t.src[t.offset + 1 : t.read_offset - 1]), .None } - return string(src[offset : read_offset]), .Invalid_Entity_Encoding + return string(t.src[t.offset : t.read_offset]), .Invalid_Entity_Encoding } /* diff --git a/core/encoding/xml/debug_print.odin b/core/encoding/xml/debug_print.odin index 0736e8893..b97617a8a 100644 --- a/core/encoding/xml/debug_print.odin +++ b/core/encoding/xml/debug_print.odin @@ -19,43 +19,39 @@ import "core:fmt" */ print :: proc(writer: io.Writer, doc: ^Document) -> (written: int, err: io.Error) { if doc == nil { return } - using fmt - - written += wprintf(writer, "[XML Prolog]\n") + written += fmt.wprintf(writer, "[XML Prolog]\n") for attr in doc.prologue { - written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val) + written += fmt.wprintf(writer, "\t%v: %v\n", attr.key, attr.val) } - written += wprintf(writer, "[Encoding] %v\n", doc.encoding) + written += fmt.wprintf(writer, "[Encoding] %v\n", doc.encoding) if len(doc.doctype.ident) > 0 { - written += wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) + written += fmt.wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) if len(doc.doctype.rest) > 0 { - wprintf(writer, "\t%v\n", doc.doctype.rest) + fmt.wprintf(writer, "\t%v\n", doc.doctype.rest) } } for comment in doc.comments { - written += wprintf(writer, "[Pre-root comment] %v\n", comment) + written += fmt.wprintf(writer, "[Pre-root comment] %v\n", comment) } if len(doc.elements) > 0 { - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") print_element(writer, doc, 0) - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") } return written, .None } print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, indent := 0) -> (written: int, err: io.Error) { - using fmt - tab :: proc(writer: io.Writer, indent: int) { for _ in 0..=indent { - wprintf(writer, "\t") + fmt.wprintf(writer, "\t") } } @@ -64,13 +60,13 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, element := doc.elements[element_id] if element.kind == .Element { - wprintf(writer, "<%v>\n", element.ident) + fmt.wprintf(writer, "<%v>\n", element.ident) for value in element.value { switch v in value { case string: tab(writer, indent + 1) - wprintf(writer, "[Value] %v\n", v) + fmt.wprintf(writer, "[Value] %v\n", v) case Element_ID: print_element(writer, doc, v, indent + 1) } @@ -78,10 +74,10 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, for attr in element.attribs { tab(writer, indent + 1) - wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) + fmt.wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) } } else if element.kind == .Comment { - wprintf(writer, "[COMMENT] %v\n", element.value) + fmt.wprintf(writer, "[COMMENT] %v\n", element.value) } return written, .None diff --git a/core/image/netpbm/netpbm.odin b/core/image/netpbm/netpbm.odin index 74e482cb4..24df76c8e 100644 --- a/core/image/netpbm/netpbm.odin +++ b/core/image/netpbm/netpbm.odin @@ -1,3 +1,4 @@ +//+vet !using-stmt package netpbm import "core:bytes" diff --git a/core/image/png/helpers.odin b/core/image/png/helpers.odin index 889b3cb6b..f0209d4d7 100644 --- a/core/image/png/helpers.odin +++ b/core/image/png/helpers.odin @@ -80,11 +80,10 @@ time :: proc(c: image.PNG_Chunk) -> (res: tIME, ok: bool) { } core_time :: proc(c: image.PNG_Chunk) -> (t: coretime.Time, ok: bool) { - if png_time, png_ok := time(c); png_ok { - using png_time + if t, png_ok := time(c); png_ok { return coretime.datetime_to_time( - int(year), int(month), int(day), - int(hour), int(minute), int(second), + int(t.year), int(t.month), int(t.day), + int(t.hour), int(t.minute), int(t.second), ) } else { return {}, false diff --git a/core/image/png/png.odin b/core/image/png/png.odin index caa1e6e8a..1821e55cd 100644 --- a/core/image/png/png.odin +++ b/core/image/png/png.odin @@ -11,6 +11,7 @@ // package png implements a PNG image reader // // The PNG specification is at https://www.w3.org/TR/PNG/. +//+vet !using-stmt package png import "core:compress" @@ -444,15 +445,14 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a img.width = int(header.width) img.height = int(header.height) - using header h := image.PNG_IHDR{ - width = width, - height = height, - bit_depth = bit_depth, - color_type = color_type, - compression_method = compression_method, - filter_method = filter_method, - interlace_method = interlace_method, + width = header.width, + height = header.height, + bit_depth = header.bit_depth, + color_type = header.color_type, + compression_method = header.compression_method, + filter_method = header.filter_method, + interlace_method = header.interlace_method, } info.header = h diff --git a/core/net/url.odin b/core/net/url.odin index ed39f7859..ef43d6c9f 100644 --- a/core/net/url.odin +++ b/core/net/url.odin @@ -63,100 +63,100 @@ split_url :: proc(url: string, allocator := context.allocator) -> (scheme, host, } join_url :: proc(scheme, host, path: string, queries: map[string]string, allocator := context.allocator) -> string { - using strings + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path)) - b := builder_make(allocator) - builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path)) - - write_string(&b, scheme) - write_string(&b, "://") - write_string(&b, trim_space(host)) + strings.write_string(&b, scheme) + strings.write_string(&b, "://") + strings.write_string(&b, strings.trim_space(host)) if path != "" { - if path[0] != '/' do write_string(&b, "/") - write_string(&b, trim_space(path)) + if path[0] != '/' { + strings.write_string(&b, "/") + } + strings.write_string(&b, strings.trim_space(path)) } query_length := len(queries) - if query_length > 0 do write_string(&b, "?") + if query_length > 0 { + strings.write_string(&b, "?") + } i := 0 for query_name, query_value in queries { - write_string(&b, query_name) + strings.write_string(&b, query_name) if query_value != "" { - write_string(&b, "=") - write_string(&b, query_value) + strings.write_string(&b, "=") + strings.write_string(&b, query_value) } if i < query_length - 1 { - write_string(&b, "&") + strings.write_string(&b, "&") } i += 1 } - return to_string(b) + return strings.to_string(b) } percent_encode :: proc(s: string, allocator := context.allocator) -> string { - using strings - - b := builder_make(allocator) - builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape. + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape. for ch in s { switch ch { case 'A'..='Z', 'a'..='z', '0'..='9', '-', '_', '.', '~': - write_rune(&b, ch) + strings.write_rune(&b, ch) case: bytes, n := utf8.encode_rune(ch) for byte in bytes[:n] { buf: [2]u8 = --- t := strconv.append_int(buf[:], i64(byte), 16) - write_rune(&b, '%') - write_string(&b, t) + strings.write_rune(&b, '%') + strings.write_string(&b, t) } } } - return to_string(b) + return strings.to_string(b) } percent_decode :: proc(encoded_string: string, allocator := context.allocator) -> (decoded_string: string, ok: bool) { - using strings - - b := builder_make(allocator) - builder_grow(&b, len(encoded_string)) - defer if !ok do builder_destroy(&b) + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(encoded_string)) + defer if !ok do strings.builder_destroy(&b) s := encoded_string for len(s) > 0 { - i := index_byte(s, '%') + i := strings.index_byte(s, '%') if i == -1 { - write_string(&b, s) // no '%'s; the string is already decoded + strings.write_string(&b, s) // no '%'s; the string is already decoded break } - write_string(&b, s[:i]) + strings.write_string(&b, s[:i]) s = s[i:] if len(s) == 0 do return // percent without anything after it s = s[1:] if s[0] == '%' { - write_byte(&b, '%') + strings.write_byte(&b, '%') s = s[1:] continue } - if len(s) < 2 do return // percent without encoded value + if len(s) < 2 { + return // percent without encoded value + } val := hex.decode_sequence(s[:2]) or_return - write_byte(&b, val) + strings.write_byte(&b, val) s = s[2:] } ok = true - decoded_string = to_string(b) + decoded_string = strings.to_string(b) return } diff --git a/core/odin/printer/visit.odin b/core/odin/printer/visit.odin index 66166aa81..d3701fcc5 100644 --- a/core/odin/printer/visit.odin +++ b/core/odin/printer/visit.odin @@ -336,22 +336,20 @@ hint_current_line :: proc(p: ^Printer, hint: Line_Type) { @(private) visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { - using ast - if decl == nil { return } #partial switch v in decl.derived_stmt { - case ^Expr_Stmt: + case ^ast.Expr_Stmt: move_line(p, decl.pos) visit_expr(p, v.expr) if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^When_Stmt: - visit_stmt(p, cast(^Stmt)decl) - case ^Foreign_Import_Decl: + case ^ast.When_Stmt: + visit_stmt(p, cast(^ast.Stmt)decl) + case ^ast.Foreign_Import_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -370,7 +368,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { for path in v.fullpaths { push_ident_token(p, path, 0) } - case ^Foreign_Block_Decl: + case ^ast.Foreign_Block_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -383,7 +381,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { visit_expr(p, v.foreign_library) visit_stmt(p, v.body) - case ^Import_Decl: + case ^ast.Import_Decl: move_line(p, decl.pos) if v.name.text != "" { @@ -395,7 +393,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { push_ident_token(p, v.fullpath, 1) } - case ^Value_Decl: + case ^ast.Value_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -447,9 +445,9 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { for value in v.values { #partial switch a in value.derived { - case ^Union_Type, ^Enum_Type, ^Struct_Type: + case ^ast.Union_Type, ^ast.Enum_Type, ^ast.Struct_Type: add_semicolon = false || called_in_stmt - case ^Proc_Lit: + case ^ast.Proc_Lit: add_semicolon = false } } @@ -510,40 +508,38 @@ visit_attributes :: proc(p: ^Printer, attributes: [dynamic]^ast.Attribute) { @(private) visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Generic, empty_block := false, block_stmt := false) { - using ast - if stmt == nil { return } switch v in stmt.derived_stmt { - case ^Bad_Stmt: - case ^Bad_Decl: - case ^Package_Decl: + case ^ast.Bad_Stmt: + case ^ast.Bad_Decl: + case ^ast.Package_Decl: - case ^Empty_Stmt: + case ^ast.Empty_Stmt: push_generic_token(p, .Semicolon, 0) - case ^Tag_Stmt: + case ^ast.Tag_Stmt: push_generic_token(p, .Hash, 1) push_generic_token(p, v.op.kind, 1, v.op.text) visit_stmt(p, v.stmt) - case ^Import_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Import_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Value_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Value_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Foreign_Import_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Foreign_Import_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Foreign_Block_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Foreign_Block_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Using_Stmt: + case ^ast.Using_Stmt: move_line(p, v.pos) push_generic_token(p, .Using, 1) @@ -553,7 +549,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Block_Stmt: + case ^ast.Block_Stmt: move_line(p, v.pos) if v.pos.line == v.end.line { @@ -583,7 +579,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_end_brace(p, v.end) } } - case ^If_Stmt: + case ^ast.If_Stmt: move_line(p, v.pos) if v.label != nil { @@ -606,7 +602,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener uses_do := false - if check_stmt, ok := v.body.derived.(^Block_Stmt); ok && check_stmt.uses_do { + if check_stmt, ok := v.body.derived.(^ast.Block_Stmt); ok && check_stmt.uses_do { uses_do = true } @@ -637,7 +633,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.else_stmt) } - case ^Switch_Stmt: + case ^ast.Switch_Stmt: move_line(p, v.pos) if v.label != nil { @@ -665,7 +661,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.cond) visit_stmt(p, v.body) - case ^Case_Clause: + case ^ast.Case_Clause: move_line(p, v.pos) if !p.config.indent_cases { @@ -689,7 +685,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if !p.config.indent_cases { indent(p) } - case ^Type_Switch_Stmt: + case ^ast.Type_Switch_Stmt: move_line(p, v.pos) hint_current_line(p, {.Switch_Stmt}) @@ -707,7 +703,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.tag) visit_stmt(p, v.body) - case ^Assign_Stmt: + case ^ast.Assign_Stmt: move_line(p, v.pos) hint_current_line(p, {.Assign}) @@ -721,13 +717,13 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Expr_Stmt: + case ^ast.Expr_Stmt: move_line(p, v.pos) visit_expr(p, v.expr) if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^For_Stmt: + case ^ast.For_Stmt: // this should be simplified move_line(p, v.pos) @@ -764,7 +760,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.body) - case ^Inline_Range_Stmt: + case ^ast.Inline_Range_Stmt: move_line(p, v.pos) if v.label != nil { @@ -790,7 +786,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.expr) visit_stmt(p, v.body) - case ^Range_Stmt: + case ^ast.Range_Stmt: move_line(p, v.pos) if v.label != nil { @@ -816,7 +812,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.expr) visit_stmt(p, v.body) - case ^Return_Stmt: + case ^ast.Return_Stmt: move_line(p, v.pos) push_generic_token(p, .Return, 1) @@ -828,7 +824,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Defer_Stmt: + case ^ast.Defer_Stmt: move_line(p, v.pos) push_generic_token(p, .Defer, 0) @@ -837,7 +833,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^When_Stmt: + case ^ast.When_Stmt: move_line(p, v.pos) push_generic_token(p, .When, 1) visit_expr(p, v.cond) @@ -857,7 +853,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.else_stmt) } - case ^Branch_Stmt: + case ^ast.Branch_Stmt: move_line(p, v.pos) push_generic_token(p, v.tok.kind, 0) @@ -921,8 +917,6 @@ push_poly_params :: proc(p: ^Printer, poly_params: ^ast.Field_List) { @(private) visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { - using ast - if expr == nil { return } @@ -930,14 +924,14 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { set_source_position(p, expr.pos) switch v in expr.derived_expr { - case ^Bad_Expr: + case ^ast.Bad_Expr: - case ^Tag_Expr: + case ^ast.Tag_Expr: push_generic_token(p, .Hash, 1) push_generic_token(p, v.op.kind, 1, v.op.text) visit_expr(p, v.expr) - case ^Inline_Asm_Expr: + case ^ast.Inline_Asm_Expr: push_generic_token(p, v.tok.kind, 1, v.tok.text) push_generic_token(p, .Open_Paren, 1) @@ -954,42 +948,42 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Comma, 0) visit_expr(p, v.constraints_string) push_generic_token(p, .Close_Brace, 0) - case ^Undef: + case ^ast.Undef: push_generic_token(p, .Undef, 1) - case ^Auto_Cast: + case ^ast.Auto_Cast: push_generic_token(p, v.op.kind, 1) visit_expr(p, v.expr) - case ^Ternary_If_Expr: + case ^ast.Ternary_If_Expr: visit_expr(p, v.x) push_generic_token(p, v.op1.kind, 1) visit_expr(p, v.cond) push_generic_token(p, v.op2.kind, 1) visit_expr(p, v.y) - case ^Ternary_When_Expr: + case ^ast.Ternary_When_Expr: visit_expr(p, v.x) push_generic_token(p, v.op1.kind, 1) visit_expr(p, v.cond) push_generic_token(p, v.op2.kind, 1) visit_expr(p, v.y) - case ^Or_Else_Expr: + case ^ast.Or_Else_Expr: visit_expr(p, v.x) push_generic_token(p, v.token.kind, 1) visit_expr(p, v.y) - case ^Or_Return_Expr: + case ^ast.Or_Return_Expr: visit_expr(p, v.expr) push_generic_token(p, v.token.kind, 1) - case ^Selector_Call_Expr: + case ^ast.Selector_Call_Expr: visit_expr(p, v.call.expr) push_generic_token(p, .Open_Paren, 1) visit_exprs(p, v.call.args, {.Add_Comma}) push_generic_token(p, .Close_Paren, 0) - case ^Ellipsis: + case ^ast.Ellipsis: push_generic_token(p, .Ellipsis, 1) visit_expr(p, v.expr) - case ^Relative_Type: + case ^ast.Relative_Type: visit_expr(p, v.tag) visit_expr(p, v.type) - case ^Slice_Expr: + case ^ast.Slice_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.low) @@ -999,37 +993,37 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_expr(p, v.high) } push_generic_token(p, .Close_Bracket, 0) - case ^Ident: + case ^ast.Ident: if .Enforce_Poly_Names in options { push_generic_token(p, .Dollar, 1) push_ident_token(p, v.name, 0) } else { push_ident_token(p, v.name, 1) } - case ^Deref_Expr: + case ^ast.Deref_Expr: visit_expr(p, v.expr) push_generic_token(p, v.op.kind, 0) - case ^Type_Cast: + case ^ast.Type_Cast: push_generic_token(p, v.tok.kind, 1) push_generic_token(p, .Open_Paren, 0) visit_expr(p, v.type) push_generic_token(p, .Close_Paren, 0) merge_next_token(p) visit_expr(p, v.expr) - case ^Basic_Directive: + case ^ast.Basic_Directive: push_generic_token(p, v.tok.kind, 1) push_ident_token(p, v.name, 0) - case ^Distinct_Type: + case ^ast.Distinct_Type: push_generic_token(p, .Distinct, 1) visit_expr(p, v.type) - case ^Dynamic_Array_Type: + case ^ast.Dynamic_Array_Type: visit_expr(p, v.tag) push_generic_token(p, .Open_Bracket, 1) push_generic_token(p, .Dynamic, 0) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.elem) - case ^Bit_Set_Type: + case ^ast.Bit_Set_Type: push_generic_token(p, .Bit_Set, 1) push_generic_token(p, .Open_Bracket, 0) @@ -1041,7 +1035,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } push_generic_token(p, .Close_Bracket, 0) - case ^Union_Type: + case ^ast.Union_Type: push_generic_token(p, .Union, 1) push_poly_params(p, v.poly_params) @@ -1066,7 +1060,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_exprs(p, v.variants, {.Add_Comma, .Trailing}) visit_end_brace(p, v.end) } - case ^Enum_Type: + case ^ast.Enum_Type: push_generic_token(p, .Enum, 1) hint_current_line(p, {.Enum}) @@ -1089,7 +1083,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } set_source_position(p, v.end) - case ^Struct_Type: + case ^ast.Struct_Type: push_generic_token(p, .Struct, 1) hint_current_line(p, {.Struct}) @@ -1124,7 +1118,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } set_source_position(p, v.end) - case ^Proc_Lit: + case ^ast.Proc_Lit: switch v.inlining { case .None: case .Inline: @@ -1143,16 +1137,16 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } else { push_generic_token(p, .Undef, 1) } - case ^Proc_Type: + case ^ast.Proc_Type: visit_proc_type(p, v) - case ^Basic_Lit: + case ^ast.Basic_Lit: push_generic_token(p, v.tok.kind, 1, v.tok.text) - case ^Binary_Expr: + case ^ast.Binary_Expr: visit_binary_expr(p, v) - case ^Implicit_Selector_Expr: + case ^ast.Implicit_Selector_Expr: push_generic_token(p, .Period, 1) push_ident_token(p, v.field.name, 0) - case ^Call_Expr: + case ^ast.Call_Expr: visit_expr(p, v.expr) push_format_token(p, @@ -1167,34 +1161,34 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_call_exprs(p, v.args, v.ellipsis.kind == .Ellipsis) push_generic_token(p, .Close_Paren, 0) - case ^Typeid_Type: + case ^ast.Typeid_Type: push_generic_token(p, .Typeid, 1) if v.specialization != nil { push_generic_token(p, .Quo, 0) visit_expr(p, v.specialization) } - case ^Selector_Expr: + case ^ast.Selector_Expr: visit_expr(p, v.expr) push_generic_token(p, v.op.kind, 0) visit_expr(p, v.field) - case ^Paren_Expr: + case ^ast.Paren_Expr: push_generic_token(p, .Open_Paren, 1) visit_expr(p, v.expr) push_generic_token(p, .Close_Paren, 0) - case ^Index_Expr: + case ^ast.Index_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.index) push_generic_token(p, .Close_Bracket, 0) - case ^Matrix_Index_Expr: + case ^ast.Matrix_Index_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.row_index) push_generic_token(p, .Comma, 0) visit_expr(p, v.column_index) push_generic_token(p, .Close_Bracket, 0) - case ^Proc_Group: + case ^ast.Proc_Group: push_generic_token(p, v.tok.kind, 1) if len(v.args) != 0 && v.pos.line != v.args[len(v.args) - 1].pos.line { @@ -1209,7 +1203,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Brace, 0) } - case ^Comp_Lit: + case ^ast.Comp_Lit: if v.type != nil { visit_expr(p, v.type) } @@ -1226,18 +1220,18 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Brace, 0) } - case ^Unary_Expr: + case ^ast.Unary_Expr: push_generic_token(p, v.op.kind, 1) merge_next_token(p) visit_expr(p, v.expr) - case ^Field_Value: + case ^ast.Field_Value: visit_expr(p, v.field) push_generic_token(p, .Eq, 1) visit_expr(p, v.value) - case ^Type_Assertion: + case ^ast.Type_Assertion: visit_expr(p, v.expr) - if unary, ok := v.type.derived.(^Unary_Expr); ok && unary.op.text == "?" { + if unary, ok := v.type.derived.(^ast.Unary_Expr); ok && unary.op.text == "?" { push_generic_token(p, .Period, 0) visit_expr(p, v.type) } else { @@ -1247,13 +1241,13 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Paren, 0) } - case ^Pointer_Type: + case ^ast.Pointer_Type: push_generic_token(p, .Pointer, 1) merge_next_token(p) visit_expr(p, v.elem) - case ^Implicit: + case ^ast.Implicit: push_generic_token(p, v.tok.kind, 1) - case ^Poly_Type: + case ^ast.Poly_Type: push_generic_token(p, .Dollar, 1) merge_next_token(p) visit_expr(p, v.type) @@ -1263,28 +1257,28 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { merge_next_token(p) visit_expr(p, v.specialization) } - case ^Array_Type: + case ^ast.Array_Type: visit_expr(p, v.tag) push_generic_token(p, .Open_Bracket, 1) visit_expr(p, v.len) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.elem) - case ^Map_Type: + case ^ast.Map_Type: push_generic_token(p, .Map, 1) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.key) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.value) - case ^Helper_Type: + case ^ast.Helper_Type: visit_expr(p, v.type) - case ^Multi_Pointer_Type: + case ^ast.Multi_Pointer_Type: push_generic_token(p, .Open_Bracket, 1) push_generic_token(p, .Pointer, 0) push_generic_token(p, .Close_Bracket, 0) visit_expr(p, v.elem) - case ^Matrix_Type: + case ^ast.Matrix_Type: push_generic_token(p, .Matrix, 1) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.row_count) diff --git a/src/build_settings.cpp b/src/build_settings.cpp index f234ff2ce..b46ea10e0 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -218,10 +218,10 @@ enum BuildPath : u8 { enum VetFlags : u64 { VetFlag_NONE = 0, - VetFlag_Unused = 1u<<0, - VetFlag_Shadowing = 1u<<1, - VetFlag_UsingStmt = 1u<<2, - VetFlag_UsingParam = 1u<<3, + VetFlag_Unused = 1u<<0, // 1 + VetFlag_Shadowing = 1u<<1, // 2 + VetFlag_UsingStmt = 1u<<2, // 4 + VetFlag_UsingParam = 1u<<3, // 8 VetFlag_Extra = 1u<<16, diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 3dca7aafa..9e96dae1c 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -1064,7 +1064,7 @@ gb_internal void check_proc_decl(CheckerContext *ctx, Entity *e, DeclInfo *d) { auto *fp = &ctx->info->foreigns; StringHashKey key = string_hash_string(name); Entity **found = string_map_get(fp, key); - if (found) { + if (found && e != *found) { Entity *f = *found; TokenPos pos = f->token.pos; Type *this_type = base_type(e->type); @@ -1636,7 +1636,7 @@ gb_internal bool check_proc_body(CheckerContext *ctx_, Token token, DeclInfo *de } check_close_scope(ctx); - check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(ctx)); + check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(body)); add_deps_from_child_to_parent(decl); diff --git a/src/check_expr.cpp b/src/check_expr.cpp index fe389e027..f9c62b506 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -3099,7 +3099,7 @@ gb_internal void check_cast(CheckerContext *c, Operand *x, Type *type) { update_untyped_expr_type(c, x->expr, final_type, true); } - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(x->expr) & VetFlag_Extra) { if (are_types_identical(x->type, type)) { gbString str = type_to_string(type); warning(x->expr, "Unneeded cast to the same type '%s'", str); @@ -3171,7 +3171,7 @@ gb_internal bool check_transmute(CheckerContext *c, Ast *node, Operand *o, Type return false; } - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(node) & VetFlag_Extra) { if (are_types_identical(o->type, dst_t)) { gbString str = type_to_string(dst_t); warning(o->expr, "Unneeded transmute to the same type '%s'", str); @@ -10028,7 +10028,7 @@ gb_internal ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast Type *type = type_of_expr(ac->expr); check_cast(c, o, type_hint); if (is_type_typed(type) && are_types_identical(type, type_hint)) { - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(node) & VetFlag_Extra) { error(node, "Redundant 'auto_cast' applied to expression"); } } diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index 2c1ee8331..b6bb7d819 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -2464,9 +2464,9 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags) error(us->token, "Empty 'using' list"); return; } - if (check_vet_flags(ctx) & VetFlag_UsingStmt) { + if (check_vet_flags(node) & VetFlag_UsingStmt) { ERROR_BLOCK(); - error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied"); + error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied %llu %llu", check_vet_flags(ctx), node->file()->vet_flags); error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n"); } diff --git a/src/checker.cpp b/src/checker.cpp index a6b66f809..fbc550f00 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -521,6 +521,28 @@ GB_COMPARE_PROC(entity_variable_pos_cmp) { } + +gb_internal u64 check_vet_flags(CheckerContext *c) { + AstFile *file = c->file; + if (file == nullptr && + c->curr_proc_decl && + c->curr_proc_decl->proc_lit) { + file = c->curr_proc_decl->proc_lit->file(); + } + if (file && file->vet_flags_set) { + return file->vet_flags; + } + return build_context.vet_flags; +} + +gb_internal u64 check_vet_flags(Ast *node) { + AstFile *file = node->file(); + if (file && file->vet_flags_set) { + return file->vet_flags; + } + return build_context.vet_flags; +} + enum VettedEntityKind { VettedEntity_Invalid, diff --git a/src/checker.hpp b/src/checker.hpp index 12090cbca..8a63f7e88 100644 --- a/src/checker.hpp +++ b/src/checker.hpp @@ -449,12 +449,8 @@ struct CheckerContext { Ast *assignment_lhs_hint; }; -u64 check_vet_flags(CheckerContext *c) { - if (c->file && c->file->vet_flags_set) { - return c->file->vet_flags; - } - return build_context.vet_flags; -} +gb_internal u64 check_vet_flags(CheckerContext *c); +gb_internal u64 check_vet_flags(Ast *node); struct Checker { diff --git a/src/parser.cpp b/src/parser.cpp index b99182189..7d1c37d84 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -5563,7 +5563,9 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { while (s.len > 0) { String p = string_trim_whitespace(vet_tag_get_token(s, &s)); - if (p.len == 0) break; + if (p.len == 0) { + break; + } bool is_notted = false; if (p[0] == '!') { @@ -5571,14 +5573,10 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { p = substring(p, 1, p.len); if (p.len == 0) { syntax_error(token_for_pos, "Expected a vet flag name after '!'"); - break; + return build_context.vet_flags; } } - if (p.len == 0) { - continue; - } - u64 flag = get_vet_flag_from_name(p); if (flag != VetFlag_NONE) { if (is_notted) { @@ -5595,13 +5593,20 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { error_line("\tusing-stmt\n"); error_line("\tusing-param\n"); error_line("\textra\n"); - break; + return build_context.vet_flags; } } - if (vet_flags == 0 && vet_not_flags != 0) { - vet_flags = VetFlag_All; + if (vet_flags == 0 && vet_not_flags == 0) { + return build_context.vet_flags; } + if (vet_flags == 0 && vet_not_flags != 0) { + return build_context.vet_flags &~ vet_not_flags; + } + if (vet_flags != 0 && vet_not_flags == 0) { + return vet_flags; + } + GB_ASSERT(vet_flags != 0 && vet_not_flags != 0); return vet_flags &~ vet_not_flags; }