From 74338733ba0f672124c413a6af103f2fa48049c2 Mon Sep 17 00:00:00 2001 From: Laytan Laats Date: Wed, 19 Jul 2023 20:27:34 +0200 Subject: [PATCH 01/20] Fix #2666 by checking for disabled when generating parapoly procs --- src/check_expr.cpp | 4 ++++ tests/issues/run.bat | 1 + tests/issues/run.sh | 1 + tests/issues/test_issue_2666.odin | 26 ++++++++++++++++++++++++++ 4 files changed, 32 insertions(+) create mode 100644 tests/issues/test_issue_2666.odin diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 98154f33d..91422addd 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -349,6 +349,10 @@ gb_internal bool find_or_generate_polymorphic_procedure(CheckerContext *old_c, E return false; } + if (base_entity->flags & EntityFlag_Disabled) { + return false; + } + String name = base_entity->token.string; Type *src = base_type(base_entity->type); diff --git a/tests/issues/run.bat b/tests/issues/run.bat index 63d722e09..41c52c02f 100644 --- a/tests/issues/run.bat +++ b/tests/issues/run.bat @@ -15,6 +15,7 @@ set COMMON=-collection:tests=..\.. ..\..\..\odin test ..\test_issue_2466.odin %COMMON% -file || exit /b ..\..\..\odin test ..\test_issue_2615.odin %COMMON% -file || exit /b ..\..\..\odin test ..\test_issue_2637.odin %COMMON% -file || exit /b +..\..\..\odin test ..\test_issue_2666.odin %COMMON% -file || exit /b @echo off diff --git a/tests/issues/run.sh b/tests/issues/run.sh index 7d2101dc6..6d53388a7 100755 --- a/tests/issues/run.sh +++ b/tests/issues/run.sh @@ -18,6 +18,7 @@ $ODIN build ../test_issue_2113.odin $COMMON -file -debug $ODIN test ../test_issue_2466.odin $COMMON -file $ODIN test ../test_issue_2615.odin $COMMON -file $ODIN test ../test_issue_2637.odin $COMMON -file +$ODIN test ../test_issue_2666.odin $COMMON -file if [[ $($ODIN build ../test_issue_2395.odin $COMMON -file 2>&1 >/dev/null | grep -c "$NO_NIL_ERR") -eq 2 ]] ; then echo "SUCCESSFUL 1/1" else diff --git a/tests/issues/test_issue_2666.odin b/tests/issues/test_issue_2666.odin new file mode 100644 index 000000000..dd77129ea --- /dev/null +++ b/tests/issues/test_issue_2666.odin @@ -0,0 +1,26 @@ +// Tests issue https://github.com/odin-lang/Odin/issues/2666 +// @(disabled=) does not work with polymorphic procs +package test_issues + +import "core:testing" + +@(test) +test_disabled_parapoly :: proc(t: ^testing.T) { + disabled_parapoly(t, 1) + disabled_parapoly_constant(t, 1) +} + +@(private="file") +@(disabled = true) +disabled_parapoly :: proc(t: ^testing.T, num: $T) { + testing.error(t, "disabled_parapoly should be disabled") +} + +@(private="file") +DISABLE :: true + +@(disabled = DISABLE) +@(private = "file") +disabled_parapoly_constant :: proc(t: ^testing.T, num: $T) { + testing.error(t, "disabled_parapoly_constant should be disabled") +} From f9c083073e3d4c64c5e53f79db95e76deb67a39d Mon Sep 17 00:00:00 2001 From: jason Date: Fri, 21 Jul 2023 15:44:39 -0400 Subject: [PATCH 02/20] coalesce tombstones in map insert --- core/runtime/dynamic_map_internal.odin | 216 ++++++++++++++----------- 1 file changed, 118 insertions(+), 98 deletions(-) diff --git a/core/runtime/dynamic_map_internal.odin b/core/runtime/dynamic_map_internal.odin index 05c03028f..d34c29d4b 100644 --- a/core/runtime/dynamic_map_internal.odin +++ b/core/runtime/dynamic_map_internal.odin @@ -414,68 +414,21 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^ tk := map_cell_index_dynamic(sk, info.ks, 1) tv := map_cell_index_dynamic(sv, info.vs, 1) - for { - hp := &hs[pos] - element_hash := hp^ + swap_loop: for { + element_hash := hs[pos] if map_hash_is_empty(element_hash) { - kp := map_cell_index_dynamic(ks, info.ks, pos) - vp := map_cell_index_dynamic(vs, info.vs, pos) - intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k) - intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v) - hp^ = h + k_dst := map_cell_index_dynamic(ks, info.ks, pos) + v_dst := map_cell_index_dynamic(vs, info.vs, pos) + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v) + hs[pos] = h - return result if result != 0 else vp + return result if result != 0 else v_dst } if map_hash_is_deleted(element_hash) { - next_pos := (pos + 1) & mask - - // backward shift - for !map_hash_is_empty(hs[next_pos]) { - probe_distance := map_probe_distance(m^, hs[next_pos], next_pos) - if probe_distance == 0 { - break - } - probe_distance -= 1 - - kp := map_cell_index_dynamic(ks, info.ks, pos) - vp := map_cell_index_dynamic(vs, info.vs, pos) - kn := map_cell_index_dynamic(ks, info.ks, next_pos) - vn := map_cell_index_dynamic(vs, info.vs, next_pos) - - if distance > probe_distance { - if result == 0 { - result = vp - } - // move stored into pos; store next - intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k) - intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v) - hs[pos] = h - - intrinsics.mem_copy_non_overlapping(rawptr(k), rawptr(kn), size_of_k) - intrinsics.mem_copy_non_overlapping(rawptr(v), rawptr(vn), size_of_v) - h = hs[next_pos] - } else { - // move next back 1 - intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(kn), size_of_k) - intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(vn), size_of_v) - hs[pos] = hs[next_pos] - distance = probe_distance - } - hs[next_pos] = 0 - pos = (pos + 1) & mask - next_pos = (next_pos + 1) & mask - distance += 1 - } - - kp := map_cell_index_dynamic(ks, info.ks, pos) - vp := map_cell_index_dynamic(vs, info.vs, pos) - intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k) - intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v) - hs[pos] = h - - return result if result != 0 else vp + break swap_loop } if probe_distance := map_probe_distance(m^, element_hash, pos); distance > probe_distance { @@ -495,8 +448,8 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^ intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(tv), size_of_v) th := h - h = hp^ - hp^ = th + h = hs[pos] + hs[pos] = th distance = probe_distance } @@ -504,6 +457,103 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^ pos = (pos + 1) & mask distance += 1 } + + // backward shift loop + hs[pos] = 0 + look_ahead: uintptr = 1 + for { + la_pos := (pos + look_ahead) & mask + element_hash := hs[la_pos] + + if map_hash_is_deleted(element_hash) { + look_ahead += 1 + hs[la_pos] = 0 + continue + } + + k_dst := map_cell_index_dynamic(ks, info.ks, pos) + v_dst := map_cell_index_dynamic(vs, info.vs, pos) + + if map_hash_is_empty(element_hash) { + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v) + hs[pos] = h + + return result if result != 0 else v_dst + } + + k_src := map_cell_index_dynamic(ks, info.ks, la_pos) + v_src := map_cell_index_dynamic(vs, info.vs, la_pos) + probe_distance := map_probe_distance(m^, element_hash, la_pos) + + if probe_distance < look_ahead { + // probed can be made ideal while placing saved (ending condition) + if result == 0 { + result = v_dst + } + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v) + hs[pos] = h + + // This will be an ideal move + pos = (la_pos - probe_distance) & mask + look_ahead -= probe_distance + + // shift until we hit ideal/empty + for probe_distance != 0 { + k_dst = map_cell_index_dynamic(ks, info.ks, pos) + v_dst = map_cell_index_dynamic(vs, info.vs, pos) + + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k_src), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v_src), size_of_v) + hs[pos] = element_hash + hs[la_pos] = 0 + + pos = (pos + 1) & mask + la_pos = (la_pos + 1) & mask + look_ahead = (la_pos - pos) & mask + element_hash = hs[la_pos] + if map_hash_is_empty(element_hash) { + return + } + + probe_distance = map_probe_distance(m^, element_hash, la_pos) + if probe_distance == 0 { + return + } + // can be ideal? + if probe_distance < look_ahead { + pos = (la_pos - probe_distance) & mask + } + k_src = map_cell_index_dynamic(ks, info.ks, la_pos) + v_src = map_cell_index_dynamic(vs, info.vs, la_pos) + } + return + } else if distance < probe_distance - look_ahead { + // shift back probed + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k_src), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v_src), size_of_v) + hs[pos] = element_hash + hs[la_pos] = 0 + } else { + // place saved, save probed + if result == 0 { + result = v_dst + } + intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v) + hs[pos] = h + + intrinsics.mem_copy_non_overlapping(rawptr(k), rawptr(k_src), size_of_k) + intrinsics.mem_copy_non_overlapping(rawptr(v), rawptr(v_src), size_of_v) + h = hs[la_pos] + hs[la_pos] = 0 + distance = probe_distance - look_ahead + } + + pos = (pos + 1) & mask + distance += 1 + } } @(require_results) @@ -696,49 +746,19 @@ map_erase_dynamic :: #force_inline proc "contextless" (#no_alias m: ^Raw_Map, #n m.len -= 1 ok = true - { // coalesce tombstones - // HACK NOTE(bill): This is an ugly bodge but it is coalescing the tombstone slots - mask := (uintptr(1)< Date: Thu, 27 Jul 2023 22:13:55 +0200 Subject: [PATCH 03/20] Assign element to Slice not Array when alloc a Type_Slice --- src/types.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types.cpp b/src/types.cpp index 847aea9f3..d11c27be6 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -988,7 +988,7 @@ gb_internal Type *alloc_type_enumerated_array(Type *elem, Type *index, ExactValu gb_internal Type *alloc_type_slice(Type *elem) { Type *t = alloc_type(Type_Slice); - t->Array.elem = elem; + t->Slice.elem = elem; return t; } From 683ee75703f9bde9ecf34ae3ec6ab2c3b68b52b2 Mon Sep 17 00:00:00 2001 From: Jeroen van Rijn Date: Fri, 28 Jul 2023 15:53:39 +0200 Subject: [PATCH 04/20] Fix #2684 --- core/encoding/xml/debug_print.odin | 16 +++++---- core/encoding/xml/example/xml_example.odin | 6 ++-- core/encoding/xml/helpers.odin | 29 ++++++++------- core/encoding/xml/xml_reader.odin | 42 ++++++++-------------- core/text/i18n/i18n.odin | 2 ++ core/text/i18n/qt_linguist.odin | 42 +++++++++++++++++----- tests/core/encoding/xml/test_core_xml.odin | 34 +++++++++--------- 7 files changed, 98 insertions(+), 73 deletions(-) diff --git a/core/encoding/xml/debug_print.odin b/core/encoding/xml/debug_print.odin index e9a1cb160..0736e8893 100644 --- a/core/encoding/xml/debug_print.odin +++ b/core/encoding/xml/debug_print.odin @@ -65,19 +65,21 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, if element.kind == .Element { wprintf(writer, "<%v>\n", element.ident) - if len(element.value) > 0 { - tab(writer, indent + 1) - wprintf(writer, "[Value] %v\n", element.value) + + for value in element.value { + switch v in value { + case string: + tab(writer, indent + 1) + wprintf(writer, "[Value] %v\n", v) + case Element_ID: + print_element(writer, doc, v, indent + 1) + } } for attr in element.attribs { tab(writer, indent + 1) wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) } - - for child in element.children { - print_element(writer, doc, child, indent + 1) - } } else if element.kind == .Comment { wprintf(writer, "[COMMENT] %v\n", element.value) } diff --git a/core/encoding/xml/example/xml_example.odin b/core/encoding/xml/example/xml_example.odin index 887b40764..aebb8d0ea 100644 --- a/core/encoding/xml/example/xml_example.odin +++ b/core/encoding/xml/example/xml_example.odin @@ -72,10 +72,10 @@ example :: proc() { return } - printf("Found `` with %v children, %v elements total\n", len(docs[0].elements[charlist].children), docs[0].element_count) + printf("Found `` with %v children, %v elements total\n", len(docs[0].elements[charlist].value), docs[0].element_count) - crc32 := doc_hash(docs[0]) - printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0xcaa042b9 else "🤬", crc32) + crc32 := doc_hash(docs[0], false) + printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0x420dbac5 else "🤬", crc32) for round in 0.. (id: Element_ID) { cur := doc.element_count doc.element_count += 1 - return cur } \ No newline at end of file diff --git a/core/text/i18n/i18n.odin b/core/text/i18n/i18n.odin index 8513f30c8..151f9e129 100644 --- a/core/text/i18n/i18n.odin +++ b/core/text/i18n/i18n.odin @@ -71,6 +71,8 @@ Error :: enum { TS_File_Expected_Source, TS_File_Expected_Translation, TS_File_Expected_NumerusForm, + Bad_Str, + Bad_Id, } diff --git a/core/text/i18n/qt_linguist.odin b/core/text/i18n/qt_linguist.odin index e7c1f9974..f4d2d78d6 100644 --- a/core/text/i18n/qt_linguist.odin +++ b/core/text/i18n/qt_linguist.odin @@ -30,10 +30,26 @@ TS_XML_Options := xml.Options{ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTIONS, pluralizer: proc(int) -> int = nil, allocator := context.allocator) -> (translation: ^Translation, err: Error) { context.allocator = allocator + get_str :: proc(val: xml.Value) -> (str: string, err: Error) { + v, ok := val.(string) + if ok { + return v, .None + } + return "", .Bad_Str + } + + get_id :: proc(val: xml.Value) -> (str: xml.Element_ID, err: Error) { + v, ok := val.(xml.Element_ID) + if ok { + return v, .None + } + return 0, .Bad_Id + } + ts, xml_err := xml.parse(data, TS_XML_Options) defer xml.destroy(ts) - if xml_err != .None || ts.element_count < 1 || ts.elements[0].ident != "TS" || len(ts.elements[0].children) == 0 { + if xml_err != .None || ts.element_count < 1 || ts.elements[0].ident != "TS" || len(ts.elements[0].value) == 0 { return nil, .TS_File_Parse_Error } @@ -46,10 +62,12 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI section: ^Section - for child_id in ts.elements[0].children { + for value in ts.elements[0].value { + child_id := get_id(value) or_return + // These should be s. - child := ts.elements[child_id] - if child.ident != "context" { + + if ts.elements[child_id].ident != "context" { return translation, .TS_File_Expected_Context } @@ -61,7 +79,8 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI section_name, _ := strings.intern_get(&translation.intern, "") if !options.merge_sections { - section_name, _ = strings.intern_get(&translation.intern, ts.elements[section_name_id].value) + value_text := get_str(ts.elements[section_name_id].value[0]) or_return + section_name, _ = strings.intern_get(&translation.intern, value_text) } if section_name not_in translation.k_v { @@ -92,8 +111,14 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI return translation, .TS_File_Expected_Translation } - source, _ := strings.intern_get(&translation.intern, ts.elements[source_id].value) - xlat, _ := strings.intern_get(&translation.intern, ts.elements[translation_id].value) + source := get_str(ts.elements[source_id].value[0]) or_return + source, _ = strings.intern_get(&translation.intern, source) + + xlat := "" + if !has_plurals { + xlat = get_str(ts.elements[translation_id].value[0]) or_return + xlat, _ = strings.intern_get(&translation.intern, xlat) + } if source in section { return translation, .Duplicate_Key @@ -124,7 +149,8 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI if !numerus_found { break } - numerus, _ := strings.intern_get(&translation.intern, ts.elements[numerus_id].value) + numerus := get_str(ts.elements[numerus_id].value[0]) or_return + numerus, _ = strings.intern_get(&translation.intern, numerus) section[source][num_plurals] = numerus num_plurals += 1 diff --git a/tests/core/encoding/xml/test_core_xml.odin b/tests/core/encoding/xml/test_core_xml.odin index 3cfc75a65..a05db377a 100644 --- a/tests/core/encoding/xml/test_core_xml.odin +++ b/tests/core/encoding/xml/test_core_xml.odin @@ -47,7 +47,7 @@ TESTS :: []TEST{ }, expected_doctype = "恥ずべきフクロウ", }, - crc32 = 0x30d82264, + crc32 = 0xe9b62f03, }, { @@ -62,7 +62,7 @@ TESTS :: []TEST{ }, expected_doctype = "恥ずべきフクロウ", }, - crc32 = 0xad31d8e8, + crc32 = 0x9c2643ed, }, { @@ -77,7 +77,7 @@ TESTS :: []TEST{ }, expected_doctype = "TS", }, - crc32 = 0x7bce2630, + crc32 = 0x859b7443, }, { @@ -92,7 +92,7 @@ TESTS :: []TEST{ }, expected_doctype = "xliff", }, - crc32 = 0x43f19d61, + crc32 = 0x3deaf329, }, { @@ -107,7 +107,7 @@ TESTS :: []TEST{ }, expected_doctype = "xliff", }, - crc32 = 0x961e7635, + crc32 = 0x0c55e287, }, { @@ -118,7 +118,7 @@ TESTS :: []TEST{ }, expected_doctype = "html", }, - crc32 = 0x573c1033, + crc32 = 0x05373317, }, { @@ -129,7 +129,7 @@ TESTS :: []TEST{ }, expected_doctype = "html", }, - crc32 = 0x82588917, + crc32 = 0x3b6d4a90, }, { @@ -140,7 +140,7 @@ TESTS :: []TEST{ }, expected_doctype = "html", }, - crc32 = 0x5e74d8a6, + crc32 = 0x5be2ffdc, }, /* @@ -170,7 +170,7 @@ TESTS :: []TEST{ expected_doctype = "", }, err = .None, - crc32 = 0xcaa042b9, + crc32 = 0x420dbac5, }, } @@ -260,19 +260,21 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) { if element.kind == .Element { wprintf(writer, "<%v>\n", element.ident) - if len(element.value) > 0 { - tab(writer, indent + 1) - wprintf(writer, "[Value] %v\n", element.value) + + for value in element.value { + switch v in value { + case string: + tab(writer, indent + 1) + wprintf(writer, "[Value] %v\n", v) + case xml.Element_ID: + print_element(writer, doc, v, indent + 1) + } } for attr in element.attribs { tab(writer, indent + 1) wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) } - - for child in element.children { - print_element(writer, doc, child, indent + 1) - } } else if element.kind == .Comment { wprintf(writer, "[COMMENT] %v\n", element.value) } From 60e509b1e066da14461b3832307065726e651153 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 11:09:19 +0100 Subject: [PATCH 05/20] Add separate `-vet` flags; `-vet-using-*` flags; `//+vet` file flags --- examples/demo/demo.odin | 1 + src/build_settings.cpp | 35 ++++++++++++++++-- src/check_decl.cpp | 2 +- src/check_expr.cpp | 6 ++-- src/check_stmt.cpp | 6 ++++ src/check_type.cpp | 6 ++++ src/checker.cpp | 24 ++++++++----- src/checker.hpp | 7 ++++ src/main.cpp | 57 +++++++++++++++++++++++++---- src/parser.cpp | 80 +++++++++++++++++++++++++++++++++++++++++ src/parser.hpp | 2 ++ 11 files changed, 204 insertions(+), 22 deletions(-) diff --git a/examples/demo/demo.odin b/examples/demo/demo.odin index 7c98ca728..5f1e84bbf 100644 --- a/examples/demo/demo.odin +++ b/examples/demo/demo.odin @@ -1,3 +1,4 @@ +//+vet !using-stmt !using-param package main import "core:fmt" diff --git a/src/build_settings.cpp b/src/build_settings.cpp index 866631f9a..f234ff2ce 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -216,6 +216,37 @@ enum BuildPath : u8 { BuildPathCOUNT, }; +enum VetFlags : u64 { + VetFlag_NONE = 0, + VetFlag_Unused = 1u<<0, + VetFlag_Shadowing = 1u<<1, + VetFlag_UsingStmt = 1u<<2, + VetFlag_UsingParam = 1u<<3, + + VetFlag_Extra = 1u<<16, + + VetFlag_All = VetFlag_Unused|VetFlag_Shadowing|VetFlag_UsingStmt, // excluding extra + + VetFlag_Using = VetFlag_UsingStmt|VetFlag_UsingParam, +}; + +u64 get_vet_flag_from_name(String const &name) { + if (name == "unused") { + return VetFlag_Unused; + } else if (name == "shadowing") { + return VetFlag_Shadowing; + } else if (name == "using-stmt") { + return VetFlag_UsingStmt; + } else if (name == "using-param") { + return VetFlag_UsingParam; + } else if (name == "extra") { + return VetFlag_Extra; + } + return VetFlag_NONE; +} + + + // This stores the information for the specify architecture of this build struct BuildContext { // Constants @@ -255,6 +286,8 @@ struct BuildContext { String resource_filepath; String pdb_filepath; + u64 vet_flags; + bool has_resource; String link_flags; String extra_linker_flags; @@ -280,8 +313,6 @@ struct BuildContext { bool no_entry_point; bool no_thread_local; bool use_lld; - bool vet; - bool vet_extra; bool cross_compiling; bool different_os; bool keep_object_files; diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 2b2fb867c..3dca7aafa 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -1636,7 +1636,7 @@ gb_internal bool check_proc_body(CheckerContext *ctx_, Token token, DeclInfo *de } check_close_scope(ctx); - check_scope_usage(ctx->checker, ctx->scope); + check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(ctx)); add_deps_from_child_to_parent(decl); diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 98154f33d..fe389e027 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -3099,7 +3099,7 @@ gb_internal void check_cast(CheckerContext *c, Operand *x, Type *type) { update_untyped_expr_type(c, x->expr, final_type, true); } - if (build_context.vet_extra) { + if (check_vet_flags(c) & VetFlag_Extra) { if (are_types_identical(x->type, type)) { gbString str = type_to_string(type); warning(x->expr, "Unneeded cast to the same type '%s'", str); @@ -3171,7 +3171,7 @@ gb_internal bool check_transmute(CheckerContext *c, Ast *node, Operand *o, Type return false; } - if (build_context.vet_extra) { + if (check_vet_flags(c) & VetFlag_Extra) { if (are_types_identical(o->type, dst_t)) { gbString str = type_to_string(dst_t); warning(o->expr, "Unneeded transmute to the same type '%s'", str); @@ -10028,7 +10028,7 @@ gb_internal ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast Type *type = type_of_expr(ac->expr); check_cast(c, o, type_hint); if (is_type_typed(type) && are_types_identical(type, type_hint)) { - if (build_context.vet_extra) { + if (check_vet_flags(c) & VetFlag_Extra) { error(node, "Redundant 'auto_cast' applied to expression"); } } diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index a15977b7d..2c1ee8331 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -2464,6 +2464,12 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags) error(us->token, "Empty 'using' list"); return; } + if (check_vet_flags(ctx) & VetFlag_UsingStmt) { + ERROR_BLOCK(); + error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied"); + error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n"); + } + for (Ast *expr : us->list) { expr = unparen_expr(expr); Entity *e = nullptr; diff --git a/src/check_type.cpp b/src/check_type.cpp index a68f83ba9..c52f32f1a 100644 --- a/src/check_type.cpp +++ b/src/check_type.cpp @@ -1474,6 +1474,12 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para Type *specialization = nullptr; bool is_using = (p->flags&FieldFlag_using) != 0; + if ((build_context.vet_flags & VetFlag_UsingParam) && is_using) { + ERROR_BLOCK(); + error(param, "'using' on a procedure parameter is now allowed when '-vet' or '-vet-using-stmt' is applied"); + error_line("\t'using' is considered bad practice to use as a statement/procedure parameter outside of immediate refactoring\n"); + + } if (type_expr == nullptr) { param_value = handle_parameter_value(ctx, nullptr, &type, default_value, true); diff --git a/src/checker.cpp b/src/checker.cpp index 2a2cb5c42..a6b66f809 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -655,9 +655,9 @@ gb_internal bool check_vet_unused(Checker *c, Entity *e, VettedEntity *ve) { return false; } -gb_internal void check_scope_usage(Checker *c, Scope *scope) { - bool vet_unused = true; - bool vet_shadowing = true; +gb_internal void check_scope_usage(Checker *c, Scope *scope, u64 vet_flags) { + bool vet_unused = (vet_flags & VetFlag_Unused) != 0; + bool vet_shadowing = (vet_flags & (VetFlag_Shadowing|VetFlag_Using)) != 0; Array vetted_entities = {}; array_init(&vetted_entities, heap_allocator()); @@ -691,15 +691,17 @@ gb_internal void check_scope_usage(Checker *c, Scope *scope) { if (ve.kind == VettedEntity_Shadowed_And_Unused) { error(e->token, "'%.*s' declared but not used, possibly shadows declaration at line %d", LIT(name), other->token.pos.line); - } else if (build_context.vet) { + } else if (vet_flags) { switch (ve.kind) { case VettedEntity_Unused: - error(e->token, "'%.*s' declared but not used", LIT(name)); + if (vet_flags & VetFlag_Unused) { + error(e->token, "'%.*s' declared but not used", LIT(name)); + } break; case VettedEntity_Shadowed: - if (e->flags&EntityFlag_Using) { + if ((vet_flags & (VetFlag_Shadowing|VetFlag_Using)) != 0 && e->flags&EntityFlag_Using) { error(e->token, "Declaration of '%.*s' from 'using' shadows declaration at line %d", LIT(name), other->token.pos.line); - } else { + } else if ((vet_flags & (VetFlag_Shadowing)) != 0) { error(e->token, "Declaration of '%.*s' shadows declaration at line %d", LIT(name), other->token.pos.line); } break; @@ -726,7 +728,7 @@ gb_internal void check_scope_usage(Checker *c, Scope *scope) { if (child->flags & (ScopeFlag_Proc|ScopeFlag_Type|ScopeFlag_File)) { // Ignore these } else { - check_scope_usage(c, child); + check_scope_usage(c, child, vet_flags); } } } @@ -5952,7 +5954,11 @@ gb_internal void check_parsed_files(Checker *c) { TIME_SECTION("check scope usage"); for (auto const &entry : c->info.files) { AstFile *f = entry.value; - check_scope_usage(c, f->scope); + u64 vet_flags = build_context.vet_flags; + if (f->vet_flags_set) { + vet_flags = f->vet_flags; + } + check_scope_usage(c, f->scope, vet_flags); } TIME_SECTION("add basic type information"); diff --git a/src/checker.hpp b/src/checker.hpp index b06d0a8f9..12090cbca 100644 --- a/src/checker.hpp +++ b/src/checker.hpp @@ -449,6 +449,13 @@ struct CheckerContext { Ast *assignment_lhs_hint; }; +u64 check_vet_flags(CheckerContext *c) { + if (c->file && c->file->vet_flags_set) { + return c->file->vet_flags; + } + return build_context.vet_flags; +} + struct Checker { Parser * parser; diff --git a/src/main.cpp b/src/main.cpp index db2702b19..1802e2984 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -654,6 +654,10 @@ enum BuildFlagKind { BuildFlag_NoThreadedChecker, BuildFlag_ShowDebugMessages, BuildFlag_Vet, + BuildFlag_VetShadowing, + BuildFlag_VetUnused, + BuildFlag_VetUsingStmt, + BuildFlag_VetUsingParam, BuildFlag_VetExtra, BuildFlag_IgnoreUnknownAttributes, BuildFlag_ExtraLinkerFlags, @@ -830,8 +834,14 @@ gb_internal bool parse_build_flags(Array args) { add_flag(&build_flags, BuildFlag_UseSeparateModules, str_lit("use-separate-modules"), BuildFlagParam_None, Command__does_build); add_flag(&build_flags, BuildFlag_NoThreadedChecker, str_lit("no-threaded-checker"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_ShowDebugMessages, str_lit("show-debug-messages"), BuildFlagParam_None, Command_all); + add_flag(&build_flags, BuildFlag_Vet, str_lit("vet"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetUnused, str_lit("vet-unused"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetShadowing, str_lit("vet-shadowing"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetUsingStmt, str_lit("vet-using-stmt"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetUsingParam, str_lit("vet-using-param"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_VetExtra, str_lit("vet-extra"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_IgnoreUnknownAttributes, str_lit("ignore-unknown-attributes"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_ExtraLinkerFlags, str_lit("extra-linker-flags"), BuildFlagParam_String, Command__does_build); add_flag(&build_flags, BuildFlag_ExtraAssemblerFlags, str_lit("extra-assembler-flags"), BuildFlagParam_String, Command__does_build); @@ -1362,13 +1372,23 @@ gb_internal bool parse_build_flags(Array args) { build_context.show_debug_messages = true; break; case BuildFlag_Vet: - build_context.vet = true; + if (build_context.vet_flags & VetFlag_Extra) { + build_context.vet_flags |= VetFlag_All; + } else { + build_context.vet_flags &= ~VetFlag_Extra; + build_context.vet_flags |= VetFlag_All; + } break; - case BuildFlag_VetExtra: { - build_context.vet = true; - build_context.vet_extra = true; + + case BuildFlag_VetUnused: build_context.vet_flags |= VetFlag_Unused; break; + case BuildFlag_VetShadowing: build_context.vet_flags |= VetFlag_Shadowing; break; + case BuildFlag_VetUsingStmt: build_context.vet_flags |= VetFlag_UsingStmt; break; + case BuildFlag_VetUsingParam: build_context.vet_flags |= VetFlag_UsingParam; break; + + case BuildFlag_VetExtra: + build_context.vet_flags = VetFlag_All | VetFlag_Extra; break; - } + case BuildFlag_IgnoreUnknownAttributes: build_context.ignore_unknown_attributes = true; break; @@ -2124,19 +2144,42 @@ gb_internal void print_show_help(String const arg0, String const &command) { print_usage_line(2, "Multithread the semantic checker stage"); print_usage_line(0, ""); #endif + } + if (check) { print_usage_line(1, "-vet"); print_usage_line(2, "Do extra checks on the code"); print_usage_line(2, "Extra checks include:"); - print_usage_line(3, "Variable shadowing within procedures"); - print_usage_line(3, "Unused declarations"); + print_usage_line(2, "-vet-unused"); + print_usage_line(2, "-vet-shadowing"); + print_usage_line(2, "-vet-using-stmt"); + print_usage_line(0, ""); + + print_usage_line(1, "-vet-unused"); + print_usage_line(2, "Checks for unused declarations"); + print_usage_line(0, ""); + + print_usage_line(1, "-vet-shadowing"); + print_usage_line(2, "Checks for variable shadowing within procedures"); + print_usage_line(0, ""); + + print_usage_line(1, "-vet-using-stmt"); + print_usage_line(2, "Checks for the use of 'using' as a statement"); + print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring"); + print_usage_line(0, ""); + + print_usage_line(1, "-vet-using-param"); + print_usage_line(2, "Checks for the use of 'using' on procedure parameters"); + print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring"); print_usage_line(0, ""); print_usage_line(1, "-vet-extra"); print_usage_line(2, "Do even more checks than standard vet on the code"); print_usage_line(2, "To treat the extra warnings as errors, use -warnings-as-errors"); print_usage_line(0, ""); + } + if (check) { print_usage_line(1, "-ignore-unknown-attributes"); print_usage_line(2, "Ignores unknown attributes"); print_usage_line(2, "This can be used with metaprogramming tools"); diff --git a/src/parser.cpp b/src/parser.cpp index b756412ff..b99182189 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -5528,6 +5528,83 @@ gb_internal bool parse_build_tag(Token token_for_pos, String s) { return any_correct; } +gb_internal String vet_tag_get_token(String s, String *out) { + s = string_trim_whitespace(s); + isize n = 0; + while (n < s.len) { + Rune rune = 0; + isize width = utf8_decode(&s[n], s.len-n, &rune); + if (n == 0 && rune == '!') { + + } else if (!rune_is_letter(rune) && !rune_is_digit(rune) && rune != '-') { + isize k = gb_max(gb_max(n, width), 1); + *out = substring(s, k, s.len); + return substring(s, 0, k); + } + n += width; + } + out->len = 0; + return s; +} + + +gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { + String const prefix = str_lit("+vet"); + GB_ASSERT(string_starts_with(s, prefix)); + s = string_trim_whitespace(substring(s, prefix.len, s.len)); + + if (s.len == 0) { + return VetFlag_All; + } + + + u64 vet_flags = 0; + u64 vet_not_flags = 0; + + while (s.len > 0) { + String p = string_trim_whitespace(vet_tag_get_token(s, &s)); + if (p.len == 0) break; + + bool is_notted = false; + if (p[0] == '!') { + is_notted = true; + p = substring(p, 1, p.len); + if (p.len == 0) { + syntax_error(token_for_pos, "Expected a vet flag name after '!'"); + break; + } + } + + if (p.len == 0) { + continue; + } + + u64 flag = get_vet_flag_from_name(p); + if (flag != VetFlag_NONE) { + if (is_notted) { + vet_not_flags |= flag; + } else { + vet_flags |= flag; + } + } else { + ERROR_BLOCK(); + syntax_error(token_for_pos, "Invalid vet flag name: %.*s", LIT(p)); + error_line("\tExpected one of the following\n"); + error_line("\tunused\n"); + error_line("\tshadowing\n"); + error_line("\tusing-stmt\n"); + error_line("\tusing-param\n"); + error_line("\textra\n"); + break; + } + } + + if (vet_flags == 0 && vet_not_flags != 0) { + vet_flags = VetFlag_All; + } + return vet_flags &~ vet_not_flags; +} + gb_internal String dir_from_path(String path) { String base_dir = path; for (isize i = path.len-1; i >= 0; i--) { @@ -5679,6 +5756,9 @@ gb_internal bool parse_file(Parser *p, AstFile *f) { if (!parse_build_tag(tok, lc)) { return false; } + } else if (string_starts_with(lc, str_lit("+vet"))) { + f->vet_flags = parse_vet_tag(tok, lc); + f->vet_flags_set = true; } else if (string_starts_with(lc, str_lit("+ignore"))) { return false; } else if (string_starts_with(lc, str_lit("+private"))) { diff --git a/src/parser.hpp b/src/parser.hpp index 900fddbab..fa169d3ad 100644 --- a/src/parser.hpp +++ b/src/parser.hpp @@ -104,6 +104,8 @@ struct AstFile { Token package_token; String package_name; + u64 vet_flags; + bool vet_flags_set; // >= 0: In Expression // < 0: In Control Clause From 0de7df9eab9b256e0d1c8da7c9fc8c422c5ac1a7 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 11:37:14 +0100 Subject: [PATCH 06/20] Improve `//+vet`; remove `using` in many places; add `//+vet !using-stmt` where necessary --- core/encoding/entity/entity.odin | 37 +++--- core/encoding/xml/debug_print.odin | 30 +++-- core/image/netpbm/netpbm.odin | 1 + core/image/png/helpers.odin | 7 +- core/image/png/png.odin | 16 +-- core/net/url.odin | 70 ++++++------ core/odin/printer/visit.odin | 178 ++++++++++++++--------------- src/build_settings.cpp | 8 +- src/check_decl.cpp | 4 +- src/check_expr.cpp | 6 +- src/check_stmt.cpp | 4 +- src/checker.cpp | 22 ++++ src/checker.hpp | 8 +- src/parser.cpp | 23 ++-- 14 files changed, 212 insertions(+), 202 deletions(-) diff --git a/core/encoding/entity/entity.odin b/core/encoding/entity/entity.odin index 694fcdffc..ec640c69f 100644 --- a/core/encoding/entity/entity.odin +++ b/core/encoding/entity/entity.odin @@ -184,28 +184,26 @@ decode_xml :: proc(input: string, options := XML_Decode_Options{}, allocator := advance :: proc(t: ^Tokenizer) -> (err: Error) { if t == nil { return .Tokenizer_Is_Nil } - using t - #no_bounds_check { - if read_offset < len(src) { - offset = read_offset - r, w = rune(src[read_offset]), 1 + if t.read_offset < len(t.src) { + t.offset = t.read_offset + t.r, t.w = rune(t.src[t.read_offset]), 1 switch { - case r == 0: + case t.r == 0: return .Illegal_NUL_Character - case r >= utf8.RUNE_SELF: - r, w = utf8.decode_rune_in_string(src[read_offset:]) - if r == utf8.RUNE_ERROR && w == 1 { + case t.r >= utf8.RUNE_SELF: + t.r, t.w = utf8.decode_rune_in_string(t.src[t.read_offset:]) + if t.r == utf8.RUNE_ERROR && t.w == 1 { return .Illegal_UTF_Encoding - } else if r == utf8.RUNE_BOM && offset > 0 { + } else if t.r == utf8.RUNE_BOM && t.offset > 0 { return .Illegal_BOM } } - read_offset += w + t.read_offset += t.w return .None } else { - offset = len(src) - r = -1 + t.offset = len(t.src) + t.r = -1 return } } @@ -273,26 +271,25 @@ _extract_xml_entity :: proc(t: ^Tokenizer) -> (entity: string, err: Error) { All of these would be in the ASCII range. Even if one is not, it doesn't matter. All characters we need to compare to extract are. */ - using t length := len(t.src) found := false #no_bounds_check { - for read_offset < length { - if src[read_offset] == ';' { + for t.read_offset < length { + if t.src[t.read_offset] == ';' { + t.read_offset += 1 found = true - read_offset += 1 break } - read_offset += 1 + t.read_offset += 1 } } if found { - return string(src[offset + 1 : read_offset - 1]), .None + return string(t.src[t.offset + 1 : t.read_offset - 1]), .None } - return string(src[offset : read_offset]), .Invalid_Entity_Encoding + return string(t.src[t.offset : t.read_offset]), .Invalid_Entity_Encoding } /* diff --git a/core/encoding/xml/debug_print.odin b/core/encoding/xml/debug_print.odin index 0736e8893..b97617a8a 100644 --- a/core/encoding/xml/debug_print.odin +++ b/core/encoding/xml/debug_print.odin @@ -19,43 +19,39 @@ import "core:fmt" */ print :: proc(writer: io.Writer, doc: ^Document) -> (written: int, err: io.Error) { if doc == nil { return } - using fmt - - written += wprintf(writer, "[XML Prolog]\n") + written += fmt.wprintf(writer, "[XML Prolog]\n") for attr in doc.prologue { - written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val) + written += fmt.wprintf(writer, "\t%v: %v\n", attr.key, attr.val) } - written += wprintf(writer, "[Encoding] %v\n", doc.encoding) + written += fmt.wprintf(writer, "[Encoding] %v\n", doc.encoding) if len(doc.doctype.ident) > 0 { - written += wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) + written += fmt.wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) if len(doc.doctype.rest) > 0 { - wprintf(writer, "\t%v\n", doc.doctype.rest) + fmt.wprintf(writer, "\t%v\n", doc.doctype.rest) } } for comment in doc.comments { - written += wprintf(writer, "[Pre-root comment] %v\n", comment) + written += fmt.wprintf(writer, "[Pre-root comment] %v\n", comment) } if len(doc.elements) > 0 { - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") print_element(writer, doc, 0) - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") } return written, .None } print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, indent := 0) -> (written: int, err: io.Error) { - using fmt - tab :: proc(writer: io.Writer, indent: int) { for _ in 0..=indent { - wprintf(writer, "\t") + fmt.wprintf(writer, "\t") } } @@ -64,13 +60,13 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, element := doc.elements[element_id] if element.kind == .Element { - wprintf(writer, "<%v>\n", element.ident) + fmt.wprintf(writer, "<%v>\n", element.ident) for value in element.value { switch v in value { case string: tab(writer, indent + 1) - wprintf(writer, "[Value] %v\n", v) + fmt.wprintf(writer, "[Value] %v\n", v) case Element_ID: print_element(writer, doc, v, indent + 1) } @@ -78,10 +74,10 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, for attr in element.attribs { tab(writer, indent + 1) - wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) + fmt.wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) } } else if element.kind == .Comment { - wprintf(writer, "[COMMENT] %v\n", element.value) + fmt.wprintf(writer, "[COMMENT] %v\n", element.value) } return written, .None diff --git a/core/image/netpbm/netpbm.odin b/core/image/netpbm/netpbm.odin index 74e482cb4..24df76c8e 100644 --- a/core/image/netpbm/netpbm.odin +++ b/core/image/netpbm/netpbm.odin @@ -1,3 +1,4 @@ +//+vet !using-stmt package netpbm import "core:bytes" diff --git a/core/image/png/helpers.odin b/core/image/png/helpers.odin index 889b3cb6b..f0209d4d7 100644 --- a/core/image/png/helpers.odin +++ b/core/image/png/helpers.odin @@ -80,11 +80,10 @@ time :: proc(c: image.PNG_Chunk) -> (res: tIME, ok: bool) { } core_time :: proc(c: image.PNG_Chunk) -> (t: coretime.Time, ok: bool) { - if png_time, png_ok := time(c); png_ok { - using png_time + if t, png_ok := time(c); png_ok { return coretime.datetime_to_time( - int(year), int(month), int(day), - int(hour), int(minute), int(second), + int(t.year), int(t.month), int(t.day), + int(t.hour), int(t.minute), int(t.second), ) } else { return {}, false diff --git a/core/image/png/png.odin b/core/image/png/png.odin index caa1e6e8a..1821e55cd 100644 --- a/core/image/png/png.odin +++ b/core/image/png/png.odin @@ -11,6 +11,7 @@ // package png implements a PNG image reader // // The PNG specification is at https://www.w3.org/TR/PNG/. +//+vet !using-stmt package png import "core:compress" @@ -444,15 +445,14 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a img.width = int(header.width) img.height = int(header.height) - using header h := image.PNG_IHDR{ - width = width, - height = height, - bit_depth = bit_depth, - color_type = color_type, - compression_method = compression_method, - filter_method = filter_method, - interlace_method = interlace_method, + width = header.width, + height = header.height, + bit_depth = header.bit_depth, + color_type = header.color_type, + compression_method = header.compression_method, + filter_method = header.filter_method, + interlace_method = header.interlace_method, } info.header = h diff --git a/core/net/url.odin b/core/net/url.odin index ed39f7859..ef43d6c9f 100644 --- a/core/net/url.odin +++ b/core/net/url.odin @@ -63,100 +63,100 @@ split_url :: proc(url: string, allocator := context.allocator) -> (scheme, host, } join_url :: proc(scheme, host, path: string, queries: map[string]string, allocator := context.allocator) -> string { - using strings + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path)) - b := builder_make(allocator) - builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path)) - - write_string(&b, scheme) - write_string(&b, "://") - write_string(&b, trim_space(host)) + strings.write_string(&b, scheme) + strings.write_string(&b, "://") + strings.write_string(&b, strings.trim_space(host)) if path != "" { - if path[0] != '/' do write_string(&b, "/") - write_string(&b, trim_space(path)) + if path[0] != '/' { + strings.write_string(&b, "/") + } + strings.write_string(&b, strings.trim_space(path)) } query_length := len(queries) - if query_length > 0 do write_string(&b, "?") + if query_length > 0 { + strings.write_string(&b, "?") + } i := 0 for query_name, query_value in queries { - write_string(&b, query_name) + strings.write_string(&b, query_name) if query_value != "" { - write_string(&b, "=") - write_string(&b, query_value) + strings.write_string(&b, "=") + strings.write_string(&b, query_value) } if i < query_length - 1 { - write_string(&b, "&") + strings.write_string(&b, "&") } i += 1 } - return to_string(b) + return strings.to_string(b) } percent_encode :: proc(s: string, allocator := context.allocator) -> string { - using strings - - b := builder_make(allocator) - builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape. + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape. for ch in s { switch ch { case 'A'..='Z', 'a'..='z', '0'..='9', '-', '_', '.', '~': - write_rune(&b, ch) + strings.write_rune(&b, ch) case: bytes, n := utf8.encode_rune(ch) for byte in bytes[:n] { buf: [2]u8 = --- t := strconv.append_int(buf[:], i64(byte), 16) - write_rune(&b, '%') - write_string(&b, t) + strings.write_rune(&b, '%') + strings.write_string(&b, t) } } } - return to_string(b) + return strings.to_string(b) } percent_decode :: proc(encoded_string: string, allocator := context.allocator) -> (decoded_string: string, ok: bool) { - using strings - - b := builder_make(allocator) - builder_grow(&b, len(encoded_string)) - defer if !ok do builder_destroy(&b) + b := strings.builder_make(allocator) + strings.builder_grow(&b, len(encoded_string)) + defer if !ok do strings.builder_destroy(&b) s := encoded_string for len(s) > 0 { - i := index_byte(s, '%') + i := strings.index_byte(s, '%') if i == -1 { - write_string(&b, s) // no '%'s; the string is already decoded + strings.write_string(&b, s) // no '%'s; the string is already decoded break } - write_string(&b, s[:i]) + strings.write_string(&b, s[:i]) s = s[i:] if len(s) == 0 do return // percent without anything after it s = s[1:] if s[0] == '%' { - write_byte(&b, '%') + strings.write_byte(&b, '%') s = s[1:] continue } - if len(s) < 2 do return // percent without encoded value + if len(s) < 2 { + return // percent without encoded value + } val := hex.decode_sequence(s[:2]) or_return - write_byte(&b, val) + strings.write_byte(&b, val) s = s[2:] } ok = true - decoded_string = to_string(b) + decoded_string = strings.to_string(b) return } diff --git a/core/odin/printer/visit.odin b/core/odin/printer/visit.odin index 66166aa81..d3701fcc5 100644 --- a/core/odin/printer/visit.odin +++ b/core/odin/printer/visit.odin @@ -336,22 +336,20 @@ hint_current_line :: proc(p: ^Printer, hint: Line_Type) { @(private) visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { - using ast - if decl == nil { return } #partial switch v in decl.derived_stmt { - case ^Expr_Stmt: + case ^ast.Expr_Stmt: move_line(p, decl.pos) visit_expr(p, v.expr) if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^When_Stmt: - visit_stmt(p, cast(^Stmt)decl) - case ^Foreign_Import_Decl: + case ^ast.When_Stmt: + visit_stmt(p, cast(^ast.Stmt)decl) + case ^ast.Foreign_Import_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -370,7 +368,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { for path in v.fullpaths { push_ident_token(p, path, 0) } - case ^Foreign_Block_Decl: + case ^ast.Foreign_Block_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -383,7 +381,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { visit_expr(p, v.foreign_library) visit_stmt(p, v.body) - case ^Import_Decl: + case ^ast.Import_Decl: move_line(p, decl.pos) if v.name.text != "" { @@ -395,7 +393,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { push_ident_token(p, v.fullpath, 1) } - case ^Value_Decl: + case ^ast.Value_Decl: if len(v.attributes) > 0 { sort.sort(sort_attribute(&v.attributes)) move_line(p, v.attributes[0].pos) @@ -447,9 +445,9 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) { for value in v.values { #partial switch a in value.derived { - case ^Union_Type, ^Enum_Type, ^Struct_Type: + case ^ast.Union_Type, ^ast.Enum_Type, ^ast.Struct_Type: add_semicolon = false || called_in_stmt - case ^Proc_Lit: + case ^ast.Proc_Lit: add_semicolon = false } } @@ -510,40 +508,38 @@ visit_attributes :: proc(p: ^Printer, attributes: [dynamic]^ast.Attribute) { @(private) visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Generic, empty_block := false, block_stmt := false) { - using ast - if stmt == nil { return } switch v in stmt.derived_stmt { - case ^Bad_Stmt: - case ^Bad_Decl: - case ^Package_Decl: + case ^ast.Bad_Stmt: + case ^ast.Bad_Decl: + case ^ast.Package_Decl: - case ^Empty_Stmt: + case ^ast.Empty_Stmt: push_generic_token(p, .Semicolon, 0) - case ^Tag_Stmt: + case ^ast.Tag_Stmt: push_generic_token(p, .Hash, 1) push_generic_token(p, v.op.kind, 1, v.op.text) visit_stmt(p, v.stmt) - case ^Import_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Import_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Value_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Value_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Foreign_Import_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Foreign_Import_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Foreign_Block_Decl: - visit_decl(p, cast(^Decl)stmt, true) + case ^ast.Foreign_Block_Decl: + visit_decl(p, cast(^ast.Decl)stmt, true) return - case ^Using_Stmt: + case ^ast.Using_Stmt: move_line(p, v.pos) push_generic_token(p, .Using, 1) @@ -553,7 +549,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Block_Stmt: + case ^ast.Block_Stmt: move_line(p, v.pos) if v.pos.line == v.end.line { @@ -583,7 +579,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_end_brace(p, v.end) } } - case ^If_Stmt: + case ^ast.If_Stmt: move_line(p, v.pos) if v.label != nil { @@ -606,7 +602,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener uses_do := false - if check_stmt, ok := v.body.derived.(^Block_Stmt); ok && check_stmt.uses_do { + if check_stmt, ok := v.body.derived.(^ast.Block_Stmt); ok && check_stmt.uses_do { uses_do = true } @@ -637,7 +633,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.else_stmt) } - case ^Switch_Stmt: + case ^ast.Switch_Stmt: move_line(p, v.pos) if v.label != nil { @@ -665,7 +661,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.cond) visit_stmt(p, v.body) - case ^Case_Clause: + case ^ast.Case_Clause: move_line(p, v.pos) if !p.config.indent_cases { @@ -689,7 +685,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if !p.config.indent_cases { indent(p) } - case ^Type_Switch_Stmt: + case ^ast.Type_Switch_Stmt: move_line(p, v.pos) hint_current_line(p, {.Switch_Stmt}) @@ -707,7 +703,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.tag) visit_stmt(p, v.body) - case ^Assign_Stmt: + case ^ast.Assign_Stmt: move_line(p, v.pos) hint_current_line(p, {.Assign}) @@ -721,13 +717,13 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Expr_Stmt: + case ^ast.Expr_Stmt: move_line(p, v.pos) visit_expr(p, v.expr) if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^For_Stmt: + case ^ast.For_Stmt: // this should be simplified move_line(p, v.pos) @@ -764,7 +760,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.body) - case ^Inline_Range_Stmt: + case ^ast.Inline_Range_Stmt: move_line(p, v.pos) if v.label != nil { @@ -790,7 +786,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.expr) visit_stmt(p, v.body) - case ^Range_Stmt: + case ^ast.Range_Stmt: move_line(p, v.pos) if v.label != nil { @@ -816,7 +812,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_expr(p, v.expr) visit_stmt(p, v.body) - case ^Return_Stmt: + case ^ast.Return_Stmt: move_line(p, v.pos) push_generic_token(p, .Return, 1) @@ -828,7 +824,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if block_stmt && p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^Defer_Stmt: + case ^ast.Defer_Stmt: move_line(p, v.pos) push_generic_token(p, .Defer, 0) @@ -837,7 +833,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener if p.config.semicolons { push_generic_token(p, .Semicolon, 0) } - case ^When_Stmt: + case ^ast.When_Stmt: move_line(p, v.pos) push_generic_token(p, .When, 1) visit_expr(p, v.cond) @@ -857,7 +853,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener visit_stmt(p, v.else_stmt) } - case ^Branch_Stmt: + case ^ast.Branch_Stmt: move_line(p, v.pos) push_generic_token(p, v.tok.kind, 0) @@ -921,8 +917,6 @@ push_poly_params :: proc(p: ^Printer, poly_params: ^ast.Field_List) { @(private) visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { - using ast - if expr == nil { return } @@ -930,14 +924,14 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { set_source_position(p, expr.pos) switch v in expr.derived_expr { - case ^Bad_Expr: + case ^ast.Bad_Expr: - case ^Tag_Expr: + case ^ast.Tag_Expr: push_generic_token(p, .Hash, 1) push_generic_token(p, v.op.kind, 1, v.op.text) visit_expr(p, v.expr) - case ^Inline_Asm_Expr: + case ^ast.Inline_Asm_Expr: push_generic_token(p, v.tok.kind, 1, v.tok.text) push_generic_token(p, .Open_Paren, 1) @@ -954,42 +948,42 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Comma, 0) visit_expr(p, v.constraints_string) push_generic_token(p, .Close_Brace, 0) - case ^Undef: + case ^ast.Undef: push_generic_token(p, .Undef, 1) - case ^Auto_Cast: + case ^ast.Auto_Cast: push_generic_token(p, v.op.kind, 1) visit_expr(p, v.expr) - case ^Ternary_If_Expr: + case ^ast.Ternary_If_Expr: visit_expr(p, v.x) push_generic_token(p, v.op1.kind, 1) visit_expr(p, v.cond) push_generic_token(p, v.op2.kind, 1) visit_expr(p, v.y) - case ^Ternary_When_Expr: + case ^ast.Ternary_When_Expr: visit_expr(p, v.x) push_generic_token(p, v.op1.kind, 1) visit_expr(p, v.cond) push_generic_token(p, v.op2.kind, 1) visit_expr(p, v.y) - case ^Or_Else_Expr: + case ^ast.Or_Else_Expr: visit_expr(p, v.x) push_generic_token(p, v.token.kind, 1) visit_expr(p, v.y) - case ^Or_Return_Expr: + case ^ast.Or_Return_Expr: visit_expr(p, v.expr) push_generic_token(p, v.token.kind, 1) - case ^Selector_Call_Expr: + case ^ast.Selector_Call_Expr: visit_expr(p, v.call.expr) push_generic_token(p, .Open_Paren, 1) visit_exprs(p, v.call.args, {.Add_Comma}) push_generic_token(p, .Close_Paren, 0) - case ^Ellipsis: + case ^ast.Ellipsis: push_generic_token(p, .Ellipsis, 1) visit_expr(p, v.expr) - case ^Relative_Type: + case ^ast.Relative_Type: visit_expr(p, v.tag) visit_expr(p, v.type) - case ^Slice_Expr: + case ^ast.Slice_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.low) @@ -999,37 +993,37 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_expr(p, v.high) } push_generic_token(p, .Close_Bracket, 0) - case ^Ident: + case ^ast.Ident: if .Enforce_Poly_Names in options { push_generic_token(p, .Dollar, 1) push_ident_token(p, v.name, 0) } else { push_ident_token(p, v.name, 1) } - case ^Deref_Expr: + case ^ast.Deref_Expr: visit_expr(p, v.expr) push_generic_token(p, v.op.kind, 0) - case ^Type_Cast: + case ^ast.Type_Cast: push_generic_token(p, v.tok.kind, 1) push_generic_token(p, .Open_Paren, 0) visit_expr(p, v.type) push_generic_token(p, .Close_Paren, 0) merge_next_token(p) visit_expr(p, v.expr) - case ^Basic_Directive: + case ^ast.Basic_Directive: push_generic_token(p, v.tok.kind, 1) push_ident_token(p, v.name, 0) - case ^Distinct_Type: + case ^ast.Distinct_Type: push_generic_token(p, .Distinct, 1) visit_expr(p, v.type) - case ^Dynamic_Array_Type: + case ^ast.Dynamic_Array_Type: visit_expr(p, v.tag) push_generic_token(p, .Open_Bracket, 1) push_generic_token(p, .Dynamic, 0) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.elem) - case ^Bit_Set_Type: + case ^ast.Bit_Set_Type: push_generic_token(p, .Bit_Set, 1) push_generic_token(p, .Open_Bracket, 0) @@ -1041,7 +1035,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } push_generic_token(p, .Close_Bracket, 0) - case ^Union_Type: + case ^ast.Union_Type: push_generic_token(p, .Union, 1) push_poly_params(p, v.poly_params) @@ -1066,7 +1060,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_exprs(p, v.variants, {.Add_Comma, .Trailing}) visit_end_brace(p, v.end) } - case ^Enum_Type: + case ^ast.Enum_Type: push_generic_token(p, .Enum, 1) hint_current_line(p, {.Enum}) @@ -1089,7 +1083,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } set_source_position(p, v.end) - case ^Struct_Type: + case ^ast.Struct_Type: push_generic_token(p, .Struct, 1) hint_current_line(p, {.Struct}) @@ -1124,7 +1118,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } set_source_position(p, v.end) - case ^Proc_Lit: + case ^ast.Proc_Lit: switch v.inlining { case .None: case .Inline: @@ -1143,16 +1137,16 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { } else { push_generic_token(p, .Undef, 1) } - case ^Proc_Type: + case ^ast.Proc_Type: visit_proc_type(p, v) - case ^Basic_Lit: + case ^ast.Basic_Lit: push_generic_token(p, v.tok.kind, 1, v.tok.text) - case ^Binary_Expr: + case ^ast.Binary_Expr: visit_binary_expr(p, v) - case ^Implicit_Selector_Expr: + case ^ast.Implicit_Selector_Expr: push_generic_token(p, .Period, 1) push_ident_token(p, v.field.name, 0) - case ^Call_Expr: + case ^ast.Call_Expr: visit_expr(p, v.expr) push_format_token(p, @@ -1167,34 +1161,34 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { visit_call_exprs(p, v.args, v.ellipsis.kind == .Ellipsis) push_generic_token(p, .Close_Paren, 0) - case ^Typeid_Type: + case ^ast.Typeid_Type: push_generic_token(p, .Typeid, 1) if v.specialization != nil { push_generic_token(p, .Quo, 0) visit_expr(p, v.specialization) } - case ^Selector_Expr: + case ^ast.Selector_Expr: visit_expr(p, v.expr) push_generic_token(p, v.op.kind, 0) visit_expr(p, v.field) - case ^Paren_Expr: + case ^ast.Paren_Expr: push_generic_token(p, .Open_Paren, 1) visit_expr(p, v.expr) push_generic_token(p, .Close_Paren, 0) - case ^Index_Expr: + case ^ast.Index_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.index) push_generic_token(p, .Close_Bracket, 0) - case ^Matrix_Index_Expr: + case ^ast.Matrix_Index_Expr: visit_expr(p, v.expr) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.row_index) push_generic_token(p, .Comma, 0) visit_expr(p, v.column_index) push_generic_token(p, .Close_Bracket, 0) - case ^Proc_Group: + case ^ast.Proc_Group: push_generic_token(p, v.tok.kind, 1) if len(v.args) != 0 && v.pos.line != v.args[len(v.args) - 1].pos.line { @@ -1209,7 +1203,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Brace, 0) } - case ^Comp_Lit: + case ^ast.Comp_Lit: if v.type != nil { visit_expr(p, v.type) } @@ -1226,18 +1220,18 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Brace, 0) } - case ^Unary_Expr: + case ^ast.Unary_Expr: push_generic_token(p, v.op.kind, 1) merge_next_token(p) visit_expr(p, v.expr) - case ^Field_Value: + case ^ast.Field_Value: visit_expr(p, v.field) push_generic_token(p, .Eq, 1) visit_expr(p, v.value) - case ^Type_Assertion: + case ^ast.Type_Assertion: visit_expr(p, v.expr) - if unary, ok := v.type.derived.(^Unary_Expr); ok && unary.op.text == "?" { + if unary, ok := v.type.derived.(^ast.Unary_Expr); ok && unary.op.text == "?" { push_generic_token(p, .Period, 0) visit_expr(p, v.type) } else { @@ -1247,13 +1241,13 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { push_generic_token(p, .Close_Paren, 0) } - case ^Pointer_Type: + case ^ast.Pointer_Type: push_generic_token(p, .Pointer, 1) merge_next_token(p) visit_expr(p, v.elem) - case ^Implicit: + case ^ast.Implicit: push_generic_token(p, v.tok.kind, 1) - case ^Poly_Type: + case ^ast.Poly_Type: push_generic_token(p, .Dollar, 1) merge_next_token(p) visit_expr(p, v.type) @@ -1263,28 +1257,28 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) { merge_next_token(p) visit_expr(p, v.specialization) } - case ^Array_Type: + case ^ast.Array_Type: visit_expr(p, v.tag) push_generic_token(p, .Open_Bracket, 1) visit_expr(p, v.len) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.elem) - case ^Map_Type: + case ^ast.Map_Type: push_generic_token(p, .Map, 1) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.key) push_generic_token(p, .Close_Bracket, 0) merge_next_token(p) visit_expr(p, v.value) - case ^Helper_Type: + case ^ast.Helper_Type: visit_expr(p, v.type) - case ^Multi_Pointer_Type: + case ^ast.Multi_Pointer_Type: push_generic_token(p, .Open_Bracket, 1) push_generic_token(p, .Pointer, 0) push_generic_token(p, .Close_Bracket, 0) visit_expr(p, v.elem) - case ^Matrix_Type: + case ^ast.Matrix_Type: push_generic_token(p, .Matrix, 1) push_generic_token(p, .Open_Bracket, 0) visit_expr(p, v.row_count) diff --git a/src/build_settings.cpp b/src/build_settings.cpp index f234ff2ce..b46ea10e0 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -218,10 +218,10 @@ enum BuildPath : u8 { enum VetFlags : u64 { VetFlag_NONE = 0, - VetFlag_Unused = 1u<<0, - VetFlag_Shadowing = 1u<<1, - VetFlag_UsingStmt = 1u<<2, - VetFlag_UsingParam = 1u<<3, + VetFlag_Unused = 1u<<0, // 1 + VetFlag_Shadowing = 1u<<1, // 2 + VetFlag_UsingStmt = 1u<<2, // 4 + VetFlag_UsingParam = 1u<<3, // 8 VetFlag_Extra = 1u<<16, diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 3dca7aafa..9e96dae1c 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -1064,7 +1064,7 @@ gb_internal void check_proc_decl(CheckerContext *ctx, Entity *e, DeclInfo *d) { auto *fp = &ctx->info->foreigns; StringHashKey key = string_hash_string(name); Entity **found = string_map_get(fp, key); - if (found) { + if (found && e != *found) { Entity *f = *found; TokenPos pos = f->token.pos; Type *this_type = base_type(e->type); @@ -1636,7 +1636,7 @@ gb_internal bool check_proc_body(CheckerContext *ctx_, Token token, DeclInfo *de } check_close_scope(ctx); - check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(ctx)); + check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(body)); add_deps_from_child_to_parent(decl); diff --git a/src/check_expr.cpp b/src/check_expr.cpp index fe389e027..f9c62b506 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -3099,7 +3099,7 @@ gb_internal void check_cast(CheckerContext *c, Operand *x, Type *type) { update_untyped_expr_type(c, x->expr, final_type, true); } - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(x->expr) & VetFlag_Extra) { if (are_types_identical(x->type, type)) { gbString str = type_to_string(type); warning(x->expr, "Unneeded cast to the same type '%s'", str); @@ -3171,7 +3171,7 @@ gb_internal bool check_transmute(CheckerContext *c, Ast *node, Operand *o, Type return false; } - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(node) & VetFlag_Extra) { if (are_types_identical(o->type, dst_t)) { gbString str = type_to_string(dst_t); warning(o->expr, "Unneeded transmute to the same type '%s'", str); @@ -10028,7 +10028,7 @@ gb_internal ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast Type *type = type_of_expr(ac->expr); check_cast(c, o, type_hint); if (is_type_typed(type) && are_types_identical(type, type_hint)) { - if (check_vet_flags(c) & VetFlag_Extra) { + if (check_vet_flags(node) & VetFlag_Extra) { error(node, "Redundant 'auto_cast' applied to expression"); } } diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index 2c1ee8331..b6bb7d819 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -2464,9 +2464,9 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags) error(us->token, "Empty 'using' list"); return; } - if (check_vet_flags(ctx) & VetFlag_UsingStmt) { + if (check_vet_flags(node) & VetFlag_UsingStmt) { ERROR_BLOCK(); - error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied"); + error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied %llu %llu", check_vet_flags(ctx), node->file()->vet_flags); error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n"); } diff --git a/src/checker.cpp b/src/checker.cpp index a6b66f809..fbc550f00 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -521,6 +521,28 @@ GB_COMPARE_PROC(entity_variable_pos_cmp) { } + +gb_internal u64 check_vet_flags(CheckerContext *c) { + AstFile *file = c->file; + if (file == nullptr && + c->curr_proc_decl && + c->curr_proc_decl->proc_lit) { + file = c->curr_proc_decl->proc_lit->file(); + } + if (file && file->vet_flags_set) { + return file->vet_flags; + } + return build_context.vet_flags; +} + +gb_internal u64 check_vet_flags(Ast *node) { + AstFile *file = node->file(); + if (file && file->vet_flags_set) { + return file->vet_flags; + } + return build_context.vet_flags; +} + enum VettedEntityKind { VettedEntity_Invalid, diff --git a/src/checker.hpp b/src/checker.hpp index 12090cbca..8a63f7e88 100644 --- a/src/checker.hpp +++ b/src/checker.hpp @@ -449,12 +449,8 @@ struct CheckerContext { Ast *assignment_lhs_hint; }; -u64 check_vet_flags(CheckerContext *c) { - if (c->file && c->file->vet_flags_set) { - return c->file->vet_flags; - } - return build_context.vet_flags; -} +gb_internal u64 check_vet_flags(CheckerContext *c); +gb_internal u64 check_vet_flags(Ast *node); struct Checker { diff --git a/src/parser.cpp b/src/parser.cpp index b99182189..7d1c37d84 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -5563,7 +5563,9 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { while (s.len > 0) { String p = string_trim_whitespace(vet_tag_get_token(s, &s)); - if (p.len == 0) break; + if (p.len == 0) { + break; + } bool is_notted = false; if (p[0] == '!') { @@ -5571,14 +5573,10 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { p = substring(p, 1, p.len); if (p.len == 0) { syntax_error(token_for_pos, "Expected a vet flag name after '!'"); - break; + return build_context.vet_flags; } } - if (p.len == 0) { - continue; - } - u64 flag = get_vet_flag_from_name(p); if (flag != VetFlag_NONE) { if (is_notted) { @@ -5595,13 +5593,20 @@ gb_internal u64 parse_vet_tag(Token token_for_pos, String s) { error_line("\tusing-stmt\n"); error_line("\tusing-param\n"); error_line("\textra\n"); - break; + return build_context.vet_flags; } } - if (vet_flags == 0 && vet_not_flags != 0) { - vet_flags = VetFlag_All; + if (vet_flags == 0 && vet_not_flags == 0) { + return build_context.vet_flags; } + if (vet_flags == 0 && vet_not_flags != 0) { + return build_context.vet_flags &~ vet_not_flags; + } + if (vet_flags != 0 && vet_not_flags == 0) { + return vet_flags; + } + GB_ASSERT(vet_flags != 0 && vet_not_flags != 0); return vet_flags &~ vet_not_flags; } From 44ea82f8452876c4890884506111e243b8b2a541 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 11:46:40 +0100 Subject: [PATCH 07/20] Clean up usage of `using` throughout core and vendor --- core/compress/zlib/zlib.odin | 1 + core/encoding/xml/tokenizer.odin | 32 ++++----- core/fmt/fmt.odin | 22 +++--- core/image/netpbm/helpers.odin | 15 +++-- core/mem/allocators.odin | 104 ++++++++++++++--------------- core/odin/tokenizer/tokenizer.odin | 32 ++++----- core/runtime/error_checks.odin | 18 ++--- core/runtime/print.odin | 12 ++-- core/thread/thread_windows.odin | 4 +- core/time/time.odin | 36 +++++----- src/check_type.cpp | 4 +- vendor/fontstash/fontstash.odin | 1 + 12 files changed, 143 insertions(+), 138 deletions(-) diff --git a/core/compress/zlib/zlib.odin b/core/compress/zlib/zlib.odin index 21172e8e8..8062c8d3a 100644 --- a/core/compress/zlib/zlib.odin +++ b/core/compress/zlib/zlib.odin @@ -1,3 +1,4 @@ +//+vet !using-param package zlib /* diff --git a/core/encoding/xml/tokenizer.odin b/core/encoding/xml/tokenizer.odin index d225c5d90..cd055475c 100644 --- a/core/encoding/xml/tokenizer.odin +++ b/core/encoding/xml/tokenizer.odin @@ -125,38 +125,38 @@ error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) { } @(optimization_mode="speed") -advance_rune :: proc(using t: ^Tokenizer) { +advance_rune :: proc(t: ^Tokenizer) { #no_bounds_check { /* Already bounds-checked here. */ - if read_offset < len(src) { - offset = read_offset - if ch == '\n' { - line_offset = offset - line_count += 1 + if t.read_offset < len(t.src) { + t.offset = t.read_offset + if t.ch == '\n' { + t.line_offset = t.offset + t.line_count += 1 } - r, w := rune(src[read_offset]), 1 + r, w := rune(t.src[t.read_offset]), 1 switch { case r == 0: error(t, t.offset, "illegal character NUL") case r >= utf8.RUNE_SELF: - r, w = #force_inline utf8.decode_rune_in_string(src[read_offset:]) + r, w = #force_inline utf8.decode_rune_in_string(t.src[t.read_offset:]) if r == utf8.RUNE_ERROR && w == 1 { error(t, t.offset, "illegal UTF-8 encoding") - } else if r == utf8.RUNE_BOM && offset > 0 { + } else if r == utf8.RUNE_BOM && t.offset > 0 { error(t, t.offset, "illegal byte order mark") } } - read_offset += w - ch = r + t.read_offset += w + t.ch = r } else { - offset = len(src) - if ch == '\n' { - line_offset = offset - line_count += 1 + t.offset = len(t.src) + if t.ch == '\n' { + t.line_offset = t.offset + t.line_count += 1 } - ch = -1 + t.ch = -1 } } } diff --git a/core/fmt/fmt.odin b/core/fmt/fmt.odin index f1f94b1b3..e64b621bf 100644 --- a/core/fmt/fmt.odin +++ b/core/fmt/fmt.odin @@ -835,22 +835,22 @@ int_from_arg :: proc(args: []any, arg_index: int) -> (int, int, bool) { // - fi: A pointer to an Info structure // - verb: The invalid format verb // -fmt_bad_verb :: proc(using fi: ^Info, verb: rune) { +fmt_bad_verb :: proc(fi: ^Info, verb: rune) { prev_in_bad := fi.in_bad defer fi.in_bad = prev_in_bad fi.in_bad = true - io.write_string(writer, "%!", &fi.n) - io.write_rune(writer, verb, &fi.n) - io.write_byte(writer, '(', &fi.n) - if arg.id != nil { - reflect.write_typeid(writer, arg.id, &fi.n) - io.write_byte(writer, '=', &fi.n) - fmt_value(fi, arg, 'v') + io.write_string(fi.writer, "%!", &fi.n) + io.write_rune(fi.writer, verb, &fi.n) + io.write_byte(fi.writer, '(', &fi.n) + if fi.arg.id != nil { + reflect.write_typeid(fi.writer, fi.arg.id, &fi.n) + io.write_byte(fi.writer, '=', &fi.n) + fmt_value(fi, fi.arg, 'v') } else { - io.write_string(writer, "", &fi.n) + io.write_string(fi.writer, "", &fi.n) } - io.write_byte(writer, ')', &fi.n) + io.write_byte(fi.writer, ')', &fi.n) } // Formats a boolean value according to the specified format verb // @@ -859,7 +859,7 @@ fmt_bad_verb :: proc(using fi: ^Info, verb: rune) { // - b: The boolean value to format // - verb: The format verb // -fmt_bool :: proc(using fi: ^Info, b: bool, verb: rune) { +fmt_bool :: proc(fi: ^Info, b: bool, verb: rune) { switch verb { case 't', 'v': fmt_string(fi, b ? "true" : "false", 's') diff --git a/core/image/netpbm/helpers.odin b/core/image/netpbm/helpers.odin index 016f9453e..5307d764b 100644 --- a/core/image/netpbm/helpers.odin +++ b/core/image/netpbm/helpers.odin @@ -4,13 +4,14 @@ import "core:bytes" import "core:image" destroy :: proc(img: ^image.Image) -> bool { - if img == nil do return false + if img == nil { + return false + } defer free(img) bytes.buffer_destroy(&img.pixels) - info, ok := img.metadata.(^image.Netpbm_Info) - if !ok do return false + info := img.metadata.(^image.Netpbm_Info) or_return header_destroy(&info.header) free(info) @@ -19,9 +20,9 @@ destroy :: proc(img: ^image.Image) -> bool { return true } -header_destroy :: proc(using header: ^Header) { - if format == .P7 && tupltype != "" { - delete(tupltype) - tupltype = "" +header_destroy :: proc(header: ^Header) { + if header.format == .P7 && header.tupltype != "" { + delete(header.tupltype) + header.tupltype = "" } } diff --git a/core/mem/allocators.odin b/core/mem/allocators.odin index 7767740c9..77cdfb3cf 100644 --- a/core/mem/allocators.odin +++ b/core/mem/allocators.odin @@ -111,11 +111,11 @@ begin_arena_temp_memory :: proc(a: ^Arena) -> Arena_Temp_Memory { return tmp } -end_arena_temp_memory :: proc(using tmp: Arena_Temp_Memory) { - assert(arena.offset >= prev_offset) - assert(arena.temp_count > 0) - arena.offset = prev_offset - arena.temp_count -= 1 +end_arena_temp_memory :: proc(tmp: Arena_Temp_Memory) { + assert(tmp.arena.offset >= tmp.prev_offset) + assert(tmp.arena.temp_count > 0) + tmp.arena.offset = tmp.prev_offset + tmp.arena.temp_count -= 1 } @@ -702,11 +702,11 @@ dynamic_pool_init :: proc(pool: ^Dynamic_Pool, pool. used_blocks.allocator = array_allocator } -dynamic_pool_destroy :: proc(using pool: ^Dynamic_Pool) { +dynamic_pool_destroy :: proc(pool: ^Dynamic_Pool) { dynamic_pool_free_all(pool) - delete(unused_blocks) - delete(used_blocks) - delete(out_band_allocations) + delete(pool.unused_blocks) + delete(pool.used_blocks) + delete(pool.out_band_allocations) zero(pool, size_of(pool^)) } @@ -719,90 +719,90 @@ dynamic_pool_alloc :: proc(pool: ^Dynamic_Pool, bytes: int) -> (rawptr, Allocato } @(require_results) -dynamic_pool_alloc_bytes :: proc(using pool: ^Dynamic_Pool, bytes: int) -> ([]byte, Allocator_Error) { - cycle_new_block :: proc(using pool: ^Dynamic_Pool) -> (err: Allocator_Error) { - if block_allocator.procedure == nil { +dynamic_pool_alloc_bytes :: proc(p: ^Dynamic_Pool, bytes: int) -> ([]byte, Allocator_Error) { + cycle_new_block :: proc(p: ^Dynamic_Pool) -> (err: Allocator_Error) { + if p.block_allocator.procedure == nil { panic("You must call pool_init on a Pool before using it") } - if current_block != nil { - append(&used_blocks, current_block) + if p.current_block != nil { + append(&p.used_blocks, p.current_block) } new_block: rawptr - if len(unused_blocks) > 0 { - new_block = pop(&unused_blocks) + if len(p.unused_blocks) > 0 { + new_block = pop(&p.unused_blocks) } else { data: []byte - data, err = block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc, - block_size, alignment, - nil, 0) + data, err = p.block_allocator.procedure(p.block_allocator.data, Allocator_Mode.Alloc, + p.block_size, p.alignment, + nil, 0) new_block = raw_data(data) } - bytes_left = block_size - current_pos = new_block - current_block = new_block + p.bytes_left = p.block_size + p.current_pos = new_block + p.current_block = new_block return } n := bytes - extra := alignment - (n % alignment) + extra := p.alignment - (n % p.alignment) n += extra - if n >= out_band_size { - assert(block_allocator.procedure != nil) - memory, err := block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc, - block_size, alignment, - nil, 0) + if n >= p.out_band_size { + assert(p.block_allocator.procedure != nil) + memory, err := p.block_allocator.procedure(p.block_allocator.data, Allocator_Mode.Alloc, + p.block_size, p.alignment, + nil, 0) if memory != nil { - append(&out_band_allocations, raw_data(memory)) + append(&p.out_band_allocations, raw_data(memory)) } return memory, err } - if bytes_left < n { - err := cycle_new_block(pool) + if p.bytes_left < n { + err := cycle_new_block(p) if err != nil { return nil, err } - if current_block == nil { + if p.current_block == nil { return nil, .Out_Of_Memory } } - memory := current_pos - current_pos = ptr_offset((^byte)(current_pos), n) - bytes_left -= n - return byte_slice(memory, bytes), nil + memory := p.current_pos + p.current_pos = ([^]byte)(p.current_pos)[n:] + p.bytes_left -= n + return ([^]byte)(memory)[:bytes], nil } -dynamic_pool_reset :: proc(using pool: ^Dynamic_Pool) { - if current_block != nil { - append(&unused_blocks, current_block) - current_block = nil +dynamic_pool_reset :: proc(p: ^Dynamic_Pool) { + if p.current_block != nil { + append(&p.unused_blocks, p.current_block) + p.current_block = nil } - for block in used_blocks { - append(&unused_blocks, block) + for block in p.used_blocks { + append(&p.unused_blocks, block) } - clear(&used_blocks) + clear(&p.used_blocks) - for a in out_band_allocations { - free(a, block_allocator) + for a in p.out_band_allocations { + free(a, p.block_allocator) } - clear(&out_band_allocations) + clear(&p.out_band_allocations) - bytes_left = 0 // Make new allocations call `cycle_new_block` again. + p.bytes_left = 0 // Make new allocations call `cycle_new_block` again. } -dynamic_pool_free_all :: proc(using pool: ^Dynamic_Pool) { - dynamic_pool_reset(pool) +dynamic_pool_free_all :: proc(p: ^Dynamic_Pool) { + dynamic_pool_reset(p) - for block in unused_blocks { - free(block, block_allocator) + for block in p.unused_blocks { + free(block, p.block_allocator) } - clear(&unused_blocks) + clear(&p.unused_blocks) } diff --git a/core/odin/tokenizer/tokenizer.odin b/core/odin/tokenizer/tokenizer.odin index c06d05e1d..0ec57356e 100644 --- a/core/odin/tokenizer/tokenizer.odin +++ b/core/odin/tokenizer/tokenizer.odin @@ -75,34 +75,34 @@ error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) { t.error_count += 1 } -advance_rune :: proc(using t: ^Tokenizer) { - if read_offset < len(src) { - offset = read_offset - if ch == '\n' { - line_offset = offset - line_count += 1 +advance_rune :: proc(t: ^Tokenizer) { + if t.read_offset < len(t.src) { + t.offset = t.read_offset + if t.ch == '\n' { + t.line_offset = t.offset + t.line_count += 1 } - r, w := rune(src[read_offset]), 1 + r, w := rune(t.src[t.read_offset]), 1 switch { case r == 0: error(t, t.offset, "illegal character NUL") case r >= utf8.RUNE_SELF: - r, w = utf8.decode_rune_in_string(src[read_offset:]) + r, w = utf8.decode_rune_in_string(t.src[t.read_offset:]) if r == utf8.RUNE_ERROR && w == 1 { error(t, t.offset, "illegal UTF-8 encoding") - } else if r == utf8.RUNE_BOM && offset > 0 { + } else if r == utf8.RUNE_BOM && t.offset > 0 { error(t, t.offset, "illegal byte order mark") } } - read_offset += w - ch = r + t.read_offset += w + t.ch = r } else { - offset = len(src) - if ch == '\n' { - line_offset = offset - line_count += 1 + t.offset = len(t.src) + if t.ch == '\n' { + t.line_offset = t.offset + t.line_count += 1 } - ch = -1 + t.ch = -1 } } diff --git a/core/runtime/error_checks.odin b/core/runtime/error_checks.odin index c189642af..9d484979a 100644 --- a/core/runtime/error_checks.odin +++ b/core/runtime/error_checks.odin @@ -235,7 +235,7 @@ make_slice_error_loc :: #force_inline proc "contextless" (loc := #caller_locatio handle_error(loc, len) } -make_dynamic_array_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, len, cap: int) { +make_dynamic_array_error_loc :: #force_inline proc "contextless" (loc := #caller_location, len, cap: int) { if 0 <= len && len <= cap { return } @@ -271,18 +271,18 @@ make_map_expr_error_loc :: #force_inline proc "contextless" (loc := #caller_loca -bounds_check_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, index, count: int) { - bounds_check_error(file_path, line, column, index, count) +bounds_check_error_loc :: #force_inline proc "contextless" (loc := #caller_location, index, count: int) { + bounds_check_error(loc.file_path, loc.line, loc.column, index, count) } -slice_expr_error_hi_loc :: #force_inline proc "contextless" (using loc := #caller_location, hi: int, len: int) { - slice_expr_error_hi(file_path, line, column, hi, len) +slice_expr_error_hi_loc :: #force_inline proc "contextless" (loc := #caller_location, hi: int, len: int) { + slice_expr_error_hi(loc.file_path, loc.line, loc.column, hi, len) } -slice_expr_error_lo_hi_loc :: #force_inline proc "contextless" (using loc := #caller_location, lo, hi: int, len: int) { - slice_expr_error_lo_hi(file_path, line, column, lo, hi, len) +slice_expr_error_lo_hi_loc :: #force_inline proc "contextless" (loc := #caller_location, lo, hi: int, len: int) { + slice_expr_error_lo_hi(loc.file_path, loc.line, loc.column, lo, hi, len) } -dynamic_array_expr_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, low, high, max: int) { - dynamic_array_expr_error(file_path, line, column, low, high, max) +dynamic_array_expr_error_loc :: #force_inline proc "contextless" (loc := #caller_location, low, high, max: int) { + dynamic_array_expr_error(loc.file_path, loc.line, loc.column, low, high, max) } diff --git a/core/runtime/print.odin b/core/runtime/print.odin index 732ed9c12..20788b66f 100644 --- a/core/runtime/print.odin +++ b/core/runtime/print.odin @@ -215,19 +215,19 @@ print_uint :: proc "contextless" (x: uint) { print_u64(u64(x)) } print_uintptr :: proc "contextless" (x: uintptr) { print_u64(u64(x)) } print_int :: proc "contextless" (x: int) { print_i64(i64(x)) } -print_caller_location :: proc "contextless" (using loc: Source_Code_Location) { - print_string(file_path) +print_caller_location :: proc "contextless" (loc: Source_Code_Location) { + print_string(loc.file_path) when ODIN_ERROR_POS_STYLE == .Default { print_byte('(') - print_u64(u64(line)) + print_u64(u64(loc.line)) print_byte(':') - print_u64(u64(column)) + print_u64(u64(loc.column)) print_byte(')') } else when ODIN_ERROR_POS_STYLE == .Unix { print_byte(':') - print_u64(u64(line)) + print_u64(u64(loc.line)) print_byte(':') - print_u64(u64(column)) + print_u64(u64(loc.column)) print_byte(':') } else { #panic("unhandled ODIN_ERROR_POS_STYLE") diff --git a/core/thread/thread_windows.odin b/core/thread/thread_windows.odin index 0d004c8c3..2d6cad1ad 100644 --- a/core/thread/thread_windows.odin +++ b/core/thread/thread_windows.odin @@ -129,8 +129,8 @@ _destroy :: proc(thread: ^Thread) { free(thread, thread.creation_allocator) } -_terminate :: proc(using thread : ^Thread, exit_code: int) { - win32.TerminateThread(win32_thread, u32(exit_code)) +_terminate :: proc(thread: ^Thread, exit_code: int) { + win32.TerminateThread(thread.win32_thread, u32(exit_code)) } _yield :: proc() { diff --git a/core/time/time.odin b/core/time/time.odin index 6c424a62e..90d051a31 100644 --- a/core/time/time.odin +++ b/core/time/time.odin @@ -59,28 +59,30 @@ sleep :: proc "contextless" (d: Duration) { _sleep(d) } -stopwatch_start :: proc "contextless" (using stopwatch: ^Stopwatch) { - if !running { - _start_time = tick_now() - running = true +stopwatch_start :: proc "contextless" (stopwatch: ^Stopwatch) { + if !stopwatch.running { + stopwatch._start_time = tick_now() + stopwatch.running = true } } -stopwatch_stop :: proc "contextless" (using stopwatch: ^Stopwatch) { - if running { - _accumulation += tick_diff(_start_time, tick_now()) - running = false +stopwatch_stop :: proc "contextless" (stopwatch: ^Stopwatch) { + if stopwatch.running { + stopwatch._accumulation += tick_diff(stopwatch._start_time, tick_now()) + stopwatch.running = false } } -stopwatch_reset :: proc "contextless" (using stopwatch: ^Stopwatch) { - _accumulation = {} - running = false +stopwatch_reset :: proc "contextless" (stopwatch: ^Stopwatch) { + stopwatch._accumulation = {} + stopwatch.running = false } -stopwatch_duration :: proc "contextless" (using stopwatch: Stopwatch) -> Duration { - if !running { return _accumulation } - return _accumulation + tick_diff(_start_time, tick_now()) +stopwatch_duration :: proc "contextless" (stopwatch: Stopwatch) -> Duration { + if !stopwatch.running { + return stopwatch._accumulation + } + return stopwatch._accumulation + tick_diff(stopwatch._start_time, tick_now()) } diff :: proc "contextless" (start, end: Time) -> Duration { @@ -171,9 +173,9 @@ day :: proc "contextless" (t: Time) -> (day: int) { } weekday :: proc "contextless" (t: Time) -> (weekday: Weekday) { - abs := _time_abs(t) - sec := (abs + u64(Weekday.Monday) * SECONDS_PER_DAY) % SECONDS_PER_WEEK - return Weekday(int(sec) / SECONDS_PER_DAY) + abs := _time_abs(t) + sec := (abs + u64(Weekday.Monday) * SECONDS_PER_DAY) % SECONDS_PER_WEEK + return Weekday(int(sec) / SECONDS_PER_DAY) } clock :: proc { clock_from_time, clock_from_duration, clock_from_stopwatch } diff --git a/src/check_type.cpp b/src/check_type.cpp index c52f32f1a..4704f8b9b 100644 --- a/src/check_type.cpp +++ b/src/check_type.cpp @@ -1474,9 +1474,9 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para Type *specialization = nullptr; bool is_using = (p->flags&FieldFlag_using) != 0; - if ((build_context.vet_flags & VetFlag_UsingParam) && is_using) { + if ((check_vet_flags(param) & VetFlag_UsingParam) && is_using) { ERROR_BLOCK(); - error(param, "'using' on a procedure parameter is now allowed when '-vet' or '-vet-using-stmt' is applied"); + error(param, "'using' on a procedure parameter is now allowed when '-vet' or '-vet-using-param' is applied"); error_line("\t'using' is considered bad practice to use as a statement/procedure parameter outside of immediate refactoring\n"); } diff --git a/vendor/fontstash/fontstash.odin b/vendor/fontstash/fontstash.odin index edf9e12db..1516e34cf 100644 --- a/vendor/fontstash/fontstash.odin +++ b/vendor/fontstash/fontstash.odin @@ -1,4 +1,5 @@ //+build windows, linux, darwin +//+vet !using-param package fontstash import "core:runtime" From 8aa36072fccdc47ceaef6861ceaa66329fd73c42 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 12:11:17 +0100 Subject: [PATCH 08/20] Remove `using` where easily possible --- tests/core/encoding/hxa/test_core_hxa.odin | 7 +--- tests/core/encoding/xml/test_core_xml.odin | 37 ++++++++----------- .../linalg/glsl/test_linalg_glsl_math.odin | 1 + tests/core/math/test_core_math.odin | 1 + tests/core/text/i18n/test_core_text_i18n.odin | 1 + 5 files changed, 21 insertions(+), 26 deletions(-) diff --git a/tests/core/encoding/hxa/test_core_hxa.odin b/tests/core/encoding/hxa/test_core_hxa.odin index b93562fd5..33d26d707 100644 --- a/tests/core/encoding/hxa/test_core_hxa.odin +++ b/tests/core/encoding/hxa/test_core_hxa.odin @@ -21,16 +21,13 @@ main :: proc() { @test test_read :: proc(t: ^testing.T) { - - using hxa - filename := tc.get_data_path(t, TEAPOT_PATH) defer delete(filename) - file, err := read_from_file(filename) + file, err := hxa.read_from_file(filename) e :: hxa.Read_Error.None tc.expect(t, err == e, fmt.tprintf("%v: read_from_file(%v) -> %v != %v", #procedure, filename, err, e)) - defer file_destroy(file) + defer hxa.file_destroy(file) /* Header */ tc.expect(t, file.magic_number == 0x417848, fmt.tprintf("%v: file.magic_number %v != %v", diff --git a/tests/core/encoding/xml/test_core_xml.odin b/tests/core/encoding/xml/test_core_xml.odin index a05db377a..7a900659a 100644 --- a/tests/core/encoding/xml/test_core_xml.odin +++ b/tests/core/encoding/xml/test_core_xml.odin @@ -214,43 +214,40 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) { */ print :: proc(writer: io.Writer, doc: ^xml.Document) -> (written: int, err: io.Error) { if doc == nil { return } - using fmt - written += wprintf(writer, "[XML Prolog]\n") + written += fmt.wprintf(writer, "[XML Prolog]\n") for attr in doc.prologue { - written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val) + written += fmt.wprintf(writer, "\t%v: %v\n", attr.key, attr.val) } - written += wprintf(writer, "[Encoding] %v\n", doc.encoding) + written += fmt.wprintf(writer, "[Encoding] %v\n", doc.encoding) if len(doc.doctype.ident) > 0 { - written += wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) + written += fmt.wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident) if len(doc.doctype.rest) > 0 { - wprintf(writer, "\t%v\n", doc.doctype.rest) + fmt.wprintf(writer, "\t%v\n", doc.doctype.rest) } } for comment in doc.comments { - written += wprintf(writer, "[Pre-root comment] %v\n", comment) + written += fmt.wprintf(writer, "[Pre-root comment] %v\n", comment) } if doc.element_count > 0 { - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") print_element(writer, doc, 0) - wprintln(writer, " --- ") + fmt.wprintln(writer, " --- ") } return written, .None } print_element :: proc(writer: io.Writer, doc: ^xml.Document, element_id: xml.Element_ID, indent := 0) -> (written: int, err: io.Error) { - using fmt - tab :: proc(writer: io.Writer, indent: int) { for _ in 0..=indent { - wprintf(writer, "\t") + fmt.wprintf(writer, "\t") } } @@ -259,13 +256,13 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) { element := doc.elements[element_id] if element.kind == .Element { - wprintf(writer, "<%v>\n", element.ident) + fmt.wprintf(writer, "<%v>\n", element.ident) for value in element.value { switch v in value { case string: tab(writer, indent + 1) - wprintf(writer, "[Value] %v\n", v) + fmt.wprintf(writer, "[Value] %v\n", v) case xml.Element_ID: print_element(writer, doc, v, indent + 1) } @@ -273,10 +270,10 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) { for attr in element.attribs { tab(writer, indent + 1) - wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) + fmt.wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val) } } else if element.kind == .Comment { - wprintf(writer, "[COMMENT] %v\n", element.value) + fmt.wprintf(writer, "[COMMENT] %v\n", element.value) } return written, .None @@ -291,8 +288,6 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) { @test run_tests :: proc(t: ^testing.T) { - using fmt - for test in TESTS { path := test_file_path(test.filename) log(t, fmt.tprintf("Trying to parse %v", path)) @@ -307,11 +302,11 @@ run_tests :: proc(t: ^testing.T) { crc32 := hash.crc32(tree_bytes) failed := err != test.err - err_msg := tprintf("Expected return value %v, got %v", test.err, err) + err_msg := fmt.tprintf("Expected return value %v, got %v", test.err, err) expect(t, err == test.err, err_msg) failed |= crc32 != test.crc32 - err_msg = tprintf("Expected CRC 0x%08x, got 0x%08x, with options %v", test.crc32, crc32, test.options) + err_msg = fmt.tprintf("Expected CRC 0x%08x, got 0x%08x, with options %v", test.crc32, crc32, test.options) expect(t, crc32 == test.crc32, err_msg) if failed { @@ -319,7 +314,7 @@ run_tests :: proc(t: ^testing.T) { Don't fully print big trees. */ tree_string = tree_string[:min(2_048, len(tree_string))] - println(tree_string) + fmt.println(tree_string) } } } diff --git a/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin b/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin index e0b4f5145..1d0218f5c 100644 --- a/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin +++ b/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin @@ -1,6 +1,7 @@ // Tests "linalg_glsl_math.odin" in "core:math/linalg/glsl". // Must be run with `-collection:tests=` flag, e.g. // ./odin run tests/core/math/linalg/glsl/test_linalg_glsl_math.odin -collection:tests=./tests +//+vet !using-stmt package test_core_math_linalg_glsl_math import glsl "core:math/linalg/glsl" diff --git a/tests/core/math/test_core_math.odin b/tests/core/math/test_core_math.odin index 9131c163d..f6b7345ed 100644 --- a/tests/core/math/test_core_math.odin +++ b/tests/core/math/test_core_math.odin @@ -1,6 +1,7 @@ // Tests "math.odin" in "core:math". // Must be run with `-collection:tests=` flag, e.g. // ./odin run tests/core/math/test_core_math.odin -collection:tests=./tests +//+vet !using-stmt package test_core_math import "core:fmt" diff --git a/tests/core/text/i18n/test_core_text_i18n.odin b/tests/core/text/i18n/test_core_text_i18n.odin index ba668c4fd..69ed65467 100644 --- a/tests/core/text/i18n/test_core_text_i18n.odin +++ b/tests/core/text/i18n/test_core_text_i18n.odin @@ -1,3 +1,4 @@ +//+vet !using-stmt package test_core_text_i18n import "core:mem" From f17077c05cfcb9632ee12619bdc01f4e10ea8930 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 12:11:31 +0100 Subject: [PATCH 09/20] Remove debug code --- src/check_stmt.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index b6bb7d819..a97a8d312 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -2466,7 +2466,7 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags) } if (check_vet_flags(node) & VetFlag_UsingStmt) { ERROR_BLOCK(); - error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied %llu %llu", check_vet_flags(ctx), node->file()->vet_flags); + error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied"); error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n"); } From 5dba08fb3b3e4f851d569f91f2cb222dd8ddce15 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 12:19:25 +0100 Subject: [PATCH 10/20] Keep -vet happy --- core/math/math.odin | 24 +++++++++---------- tests/core/encoding/hxa/test_core_hxa.odin | 27 ++++++++++------------ tests/core/math/test_core_math.odin | 6 ++--- 3 files changed, 27 insertions(+), 30 deletions(-) diff --git a/core/math/math.odin b/core/math/math.odin index 6f7a36bab..4215a8075 100644 --- a/core/math/math.odin +++ b/core/math/math.odin @@ -2286,20 +2286,20 @@ F64_MASK :: 0x7ff F64_SHIFT :: 64 - 12 F64_BIAS :: 0x3ff -INF_F16 :f16: 0h7C00 -NEG_INF_F16 :f16: 0hFC00 +INF_F16 :: f16(0h7C00) +NEG_INF_F16 :: f16(0hFC00) -SNAN_F16 :f16: 0h7C01 -QNAN_F16 :f16: 0h7E01 +SNAN_F16 :: f16(0h7C01) +QNAN_F16 :: f16(0h7E01) -INF_F32 :f32: 0h7F80_0000 -NEG_INF_F32 :f32: 0hFF80_0000 +INF_F32 :: f32(0h7F80_0000) +NEG_INF_F32 :: f32(0hFF80_0000) -SNAN_F32 :f32: 0hFF80_0001 -QNAN_F32 :f32: 0hFFC0_0001 +SNAN_F32 :: f32(0hFF80_0001) +QNAN_F32 :: f32(0hFFC0_0001) -INF_F64 :f64: 0h7FF0_0000_0000_0000 -NEG_INF_F64 :f64: 0hFFF0_0000_0000_0000 +INF_F64 :: f64(0h7FF0_0000_0000_0000) +NEG_INF_F64 :: f64(0hFFF0_0000_0000_0000) -SNAN_F64 :f64: 0h7FF0_0000_0000_0001 -QNAN_F64 :f64: 0h7FF8_0000_0000_0001 +SNAN_F64 :: f64(0h7FF0_0000_0000_0001) +QNAN_F64 :: f64(0h7FF8_0000_0000_0001) diff --git a/tests/core/encoding/hxa/test_core_hxa.odin b/tests/core/encoding/hxa/test_core_hxa.odin index 33d26d707..5465f5d87 100644 --- a/tests/core/encoding/hxa/test_core_hxa.odin +++ b/tests/core/encoding/hxa/test_core_hxa.odin @@ -131,38 +131,35 @@ test_read :: proc(t: ^testing.T) { @test test_write :: proc(t: ^testing.T) { - - using hxa - - n1 :Node + n1: hxa.Node n1_m1_value := []f64le{0.4, -1.23, 2341.6, -333.333} - n1_m1 := Meta{"m1", n1_m1_value} + n1_m1 := hxa.Meta{"m1", n1_m1_value} - n1.meta_data = []Meta{n1_m1} + n1.meta_data = []hxa.Meta{n1_m1} - n1_l1 := Layer{"l1", 2, []f32le{32.1, -41.3}} - n1_l2 := Layer{"l2", 3, []f64le{0.64, 1.64, -2.64}} + n1_l1 := hxa.Layer{"l1", 2, []f32le{32.1, -41.3}} + n1_l2 := hxa.Layer{"l2", 3, []f64le{0.64, 1.64, -2.64}} - n1_content := Node_Image{Image_Type.Image_1D, [3]u32le{1, 1, 2}, Layer_Stack{n1_l1, n1_l2}} + n1_content := hxa.Node_Image{.Image_1D, [3]u32le{1, 1, 2}, hxa.Layer_Stack{n1_l1, n1_l2}} n1.content = n1_content - w_file :File - w_file.nodes = []Node{n1} + w_file: hxa.File + w_file.nodes = []hxa.Node{n1} - required_size := required_write_size(w_file) + required_size := hxa.required_write_size(w_file) buf := make([]u8, required_size) - n, write_err := write(buf, w_file) + n, write_err := hxa.write(buf, w_file) write_e :: hxa.Write_Error.None tc.expect(t, write_err == write_e, fmt.tprintf("%v: write_err %v != %v", #procedure, write_err, write_e)) tc.expect(t, n == required_size, fmt.tprintf("%v: n %v != %v", #procedure, n, required_size)) - file, read_err := read(buf) + file, read_err := hxa.read(buf) read_e :: hxa.Read_Error.None tc.expect(t, read_err == read_e, fmt.tprintf("%v: read_err %v != %v", #procedure, read_err, read_e)) - defer file_destroy(file) + defer hxa.file_destroy(file) delete(buf) diff --git a/tests/core/math/test_core_math.odin b/tests/core/math/test_core_math.odin index f6b7345ed..d358dc936 100644 --- a/tests/core/math/test_core_math.odin +++ b/tests/core/math/test_core_math.odin @@ -73,10 +73,10 @@ test_classify_f16 :: proc(t: ^testing.T) { } /* Check all subnormals (exponent 0, 10-bit significand non-zero) */ - for i :u16 = 1; i < 0x400; i += 1 { - v :f16 = transmute(f16)i + for i in u16(1)..<0x400 { + v := transmute(f16)i r = classify_f16(v) - e :Float_Class: Subnormal + e :: Float_Class.Subnormal tc.expect(t, r == e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, v, r, e)) } } From be6f355665a0e9d469f7c405e918b0bb64d4f793 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 12:32:30 +0100 Subject: [PATCH 11/20] Keep `-vet` happy by removing `using` --- .../linalg/glsl/test_linalg_glsl_math.odin | 15 +- tests/core/math/test_core_math.odin | 164 ++++++++---------- .../path/filepath/test_core_filepath.odin | 18 +- tests/core/reflect/test_core_reflect.odin | 36 ++-- tests/core/text/i18n/test_core_text_i18n.odin | 9 +- 5 files changed, 99 insertions(+), 143 deletions(-) diff --git a/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin b/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin index 1d0218f5c..cf91b8a97 100644 --- a/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin +++ b/tests/core/math/linalg/glsl/test_linalg_glsl_math.odin @@ -1,7 +1,6 @@ // Tests "linalg_glsl_math.odin" in "core:math/linalg/glsl". // Must be run with `-collection:tests=` flag, e.g. // ./odin run tests/core/math/linalg/glsl/test_linalg_glsl_math.odin -collection:tests=./tests -//+vet !using-stmt package test_core_math_linalg_glsl_math import glsl "core:math/linalg/glsl" @@ -23,9 +22,6 @@ main :: proc() { @test test_fract_f32 :: proc(t: ^testing.T) { - - using math - r: f32 Datum :: struct { @@ -36,8 +32,8 @@ test_fract_f32 :: proc(t: ^testing.T) { @static data := []Datum{ { 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken { 1, -10.5, -0.5 }, - { 2, F32_MIN, F32_MIN }, // 0x1p-126 - { 3, -F32_MIN, -F32_MIN }, + { 2, math.F32_MIN, math.F32_MIN }, // 0x1p-126 + { 3, -math.F32_MIN, -math.F32_MIN }, { 4, 0.0, 0.0 }, { 5, -0.0, -0.0 }, { 6, 1, 0.0 }, @@ -55,9 +51,6 @@ test_fract_f32 :: proc(t: ^testing.T) { @test test_fract_f64 :: proc(t: ^testing.T) { - - using math - r: f64 Datum :: struct { @@ -68,8 +61,8 @@ test_fract_f64 :: proc(t: ^testing.T) { @static data := []Datum{ { 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken { 1, -10.5, -0.5 }, - { 2, F64_MIN, F64_MIN }, // 0x1p-1022 - { 3, -F64_MIN, -F64_MIN }, + { 2, math.F64_MIN, math.F64_MIN }, // 0x1p-1022 + { 3, -math.F64_MIN, -math.F64_MIN }, { 4, 0.0, 0.0 }, { 5, -0.0, -0.0 }, { 6, 1, 0.0 }, diff --git a/tests/core/math/test_core_math.odin b/tests/core/math/test_core_math.odin index d358dc936..30e1875c0 100644 --- a/tests/core/math/test_core_math.odin +++ b/tests/core/math/test_core_math.odin @@ -1,7 +1,6 @@ // Tests "math.odin" in "core:math". // Must be run with `-collection:tests=` flag, e.g. // ./odin run tests/core/math/test_core_math.odin -collection:tests=./tests -//+vet !using-stmt package test_core_math import "core:fmt" @@ -44,11 +43,7 @@ main :: proc() { @test test_classify_f16 :: proc(t: ^testing.T) { - - using math - using Float_Class - - r: Float_Class + r: math.Float_Class Datum :: struct { i: int, @@ -56,38 +51,34 @@ test_classify_f16 :: proc(t: ^testing.T) { e: math.Float_Class, } @static data := []Datum{ - { 0, 1.2, Normal }, - { 1, 0h0001, Subnormal }, - { 2, 0.0, Zero }, - { 3, -0.0, Neg_Zero }, - { 4, SNAN_F16, NaN }, - { 5, QNAN_F16, NaN }, - { 6, INF_F16, Inf }, - { 7, NEG_INF_F16, Neg_Inf }, + { 0, 1.2, .Normal }, + { 1, 0h0001, .Subnormal }, + { 2, 0.0, .Zero }, + { 3, -0.0, .Neg_Zero }, + { 4, math.SNAN_F16, .NaN }, + { 5, math.QNAN_F16, .NaN }, + { 6, math.INF_F16, .Inf }, + { 7, math.NEG_INF_F16, .Neg_Inf }, } for d, i in data { assert(i == d.i) - r = classify_f16(d.v) + r = math.classify_f16(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e)) } /* Check all subnormals (exponent 0, 10-bit significand non-zero) */ for i in u16(1)..<0x400 { v := transmute(f16)i - r = classify_f16(v) - e :: Float_Class.Subnormal + r = math.classify_f16(v) + e :: math.Float_Class.Subnormal tc.expect(t, r == e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, v, r, e)) } } @test test_classify_f32 :: proc(t: ^testing.T) { - - using math - using Float_Class - - r: Float_Class + r: math.Float_Class Datum :: struct { i: int, @@ -95,30 +86,26 @@ test_classify_f32 :: proc(t: ^testing.T) { e: math.Float_Class, } @static data := []Datum{ - { 0, 1.2, Normal }, - { 1, 0h0000_0001, Subnormal }, - { 2, 0.0, Zero }, - { 3, -0.0, Neg_Zero }, - { 4, SNAN_F32, NaN }, - { 5, QNAN_F32, NaN }, - { 6, INF_F32, Inf }, - { 7, NEG_INF_F32, Neg_Inf }, + { 0, 1.2, .Normal }, + { 1, 0h0000_0001, .Subnormal }, + { 2, 0.0, .Zero }, + { 3, -0.0, .Neg_Zero }, + { 4, math.SNAN_F32, .NaN }, + { 5, math.QNAN_F32, .NaN }, + { 6, math.INF_F32, .Inf }, + { 7, math.NEG_INF_F32, .Neg_Inf }, } for d, i in data { assert(i == d.i) - r = classify_f32(d.v) + r = math.classify_f32(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e)) } } @test test_classify_f64 :: proc(t: ^testing.T) { - - using math - using Float_Class - - r: Float_Class + r: math.Float_Class Datum :: struct { i: int, @@ -126,28 +113,25 @@ test_classify_f64 :: proc(t: ^testing.T) { e: math.Float_Class, } @static data := []Datum{ - { 0, 1.2, Normal }, - { 1, 0h0000_0000_0000_0001, Subnormal }, - { 2, 0.0, Zero }, - { 3, -0.0, Neg_Zero }, - { 4, SNAN_F64, NaN }, - { 5, QNAN_F64, NaN }, - { 6, INF_F64, Inf }, - { 7, NEG_INF_F64, Neg_Inf }, + { 0, 1.2, .Normal }, + { 1, 0h0000_0000_0000_0001, .Subnormal }, + { 2, 0.0, .Zero }, + { 3, -0.0, .Neg_Zero }, + { 4, math.SNAN_F64, .NaN }, + { 5, math.QNAN_F64, .NaN }, + { 6, math.INF_F64, .Inf }, + { 7, math.NEG_INF_F64, .Neg_Inf }, } for d, i in data { assert(i == d.i) - r = classify_f64(d.v) + r = math.classify_f64(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e)) } } @test test_trunc_f16 :: proc(t: ^testing.T) { - - using math - r, v: f16 Datum :: struct { @@ -159,16 +143,16 @@ test_trunc_f16 :: proc(t: ^testing.T) { { 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken { 1, -10.5, -10 }, - { 2, F16_MAX, F16_MAX }, - { 3, -F16_MAX, -F16_MAX }, - { 4, F16_MIN, 0.0 }, - { 5, -F16_MIN, -0.0 }, + { 2, math.F16_MAX, math.F16_MAX }, + { 3, -math.F16_MAX, -math.F16_MAX }, + { 4, math.F16_MIN, 0.0 }, + { 5, -math.F16_MIN, -0.0 }, { 6, 0.0, 0.0 }, { 7, -0.0, -0.0 }, { 8, 1, 1 }, { 9, -1, -1 }, - { 10, INF_F16, INF_F16 }, - { 11, NEG_INF_F16, NEG_INF_F16 }, + { 10, math.INF_F16, math.INF_F16 }, + { 11, math.NEG_INF_F16, math.NEG_INF_F16 }, /* From https://en.wikipedia.org/wiki/Half-precision_floating-point_format */ { 12, 0h3C01, 1 }, // 0x1.004p+0 (smallest > 1) @@ -186,24 +170,21 @@ test_trunc_f16 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r = trunc_f16(d.v) + r = math.trunc_f16(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e)) } - v = SNAN_F16 - r = trunc_f16(v) - tc.expect(t, is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.SNAN_F16 + r = math.trunc_f16(v) + tc.expect(t, math.is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) - v = QNAN_F16 - r = trunc_f16(v) - tc.expect(t, is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.QNAN_F16 + r = math.trunc_f16(v) + tc.expect(t, math.is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) } @test test_trunc_f32 :: proc(t: ^testing.T) { - - using math - r, v: f32 Datum :: struct { @@ -215,16 +196,16 @@ test_trunc_f32 :: proc(t: ^testing.T) { { 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken { 1, -10.5, -10 }, - { 2, F32_MAX, F32_MAX }, - { 3, -F32_MAX, -F32_MAX }, - { 4, F32_MIN, 0.0 }, - { 5, -F32_MIN, -0.0 }, + { 2, math.F32_MAX, math.F32_MAX }, + { 3, -math.F32_MAX, -math.F32_MAX }, + { 4, math.F32_MIN, 0.0 }, + { 5, -math.F32_MIN, -0.0 }, { 6, 0.0, 0.0 }, { 7, -0.0, -0.0 }, { 8, 1, 1 }, { 9, -1, -1 }, - { 10, INF_F32, INF_F32 }, - { 11, NEG_INF_F32, NEG_INF_F32 }, + { 10, math.INF_F32, math.INF_F32 }, + { 11, math.NEG_INF_F32, math.NEG_INF_F32 }, /* From https://en.wikipedia.org/wiki/Single-precision_floating-point_format */ { 12, 0h3F80_0001, 1 }, // 0x1.000002p+0 (smallest > 1) @@ -251,24 +232,21 @@ test_trunc_f32 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r = trunc_f32(d.v) + r = math.trunc_f32(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e)) } - v = SNAN_F32 - r = trunc_f32(v) - tc.expect(t, is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.SNAN_F32 + r = math.trunc_f32(v) + tc.expect(t, math.is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) - v = QNAN_F32 - r = trunc_f32(v) - tc.expect(t, is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.QNAN_F32 + r = math.trunc_f32(v) + tc.expect(t, math.is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) } @test test_trunc_f64 :: proc(t: ^testing.T) { - - using math - r, v: f64 Datum :: struct { @@ -280,16 +258,16 @@ test_trunc_f64 :: proc(t: ^testing.T) { { 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken { 1, -10.5, -10 }, - { 2, F64_MAX, F64_MAX }, - { 3, -F64_MAX, -F64_MAX }, - { 4, F64_MIN, 0.0 }, - { 5, -F64_MIN, -0.0 }, + { 2, math.F64_MAX, math.F64_MAX }, + { 3, -math.F64_MAX, -math.F64_MAX }, + { 4, math.F64_MIN, 0.0 }, + { 5, -math.F64_MIN, -0.0 }, { 6, 0.0, 0.0 }, { 7, -0.0, -0.0 }, { 8, 1, 1 }, { 9, -1, -1 }, - { 10, INF_F64, INF_F64 }, - { 11, NEG_INF_F64, NEG_INF_F64 }, + { 10, math.INF_F64, math.INF_F64 }, + { 11, math.NEG_INF_F64, math.NEG_INF_F64 }, /* From https://en.wikipedia.org/wiki/Double-precision_floating-point_format */ { 12, 0h3FF0_0000_0000_0001, 1 }, // 0x1.0000000000001p+0 (smallest > 1) @@ -316,17 +294,17 @@ test_trunc_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r = trunc_f64(d.v) + r = math.trunc_f64(d.v) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e)) } - v = SNAN_F64 - r = trunc_f64(v) - tc.expect(t, is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.SNAN_F64 + r = math.trunc_f64(v) + tc.expect(t, math.is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) - v = QNAN_F64 - r = trunc_f64(v) - tc.expect(t, is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) + v = math.QNAN_F64 + r = math.trunc_f64(v) + tc.expect(t, math.is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r)) } diff --git a/tests/core/path/filepath/test_core_filepath.odin b/tests/core/path/filepath/test_core_filepath.odin index 0268fb62c..4c70e5f28 100644 --- a/tests/core/path/filepath/test_core_filepath.odin +++ b/tests/core/path/filepath/test_core_filepath.odin @@ -22,9 +22,6 @@ main :: proc() { @test test_split_list_windows :: proc(t: ^testing.T) { - - using filepath - Datum :: struct { i: int, v: string, @@ -43,7 +40,7 @@ test_split_list_windows :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i)) - r := split_list(d.v) + r := filepath.split_list(d.v) defer delete(r) tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d", i, #procedure, d.v, len(r), len(d.e))) @@ -57,12 +54,12 @@ test_split_list_windows :: proc(t: ^testing.T) { { v := "" - r := split_list(v) + r := filepath.split_list(v) tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r)) } { v := "a" - r := split_list(v) + r := filepath.split_list(v) defer delete(r) tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r))) if len(r) == 1 { @@ -73,9 +70,6 @@ test_split_list_windows :: proc(t: ^testing.T) { @test test_split_list_unix :: proc(t: ^testing.T) { - - using filepath - Datum :: struct { i: int, v: string, @@ -94,7 +88,7 @@ test_split_list_unix :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i)) - r := split_list(d.v) + r := filepath.split_list(d.v) defer delete(r) tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d", i, #procedure, d.v, len(r), len(d.e))) @@ -108,12 +102,12 @@ test_split_list_unix :: proc(t: ^testing.T) { { v := "" - r := split_list(v) + r := filepath.split_list(v) tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r)) } { v := "a" - r := split_list(v) + r := filepath.split_list(v) defer delete(r) tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r))) if len(r) == 1 { diff --git a/tests/core/reflect/test_core_reflect.odin b/tests/core/reflect/test_core_reflect.odin index 039501735..a3a66f968 100644 --- a/tests/core/reflect/test_core_reflect.odin +++ b/tests/core/reflect/test_core_reflect.odin @@ -19,8 +19,6 @@ main :: proc() { @test test_as_u64 :: proc(t: ^testing.T) { - using reflect - { /* i8 */ Datum :: struct { i: int, v: i8, e: u64 } @@ -32,7 +30,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v (0x%X) != %v (0x%X)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -49,7 +47,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v (0x%X) != %v (0x%X)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -66,7 +64,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v (0x%X) != %v (0x%X)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -83,7 +81,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v (0x%X) != %v (0x%X)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -103,7 +101,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (0x%X) != %v (0x%X)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -119,7 +117,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -133,7 +131,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -147,7 +145,7 @@ test_as_u64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_u64(d.v) + r, valid := reflect.as_u64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -156,8 +154,6 @@ test_as_u64 :: proc(t: ^testing.T) { @test test_as_f64 :: proc(t: ^testing.T) { - using reflect - { /* i8 */ Datum :: struct { i: int, v: i8, e: f64 } @@ -169,7 +165,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -185,7 +181,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -201,7 +197,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -217,7 +213,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } @@ -234,7 +230,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (%H) != %v (%H)\n", i, #procedure, d.v, r, r, d.e, d.e)) @@ -250,7 +246,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v (%H)) -> %v (%H) != %v (%H)\n", i, #procedure, d.v, d.v, r, r, d.e, d.e)) @@ -265,7 +261,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v (%H)) -> %v (%H) != %v (%H)\n", i, #procedure, d.v, d.v, r, r, d.e, d.e)) @@ -280,7 +276,7 @@ test_as_f64 :: proc(t: ^testing.T) { for d, i in data { assert(i == d.i) - r, valid := as_f64(d.v) + r, valid := reflect.as_f64(d.v) tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v)) tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e)) } diff --git a/tests/core/text/i18n/test_core_text_i18n.odin b/tests/core/text/i18n/test_core_text_i18n.odin index 69ed65467..ec632d432 100644 --- a/tests/core/text/i18n/test_core_text_i18n.odin +++ b/tests/core/text/i18n/test_core_text_i18n.odin @@ -1,4 +1,3 @@ -//+vet !using-stmt package test_core_text_i18n import "core:mem" @@ -119,8 +118,6 @@ TESTS := []Test_Suite{ @test tests :: proc(t: ^testing.T) { - using fmt - cat: ^i18n.Translation err: i18n.Error @@ -143,8 +140,6 @@ tests :: proc(t: ^testing.T) { } main :: proc() { - using fmt - track: mem.Tracking_Allocator mem.tracking_allocator_init(&track, context.allocator) context.allocator = mem.tracking_allocator(&track) @@ -158,9 +153,9 @@ main :: proc() { } if len(track.allocation_map) > 0 { - println() + fmt.println() for _, v in track.allocation_map { - printf("%v Leaked %v bytes.\n", v.location, v.size) + fmt.printf("%v Leaked %v bytes.\n", v.location, v.size) } } } \ No newline at end of file From 3d16880d95e1649cc375cfe41ba10a26a6ebadb8 Mon Sep 17 00:00:00 2001 From: Hasan Yasin Ozturk Date: Mon, 31 Jul 2023 17:18:06 +0300 Subject: [PATCH 12/20] Set version date to commit date instead of build date Fixes #2688 This commit updates `build_odin.sh` and `build.bat` so that date of the commit is used for the version tag, instead of the build time. --- build.bat | 5 ++++- build_odin.sh | 11 +++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/build.bat b/build.bat index b7537fba6..cee679773 100644 --- a/build.bat +++ b/build.bat @@ -51,7 +51,10 @@ set compiler_flags= -nologo -Oi -TP -fp:precise -Gm- -MP -FC -EHsc- -GR- -GF set compiler_defines= -DODIN_VERSION_RAW=\"%odin_version_raw%\" if not exist .git\ goto skip_git_hash -for /f %%i in ('git rev-parse --short HEAD') do set GIT_SHA=%%i +for /f "tokens=1,2" %%i IN ('git show "--pretty=%%cd %%h" "--date=format:%%Y-%%m" --no-patch --no-notes HEAD') do ( + set odin_version_raw=%%i + set GIT_SHA=%%j +) if %ERRORLEVEL% equ 0 set compiler_defines=%compiler_defines% -DGIT_SHA=\"%GIT_SHA%\" :skip_git_hash diff --git a/build_odin.sh b/build_odin.sh index 9b90a80e2..31fe4d54c 100755 --- a/build_odin.sh +++ b/build_odin.sh @@ -8,17 +8,20 @@ set -eu : ${ODIN_VERSION=dev-$(date +"%Y-%m")} : ${GIT_SHA=} -CPPFLAGS="$CPPFLAGS -DODIN_VERSION_RAW=\"$ODIN_VERSION\"" CXXFLAGS="$CXXFLAGS -std=c++14" LDFLAGS="$LDFLAGS -pthread -lm -lstdc++" -if [ -d ".git" ]; then - GIT_SHA=$(git rev-parse --short HEAD || :) - if [ "$GIT_SHA" ]; then +if [ -d ".git" ] && [ $(which git) ]; then + versionTag=( $(git show --pretty='%cd %h' --date=format:%Y-%m --no-patch --no-notes HEAD) ) + if [ $? -eq 0 ]; then + ODIN_VERSION="${versionTag[0]}" + GIT_SHA="${versionTag[1]}" CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\"" fi fi +CPPFLAGS="$CPPFLAGS -DODIN_VERSION_RAW=\"$ODIN_VERSION\"" + DISABLED_WARNINGS="-Wno-switch -Wno-macro-redefined -Wno-unused-value" OS=$(uname) From 963eeee361ed59c16a990836ee1132dfba105a89 Mon Sep 17 00:00:00 2001 From: Hasan Yasin Ozturk Date: Mon, 31 Jul 2023 17:46:45 +0300 Subject: [PATCH 13/20] Fix dropped "dev-" prefix from version tag In commit c3a31666, "dev-" prefix was dropped unintentionally. This commit fixes that. --- build.bat | 2 +- build_odin.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.bat b/build.bat index cee679773..30faf43db 100644 --- a/build.bat +++ b/build.bat @@ -52,7 +52,7 @@ set compiler_defines= -DODIN_VERSION_RAW=\"%odin_version_raw%\" if not exist .git\ goto skip_git_hash for /f "tokens=1,2" %%i IN ('git show "--pretty=%%cd %%h" "--date=format:%%Y-%%m" --no-patch --no-notes HEAD') do ( - set odin_version_raw=%%i + set odin_version_raw=dev-%%i set GIT_SHA=%%j ) if %ERRORLEVEL% equ 0 set compiler_defines=%compiler_defines% -DGIT_SHA=\"%GIT_SHA%\" diff --git a/build_odin.sh b/build_odin.sh index 31fe4d54c..cbda51bfc 100755 --- a/build_odin.sh +++ b/build_odin.sh @@ -14,7 +14,7 @@ LDFLAGS="$LDFLAGS -pthread -lm -lstdc++" if [ -d ".git" ] && [ $(which git) ]; then versionTag=( $(git show --pretty='%cd %h' --date=format:%Y-%m --no-patch --no-notes HEAD) ) if [ $? -eq 0 ]; then - ODIN_VERSION="${versionTag[0]}" + ODIN_VERSION="dev-${versionTag[0]}" GIT_SHA="${versionTag[1]}" CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\"" fi From 19b24fcce2e012b18decfd8c31b810eb8a571fd5 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 17:27:23 +0100 Subject: [PATCH 14/20] Add require_results for proc groups --- src/checker.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/checker.cpp b/src/checker.cpp index fbc550f00..895e3c528 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -3002,6 +3002,12 @@ gb_internal DECL_ATTRIBUTE_PROC(proc_group_attribute) { } } return true; + } else if (name == "require_results") { + if (value != nullptr) { + error(elem, "Expected no value for '%.*s'", LIT(name)); + } + ac->require_results = true; + return true; } return false; } From 4b57aec1c6b926dedd77054b82b05a29c152a099 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 31 Jul 2023 17:30:03 +0100 Subject: [PATCH 15/20] Fix typo --- src/check_stmt.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index a97a8d312..b497c0afb 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -2466,7 +2466,7 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags) } if (check_vet_flags(node) & VetFlag_UsingStmt) { ERROR_BLOCK(); - error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied"); + error(node, "'using' as a statement is not allowed when '-vet' or '-vet-using' is applied"); error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n"); } From c35c58b023ec98aa7d42498b9ece68cf481f2c32 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Tue, 1 Aug 2023 11:03:15 +0100 Subject: [PATCH 16/20] Add `-vet-style` and `-vet-semicolon` --- src/build_settings.cpp | 6 ++++ src/check_expr.cpp | 4 +-- src/main.cpp | 30 ++++++++++++++--- src/parser.cpp | 75 +++++++++++++++++++++++++----------------- 4 files changed, 77 insertions(+), 38 deletions(-) diff --git a/src/build_settings.cpp b/src/build_settings.cpp index b46ea10e0..48891e89c 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -222,6 +222,8 @@ enum VetFlags : u64 { VetFlag_Shadowing = 1u<<1, // 2 VetFlag_UsingStmt = 1u<<2, // 4 VetFlag_UsingParam = 1u<<3, // 8 + VetFlag_Style = 1u<<4, // 16 + VetFlag_Semicolon = 1u<<5, // 32 VetFlag_Extra = 1u<<16, @@ -239,6 +241,10 @@ u64 get_vet_flag_from_name(String const &name) { return VetFlag_UsingStmt; } else if (name == "using-param") { return VetFlag_UsingParam; + } else if (name == "style") { + return VetFlag_Style; + } else if (name == "semicolon") { + return VetFlag_Semicolon; } else if (name == "extra") { return VetFlag_Extra; } diff --git a/src/check_expr.cpp b/src/check_expr.cpp index f9c62b506..8d159d920 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -2261,7 +2261,7 @@ gb_internal bool check_is_not_addressable(CheckerContext *c, Operand *o) { } gb_internal void check_old_for_or_switch_value_usage(Ast *expr) { - if (!build_context.strict_style) { + if (!(build_context.strict_style || (check_vet_flags(expr) & VetFlag_Style))) { return; } @@ -2351,7 +2351,7 @@ gb_internal void check_unary_expr(CheckerContext *c, Operand *o, Token op, Ast * o->type = alloc_type_pointer(o->type); } } else { - if (build_context.strict_style && ast_node_expect(node, Ast_UnaryExpr)) { + if (ast_node_expect(node, Ast_UnaryExpr)) { ast_node(ue, UnaryExpr, node); check_old_for_or_switch_value_usage(ue->expr); } diff --git a/src/main.cpp b/src/main.cpp index 1802e2984..5cecb5682 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -653,12 +653,16 @@ enum BuildFlagKind { BuildFlag_UseSeparateModules, BuildFlag_NoThreadedChecker, BuildFlag_ShowDebugMessages, + BuildFlag_Vet, BuildFlag_VetShadowing, BuildFlag_VetUnused, BuildFlag_VetUsingStmt, BuildFlag_VetUsingParam, + BuildFlag_VetStyle, + BuildFlag_VetSemicolon, BuildFlag_VetExtra, + BuildFlag_IgnoreUnknownAttributes, BuildFlag_ExtraLinkerFlags, BuildFlag_ExtraAssemblerFlags, @@ -839,7 +843,9 @@ gb_internal bool parse_build_flags(Array args) { add_flag(&build_flags, BuildFlag_VetUnused, str_lit("vet-unused"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_VetShadowing, str_lit("vet-shadowing"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_VetUsingStmt, str_lit("vet-using-stmt"), BuildFlagParam_None, Command__does_check); - add_flag(&build_flags, BuildFlag_VetUsingParam, str_lit("vet-using-param"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetUsingParam, str_lit("vet-using-param"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetStyle, str_lit("vet-style"), BuildFlagParam_None, Command__does_check); + add_flag(&build_flags, BuildFlag_VetSemicolon, str_lit("vet-semicolon"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_VetExtra, str_lit("vet-extra"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_IgnoreUnknownAttributes, str_lit("ignore-unknown-attributes"), BuildFlagParam_None, Command__does_check); @@ -1380,10 +1386,12 @@ gb_internal bool parse_build_flags(Array args) { } break; - case BuildFlag_VetUnused: build_context.vet_flags |= VetFlag_Unused; break; - case BuildFlag_VetShadowing: build_context.vet_flags |= VetFlag_Shadowing; break; - case BuildFlag_VetUsingStmt: build_context.vet_flags |= VetFlag_UsingStmt; break; + case BuildFlag_VetUnused: build_context.vet_flags |= VetFlag_Unused; break; + case BuildFlag_VetShadowing: build_context.vet_flags |= VetFlag_Shadowing; break; + case BuildFlag_VetUsingStmt: build_context.vet_flags |= VetFlag_UsingStmt; break; case BuildFlag_VetUsingParam: build_context.vet_flags |= VetFlag_UsingParam; break; + case BuildFlag_VetStyle: build_context.vet_flags |= VetFlag_Style; break; + case BuildFlag_VetSemicolon: build_context.vet_flags |= VetFlag_Semicolon; break; case BuildFlag_VetExtra: build_context.vet_flags = VetFlag_All | VetFlag_Extra; @@ -2173,6 +2181,16 @@ gb_internal void print_show_help(String const arg0, String const &command) { print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring"); print_usage_line(0, ""); + print_usage_line(1, "-vet-style"); + print_usage_line(2, "Errs on missing trailing commas followed by a newline"); + print_usage_line(2, "Errs on deprecated syntax"); + print_usage_line(2, "Does not err on unneeded tokens (unlike -strict-style)"); + print_usage_line(0, ""); + + print_usage_line(1, "-vet-semicolon"); + print_usage_line(2, "Errs on unneeded semicolons"); + print_usage_line(0, ""); + print_usage_line(1, "-vet-extra"); print_usage_line(2, "Do even more checks than standard vet on the code"); print_usage_line(2, "To treat the extra warnings as errors, use -warnings-as-errors"); @@ -2249,10 +2267,12 @@ gb_internal void print_show_help(String const arg0, String const &command) { print_usage_line(1, "-strict-style"); print_usage_line(2, "Errs on unneeded tokens, such as unneeded semicolons"); + print_usage_line(2, "Errs on missing trailing commas followed by a newline"); + print_usage_line(2, "Errs on deprecated syntax"); print_usage_line(0, ""); print_usage_line(1, "-strict-style-init-only"); - print_usage_line(2, "Errs on unneeded tokens, such as unneeded semicolons, only on the initial project"); + print_usage_line(2, "Same as -strict-style but only on the initial package"); print_usage_line(0, ""); print_usage_line(1, "-ignore-warnings"); diff --git a/src/parser.cpp b/src/parser.cpp index 7d1c37d84..c991f5741 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -1,7 +1,21 @@ #include "parser_pos.cpp" -// #undef at the bottom of this file -#define ALLOW_NEWLINE (!build_context.strict_style) +gb_internal u64 ast_file_vet_flags(AstFile *f) { + if (f->vet_flags_set) { + return f->vet_flags; + } + return build_context.vet_flags; +} + +gb_internal bool ast_file_vet_style(AstFile *f) { + return (ast_file_vet_flags(f) & VetFlag_Style) != 0; +} + + +gb_internal bool file_allow_newline(AstFile *f) { + bool is_strict = build_context.strict_style || ast_file_vet_style(f); + return !is_strict; +} gb_internal Token token_end_of_line(AstFile *f, Token tok) { u8 const *start = f->tokenizer.start + tok.pos.offset; @@ -1567,29 +1581,31 @@ gb_internal void assign_removal_flag_to_semicolon(AstFile *f) { Token *prev_token = &f->tokens[f->prev_token_index]; Token *curr_token = &f->tokens[f->curr_token_index]; GB_ASSERT(prev_token->kind == Token_Semicolon); - if (prev_token->string == ";") { - bool ok = false; - if (curr_token->pos.line > prev_token->pos.line) { + if (prev_token->string != ";") { + return; + } + bool ok = false; + if (curr_token->pos.line > prev_token->pos.line) { + ok = true; + } else if (curr_token->pos.line == prev_token->pos.line) { + switch (curr_token->kind) { + case Token_CloseBrace: + case Token_CloseParen: + case Token_EOF: ok = true; - } else if (curr_token->pos.line == prev_token->pos.line) { - switch (curr_token->kind) { - case Token_CloseBrace: - case Token_CloseParen: - case Token_EOF: - ok = true; - break; - } - } - - if (ok) { - if (build_context.strict_style) { - syntax_error(*prev_token, "Found unneeded semicolon"); - } else if (build_context.strict_style_init_only && f->pkg->kind == Package_Init) { - syntax_error(*prev_token, "Found unneeded semicolon"); - } - prev_token->flags |= TokenFlag_Remove; + break; } } + if (!ok) { + return; + } + + if (build_context.strict_style || (ast_file_vet_flags(f) & VetFlag_Semicolon)) { + syntax_error(*prev_token, "Found unneeded semicolon"); + } else if (build_context.strict_style_init_only && f->pkg->kind == Package_Init) { + syntax_error(*prev_token, "Found unneeded semicolon"); + } + prev_token->flags |= TokenFlag_Remove; } gb_internal void expect_semicolon(AstFile *f) { @@ -2748,7 +2764,7 @@ gb_internal Ast *parse_call_expr(AstFile *f, Ast *operand) { isize prev_expr_level = f->expr_level; bool prev_allow_newline = f->allow_newline; f->expr_level = 0; - f->allow_newline = ALLOW_NEWLINE; + f->allow_newline = file_allow_newline(f); open_paren = expect_token(f, Token_OpenParen); @@ -3147,7 +3163,7 @@ gb_internal Ast *parse_expr(AstFile *f, bool lhs) { gb_internal Array parse_expr_list(AstFile *f, bool lhs) { bool allow_newline = f->allow_newline; - f->allow_newline = ALLOW_NEWLINE; + f->allow_newline = file_allow_newline(f); auto list = array_make(heap_allocator()); for (;;) { @@ -3472,7 +3488,7 @@ gb_internal Ast *parse_results(AstFile *f, bool *diverging) { Ast *list = nullptr; expect_token(f, Token_OpenParen); list = parse_field_list(f, nullptr, FieldFlag_Results, Token_CloseParen, true, false); - if (ALLOW_NEWLINE) { + if (file_allow_newline(f)) { skip_possible_newline(f); } expect_token_after(f, Token_CloseParen, "parameter list"); @@ -3532,7 +3548,7 @@ gb_internal Ast *parse_proc_type(AstFile *f, Token proc_token) { expect_token(f, Token_OpenParen); params = parse_field_list(f, nullptr, FieldFlag_Signature, Token_CloseParen, true, true); - if (ALLOW_NEWLINE) { + if (file_allow_newline(f)) { skip_possible_newline(f); } expect_token_after(f, Token_CloseParen, "parameter list"); @@ -3754,7 +3770,7 @@ gb_internal bool allow_field_separator(AstFile *f) { } if (token.kind == Token_Semicolon) { bool ok = false; - if (ALLOW_NEWLINE && token_is_newline(token)) { + if (file_allow_newline(f) && token_is_newline(token)) { TokenKind next = peek_token(f).kind; switch (next) { case Token_CloseBrace: @@ -3818,7 +3834,7 @@ gb_internal bool check_procedure_name_list(Array const &names) { gb_internal Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token) { bool prev_allow_newline = f->allow_newline; defer (f->allow_newline = prev_allow_newline); - f->allow_newline = ALLOW_NEWLINE; + f->allow_newline = file_allow_newline(f); Token start_token = f->curr_token; @@ -6005,6 +6021,3 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) { return ParseFile_None; } - - -#undef ALLOW_NEWLINE From 69e1f42aedad0d1992e64989aac1d236bee3d4d9 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Tue, 1 Aug 2023 11:11:15 +0100 Subject: [PATCH 17/20] Replace a lot of warnings with errors; remove deprecated stuff --- src/check_builtin.cpp | 2 +- src/check_decl.cpp | 26 +------------------------- src/check_expr.cpp | 2 +- src/checker.cpp | 6 +++--- src/main.cpp | 7 ------- src/tokenizer.cpp | 4 ++-- 6 files changed, 8 insertions(+), 39 deletions(-) diff --git a/src/check_builtin.cpp b/src/check_builtin.cpp index 269a0ec48..35720c914 100644 --- a/src/check_builtin.cpp +++ b/src/check_builtin.cpp @@ -1406,7 +1406,7 @@ gb_internal bool check_builtin_procedure_directive(CheckerContext *c, Operand *o } return false; } else if (name == "load_or") { - warning(call, "'#load_or' is deprecated in favour of '#load(path) or_else default'"); + error(call, "'#load_or' has now been removed in favour of '#load(path) or_else default'"); if (ce->args.count != 2) { if (ce->args.count == 0) { diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 9e96dae1c..4a1a636f8 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -354,31 +354,7 @@ gb_internal void check_type_decl(CheckerContext *ctx, Entity *e, Ast *init_expr, // using decl if (decl->is_using) { - warning(init_expr, "'using' an enum declaration is not allowed, prefer using implicit selector expressions e.g. '.A'"); - #if 1 - // NOTE(bill): Must be an enum declaration - if (te->kind == Ast_EnumType) { - Scope *parent = e->scope; - if (parent->flags&ScopeFlag_File) { - // NOTE(bill): Use package scope - parent = parent->parent; - } - - Type *t = base_type(e->type); - if (t->kind == Type_Enum) { - for (Entity *f : t->Enum.fields) { - if (f->kind != Entity_Constant) { - continue; - } - String name = f->token.string; - if (is_blank_ident(name)) { - continue; - } - add_entity(ctx, parent, nullptr, f); - } - } - } - #endif + error(init_expr, "'using' an enum declaration is not allowed, prefer using implicit selector expressions e.g. '.A'"); } } diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 8d159d920..40bf729c1 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -7153,7 +7153,7 @@ gb_internal ExprKind check_call_expr(CheckerContext *c, Operand *operand, Ast *c i32 id = operand->builtin_id; Entity *e = entity_of_node(operand->expr); if (e != nullptr && e->token.string == "expand_to_tuple") { - warning(operand->expr, "'expand_to_tuple' has been replaced with 'expand_values'"); + error(operand->expr, "'expand_to_tuple' has been replaced with 'expand_values'"); } if (!check_builtin_procedure(c, operand, call, id, type_hint)) { operand->mode = Addressing_Invalid; diff --git a/src/checker.cpp b/src/checker.cpp index 895e3c528..91c62c20c 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -3085,7 +3085,7 @@ gb_internal DECL_ATTRIBUTE_PROC(proc_decl_attribute) { check_expr(c, &o, value); Entity *e = entity_of_node(o.expr); if (e != nullptr && e->kind == Entity_Procedure) { - warning(elem, "'%.*s' is deprecated, please use one of the following instead: 'deferred_none', 'deferred_in', 'deferred_out'", LIT(name)); + error(elem, "'%.*s' is not allowed any more, please use one of the following instead: 'deferred_none', 'deferred_in', 'deferred_out'", LIT(name)); if (ac->deferred_procedure.entity != nullptr) { error(elem, "Previous usage of a 'deferred_*' attribute"); } @@ -4584,7 +4584,7 @@ gb_internal DECL_ATTRIBUTE_PROC(foreign_import_decl_attribute) { if (value != nullptr) { error(elem, "Expected no parameter for '%.*s'", LIT(name)); } else if (name == "force") { - warning(elem, "'force' is deprecated and is identical to 'require'"); + error(elem, "'force' was replaced with 'require'"); } ac->require_declaration = true; return true; @@ -6104,7 +6104,7 @@ gb_internal void check_parsed_files(Checker *c) { while (mpsc_dequeue(&c->info.intrinsics_entry_point_usage, &node)) { if (c->info.entry_point == nullptr && node != nullptr) { if (node->file()->pkg->kind != Package_Runtime) { - warning(node, "usage of intrinsics.__entry_point will be a no-op"); + error(node, "usage of intrinsics.__entry_point will be a no-op"); } } } diff --git a/src/main.cpp b/src/main.cpp index 5cecb5682..aa5b2ed34 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -2142,16 +2142,9 @@ gb_internal void print_show_help(String const arg0, String const &command) { } if (check) { - #if defined(GB_SYSTEM_WINDOWS) print_usage_line(1, "-no-threaded-checker"); print_usage_line(2, "Disabled multithreading in the semantic checker stage"); print_usage_line(0, ""); - #else - print_usage_line(1, "-threaded-checker"); - print_usage_line(1, "[EXPERIMENTAL]"); - print_usage_line(2, "Multithread the semantic checker stage"); - print_usage_line(0, ""); - #endif } if (check) { diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 17a396b9f..ad7aa81de 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -696,8 +696,8 @@ gb_internal void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) { if (entry->kind != Token_Invalid && entry->hash == hash) { if (str_eq(entry->text, token->string)) { token->kind = entry->kind; - if (token->kind == Token_not_in && entry->text == "notin") { - syntax_warning(*token, "'notin' is deprecated in favour of 'not_in'"); + if (token->kind == Token_not_in && entry->text.len == 5) { + syntax_error(*token, "Did you mean 'not_in'?"); } } } From 2f094134a3e54cb6b99daf09b6a257b36f182b6e Mon Sep 17 00:00:00 2001 From: gingerBill Date: Tue, 1 Aug 2023 11:14:52 +0100 Subject: [PATCH 18/20] Remove `-strict-style-init-only` --- src/build_settings.cpp | 1 - src/main.cpp | 19 +------------------ src/parser.cpp | 2 -- 3 files changed, 1 insertion(+), 21 deletions(-) diff --git a/src/build_settings.cpp b/src/build_settings.cpp index 48891e89c..97098e545 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -325,7 +325,6 @@ struct BuildContext { bool disallow_do; bool strict_style; - bool strict_style_init_only; bool ignore_warnings; bool warnings_as_errors; diff --git a/src/main.cpp b/src/main.cpp index aa5b2ed34..abd01b7db 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -679,7 +679,6 @@ enum BuildFlagKind { BuildFlag_DisallowDo, BuildFlag_DefaultToNilAllocator, BuildFlag_StrictStyle, - BuildFlag_StrictStyleInitOnly, BuildFlag_ForeignErrorProcedures, BuildFlag_NoRTTI, BuildFlag_DynamicMapCalls, @@ -863,7 +862,6 @@ gb_internal bool parse_build_flags(Array args) { add_flag(&build_flags, BuildFlag_DisallowDo, str_lit("disallow-do"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_DefaultToNilAllocator, str_lit("default-to-nil-allocator"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_StrictStyle, str_lit("strict-style"), BuildFlagParam_None, Command__does_check); - add_flag(&build_flags, BuildFlag_StrictStyleInitOnly, str_lit("strict-style-init-only"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_ForeignErrorProcedures, str_lit("foreign-error-procedures"), BuildFlagParam_None, Command__does_check); add_flag(&build_flags, BuildFlag_NoRTTI, str_lit("no-rtti"), BuildFlagParam_None, Command__does_check); @@ -1484,20 +1482,9 @@ gb_internal bool parse_build_flags(Array args) { case BuildFlag_ForeignErrorProcedures: build_context.ODIN_FOREIGN_ERROR_PROCEDURES = true; break; - case BuildFlag_StrictStyle: { - if (build_context.strict_style_init_only) { - gb_printf_err("-strict-style and -strict-style-init-only cannot be used together\n"); - } + case BuildFlag_StrictStyle: build_context.strict_style = true; break; - } - case BuildFlag_StrictStyleInitOnly: { - if (build_context.strict_style) { - gb_printf_err("-strict-style and -strict-style-init-only cannot be used together\n"); - } - build_context.strict_style_init_only = true; - break; - } case BuildFlag_Short: build_context.cmd_doc_flags |= CmdDocFlag_Short; break; @@ -2264,10 +2251,6 @@ gb_internal void print_show_help(String const arg0, String const &command) { print_usage_line(2, "Errs on deprecated syntax"); print_usage_line(0, ""); - print_usage_line(1, "-strict-style-init-only"); - print_usage_line(2, "Same as -strict-style but only on the initial package"); - print_usage_line(0, ""); - print_usage_line(1, "-ignore-warnings"); print_usage_line(2, "Ignores warning messages"); print_usage_line(0, ""); diff --git a/src/parser.cpp b/src/parser.cpp index c991f5741..1aa03033e 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -1602,8 +1602,6 @@ gb_internal void assign_removal_flag_to_semicolon(AstFile *f) { if (build_context.strict_style || (ast_file_vet_flags(f) & VetFlag_Semicolon)) { syntax_error(*prev_token, "Found unneeded semicolon"); - } else if (build_context.strict_style_init_only && f->pkg->kind == Package_Init) { - syntax_error(*prev_token, "Found unneeded semicolon"); } prev_token->flags |= TokenFlag_Remove; } From 65206fe33e52a707134c919c01a0f8ae2d19c2d8 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Tue, 1 Aug 2023 11:39:04 +0100 Subject: [PATCH 19/20] Go through loads of `TODO`s --- src/check_decl.cpp | 13 +++++++------ src/check_expr.cpp | 30 +++--------------------------- src/check_stmt.cpp | 11 +++++------ src/checker.cpp | 5 ----- src/checker.hpp | 2 -- src/entity.cpp | 7 +++---- src/exact_value.cpp | 7 ++----- src/llvm_backend_general.cpp | 3 +-- src/llvm_backend_proc.cpp | 5 +---- src/llvm_backend_stmt.cpp | 5 +---- src/llvm_backend_type.cpp | 1 - src/parser.cpp | 2 -- src/types.cpp | 22 +++++++++++++--------- 13 files changed, 36 insertions(+), 77 deletions(-) diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 4a1a636f8..587d749b4 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -7,13 +7,15 @@ gb_internal Type *check_init_variable(CheckerContext *ctx, Entity *e, Operand *o e->type == t_invalid) { if (operand->mode == Addressing_Builtin) { + ERROR_BLOCK(); gbString expr_str = expr_to_string(operand->expr); - // TODO(bill): is this a good enough error message? error(operand->expr, - "Cannot assign built-in procedure '%s' in %.*s", - expr_str, - LIT(context_name)); + "Cannot assign built-in procedure '%s' in %.*s", + expr_str, + LIT(context_name)); + + error_line("\tBuilt-in procedures are implemented by the compiler and might not be actually instantiated procedure\n"); operand->mode = Addressing_Invalid; @@ -159,9 +161,8 @@ gb_internal void check_init_constant(CheckerContext *ctx, Entity *e, Operand *op } if (operand->mode != Addressing_Constant) { - // TODO(bill): better error gbString str = expr_to_string(operand->expr); - error(operand->expr, "'%s' is not a constant", str); + error(operand->expr, "'%s' is not a compile-time known constant", str); gb_string_free(str); if (e->type == nullptr) { e->type = t_invalid; diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 40bf729c1..d2616ca72 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -462,7 +462,7 @@ gb_internal bool find_or_generate_polymorphic_procedure(CheckerContext *old_c, E { - // LEAK TODO(bill): This is technically a memory leak as it has to generate the type twice + // LEAK NOTE(bill): This is technically a memory leak as it has to generate the type twice bool prev_no_polymorphic_errors = nctx.no_polymorphic_errors; defer (nctx.no_polymorphic_errors = prev_no_polymorphic_errors); nctx.no_polymorphic_errors = false; @@ -470,7 +470,7 @@ gb_internal bool find_or_generate_polymorphic_procedure(CheckerContext *old_c, E // NOTE(bill): Reset scope from the failed procedure type scope_reset(scope); - // LEAK TODO(bill): Cloning this AST may be leaky + // LEAK NOTE(bill): Cloning this AST may be leaky but this is not really an issue due to arena-based allocation Ast *cloned_proc_type_node = clone_ast(pt->node); success = check_procedure_type(&nctx, final_proc_type, cloned_proc_type_node, &operands); if (!success) { @@ -778,16 +778,6 @@ gb_internal i64 check_distance_between_types(CheckerContext *c, Operand *operand } } - // ^T <- rawptr -#if 0 - // TODO(bill): Should C-style (not C++) pointer cast be allowed? - if (is_type_pointer(dst) && is_type_rawptr(src)) { - return true; - } -#endif -#if 1 - - // rawptr <- ^T if (are_types_identical(type, t_rawptr) && is_type_pointer(src)) { return 5; @@ -808,7 +798,6 @@ gb_internal i64 check_distance_between_types(CheckerContext *c, Operand *operand return 4; } } -#endif if (is_type_polymorphic(dst) && !is_type_polymorphic(src)) { bool modify_type = !c->no_polymorphic_errors; @@ -824,7 +813,6 @@ gb_internal i64 check_distance_between_types(CheckerContext *c, Operand *operand } } - // TODO(bill): Determine which rule is a better on in practice if (dst->Union.variants.count == 1) { Type *vt = dst->Union.variants[0]; i64 score = check_distance_between_types(c, operand, vt); @@ -1093,7 +1081,7 @@ gb_internal void check_assignment(CheckerContext *c, Operand *operand, Type *typ // TODO(bill): is this a good enough error message? error(operand->expr, - "Cannot assign overloaded procedure '%s' to '%s' in %.*s", + "Cannot assign overloaded procedure group '%s' to '%s' in %.*s", expr_str, op_type_str, LIT(context_name)); @@ -1120,7 +1108,6 @@ gb_internal void check_assignment(CheckerContext *c, Operand *operand, Type *typ switch (operand->mode) { case Addressing_Builtin: - // TODO(bill): Actually allow built in procedures to be passed around and thus be created on use error(operand->expr, "Cannot assign built-in procedure '%s' in %.*s", expr_str, @@ -1412,9 +1399,6 @@ gb_internal bool is_polymorphic_type_assignable(CheckerContext *c, Type *poly, T return false; case Type_Proc: if (source->kind == Type_Proc) { - // return check_is_assignable_to(c, &o, poly); - // TODO(bill): Polymorphic type assignment - #if 1 TypeProc *x = &poly->Proc; TypeProc *y = &source->Proc; if (x->calling_convention != y->calling_convention) { @@ -1447,7 +1431,6 @@ gb_internal bool is_polymorphic_type_assignable(CheckerContext *c, Type *poly, T } return true; - #endif } return false; case Type_Map: @@ -1699,7 +1682,6 @@ gb_internal bool check_unary_op(CheckerContext *c, Operand *o, Token op) { gb_string_free(str); return false; } - // TODO(bill): Handle errors correctly Type *type = base_type(core_array_type(o->type)); gbString str = nullptr; switch (op.kind) { @@ -1743,7 +1725,6 @@ gb_internal bool check_unary_op(CheckerContext *c, Operand *o, Token op) { gb_internal bool check_binary_op(CheckerContext *c, Operand *o, Token op) { Type *main_type = o->type; - // TODO(bill): Handle errors correctly Type *type = base_type(core_array_type(main_type)); Type *ct = core_type(type); @@ -2775,8 +2756,6 @@ gb_internal void check_shift(CheckerContext *c, Operand *x, Operand *y, Ast *nod gb_string_free(err_str); } - // TODO(bill): Should we support shifts for fixed arrays and #simd vectors? - if (!is_type_integer(x->type)) { gbString err_str = expr_to_string(x->expr); error(node, "Shift operand '%s' must be an integer", err_str); @@ -4437,7 +4416,6 @@ gb_internal ExactValue get_constant_field_single(CheckerContext *c, ExactValue v case_end; default: - // TODO(bill): Should this be a general fallback? if (success_) *success_ = true; if (finish_) *finish_ = true; return empty_exact_value; @@ -4793,8 +4771,6 @@ gb_internal Entity *check_selector(CheckerContext *c, Operand *operand, Ast *nod } if (entity == nullptr && selector->kind == Ast_Ident && is_type_array(type_deref(operand->type))) { - // TODO(bill): Simd_Vector swizzling - String field_name = selector->Ident.token.string; if (1 < field_name.len && field_name.len <= 4) { u8 swizzles_xyzw[4] = {'x', 'y', 'z', 'w'}; diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index b497c0afb..fa5f8f428 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -384,7 +384,6 @@ gb_internal Type *check_assignment_variable(CheckerContext *ctx, Operand *lhs, O } if (e != nullptr) { - // HACK TODO(bill): Should the entities be freed as it's technically a leak rhs->mode = Addressing_Value; rhs->type = e->type; rhs->proc_group = nullptr; @@ -394,7 +393,7 @@ gb_internal Type *check_assignment_variable(CheckerContext *ctx, Operand *lhs, O ast_node(i, Ident, node); e = scope_lookup(ctx->scope, i->token.string); if (e != nullptr && e->kind == Entity_Variable) { - used = (e->flags & EntityFlag_Used) != 0; // TODO(bill): Make backup just in case + used = (e->flags & EntityFlag_Used) != 0; // NOTE(bill): Make backup just in case } } @@ -888,7 +887,7 @@ gb_internal void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags check_open_scope(ctx, node); defer (check_close_scope(ctx)); - check_label(ctx, ss->label, node); // TODO(bill): What should the label's "scope" be? + check_label(ctx, ss->label, node); if (ss->init != nullptr) { check_stmt(ctx, ss->init, 0); @@ -1125,7 +1124,7 @@ gb_internal void check_type_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_ check_open_scope(ctx, node); defer (check_close_scope(ctx)); - check_label(ctx, ss->label, node); // TODO(bill): What should the label's "scope" be? + check_label(ctx, ss->label, node); if (ss->tag->kind != Ast_AssignStmt) { error(ss->tag, "Expected an 'in' assignment for this type switch statement"); @@ -1960,7 +1959,7 @@ gb_internal void check_value_decl_stmt(CheckerContext *ctx, Ast *node, u32 mod_f Token token = ast_token(node); if (vd->type != nullptr && entity_count > 1) { error(token, "'using' can only be applied to one variable of the same type"); - // TODO(bill): Should a 'continue' happen here? + // NOTE(bill): `using` will only be applied to a single declaration } for (isize entity_index = 0; entity_index < 1; entity_index++) { @@ -2294,7 +2293,7 @@ gb_internal void check_for_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { mod_flags |= Stmt_BreakAllowed | Stmt_ContinueAllowed; check_open_scope(ctx, node); - check_label(ctx, fs->label, node); // TODO(bill): What should the label's "scope" be? + check_label(ctx, fs->label, node); if (fs->init != nullptr) { check_stmt(ctx, fs->init, 0); diff --git a/src/checker.cpp b/src/checker.cpp index 91c62c20c..da4fae76c 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -967,7 +967,6 @@ gb_internal void init_universal(void) { add_global_bool_constant("true", true); add_global_bool_constant("false", false); - // TODO(bill): Set through flags in the compiler add_global_string_constant("ODIN_VENDOR", bc->ODIN_VENDOR); add_global_string_constant("ODIN_VERSION", bc->ODIN_VERSION); add_global_string_constant("ODIN_ROOT", bc->ODIN_ROOT); @@ -1477,7 +1476,6 @@ gb_internal void add_type_and_value(CheckerContext *ctx, Ast *expr, AddressingMo if (ctx->decl) { mutex = &ctx->decl->type_and_value_mutex; } else if (ctx->pkg) { - // TODO(bill): is a per package mutex is a good idea here? mutex = &ctx->pkg->type_and_value_mutex; } @@ -2580,9 +2578,6 @@ gb_internal Array generate_entity_dependency_graph(CheckerInf } } - // TODO(bill): This could be multithreaded to improve performance - // This means that the entity graph node set will have to be thread safe - TIME_SECTION("generate_entity_dependency_graph: Calculate edges for graph M - Part 2"); auto G = array_make(allocator, 0, M.count); diff --git a/src/checker.hpp b/src/checker.hpp index 8a63f7e88..bf956393c 100644 --- a/src/checker.hpp +++ b/src/checker.hpp @@ -387,8 +387,6 @@ struct CheckerInfo { BlockingMutex foreign_mutex; // NOT recursive StringMap foreigns; - // NOTE(bill): These are actually MPSC queues - // TODO(bill): Convert them to be MPSC queues MPSCQueue definition_queue; MPSCQueue entity_queue; MPSCQueue required_global_variable_queue; diff --git a/src/entity.cpp b/src/entity.cpp index 649dd900d..291ae8c83 100644 --- a/src/entity.cpp +++ b/src/entity.cpp @@ -287,7 +287,6 @@ gb_internal bool is_entity_kind_exported(EntityKind kind, bool allow_builtin = f } gb_internal bool is_entity_exported(Entity *e, bool allow_builtin = false) { - // TODO(bill): Determine the actual exportation rules for imports of entities GB_ASSERT(e != nullptr); if (!is_entity_kind_exported(e->kind, allow_builtin)) { return false; @@ -401,7 +400,7 @@ gb_internal Entity *alloc_entity_array_elem(Scope *scope, Token token, Type *typ return entity; } -gb_internal Entity *alloc_entity_procedure(Scope *scope, Token token, Type *signature_type, u64 tags) { +gb_internal Entity *alloc_entity_procedure(Scope *scope, Token token, Type *signature_type, u64 tags=0) { Entity *entity = alloc_entity(Entity_Procedure, scope, token, signature_type); entity->Procedure.tags = tags; return entity; @@ -418,7 +417,7 @@ gb_internal Entity *alloc_entity_import_name(Scope *scope, Token token, Type *ty entity->ImportName.path = path; entity->ImportName.name = name; entity->ImportName.scope = import_scope; - entity->state = EntityState_Resolved; // TODO(bill): Is this correct? + entity->state = EntityState_Resolved; return entity; } @@ -427,7 +426,7 @@ gb_internal Entity *alloc_entity_library_name(Scope *scope, Token token, Type *t Entity *entity = alloc_entity(Entity_LibraryName, scope, token, type); entity->LibraryName.paths = paths; entity->LibraryName.name = name; - entity->state = EntityState_Resolved; // TODO(bill): Is this correct? + entity->state = EntityState_Resolved; return entity; } diff --git a/src/exact_value.cpp b/src/exact_value.cpp index ff940aabb..cd499272f 100644 --- a/src/exact_value.cpp +++ b/src/exact_value.cpp @@ -26,8 +26,8 @@ enum ExactValueKind { ExactValue_Complex = 5, ExactValue_Quaternion = 6, ExactValue_Pointer = 7, - ExactValue_Compound = 8, // TODO(bill): Is this good enough? - ExactValue_Procedure = 9, // TODO(bill): Is this good enough? + ExactValue_Compound = 8, + ExactValue_Procedure = 9, ExactValue_Typeid = 10, ExactValue_Count, @@ -101,7 +101,6 @@ gb_internal ExactValue exact_value_bool(bool b) { } gb_internal ExactValue exact_value_string(String string) { - // TODO(bill): Allow for numbers with underscores in them ExactValue result = {ExactValue_String}; result.value_string = string; return result; @@ -702,7 +701,6 @@ gb_internal void match_exact_values(ExactValue *x, ExactValue *y) { compiler_error("match_exact_values: How'd you get here? Invalid ExactValueKind %d", x->kind); } -// TODO(bill): Allow for pointer arithmetic? Or are pointer slices good enough? gb_internal ExactValue exact_binary_operator_value(TokenKind op, ExactValue x, ExactValue y) { match_exact_values(&x, &y); @@ -943,7 +941,6 @@ gb_internal bool compare_exact_values(TokenKind op, ExactValue x, ExactValue y) case ExactValue_String: { String a = x.value_string; String b = y.value_string; - // TODO(bill): gb_memcompare is used because the strings are UTF-8 switch (op) { case Token_CmpEq: return a == b; case Token_NotEq: return a != b; diff --git a/src/llvm_backend_general.cpp b/src/llvm_backend_general.cpp index ad8a1816a..eb6389763 100644 --- a/src/llvm_backend_general.cpp +++ b/src/llvm_backend_general.cpp @@ -1895,8 +1895,8 @@ gb_internal LLVMTypeRef lb_type_internal(lbModule *m, Type *type) { case Type_SimdVector: return lb_type_internal(m, base); - // TODO(bill): Deal with this correctly. Can this be named? case Type_Proc: + // TODO(bill): Deal with this correctly. Can this be named? return lb_type_internal(m, base); case Type_Tuple: @@ -2869,7 +2869,6 @@ gb_internal lbValue lb_find_value_from_entity(lbModule *m, Entity *e) { if (USE_SEPARATE_MODULES) { lbModule *other_module = lb_module_of_entity(m->gen, e); - // TODO(bill): correct this logic bool is_external = other_module != m; if (!is_external) { if (e->code_gen_module != nullptr) { diff --git a/src/llvm_backend_proc.cpp b/src/llvm_backend_proc.cpp index c27c55337..a3156a7ed 100644 --- a/src/llvm_backend_proc.cpp +++ b/src/llvm_backend_proc.cpp @@ -362,7 +362,6 @@ gb_internal lbProcedure *lb_create_dummy_procedure(lbModule *m, String link_name Type *pt = p->type; lbCallingConventionKind cc_kind = lbCallingConvention_C; - // TODO(bill): Clean up this logic if (!is_arch_wasm()) { cc_kind = lb_calling_convention_map[pt->Proc.calling_convention]; } @@ -1702,7 +1701,6 @@ gb_internal lbValue lb_build_builtin_proc(lbProcedure *p, Ast *expr, TypeAndValu lbValue v = lb_build_expr(p, ce->args[0]); Type *t = base_type(v.type); if (is_type_pointer(t)) { - // IMPORTANT TODO(bill): Should there be a nil pointer check? v = lb_emit_load(p, v); t = type_deref(t); } @@ -1730,7 +1728,6 @@ gb_internal lbValue lb_build_builtin_proc(lbProcedure *p, Ast *expr, TypeAndValu lbValue v = lb_build_expr(p, ce->args[0]); Type *t = base_type(v.type); if (is_type_pointer(t)) { - // IMPORTANT TODO(bill): Should there be a nil pointer check? v = lb_emit_load(p, v); t = type_deref(t); } @@ -3144,7 +3141,7 @@ gb_internal lbValue lb_build_call_expr(lbProcedure *p, Ast *expr) { lbValue res = lb_build_call_expr_internal(p, expr); - if (ce->optional_ok_one) { // TODO(bill): Minor hack for #optional_ok procedures + if (ce->optional_ok_one) { GB_ASSERT(is_type_tuple(res.type)); GB_ASSERT(res.type->Tuple.variables.count == 2); return lb_emit_struct_ev(p, res, 0); diff --git a/src/llvm_backend_stmt.cpp b/src/llvm_backend_stmt.cpp index 60420402a..9d688be6a 100644 --- a/src/llvm_backend_stmt.cpp +++ b/src/llvm_backend_stmt.cpp @@ -1688,7 +1688,6 @@ gb_internal void lb_build_type_switch_stmt(lbProcedure *p, AstTypeSwitchStmt *ss lb_add_entity(p->module, case_entity, ptr); lb_add_debug_local_variable(p, ptr.value, case_entity->type, case_entity->token); } else { - // TODO(bill): is the correct expected behaviour? lb_store_type_case_implicit(p, clause, parent_value); } @@ -2014,12 +2013,10 @@ gb_internal void lb_build_if_stmt(lbProcedure *p, Ast *node) { defer (lb_close_scope(p, lbDeferExit_Default, nullptr)); if (is->init != nullptr) { - // TODO(bill): Should this have a separate block to begin with? - #if 1 lbBlock *init = lb_create_block(p, "if.init"); lb_emit_jump(p, init); lb_start_block(p, init); - #endif + lb_build_stmt(p, is->init); } lbBlock *then = lb_create_block(p, "if.then"); diff --git a/src/llvm_backend_type.cpp b/src/llvm_backend_type.cpp index 4716733cc..c85840517 100644 --- a/src/llvm_backend_type.cpp +++ b/src/llvm_backend_type.cpp @@ -731,7 +731,6 @@ gb_internal void lb_setup_type_info_data(lbProcedure *p) { // NOTE(bill): Setup type_set_offsets(t); // NOTE(bill): Just incase the offsets have not been set yet for (isize source_index = 0; source_index < count; source_index++) { - // TODO(bill): Order fields in source order not layout order Entity *f = t->Struct.fields[source_index]; lbValue tip = lb_type_info(m, f->type); i64 foffset = 0; diff --git a/src/parser.cpp b/src/parser.cpp index 1aa03033e..78120507d 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -4968,7 +4968,6 @@ gb_internal bool init_parser(Parser *p) { gb_internal void destroy_parser(Parser *p) { GB_ASSERT(p != nullptr); - // TODO(bill): Fix memory leak for (AstPackage *pkg : p->packages) { for (AstFile *file : pkg->files) { destroy_ast_file(file); @@ -5012,7 +5011,6 @@ gb_internal WORKER_TASK_PROC(parser_worker_proc) { gb_internal void parser_add_file_to_process(Parser *p, AstPackage *pkg, FileInfo fi, TokenPos pos) { - // TODO(bill): Use a better allocator ImportedFile f = {pkg, fi, pos, p->file_to_process_count++}; auto wd = gb_alloc_item(permanent_allocator(), ParserWorkerData); wd->parser = p; diff --git a/src/types.cpp b/src/types.cpp index 847aea9f3..67f42adca 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -143,6 +143,7 @@ struct TypeStruct { Type * soa_elem; i32 soa_count; StructSoaKind soa_kind; + BlockingMutex mutex; // for settings offsets bool is_polymorphic; bool are_offsets_set : 1; @@ -244,6 +245,7 @@ struct TypeProc { TYPE_KIND(Tuple, struct { \ Slice variables; /* Entity_Variable */ \ i64 * offsets; \ + BlockingMutex mutex; /* for settings offsets */ \ bool are_offsets_being_processed; \ bool are_offsets_set; \ bool is_packed; \ @@ -821,6 +823,9 @@ gb_internal void type_path_pop(TypePath *tp) { #define FAILURE_ALIGNMENT 0 gb_internal bool type_ptr_set_update(PtrSet *s, Type *t) { + if (t == nullptr) { + return true; + } if (ptr_set_exists(s, t)) { return true; } @@ -829,13 +834,17 @@ gb_internal bool type_ptr_set_update(PtrSet *s, Type *t) { } gb_internal bool type_ptr_set_exists(PtrSet *s, Type *t) { + if (t == nullptr) { + return true; + } + if (ptr_set_exists(s, t)) { return true; } // TODO(bill, 2019-10-05): This is very slow and it's probably a lot // faster to cache types correctly - for (Type *f : *s) { + for (Type *f : *s) if (f->kind == t->kind) { if (are_types_identical(t, f)) { ptr_set_add(s, t); return true; @@ -2666,7 +2675,6 @@ gb_internal bool are_types_identical_internal(Type *x, Type *y, bool check_tuple x->Struct.soa_kind == y->Struct.soa_kind && x->Struct.soa_count == y->Struct.soa_count && are_types_identical(x->Struct.soa_elem, y->Struct.soa_elem)) { - // TODO(bill); Fix the custom alignment rule for_array(i, x->Struct.fields) { Entity *xf = x->Struct.fields[i]; Entity *yf = y->Struct.fields[i]; @@ -2807,7 +2815,6 @@ gb_internal i64 union_tag_size(Type *u) { return 0; } - // TODO(bill): Is this an okay approach? i64 max_align = 1; if (u->Union.variants.count < 1ull<<8) { @@ -2817,7 +2824,7 @@ gb_internal i64 union_tag_size(Type *u) { } else if (u->Union.variants.count < 1ull<<32) { max_align = 4; } else { - GB_PANIC("how many variants do you have?!"); + compiler_error("how many variants do you have?! %lld", cast(long long)u->Union.variants.count); } for_array(i, u->Union.variants) { @@ -3136,8 +3143,6 @@ gb_internal Selection lookup_field_with_selection(Type *type_, String field_name switch (type->Basic.kind) { case Basic_any: { #if 1 - // IMPORTANT TODO(bill): Should these members be available to should I only allow them with - // `Raw_Any` type? String data_str = str_lit("data"); String id_str = str_lit("id"); gb_local_persist Entity *entity__any_data = alloc_entity_field(nullptr, make_token_ident(data_str), t_rawptr, false, 0); @@ -3663,10 +3668,9 @@ gb_internal i64 *type_set_offsets_of(Slice const &fields, bool is_pack } gb_internal bool type_set_offsets(Type *t) { - MUTEX_GUARD(&g_type_mutex); // TODO(bill): only per struct - t = base_type(t); if (t->kind == Type_Struct) { + MUTEX_GUARD(&t->Struct.mutex); if (!t->Struct.are_offsets_set) { t->Struct.are_offsets_being_processed = true; t->Struct.offsets = type_set_offsets_of(t->Struct.fields, t->Struct.is_packed, t->Struct.is_raw_union); @@ -3675,6 +3679,7 @@ gb_internal bool type_set_offsets(Type *t) { return true; } } else if (is_type_tuple(t)) { + MUTEX_GUARD(&t->Tuple.mutex); if (!t->Tuple.are_offsets_set) { t->Tuple.are_offsets_being_processed = true; t->Tuple.offsets = type_set_offsets_of(t->Tuple.variables, t->Tuple.is_packed, false); @@ -3849,7 +3854,6 @@ gb_internal i64 type_size_of_internal(Type *t, TypePath *path) { max = size; } } - // TODO(bill): Is this how it should work? return align_formula(max, align); } else { i64 count = 0, size = 0, align = 0; From fb30bda7d77ff01548e98a59f70d634b03d44f91 Mon Sep 17 00:00:00 2001 From: Hasan Yasin Ozturk Date: Tue, 1 Aug 2023 15:51:22 +0300 Subject: [PATCH 20/20] Add -show-system-calls flag info to cli usage help --- src/main.cpp | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/main.cpp b/src/main.cpp index abd01b7db..cf61049da 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -174,7 +174,7 @@ gb_internal i32 linker_stage(lbGenerator *gen) { } if (build_context.cross_compiling && selected_target_metrics->metrics == &target_essence_amd64) { -#if defined(GB_SYSTEM_UNIX) +#if defined(GB_SYSTEM_UNIX) result = system_exec_command_line_app("linker", "x86_64-essence-gcc \"%.*s.o\" -o \"%.*s\" %.*s %.*s", LIT(output_filename), LIT(output_filename), LIT(build_context.link_flags), LIT(build_context.extra_linker_flags)); #else @@ -498,13 +498,13 @@ gb_internal i32 linker_stage(lbGenerator *gen) { // line arguments prepared previously are incompatible with ld. if (build_context.metrics.os == TargetOs_darwin) { link_settings = gb_string_appendc(link_settings, "-Wl,-init,'__odin_entry_point' "); - // NOTE(weshardee): __odin_exit_point should also be added, but -fini + // NOTE(weshardee): __odin_exit_point should also be added, but -fini // does not exist on MacOS } else { link_settings = gb_string_appendc(link_settings, "-Wl,-init,'_odin_entry_point' "); link_settings = gb_string_appendc(link_settings, "-Wl,-fini,'_odin_exit_point' "); } - + } else if (build_context.metrics.os != TargetOs_openbsd) { // OpenBSD defaults to PIE executable. do not pass -no-pie for it. link_settings = gb_string_appendc(link_settings, "-no-pie "); @@ -1587,7 +1587,7 @@ gb_internal bool parse_build_flags(Array args) { if (path_is_directory(path)) { gb_printf_err("Invalid -pdb-name path. %.*s, is a directory.\n", LIT(path)); bad_flags = true; - break; + break; } // #if defined(GB_SYSTEM_WINDOWS) // String ext = path_extension(path); @@ -2020,6 +2020,10 @@ gb_internal void print_show_help(String const arg0, String const &command) { print_usage_line(2, "Shows an advanced overview of the timings of different stages within the compiler in milliseconds"); print_usage_line(0, ""); + print_usage_line(1, "-show-system-calls"); + print_usage_line(2, "Prints the whole command and arguments for calls to external tools like linker and assembler"); + print_usage_line(0, ""); + print_usage_line(1, "-export-timings:"); print_usage_line(2, "Export timings to one of a few formats. Requires `-show-timings` or `-show-more-timings`"); print_usage_line(2, "Available options:"); @@ -2853,7 +2857,7 @@ int main(int arg_count, char const **arg_ptr) { for_array(i, build_context.build_paths) { String build_path = path_to_string(heap_allocator(), build_context.build_paths[i]); debugf("build_paths[%ld]: %.*s\n", i, LIT(build_path)); - } + } } init_global_thread_pool();