From eb089a1c9884b39d5a2fbfd449e183661fa51e88 Mon Sep 17 00:00:00 2001 From: Laytan Date: Sun, 16 Nov 2025 15:06:30 +0100 Subject: [PATCH] skip collision panic when package names aren't unique --- src/checker.cpp | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/checker.cpp b/src/checker.cpp index 1daacd9ce..235d4def9 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -7031,9 +7031,11 @@ gb_internal void check_objc_context_provider_procedures(Checker *c) { } } -gb_internal void check_unique_package_names(Checker *c) { +gb_internal bool check_unique_package_names(Checker *c) { ERROR_BLOCK(); + bool ok = true; + StringMap pkgs = {}; // Key: package name string_map_init(&pkgs, 2*c->info.packages.count); defer (string_map_destroy(&pkgs)); @@ -7058,6 +7060,7 @@ gb_internal void check_unique_package_names(Checker *c) { continue; } + ok = false; begin_error_block(); error(curr, "Duplicate declaration of 'package %.*s'", LIT(name)); @@ -7080,6 +7083,8 @@ gb_internal void check_unique_package_names(Checker *c) { end_error_block(); } + + return ok; } gb_internal void check_add_entities_from_queues(Checker *c) { @@ -7462,7 +7467,7 @@ gb_internal void check_parsed_files(Checker *c) { debugf("Total Procedure Bodies Checked: %td\n", total_bodies_checked.load(std::memory_order_relaxed)); TIME_SECTION("check unique package names"); - check_unique_package_names(c); + bool package_names_are_unique = check_unique_package_names(c); TIME_SECTION("sanity checks"); check_merge_queues_into_arrays(c); @@ -7519,7 +7524,8 @@ gb_internal void check_parsed_files(Checker *c) { c->info.type_info_types_hash_map[index] = tt; bool exists = map_set_if_not_previously_exists(&c->info.min_dep_type_info_index_map, tt.hash, index); - if (exists) { + // Because we've already written a nice error about a duplicate package declaration, skip this panic if the package names aren't unique. + if (package_names_are_unique && exists) { for (auto const &entry : c->info.min_dep_type_info_index_map) { if (entry.key != tt.hash) { continue;