From f90d7029b405dcdade44d2cf1824586a4631c3a6 Mon Sep 17 00:00:00 2001 From: blob1807 <12388588+blob1807@users.noreply.github.com> Date: Thu, 21 Aug 2025 05:19:16 +1000 Subject: [PATCH] Fix stride in `memory_equal/compare_zero` giving false positves The previous stride of 8 assumed `uintptr` size is 8 which isn't the case on 32bit & wasm64p32. Skipping every other set of 4 bytes --- base/runtime/internal.odin | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/base/runtime/internal.odin b/base/runtime/internal.odin index 4f9509b23..77fe09ca8 100644 --- a/base/runtime/internal.odin +++ b/base/runtime/internal.odin @@ -268,8 +268,8 @@ memory_equal :: proc "contextless" (x, y: rawptr, n: int) -> bool { } } - m = (n-i) / 8 * 8 - for /**/; i < m; i += 8 { + m = (n-i) / size_of(uintptr) * size_of(uintptr) + for /**/; i < m; i += size_of(uintptr) { if intrinsics.unaligned_load(cast(^uintptr)&a[i]) != intrinsics.unaligned_load(cast(^uintptr)&b[i]) { return false } @@ -389,8 +389,8 @@ memory_compare_zero :: proc "contextless" (a: rawptr, n: int) -> int #no_bounds_ } } - m = (n-i) / 8 * 8 - for /**/; i < m; i += 8 { + m = (n-i) / size_of(uintptr) * size_of(uintptr) + for /**/; i < m; i += size_of(uintptr) { if intrinsics.unaligned_load(cast(^uintptr)&bytes[i]) != 0 { return 1 }