From 7640fb0483224b4c7a9bfa55fac3203de3f3e5dc Mon Sep 17 00:00:00 2001 From: Yawning Angel Date: Fri, 17 Nov 2023 17:27:14 +0900 Subject: [PATCH] core/crypto/shake: API cleanup - shake.Shake_Context -> shake.Context --- core/crypto/shake/shake.odin | 66 ++++++++++++++++----------------- vendor/botan/shake/shake.odin | 70 ++++++++++++++++------------------- 2 files changed, 64 insertions(+), 72 deletions(-) diff --git a/core/crypto/shake/shake.odin b/core/crypto/shake/shake.odin index c490de41e..e4b4c1e31 100644 --- a/core/crypto/shake/shake.odin +++ b/core/crypto/shake/shake.odin @@ -36,12 +36,11 @@ hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { // computed hash hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { hash: [DIGEST_SIZE_128]byte - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -56,32 +55,32 @@ hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash) + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_128 will read the stream in chunks and compute a // hash from its contents hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { hash: [DIGEST_SIZE_128]byte - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) + init(&ctx) + buf := make([]byte, 512) defer delete(buf) + read := 1 for read > 0 { read, _ = io.read(s, buf) if read > 0 { - _sha3.update(&ctx, buf[:read]) + update(&ctx, buf[:read]) } } - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) + final(&ctx, hash[:]) return hash, true } @@ -117,12 +116,11 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -137,32 +135,32 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash) + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context + ctx: Context ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) + init(&ctx) + buf := make([]byte, 512) defer delete(buf) + read := 1 for read > 0 { read, _ = io.read(s, buf) if read > 0 { - _sha3.update(&ctx, buf[:read]) + update(&ctx, buf[:read]) } } - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) + final(&ctx, hash[:]) return hash, true } @@ -192,17 +190,17 @@ hash_256 :: proc { Low level API */ -Shake_Context :: _sha3.Sha3_Context +Context :: _sha3.Sha3_Context -init :: proc(ctx: ^_sha3.Sha3_Context) { +init :: proc(ctx: ^Context) { _sha3.init(ctx) } -update :: proc(ctx: ^_sha3.Sha3_Context, data: []byte) { +update :: proc(ctx: ^Context, data: []byte) { _sha3.update(ctx, data) } -final :: proc(ctx: ^_sha3.Sha3_Context, hash: []byte) { +final :: proc(ctx: ^Context, hash: []byte) { _sha3.shake_xof(ctx) _sha3.shake_out(ctx, hash[:]) } diff --git a/vendor/botan/shake/shake.odin b/vendor/botan/shake/shake.odin index af577f316..fe059f0f9 100644 --- a/vendor/botan/shake/shake.odin +++ b/vendor/botan/shake/shake.odin @@ -33,11 +33,10 @@ hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { // computed hash hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 128) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -53,31 +52,29 @@ hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 128) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_128 will read the stream in chunks and compute a // hash from its contents hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) + ctx: Context + init(&ctx, hash_size = 128) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_128 will read the file provided by the given handle @@ -112,11 +109,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -132,31 +128,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) + ctx: Context + init(&ctx, hash_size = 256) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle @@ -185,20 +179,20 @@ hash_256 :: proc { Low level API */ -Shake_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 256) { +init :: proc "contextless" (ctx: ^Context, hash_size := 256) { switch hash_size { case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0) case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0) } } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) }