mirror of
https://github.com/odin-lang/Odin.git
synced 2026-02-14 23:33:15 +00:00
core/crypto/md5: API cleanup
- md5.Md5_Context -> md5.Context
This commit is contained in:
@@ -32,7 +32,7 @@ hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md5_Context
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
@@ -50,7 +50,7 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Md5_Context
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
@@ -60,10 +60,12 @@ hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md5_Context
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
@@ -101,7 +103,7 @@ hash :: proc {
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Md5_Context) {
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
@@ -113,7 +115,7 @@ init :: proc(ctx: ^Md5_Context) {
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Md5_Context, data: []byte) {
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
@@ -127,7 +129,7 @@ update :: proc(ctx: ^Md5_Context, data: []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Md5_Context, hash: []byte) {
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
@@ -171,7 +173,7 @@ final :: proc(ctx: ^Md5_Context, hash: []byte) {
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Md5_Context :: struct {
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
@@ -206,7 +208,7 @@ II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u
|
||||
}
|
||||
|
||||
@(private)
|
||||
transform :: proc "contextless" (ctx: ^Md5_Context, data: []byte) {
|
||||
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
m: [DIGEST_SIZE]u32
|
||||
|
||||
for i := 0; i < DIGEST_SIZE; i += 1 {
|
||||
|
||||
39
vendor/botan/md5/md5.odin
vendored
39
vendor/botan/md5/md5.odin
vendored
@@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
|
||||
// computed hash
|
||||
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: botan.hash_t
|
||||
botan.hash_init(&ctx, botan.HASH_MD5, 0)
|
||||
botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
botan.hash_final(ctx, &hash[0])
|
||||
botan.hash_destroy(ctx)
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
@@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: botan.hash_t
|
||||
botan.hash_init(&ctx, botan.HASH_MD5, 0)
|
||||
botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
botan.hash_final(ctx, &hash[0])
|
||||
botan.hash_destroy(ctx)
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: botan.hash_t
|
||||
botan.hash_init(&ctx, botan.HASH_MD5, 0)
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
|
||||
}
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
botan.hash_final(ctx, &hash[0])
|
||||
botan.hash_destroy(ctx)
|
||||
return hash, true
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
@@ -105,17 +102,17 @@ hash :: proc {
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Md5_Context :: botan.hash_t
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^botan.hash_t) {
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_MD5, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user