core/crypto/sha3: odinfmt (NFC)

This commit is contained in:
Yawning Angel
2023-11-16 00:18:59 +09:00
parent 14a46c6d5e
commit d50380709d
4 changed files with 629 additions and 594 deletions

View File

@@ -16,154 +16,159 @@ import "../util"
ROUNDS :: 24
Sha3_Context :: struct {
st: struct #raw_union {
b: [200]u8,
q: [25]u64,
},
pt: int,
rsiz: int,
mdlen: int,
is_keccak: bool,
st: struct #raw_union {
b: [200]u8,
q: [25]u64,
},
pt: int,
rsiz: int,
mdlen: int,
is_keccak: bool,
}
keccakf :: proc "contextless" (st: ^[25]u64) {
keccakf_rndc := [?]u64 {
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
}
keccakf_rndc := [?]u64 {
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
}
keccakf_rotc := [?]i32 {
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
}
keccakf_rotc := [?]i32 {
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
}
keccakf_piln := [?]i32 {
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
}
keccakf_piln := [?]i32 {
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
}
i, j, r: i32 = ---, ---, ---
t: u64 = ---
bc: [5]u64 = ---
i, j, r: i32 = ---, ---, ---
t: u64 = ---
bc: [5]u64 = ---
when ODIN_ENDIAN != .Little {
v: uintptr = ---
for i = 0; i < 25; i += 1 {
v := uintptr(&st[i])
st[i] = u64((^u8)(v + 0)^ << 0) | u64((^u8)(v + 1)^ << 8) |
u64((^u8)(v + 2)^ << 16) | u64((^u8)(v + 3)^ << 24) |
u64((^u8)(v + 4)^ << 32) | u64((^u8)(v + 5)^ << 40) |
u64((^u8)(v + 6)^ << 48) | u64((^u8)(v + 7)^ << 56)
}
}
when ODIN_ENDIAN != .Little {
v: uintptr = ---
for i = 0; i < 25; i += 1 {
v := uintptr(&st[i])
st[i] =
u64((^u8)(v + 0)^ << 0) |
u64((^u8)(v + 1)^ << 8) |
u64((^u8)(v + 2)^ << 16) |
u64((^u8)(v + 3)^ << 24) |
u64((^u8)(v + 4)^ << 32) |
u64((^u8)(v + 5)^ << 40) |
u64((^u8)(v + 6)^ << 48) |
u64((^u8)(v + 7)^ << 56)
}
}
for r = 0; r < ROUNDS; r += 1 {
// theta
for i = 0; i < 5; i += 1 {
bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
}
for r = 0; r < ROUNDS; r += 1 {
// theta
for i = 0; i < 5; i += 1 {
bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
}
for i = 0; i < 5; i += 1 {
t = bc[(i + 4) % 5] ~ util.ROTL64(bc[(i + 1) % 5], 1)
for j = 0; j < 25; j += 5 {
st[j + i] ~= t
}
}
for i = 0; i < 5; i += 1 {
t = bc[(i + 4) % 5] ~ util.ROTL64(bc[(i + 1) % 5], 1)
for j = 0; j < 25; j += 5 {
st[j + i] ~= t
}
}
// rho pi
t = st[1]
for i = 0; i < 24; i += 1 {
j = keccakf_piln[i]
bc[0] = st[j]
st[j] = util.ROTL64(t, u64(keccakf_rotc[i]))
t = bc[0]
}
// rho pi
t = st[1]
for i = 0; i < 24; i += 1 {
j = keccakf_piln[i]
bc[0] = st[j]
st[j] = util.ROTL64(t, u64(keccakf_rotc[i]))
t = bc[0]
}
// chi
for j = 0; j < 25; j += 5 {
for i = 0; i < 5; i += 1 {
bc[i] = st[j + i]
}
for i = 0; i < 5; i += 1 {
st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
}
}
// chi
for j = 0; j < 25; j += 5 {
for i = 0; i < 5; i += 1 {
bc[i] = st[j + i]
}
for i = 0; i < 5; i += 1 {
st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
}
}
st[0] ~= keccakf_rndc[r]
}
st[0] ~= keccakf_rndc[r]
}
when ODIN_ENDIAN != .Little {
for i = 0; i < 25; i += 1 {
v = uintptr(&st[i])
t = st[i]
(^u8)(v + 0)^ = (t >> 0) & 0xff
(^u8)(v + 1)^ = (t >> 8) & 0xff
(^u8)(v + 2)^ = (t >> 16) & 0xff
(^u8)(v + 3)^ = (t >> 24) & 0xff
(^u8)(v + 4)^ = (t >> 32) & 0xff
(^u8)(v + 5)^ = (t >> 40) & 0xff
(^u8)(v + 6)^ = (t >> 48) & 0xff
(^u8)(v + 7)^ = (t >> 56) & 0xff
}
}
when ODIN_ENDIAN != .Little {
for i = 0; i < 25; i += 1 {
v = uintptr(&st[i])
t = st[i]
(^u8)(v + 0)^ = (t >> 0) & 0xff
(^u8)(v + 1)^ = (t >> 8) & 0xff
(^u8)(v + 2)^ = (t >> 16) & 0xff
(^u8)(v + 3)^ = (t >> 24) & 0xff
(^u8)(v + 4)^ = (t >> 32) & 0xff
(^u8)(v + 5)^ = (t >> 40) & 0xff
(^u8)(v + 6)^ = (t >> 48) & 0xff
(^u8)(v + 7)^ = (t >> 56) & 0xff
}
}
}
init :: proc "contextless" (c: ^Sha3_Context) {
for i := 0; i < 25; i += 1 {
c.st.q[i] = 0
}
c.rsiz = 200 - 2 * c.mdlen
for i := 0; i < 25; i += 1 {
c.st.q[i] = 0
}
c.rsiz = 200 - 2 * c.mdlen
}
update :: proc "contextless" (c: ^Sha3_Context, data: []byte) {
j := c.pt
for i := 0; i < len(data); i += 1 {
c.st.b[j] ~= data[i]
j += 1
if j >= c.rsiz {
keccakf(&c.st.q)
j = 0
}
}
c.pt = j
j := c.pt
for i := 0; i < len(data); i += 1 {
c.st.b[j] ~= data[i]
j += 1
if j >= c.rsiz {
keccakf(&c.st.q)
j = 0
}
}
c.pt = j
}
final :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
if c.is_keccak {
c.st.b[c.pt] ~= 0x01
} else {
c.st.b[c.pt] ~= 0x06
}
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
for i := 0; i < c.mdlen; i += 1 {
hash[i] = c.st.b[i]
}
if c.is_keccak {
c.st.b[c.pt] ~= 0x01
} else {
c.st.b[c.pt] ~= 0x06
}
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
for i := 0; i < c.mdlen; i += 1 {
hash[i] = c.st.b[i]
}
}
shake_xof :: proc "contextless" (c: ^Sha3_Context) {
c.st.b[c.pt] ~= 0x1F
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
c.pt = 0
c.st.b[c.pt] ~= 0x1F
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
c.pt = 0
}
shake_out :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
j := c.pt
for i := 0; i < len(hash); i += 1 {
if j >= c.rsiz {
keccakf(&c.st.q)
j = 0
}
hash[i] = c.st.b[j]
j += 1
}
c.pt = j
j := c.pt
for i := 0; i < len(hash); i += 1 {
if j >= c.rsiz {
keccakf(&c.st.q)
j = 0
}
hash[i] = c.st.b[j]
j += 1
}
c.pt = j
}

View File

@@ -11,8 +11,8 @@ package keccak
This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
*/
import "core:os"
import "core:io"
import "core:os"
import "../_sha3"
@@ -29,329 +29,341 @@ DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
return hash_bytes_224(transmute([]byte)(data))
}
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_224,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_256,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_384,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_512,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
@@ -361,14 +373,14 @@ hash_512 :: proc {
Keccak_Context :: _sha3.Sha3_Context
init :: proc(ctx: ^_sha3.Sha3_Context) {
ctx.is_keccak = true
_sha3.init(ctx)
ctx.is_keccak = true
_sha3.init(ctx)
}
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(ctx, data)
}
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
_sha3.final(ctx, hash)
_sha3.final(ctx, hash)
}

View File

@@ -11,8 +11,8 @@ package sha3
If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
*/
import "core:os"
import "core:io"
import "core:os"
import "../_sha3"
@@ -28,317 +28,329 @@ DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
return hash_bytes_224(transmute([]byte)(data))
}
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_224,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_224]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_224
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_256,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_384,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_384]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_384
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_512,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.final(&ctx, hash)
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_512]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_512
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
@@ -348,13 +360,13 @@ hash_512 :: proc {
Sha3_Context :: _sha3.Sha3_Context
init :: proc(ctx: ^_sha3.Sha3_Context) {
_sha3.init(ctx)
_sha3.init(ctx)
}
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(ctx, data)
}
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
_sha3.final(ctx, hash)
_sha3.final(ctx, hash)
}

View File

@@ -11,8 +11,8 @@ package shake
The SHA3 functionality can be found in package sha3.
*/
import "core:os"
import "core:io"
import "core:os"
import "../_sha3"
@@ -26,165 +26,171 @@ DIGEST_SIZE_256 :: 32
// hash_string_128 will hash the given input and return the
// computed hash
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
return hash_bytes_128(transmute([]byte)(data))
return hash_bytes_128(transmute([]byte)(data))
}
// hash_bytes_128 will hash the given input and return the
// computed hash
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
hash: [DIGEST_SIZE_128]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_128]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_128 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_128 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_128,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash)
}
// hash_stream_128 will read the stream in chunks and compute a
// hash from its contents
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
hash: [DIGEST_SIZE_128]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_128]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_128
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash, true
}
// hash_file_128 will read the file provided by the given handle
// and compute a hash
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
if !load_at_once {
return hash_stream_128(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_128(buf[:]), ok
}
}
return [DIGEST_SIZE_128]byte{}, false
if !load_at_once {
return hash_stream_128(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_128(buf[:]), ok
}
}
return [DIGEST_SIZE_128]byte{}, false
}
hash_128 :: proc {
hash_stream_128,
hash_file_128,
hash_bytes_128,
hash_string_128,
hash_bytes_to_buffer_128,
hash_string_to_buffer_128,
hash_stream_128,
hash_file_128,
hash_bytes_128,
hash_string_128,
hash_bytes_to_buffer_128,
hash_string_to_buffer_128,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash)
assert(
len(hash) >= DIGEST_SIZE_256,
"Size of destination buffer is smaller than the digest size",
)
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
_sha3.update(&ctx, data)
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash, true
hash: [DIGEST_SIZE_256]byte
ctx: _sha3.Sha3_Context
ctx.mdlen = DIGEST_SIZE_256
_sha3.init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
}
_sha3.shake_xof(&ctx)
_sha3.shake_out(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
/*
@@ -194,14 +200,14 @@ hash_256 :: proc {
Shake_Context :: _sha3.Sha3_Context
init :: proc(ctx: ^_sha3.Sha3_Context) {
_sha3.init(ctx)
_sha3.init(ctx)
}
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(ctx, data)
}
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
_sha3.shake_xof(ctx)
_sha3.shake_out(ctx, hash[:])
_sha3.shake_xof(ctx)
_sha3.shake_out(ctx, hash[:])
}