core:crypto/hash: Add a generic higher level hash interface

There is a lot of code duplicated in convenience methods in each hash
implementation, and having a generic hash type makes implementing
higher-level constructs such as HMAC significantly easier down the road.
This commit is contained in:
Yawning Angel
2024-01-25 08:13:46 +09:00
parent ca10fc2d47
commit 00ab3beed9
18 changed files with 1531 additions and 2255 deletions

View File

@@ -1,84 +1,22 @@
# crypto
A cryptography library for the Odin language
A cryptography library for the Odin language.
## Supported
This library offers various algorithms implemented in Odin.
Please see the chart below for some of the options.
## Hashing algorithms
| Algorithm | |
|:-------------------------------------------------------------------------------------------------------------|:-----------------|
| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
| [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ |
| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ |
| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ |
#### High level API
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
Included in these groups are six procedures.
- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
- `hash_bytes` - Hash a given byte slice and return the computed hash
- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
For instance, `SHA-2` offers different sizes.
Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`.
#### Low level API
The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
You may also directly call them, if you wish.
#### Example
```odin
package crypto_example
// Import the desired package
import "core:crypto/blake2b"
main :: proc() {
input := "foo"
// Compute the hash, using the high level API
computed_hash := blake2b.hash(input)
// Variant that takes a destination buffer, instead of returning the computed hash
hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
blake2b.hash(input, hash[:])
// Compute the hash, using the low level API
ctx: blake2b.Context
computed_hash_low: [blake2b.DIGEST_SIZE]byte
blake2b.init(&ctx)
blake2b.update(&ctx, transmute([]byte)input)
blake2b.final(&ctx, computed_hash_low[:])
}
```
For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
This package offers various algorithms implemented in Odin, along with
useful helpers such as access to the system entropy source, and a
constant-time byte comparison.
## Implementation considerations
- The crypto packages are not thread-safe.
- Best-effort is make to mitigate timing side-channels on reasonable
architectures. Architectures that are known to be unreasonable include
architectures. Architectures that are known to be unreasonable include
but are not limited to i386, i486, and WebAssembly.
- Some but not all of the packages attempt to santize sensitive data,
however this is not done consistently through the library at the moment.
As Thomas Pornin puts it "In general, such memory cleansing is a fool's
quest."
- The packages attempt to santize sensitive data, however this is, and
will remain a "best-effort" implementation decision. As Thomas Pornin
puts it "In general, such memory cleansing is a fool's quest."
- All of these packages have not received independent third party review.
## License

View File

@@ -11,6 +11,7 @@ package _blake2
*/
import "core:encoding/endian"
import "core:mem"
BLAKE2S_BLOCK_SIZE :: 64
BLAKE2S_SIZE :: 32
@@ -28,7 +29,6 @@ Blake2s_Context :: struct {
is_keyed: bool,
size: byte,
is_last_node: bool,
cfg: Blake2_Config,
is_initialized: bool,
}
@@ -44,7 +44,6 @@ Blake2b_Context :: struct {
is_keyed: bool,
size: byte,
is_last_node: bool,
cfg: Blake2_Config,
is_initialized: bool,
}
@@ -83,62 +82,61 @@ BLAKE2B_IV := [8]u64 {
0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
}
init :: proc(ctx: ^$T) {
init :: proc(ctx: ^$T, cfg: ^Blake2_Config) {
when T == Blake2s_Context {
block_size :: BLAKE2S_BLOCK_SIZE
max_size :: BLAKE2S_SIZE
} else when T == Blake2b_Context {
block_size :: BLAKE2B_BLOCK_SIZE
max_size :: BLAKE2B_SIZE
}
if ctx.cfg.size > max_size {
if cfg.size > max_size {
panic("blake2: requested output size exceeeds algorithm max")
}
p := make([]byte, block_size)
defer delete(p)
// To save having to allocate a scratch buffer, use the internal
// data buffer (`ctx.x`), as it is exactly the correct size.
p := ctx.x[:]
p[0] = ctx.cfg.size
p[1] = byte(len(ctx.cfg.key))
p[0] = cfg.size
p[1] = byte(len(cfg.key))
if ctx.cfg.salt != nil {
if cfg.salt != nil {
when T == Blake2s_Context {
copy(p[16:], ctx.cfg.salt)
copy(p[16:], cfg.salt)
} else when T == Blake2b_Context {
copy(p[32:], ctx.cfg.salt)
copy(p[32:], cfg.salt)
}
}
if ctx.cfg.person != nil {
if cfg.person != nil {
when T == Blake2s_Context {
copy(p[24:], ctx.cfg.person)
copy(p[24:], cfg.person)
} else when T == Blake2b_Context {
copy(p[48:], ctx.cfg.person)
copy(p[48:], cfg.person)
}
}
if ctx.cfg.tree != nil {
p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
if cfg.tree != nil {
p[2] = cfg.tree.(Blake2_Tree).fanout
p[3] = cfg.tree.(Blake2_Tree).max_depth
endian.unchecked_put_u32le(p[4:], cfg.tree.(Blake2_Tree).leaf_size)
when T == Blake2s_Context {
p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
p[13] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 40)
p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
p[8] = byte(cfg.tree.(Blake2_Tree).node_offset)
p[9] = byte(cfg.tree.(Blake2_Tree).node_offset >> 8)
p[10] = byte(cfg.tree.(Blake2_Tree).node_offset >> 16)
p[11] = byte(cfg.tree.(Blake2_Tree).node_offset >> 24)
p[12] = byte(cfg.tree.(Blake2_Tree).node_offset >> 32)
p[13] = byte(cfg.tree.(Blake2_Tree).node_offset >> 40)
p[14] = cfg.tree.(Blake2_Tree).node_depth
p[15] = cfg.tree.(Blake2_Tree).inner_hash_size
} else when T == Blake2b_Context {
endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
endian.unchecked_put_u64le(p[8:], cfg.tree.(Blake2_Tree).node_offset)
p[16] = cfg.tree.(Blake2_Tree).node_depth
p[17] = cfg.tree.(Blake2_Tree).inner_hash_size
}
} else {
p[2], p[3] = 1, 1
}
ctx.size = ctx.cfg.size
ctx.size = cfg.size
for i := 0; i < 8; i += 1 {
when T == Blake2s_Context {
ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
@@ -147,11 +145,14 @@ init :: proc(ctx: ^$T) {
ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
}
}
if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
mem.zero(&ctx.x, size_of(ctx.x)) // Done with the scratch space, no barrier.
if cfg.tree != nil && cfg.tree.(Blake2_Tree).is_last_node {
ctx.is_last_node = true
}
if len(ctx.cfg.key) > 0 {
copy(ctx.padded_key[:], ctx.cfg.key)
if len(cfg.key) > 0 {
copy(ctx.padded_key[:], cfg.key)
update(ctx, ctx.padded_key[:])
ctx.is_keyed = true
}
@@ -194,22 +195,40 @@ update :: proc(ctx: ^$T, p: []byte) {
ctx.nx += copy(ctx.x[ctx.nx:], p)
}
final :: proc(ctx: ^$T, hash: []byte) {
final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
ctx := ctx
if finalize_clone {
tmp_ctx: T
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
when T == Blake2s_Context {
if len(hash) < int(ctx.cfg.size) {
if len(hash) < int(ctx.size) {
panic("crypto/blake2s: invalid destination digest size")
}
blake2s_final(ctx, hash)
} else when T == Blake2b_Context {
if len(hash) < int(ctx.cfg.size) {
if len(hash) < int(ctx.size) {
panic("crypto/blake2b: invalid destination digest size")
}
blake2b_final(ctx, hash)
}
}
ctx.is_initialized = false
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
}
mem.zero_explicit(ctx, size_of(ctx^))
}
@(private)

View File

@@ -12,10 +12,11 @@ package _sha3
*/
import "core:math/bits"
import "core:mem"
ROUNDS :: 24
Sha3_Context :: struct {
Context :: struct {
st: struct #raw_union {
b: [200]u8,
q: [25]u64,
@@ -103,81 +104,101 @@ keccakf :: proc "contextless" (st: ^[25]u64) {
}
}
init :: proc(c: ^Sha3_Context) {
init :: proc(ctx: ^Context) {
for i := 0; i < 25; i += 1 {
c.st.q[i] = 0
ctx.st.q[i] = 0
}
c.rsiz = 200 - 2 * c.mdlen
c.pt = 0
ctx.rsiz = 200 - 2 * ctx.mdlen
ctx.pt = 0
c.is_initialized = true
c.is_finalized = false
ctx.is_initialized = true
ctx.is_finalized = false
}
update :: proc(c: ^Sha3_Context, data: []byte) {
assert(c.is_initialized)
assert(!c.is_finalized)
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
assert(!ctx.is_finalized)
j := c.pt
j := ctx.pt
for i := 0; i < len(data); i += 1 {
c.st.b[j] ~= data[i]
ctx.st.b[j] ~= data[i]
j += 1
if j >= c.rsiz {
keccakf(&c.st.q)
if j >= ctx.rsiz {
keccakf(&ctx.st.q)
j = 0
}
}
c.pt = j
ctx.pt = j
}
final :: proc(c: ^Sha3_Context, hash: []byte) {
assert(c.is_initialized)
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) < c.mdlen {
if c.is_keccak {
if len(hash) < ctx.mdlen {
if ctx.is_keccak {
panic("crypto/keccac: invalid destination digest size")
}
panic("crypto/sha3: invalid destination digest size")
}
if c.is_keccak {
c.st.b[c.pt] ~= 0x01
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
if ctx.is_keccak {
ctx.st.b[ctx.pt] ~= 0x01
} else {
c.st.b[c.pt] ~= 0x06
ctx.st.b[ctx.pt] ~= 0x06
}
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
for i := 0; i < c.mdlen; i += 1 {
hash[i] = c.st.b[i]
ctx.st.b[ctx.rsiz - 1] ~= 0x80
keccakf(&ctx.st.q)
for i := 0; i < ctx.mdlen; i += 1 {
hash[i] = ctx.st.b[i]
}
}
clone :: proc(ctx, other: ^Context) {
ctx^ = other^
}
reset :: proc(ctx: ^Context) {
if !ctx.is_initialized {
return
}
c.is_initialized = false // No more absorb, no more squeeze.
mem.zero_explicit(ctx, size_of(ctx^))
}
shake_xof :: proc(c: ^Sha3_Context) {
assert(c.is_initialized)
assert(!c.is_finalized)
c.st.b[c.pt] ~= 0x1F
c.st.b[c.rsiz - 1] ~= 0x80
keccakf(&c.st.q)
c.pt = 0
shake_xof :: proc(ctx: ^Context) {
assert(ctx.is_initialized)
assert(!ctx.is_finalized)
c.is_finalized = true // No more absorb, unlimited squeeze.
ctx.st.b[ctx.pt] ~= 0x1F
ctx.st.b[ctx.rsiz - 1] ~= 0x80
keccakf(&ctx.st.q)
ctx.pt = 0
ctx.is_finalized = true // No more absorb, unlimited squeeze.
}
shake_out :: proc(c: ^Sha3_Context, hash: []byte) {
assert(c.is_initialized)
assert(c.is_finalized)
shake_out :: proc(ctx: ^Context, hash: []byte) {
assert(ctx.is_initialized)
assert(ctx.is_finalized)
j := c.pt
j := ctx.pt
for i := 0; i < len(hash); i += 1 {
if j >= c.rsiz {
keccakf(&c.st.q)
if j >= ctx.rsiz {
keccakf(&ctx.st.q)
j = 0
}
hash[i] = c.st.b[j]
hash[i] = ctx.st.b[j]
j += 1
}
c.pt = j
ctx.pt = j
}

View File

@@ -7,121 +7,33 @@ package blake2b
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the BLAKE2b hashing algorithm.
BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
Interface for the vanilla BLAKE2b hashing algorithm.
*/
import "core:io"
import "core:os"
import "../_blake2"
/*
High level API
*/
DIGEST_SIZE :: 64
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: _blake2.Blake2b_Context
init :: proc(ctx: ^Context) {
_blake2.init(ctx)
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
_blake2.init(ctx, &cfg)
}
update :: proc(ctx: ^Context, data: []byte) {
_blake2.update(ctx, data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_blake2.final(ctx, hash)
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_blake2.final(ctx, hash, finalize_clone)
}
clone :: proc(ctx, other: ^Context) {
_blake2.clone(ctx, other)
}
reset :: proc(ctx: ^Context) {
_blake2.reset(ctx)
}

View File

@@ -7,121 +7,33 @@ package blake2s
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the BLAKE2s hashing algorithm.
BLAKE2s and BLAKE2b share the implementation in the _blake2 package.
Interface for the vanilla BLAKE2s hashing algorithm.
*/
import "core:io"
import "core:os"
import "../_blake2"
/*
High level API
*/
DIGEST_SIZE :: 32
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2S_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2S_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2S_SIZE
ctx.cfg = cfg
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: _blake2.Blake2s_Context
init :: proc(ctx: ^Context) {
_blake2.init(ctx)
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2S_SIZE
_blake2.init(ctx, &cfg)
}
update :: proc(ctx: ^Context, data: []byte) {
_blake2.update(ctx, data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_blake2.final(ctx, hash)
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_blake2.final(ctx, hash, finalize_clone)
}
clone :: proc(ctx, other: ^Context) {
_blake2.clone(ctx, other)
}
reset :: proc(ctx: ^Context) {
_blake2.reset(ctx)
}

62
core/crypto/hash/doc.odin Normal file
View File

@@ -0,0 +1,62 @@
/*
package hash provides a generic interface to the supported hash algorithms.
A high-level convenience procedure group `hash` is provided to easily
accomplish common tasks.
- `hash_string` - Hash a given string and return the digest.
- `hash_bytes` - Hash a given byte slice and return the digest.
- `hash_string_to_buffer` - Hash a given string and put the digest in
the third parameter. It requires that the destination buffer
is at least as big as the digest size.
- `hash_bytes_to_buffer` - Hash a given string and put the computed
digest in the third parameter. It requires that the destination
buffer is at least as big as the digest size.
- `hash_stream` - Incrementally fully consume a `io.Stream`, and return
the computed digest.
- `hash_file` - Takes a file handle and returns the computed digest.
A third optional boolean parameter controls if the file is streamed
(default), or or read at once.
```odin
package hash_example
import "core:crypto/hash"
main :: proc() {
input := "Feed the fire."
// Compute the digest, using the high level API.
returned_digest := hash.hash(hash.Algorithm.SHA512_256, input)
defer delete(returned_digest)
// Variant that takes a destination buffer, instead of returning
// the digest.
digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.BLAKE2B]) // @note: Destination buffer has to be at least as big as the digest size of the hash.
defer delete(digest)
hash.hash(hash.Algorithm.BLAKE2B, input, digest)
}
```
A generic low level API is provided supporting the init/update/final interface
that is typical with cryptographic hash function implementations.
```odin
package hash_example
import "core:crypto/hash"
main :: proc() {
input := "Let the cinders burn."
// Compute the digest, using the low level API.
ctx: hash.Context
digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.SHA3_512])
defer delete(digest)
hash.init(&ctx, hash.Algorithm.SHA3_512)
hash.update(&ctx, transmute([]byte)input)
hash.final(&ctx, digest)
}
```
*/
package crypto_hash

118
core/crypto/hash/hash.odin Normal file
View File

@@ -0,0 +1,118 @@
package crypto_hash
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
*/
import "core:io"
import "core:mem"
import "core:os"
// hash_bytes will hash the given input and return the computed digest
// in a newly allocated slice.
hash_string :: proc(algorithm: Algorithm, data: string, allocator := context.allocator) -> []byte {
return hash_bytes(algorithm, transmute([]byte)(data), allocator)
}
// hash_bytes will hash the given input and return the computed digest
// in a newly allocated slice.
hash_bytes :: proc(algorithm: Algorithm, data: []byte, allocator := context.allocator) -> []byte {
dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
hash_bytes_to_buffer(algorithm, data, dst)
return dst
}
// hash_string_to_buffer will hash the given input and assign the
// computed digest to the third parameter. It requires that the
// destination buffer is at least as big as the digest size.
hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) {
hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed digest into the third parameter. It requires that the
// destination buffer is at least as big as the digest size.
hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) {
ctx: Context
init(&ctx, algorithm, context.temp_allocator)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will incrementally fully consume a stream, and return the
// computed digest in a newly allocated slice.
hash_stream :: proc(
algorithm: Algorithm,
s: io.Stream,
allocator := context.allocator,
) -> (
[]byte,
io.Error,
) {
ctx: Context
init(&ctx, algorithm, context.temp_allocator)
_BUFFER_SIZE :: 512
buf := make([]byte, _BUFFER_SIZE, context.temp_allocator)
defer mem.zero_explicit(raw_data(buf), _BUFFER_SIZE)
defer delete(buf)
loop: for {
n, err := io.read(s, buf)
if n > 0 {
// XXX/yawning: Can io.read return n > 0 and EOF?
update(&ctx, buf[:n])
}
#partial switch err {
case .None:
case .EOF:
break loop
case:
return nil, err
}
}
dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
final(&ctx, dst)
return dst, io.Error.None
}
// hash_file will read the file provided by the given handle and return the
// computed digest in a newly allocated slice.
hash_file :: proc(
algorithm: Algorithm,
hd: os.Handle,
load_at_once := false,
allocator := context.allocator,
) -> (
[]byte,
io.Error,
) {
if !load_at_once {
return hash_stream(algorithm, os.stream_from_handle(hd), allocator)
}
buf, ok := os.read_entire_file(hd, allocator)
if !ok {
return nil, io.Error.Unknown
}
defer delete(buf)
return hash_bytes(algorithm, buf, allocator), io.Error.None
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}

View File

@@ -0,0 +1,382 @@
package crypto_hash
import "core:crypto/blake2b"
import "core:crypto/blake2s"
import "core:crypto/sha2"
import "core:crypto/sha3"
import "core:crypto/shake"
import "core:crypto/sm3"
import "core:crypto/legacy/keccak"
import "core:crypto/legacy/md5"
import "core:crypto/legacy/sha1"
import "core:mem"
// Algorithm is the algorithm identifier associated with a given Context.
Algorithm :: enum {
Invalid,
BLAKE2B,
BLAKE2S,
SHA224,
SHA256,
SHA384,
SHA512,
SHA512_256,
SHA3_224,
SHA3_256,
SHA3_384,
SHA3_512,
SHAKE_128,
SHAKE_256,
SM3,
Legacy_KECCAK_224,
Legacy_KECCAK_256,
Legacy_KECCAK_384,
Legacy_KECCAK_512,
Insecure_MD5,
Insecure_SHA1,
}
// ALGORITHM_NAMES is the Algorithm to algorithm name string.
ALGORITHM_NAMES := [Algorithm]string {
.Invalid = "Invalid",
.BLAKE2B = "BLAKE2b",
.BLAKE2S = "BLAKE2s",
.SHA224 = "SHA-224",
.SHA256 = "SHA-256",
.SHA384 = "SHA-384",
.SHA512 = "SHA-512",
.SHA512_256 = "SHA-512/256",
.SHA3_224 = "SHA3-224",
.SHA3_256 = "SHA3-256",
.SHA3_384 = "SHA3-384",
.SHA3_512 = "SHA3-512",
.SHAKE_128 = "SHAKE-128",
.SHAKE_256 = "SHAKE-256",
.SM3 = "SM3",
.Legacy_KECCAK_224 = "Keccak-224",
.Legacy_KECCAK_256 = "Keccak-256",
.Legacy_KECCAK_384 = "Keccak-384",
.Legacy_KECCAK_512 = "Keccak-512",
.Insecure_MD5 = "MD5",
.Insecure_SHA1 = "SHA-1",
}
// DIGEST_SIZES is the Algorithm to digest size.
DIGEST_SIZES := [Algorithm]int {
.Invalid = 0,
.BLAKE2B = blake2b.DIGEST_SIZE,
.BLAKE2S = blake2s.DIGEST_SIZE,
.SHA224 = sha2.DIGEST_SIZE_224,
.SHA256 = sha2.DIGEST_SIZE_256,
.SHA384 = sha2.DIGEST_SIZE_384,
.SHA512 = sha2.DIGEST_SIZE_512,
.SHA512_256 = sha2.DIGEST_SIZE_512_256,
.SHA3_224 = sha3.DIGEST_SIZE_224,
.SHA3_256 = sha3.DIGEST_SIZE_256,
.SHA3_384 = sha3.DIGEST_SIZE_384,
.SHA3_512 = sha3.DIGEST_SIZE_512,
.SHAKE_128 = shake.DIGEST_SIZE_128,
.SHAKE_256 = shake.DIGEST_SIZE_256,
.SM3 = sm3.DIGEST_SIZE,
.Legacy_KECCAK_224 = keccak.DIGEST_SIZE_224,
.Legacy_KECCAK_256 = keccak.DIGEST_SIZE_256,
.Legacy_KECCAK_384 = keccak.DIGEST_SIZE_384,
.Legacy_KECCAK_512 = keccak.DIGEST_SIZE_512,
.Insecure_MD5 = md5.DIGEST_SIZE,
.Insecure_SHA1 = sha1.DIGEST_SIZE,
}
// Context is a concrete instantiation of a specific hash algorithm.
Context :: struct {
_algo: Algorithm,
_impl: union {
^blake2b.Context,
^blake2s.Context,
^sha2.Context_256,
^sha2.Context_512,
^sha3.Context,
^shake.Context,
^sm3.Context,
^keccak.Context,
^md5.Context,
^sha1.Context,
},
_allocator: mem.Allocator,
}
// init initializes a Context with a specific hash Algorithm.
//
// Warning: Internal state is allocated, and resources must be freed
// either implicitly via a call to final, or explicitly via calling reset.
init :: proc(ctx: ^Context, algorithm: Algorithm, allocator := context.allocator) {
if ctx._impl != nil {
reset(ctx)
}
switch algorithm {
case .BLAKE2B:
impl := new(blake2b.Context, allocator)
blake2b.init(impl)
ctx._impl = impl
case .BLAKE2S:
impl := new(blake2s.Context, allocator)
blake2s.init(impl)
ctx._impl = impl
case .SHA224:
impl := new(sha2.Context_256, allocator)
sha2.init_224(impl)
ctx._impl = impl
case .SHA256:
impl := new(sha2.Context_256, allocator)
sha2.init_256(impl)
ctx._impl = impl
case .SHA384:
impl := new(sha2.Context_512, allocator)
sha2.init_384(impl)
ctx._impl = impl
case .SHA512:
impl := new(sha2.Context_512, allocator)
sha2.init_512(impl)
ctx._impl = impl
case .SHA512_256:
impl := new(sha2.Context_512, allocator)
sha2.init_512_256(impl)
ctx._impl = impl
case .SHA3_224:
impl := new(sha3.Context, allocator)
sha3.init_224(impl)
ctx._impl = impl
case .SHA3_256:
impl := new(sha3.Context, allocator)
sha3.init_256(impl)
ctx._impl = impl
case .SHA3_384:
impl := new(sha3.Context, allocator)
sha3.init_384(impl)
ctx._impl = impl
case .SHA3_512:
impl := new(sha3.Context, allocator)
sha3.init_512(impl)
ctx._impl = impl
case .SHAKE_128:
impl := new(shake.Context, allocator)
shake.init_128(impl)
ctx._impl = impl
case .SHAKE_256:
impl := new(shake.Context, allocator)
shake.init_256(impl)
ctx._impl = impl
case .SM3:
impl := new(sm3.Context, allocator)
sm3.init(impl)
ctx._impl = impl
case .Legacy_KECCAK_224:
impl := new(keccak.Context, allocator)
keccak.init_224(impl)
ctx._impl = impl
case .Legacy_KECCAK_256:
impl := new(keccak.Context, allocator)
keccak.init_256(impl)
ctx._impl = impl
case .Legacy_KECCAK_384:
impl := new(keccak.Context, allocator)
keccak.init_384(impl)
ctx._impl = impl
case .Legacy_KECCAK_512:
impl := new(keccak.Context, allocator)
keccak.init_512(impl)
ctx._impl = impl
case .Insecure_MD5:
impl := new(md5.Context, allocator)
md5.init(impl)
ctx._impl = impl
case .Insecure_SHA1:
impl := new(sha1.Context, allocator)
sha1.init(impl)
ctx._impl = impl
case .Invalid:
panic("crypto/hash: uninitialized algorithm")
case:
panic("crypto/hash: invalid algorithm")
}
ctx._algo = algorithm
ctx._allocator = allocator
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
switch impl in ctx._impl {
case ^blake2b.Context:
blake2b.update(impl, data)
case ^blake2s.Context:
blake2s.update(impl, data)
case ^sha2.Context_256:
sha2.update(impl, data)
case ^sha2.Context_512:
sha2.update(impl, data)
case ^sha3.Context:
sha3.update(impl, data)
case ^shake.Context:
shake.update(impl, data)
case ^sm3.Context:
sm3.update(impl, data)
case ^keccak.Context:
keccak.update(impl, data)
case ^md5.Context:
md5.update(impl, data)
case ^sha1.Context:
sha1.update(impl, data)
case:
panic("crypto/hash: uninitialized algorithm")
}
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
switch impl in ctx._impl {
case ^blake2b.Context:
blake2b.final(impl, hash, finalize_clone)
case ^blake2s.Context:
blake2s.final(impl, hash, finalize_clone)
case ^sha2.Context_256:
sha2.final(impl, hash, finalize_clone)
case ^sha2.Context_512:
sha2.final(impl, hash, finalize_clone)
case ^sha3.Context:
sha3.final(impl, hash, finalize_clone)
case ^shake.Context:
shake.final(impl, hash, finalize_clone)
case ^sm3.Context:
sm3.final(impl, hash, finalize_clone)
case ^keccak.Context:
keccak.final(impl, hash, finalize_clone)
case ^md5.Context:
md5.final(impl, hash, finalize_clone)
case ^sha1.Context:
sha1.final(impl, hash, finalize_clone)
case:
panic("crypto/hash: uninitialized algorithm")
}
if !finalize_clone {
reset(ctx)
}
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context, allocator := context.allocator) {
// XXX/yawning: Maybe these cases should panic, because both cases,
// are probably bugs.
if ctx == other {
return
}
if ctx._impl != nil {
reset(ctx)
}
ctx._algo = other._algo
ctx._allocator = allocator
switch src_impl in other._impl {
case ^blake2b.Context:
impl := new(blake2b.Context, allocator)
blake2b.clone(impl, src_impl)
ctx._impl = impl
case ^blake2s.Context:
impl := new(blake2s.Context, allocator)
blake2s.clone(impl, src_impl)
ctx._impl = impl
case ^sha2.Context_256:
impl := new(sha2.Context_256, allocator)
sha2.clone(impl, src_impl)
ctx._impl = impl
case ^sha2.Context_512:
impl := new(sha2.Context_512, allocator)
sha2.clone(impl, src_impl)
ctx._impl = impl
case ^sha3.Context:
impl := new(sha3.Context, allocator)
sha3.clone(impl, src_impl)
ctx._impl = impl
case ^shake.Context:
impl := new(shake.Context, allocator)
shake.clone(impl, src_impl)
ctx._impl = impl
case ^sm3.Context:
impl := new(sm3.Context, allocator)
sm3.clone(impl, src_impl)
ctx._impl = impl
case ^keccak.Context:
impl := new(keccak.Context, allocator)
keccak.clone(impl, src_impl)
ctx._impl = impl
case ^md5.Context:
impl := new(md5.Context, allocator)
md5.clone(impl, src_impl)
ctx._impl = impl
case ^sha1.Context:
impl := new(sha1.Context, allocator)
sha1.clone(impl, src_impl)
ctx._impl = impl
case:
panic("crypto/hash: uninitialized algorithm")
}
}
// reset sanitizes the Context and frees resources internal to the
// Context. The Context must be re-initialized to be used again.
reset :: proc(ctx: ^Context) {
switch impl in ctx._impl {
case ^blake2b.Context:
blake2b.reset(impl)
free(impl, ctx._allocator)
case ^blake2s.Context:
blake2s.reset(impl)
free(impl, ctx._allocator)
case ^sha2.Context_256:
sha2.reset(impl)
free(impl, ctx._allocator)
case ^sha2.Context_512:
sha2.reset(impl)
free(impl, ctx._allocator)
case ^sha3.Context:
sha3.reset(impl)
free(impl, ctx._allocator)
case ^shake.Context:
shake.reset(impl)
free(impl, ctx._allocator)
case ^sm3.Context:
sm3.reset(impl)
free(impl, ctx._allocator)
case ^keccak.Context:
keccak.reset(impl)
free(impl, ctx._allocator)
case ^md5.Context:
md5.reset(impl)
free(impl, ctx._allocator)
case ^sha1.Context:
sha1.reset(impl)
free(impl, ctx._allocator)
case:
// Unlike clone, calling reset repeatedly is fine.
}
ctx._algo = .Invalid
ctx._impl = nil
}
// algorithm returns the Algorithm used by a Context instance.
algorithm :: proc(ctx: ^Context) -> Algorithm {
return ctx._algo
}
// digest_size returns the digest size of a Context instance.
digest_size :: proc(ctx: ^Context) -> int {
return DIGEST_SIZES[ctx._algo]
}

View File

@@ -7,371 +7,59 @@ package keccak
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the Keccak hashing algorithm.
This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
Interface for the Keccak hashing algorithm. Most users will probably
want SHA-3 and/or SHAKE instead, however the padding was changed during
the standardization process by NIST, thus the legacy Keccak algorithm
is provided.
*/
import "core:io"
import "core:os"
import "../../_sha3"
/*
High level API
*/
DIGEST_SIZE_224 :: 28
DIGEST_SIZE_256 :: 32
DIGEST_SIZE_384 :: 48
DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
}
Context :: distinct _sha3.Context
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init_224 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_224
ctx.is_keccak = true
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
ctx.is_keccak = true
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init_384 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_384
ctx.is_keccak = true
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init_512 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_512
_init(ctx)
}
@(private)
_init :: proc(ctx: ^Context) {
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_512
ctx.is_keccak = true
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
Low level API
*/
Context :: _sha3.Sha3_Context
init :: proc(ctx: ^Context) {
ctx.is_keccak = true
_sha3.init(ctx)
_sha3.init(transmute(^_sha3.Context)(ctx))
}
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_sha3.final(ctx, hash)
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}

View File

@@ -11,98 +11,11 @@ package md5
*/
import "core:encoding/endian"
import "core:io"
import "core:math/bits"
import "core:mem"
import "core:os"
/*
High level API
*/
DIGEST_SIZE :: 16
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -129,13 +42,21 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
final :: proc(ctx: ^Context, hash: []byte) {
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) < DIGEST_SIZE {
panic("crypto/md5: invalid destination digest size")
}
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
i := ctx.datalen
if ctx.datalen < 56 {
@@ -163,8 +84,18 @@ final :: proc(ctx: ^Context, hash: []byte) {
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
}
}
ctx.is_initialized = false
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
}
mem.zero_explicit(ctx, size_of(ctx^))
}
/*

View File

@@ -11,98 +11,11 @@ package sha1
*/
import "core:encoding/endian"
import "core:io"
import "core:math/bits"
import "core:mem"
import "core:os"
/*
High level API
*/
DIGEST_SIZE :: 20
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -134,13 +47,21 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
final :: proc(ctx: ^Context, hash: []byte) {
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) < DIGEST_SIZE {
panic("crypto/sha1: invalid destination digest size")
}
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
i := ctx.datalen
if ctx.datalen < 56 {
@@ -168,8 +89,18 @@ final :: proc(ctx: ^Context, hash: []byte) {
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
}
}
ctx.is_initialized = false
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
}
mem.zero_explicit(ctx, size_of(ctx^))
}
/*
@@ -180,10 +111,10 @@ BLOCK_SIZE :: 64
Context :: struct {
data: [BLOCK_SIZE]byte,
datalen: u32,
bitlen: u64,
state: [5]u32,
k: [4]u32,
bitlen: u64,
datalen: u32,
is_initialized: bool,
}

View File

@@ -12,13 +12,8 @@ package sha2
*/
import "core:encoding/endian"
import "core:io"
import "core:math/bits"
import "core:os"
/*
High level API
*/
import "core:mem"
DIGEST_SIZE_224 :: 28
DIGEST_SIZE_256 :: 32
@@ -26,411 +21,33 @@ DIGEST_SIZE_384 :: 48
DIGEST_SIZE_512 :: 64
DIGEST_SIZE_512_256 :: 32
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
}
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: Context_256
init_224 :: proc(ctx: ^Context_256) {
ctx.md_bits = 224
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
ctx: Context_256
ctx.md_bits = 224
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: Context_256
ctx.md_bits = 224
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context_256
init_256 :: proc(ctx: ^Context_256) {
ctx.md_bits = 256
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
ctx: Context_256
ctx.md_bits = 256
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context_256
ctx.md_bits = 256
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: Context_512
init_384 :: proc(ctx: ^Context_512) {
ctx.md_bits = 384
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
ctx: Context_512
ctx.md_bits = 384
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: Context_512
ctx.md_bits = 384
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context_512
init_512 :: proc(ctx: ^Context_512) {
ctx.md_bits = 512
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
ctx: Context_512
ctx.md_bits = 512
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context_512
ctx.md_bits = 512
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
// hash_string_512_256 will hash the given input and return the
// computed hash
hash_string_512_256 :: proc(data: string) -> [DIGEST_SIZE_512_256]byte {
return hash_bytes_512_256(transmute([]byte)(data))
}
// hash_bytes_512_256 will hash the given input and return the
// computed hash
hash_bytes_512_256 :: proc(data: []byte) -> [DIGEST_SIZE_512_256]byte {
hash: [DIGEST_SIZE_512_256]byte
ctx: Context_512
init_512_256 :: proc(ctx: ^Context_512) {
ctx.md_bits = 256
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_512_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512_256 :: proc(data, hash: []byte) {
ctx: Context_512
ctx.md_bits = 256
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_512_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512_256]byte, bool) {
hash: [DIGEST_SIZE_512_256]byte
ctx: Context_512
ctx.md_bits = 256
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512_256 will read the file provided by the given handle
// and compute a hash
hash_file_512_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512_256]byte, bool) {
if !load_at_once {
return hash_stream_512_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512_256(buf[:]), ok
}
}
return [DIGEST_SIZE_512_256]byte{}, false
}
hash_512_256 :: proc {
hash_stream_512_256,
hash_file_512_256,
hash_bytes_512_256,
hash_string_512_256,
hash_bytes_to_buffer_512_256,
hash_string_to_buffer_512_256,
}
/*
Low level API
*/
init :: proc(ctx: ^$T) {
@(private)
_init :: proc(ctx: ^$T) {
when T == Context_256 {
switch ctx.md_bits {
case 224:
@@ -528,13 +145,21 @@ update :: proc(ctx: ^$T, data: []byte) {
}
}
final :: proc(ctx: ^$T, hash: []byte) {
final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) * 8 < ctx.md_bits {
panic("crypto/sha2: invalid destination digest size")
}
ctx := ctx
if finalize_clone {
tmp_ctx: T
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
length := ctx.length
raw_pad: [SHA512_BLOCK_SIZE]byte
@@ -576,8 +201,18 @@ final :: proc(ctx: ^$T, hash: []byte) {
endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i])
}
}
}
ctx.is_initialized = false
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
}
mem.zero_explicit(ctx, size_of(ctx^))
}
/*

View File

@@ -7,358 +7,57 @@ package sha3
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHA3 hashing algorithm. The SHAKE functionality can be found in package shake.
If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
Interface for the SHA3 hashing algorithm. The SHAKE functionality can
be found in package shake. If you wish to compute a Keccak hash, you
can use the legacy/keccak package, it will use the original padding.
*/
import "core:io"
import "core:os"
import "../_sha3"
/*
High level API
*/
DIGEST_SIZE_224 :: 28
DIGEST_SIZE_256 :: 32
DIGEST_SIZE_384 :: 48
DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
}
Context :: distinct _sha3.Context
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init_224 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_224
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_224
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_224
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init_384 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_384
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_384
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_384
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init_512 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_512
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_512
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_512
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
Low level API
*/
Context :: _sha3.Sha3_Context
init :: proc(ctx: ^Context) {
_sha3.init(ctx)
@(private)
_init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
}
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_sha3.final(ctx, hash)
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}

View File

@@ -7,200 +7,67 @@ package shake
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHAKE hashing algorithm.
The SHA3 functionality can be found in package sha3.
Interface for the SHAKE XOF. The SHA3 hashing algorithm can be found
in package sha3.
TODO: This should provide an incremental squeeze interface, in addition
to the one-shot final call.
TODO:
- This should provide an incremental squeeze interface.
- DIGEST_SIZE is inaccurate, SHAKE-128 and SHAKE-256 are security
strengths.
*/
import "core:io"
import "core:os"
import "../_sha3"
/*
High level API
*/
DIGEST_SIZE_128 :: 16
DIGEST_SIZE_256 :: 32
// hash_string_128 will hash the given input and return the
// computed hash
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
return hash_bytes_128(transmute([]byte)(data))
}
Context :: distinct _sha3.Context
// hash_bytes_128 will hash the given input and return the
// computed hash
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
hash: [DIGEST_SIZE_128]byte
ctx: Context
init_128 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_128
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_128 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_128 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_128
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_128 will read the stream in chunks and compute a
// hash from its contents
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
hash: [DIGEST_SIZE_128]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_128
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_128 will read the file provided by the given handle
// and compute a hash
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
if !load_at_once {
return hash_stream_128(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_128(buf[:]), ok
}
}
return [DIGEST_SIZE_128]byte{}, false
}
hash_128 :: proc {
hash_stream_128,
hash_file_128,
hash_bytes_128,
hash_string_128,
hash_bytes_to_buffer_128,
hash_string_to_buffer_128,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
_init(ctx)
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
ctx.mdlen = DIGEST_SIZE_256
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
/*
Low level API
*/
Context :: _sha3.Sha3_Context
init :: proc(ctx: ^Context) {
_sha3.init(ctx)
@(private)
_init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
}
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(ctx, data)
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_sha3.shake_xof(ctx)
_sha3.shake_out(ctx, hash[:])
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
// Rolling digest support is handled here instead of in the generic
// _sha3 package as SHAKE is more of an XOF than a hash, so the
// standard notion of "final", doesn't really exist when you can
// squeeze an unlimited amount of data.
//
// TODO/yawning: Strongly consider getting rid of this and rigidly
// defining SHAKE as an XOF.
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
ctx_ := transmute(^_sha3.Context)(ctx)
_sha3.shake_xof(ctx_)
_sha3.shake_out(ctx_, hash[:])
}
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}

View File

@@ -11,97 +11,11 @@ package sm3
*/
import "core:encoding/endian"
import "core:io"
import "core:math/bits"
import "core:os"
/*
High level API
*/
import "core:mem"
DIGEST_SIZE :: 32
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
init :: proc(ctx: ^Context) {
ctx.state[0] = IV[0]
ctx.state[1] = IV[1]
@@ -143,13 +57,21 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
final :: proc(ctx: ^Context, hash: []byte) {
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) < DIGEST_SIZE {
panic("crypto/sm3: invalid destination digest size")
}
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
length := ctx.length
pad: [BLOCK_SIZE]byte
@@ -168,8 +90,18 @@ final :: proc(ctx: ^Context, hash: []byte) {
for i := 0; i < DIGEST_SIZE / 4; i += 1 {
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
}
}
ctx.is_initialized = false
clone :: proc(ctx, other: ^Context) {
ctx^ = other^
}
reset :: proc(ctx: ^Context) {
if !ctx.is_initialized {
return
}
mem.zero_explicit(ctx, size_of(ctx^))
}
/*

View File

@@ -27,6 +27,7 @@ import blake2b "core:crypto/blake2b"
import blake2s "core:crypto/blake2s"
import chacha20 "core:crypto/chacha20"
import chacha20poly1305 "core:crypto/chacha20poly1305"
import crypto_hash "core:crypto/hash"
import keccak "core:crypto/legacy/keccak"
import md5 "core:crypto/legacy/md5"
import sha1 "core:crypto/legacy/sha1"
@@ -137,6 +138,7 @@ _ :: lru
_ :: list
_ :: topological_sort
_ :: crypto
_ :: crypto_hash
_ :: blake2b
_ :: blake2s
_ :: chacha20

View File

@@ -14,18 +14,8 @@ package test_core_crypto
import "core:testing"
import "core:fmt"
import "core:strings"
import "core:crypto/sha2"
import "core:crypto/sha3"
import "core:crypto/shake"
import "core:crypto/blake2b"
import "core:crypto/blake2s"
import "core:crypto/sm3"
import "core:crypto/siphash"
import "core:crypto/legacy/keccak"
import "core:crypto/legacy/md5"
import "core:crypto/legacy/sha1"
import "core:os"
TEST_count := 0
@@ -51,26 +41,8 @@ when ODIN_TEST {
main :: proc() {
t := testing.T{}
test_md5(&t)
test_sha1(&t)
test_sha224(&t)
test_sha256(&t)
test_sha384(&t)
test_sha512(&t)
test_sha512_256(&t)
test_sha3_224(&t)
test_sha3_256(&t)
test_sha3_384(&t)
test_sha3_512(&t)
test_shake_128(&t)
test_shake_256(&t)
test_keccak_224(&t)
test_keccak_256(&t)
test_keccak_384(&t)
test_keccak_512(&t)
test_blake2b(&t)
test_blake2s(&t)
test_sm3(&t)
test_hash(&t)
test_siphash_2_4(&t)
// "modern" crypto tests
@@ -88,11 +60,6 @@ main :: proc() {
}
}
TestHash :: struct {
hash: string,
str: string,
}
hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string {
lut: [16]byte = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}
buf := make([]byte, len(bytes) * 2, allocator)
@@ -103,359 +70,6 @@ hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string
return string(buf)
}
@(test)
test_md5 :: proc(t: ^testing.T) {
// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
test_vectors := [?]TestHash {
TestHash{"d41d8cd98f00b204e9800998ecf8427e", ""},
TestHash{"0cc175b9c0f1b6a831c399e269772661", "a"},
TestHash{"900150983cd24fb0d6963f7d28e17f72", "abc"},
TestHash{"f96b697d7cb7938d525a2f31aaf161d0", "message digest"},
TestHash{"c3fcd3d76192e4007dfb496cca67e13b", "abcdefghijklmnopqrstuvwxyz"},
TestHash{"d174ab98d277d9f5a5611c2c9f419d9f", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
TestHash{"57edf4a22be3c955ac49da2e2107b67a", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
}
for v, _ in test_vectors {
computed := md5.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha1 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
TestHash{"a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
TestHash{"f9537c23893d2014f365adf8ffe33b8eb0297ed1", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"346fb528a24b48f563cb061470bcfd23740427ad", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
TestHash{"c729c8996ee0a6f74f4f3248e8957edf704fb624", "01234567012345670123456701234567"},
TestHash{"84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"a49b2446a02c645bf419f995b67091253a04a259", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha1.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha224 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
// https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
data_1_000_000_a := strings.repeat("a", 1_000_000)
test_vectors := [?]TestHash {
TestHash{"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f", ""},
TestHash{"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7", "abc"},
TestHash{"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
TestHash{"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67", data_1_000_000_a},
}
for v, _ in test_vectors {
computed := sha2.hash_224(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ""},
TestHash{"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc"},
TestHash{"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha384 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", ""},
TestHash{"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7", "abc"},
TestHash{"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_384(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ""},
TestHash{"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", "abc"},
TestHash{"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha512_256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
test_vectors := [?]TestHash {
TestHash{"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23", "abc"},
TestHash{"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_512_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_224 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7", ""},
TestHash{"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf", "abc"},
TestHash{"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b", "a"},
TestHash{"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112", "01234567012345670123456701234567"},
TestHash{"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_224(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a", ""},
TestHash{"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532", "abc"},
TestHash{"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "a"},
TestHash{"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767", "01234567012345670123456701234567"},
TestHash{"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_384 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004", ""},
TestHash{"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25", "abc"},
TestHash{"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9", "a"},
TestHash{"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be", "01234567012345670123456701234567"},
TestHash{"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_384(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26", ""},
TestHash{"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0", "abc"},
TestHash{"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a", "a"},
TestHash{"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1", "01234567012345670123456701234567"},
TestHash{"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_shake_128 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"7f9c2ba4e88f827d616045507605853e", ""},
TestHash{"f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
TestHash{"853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
}
for v, _ in test_vectors {
computed := shake.hash_128(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_shake_256 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
TestHash{"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca", "The quick brown fox jumps over the lazy dog"},
TestHash{"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401", "The quick brown fox jumps over the lazy dof"},
}
for v, _ in test_vectors {
computed := shake.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_keccak_224 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd", ""},
TestHash{"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8", "abc"},
}
for v, _ in test_vectors {
computed := keccak.hash_224(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_keccak_256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", ""},
TestHash{"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45", "abc"},
}
for v, _ in test_vectors {
computed := keccak.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_keccak_384 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff", ""},
TestHash{"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e", "abc"},
}
for v, _ in test_vectors {
computed := keccak.hash_384(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_keccak_512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e", ""},
TestHash{"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", "abc"},
}
for v, _ in test_vectors {
computed := keccak.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_blake2b :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""},
TestHash{"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918", "The quick brown fox jumps over the lazy dog"},
}
for v, _ in test_vectors {
computed := blake2b.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_blake2s :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9", ""},
TestHash{"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812", "The quick brown fox jumps over the lazy dog"},
}
for v, _ in test_vectors {
computed := blake2s.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sm3 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""},
TestHash{"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0", "abc"},
TestHash{"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732", "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"},
TestHash{"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea", "The quick brown fox jumps over the lazy dog"},
TestHash{"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098", "The quick brown fox jumps over the lazy cog"},
}
for v, _ in test_vectors {
computed := sm3.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_siphash_2_4 :: proc(t: ^testing.T) {
// Test vectors from

View File

@@ -0,0 +1,613 @@
package test_core_crypto
import "core:bytes"
import "core:fmt"
import "core:strings"
import "core:testing"
import "core:crypto/hash"
TestHash :: struct {
algo: hash.Algorithm,
hash: string,
str: string,
}
@(test)
test_hash :: proc(t: ^testing.T) {
log(t, "Testing Hashes")
// TODO:
// - Stick the test vectors in a JSON file or something.
data_1_000_000_a := strings.repeat("a", 1_000_000)
digest: [64]byte // 512-bits is enough for every digest for now.
test_vectors := [?]TestHash {
// BLAKE2b
TestHash {
hash.Algorithm.BLAKE2B,
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
"",
},
TestHash {
hash.Algorithm.BLAKE2B,
"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918",
"The quick brown fox jumps over the lazy dog",
},
// BLAKE2s
TestHash {
hash.Algorithm.BLAKE2S,
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9",
"",
},
TestHash{
hash.Algorithm.BLAKE2S,
"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812",
"The quick brown fox jumps over the lazy dog",
},
// SHA-224
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
// - https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
TestHash {
hash.Algorithm.SHA224,
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f",
"",
},
TestHash {
hash.Algorithm.SHA224,
"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7",
"abc",
},
TestHash {
hash.Algorithm.SHA224,
"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA224,
"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
TestHash {
hash.Algorithm.SHA224,
"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67",
data_1_000_000_a,
},
// SHA-256
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA256,
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"",
},
TestHash {
hash.Algorithm.SHA256,
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
"abc",
},
TestHash {
hash.Algorithm.SHA256,
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA256,
"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA-384
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA384,
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b",
"",
},
TestHash {
hash.Algorithm.SHA384,
"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7",
"abc",
},
TestHash {
hash.Algorithm.SHA384,
"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA384,
"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA-512
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA512,
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
"",
},
TestHash {
hash.Algorithm.SHA512,
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f",
"abc",
},
TestHash {
hash.Algorithm.SHA512,
"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA512,
"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA-512/256
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
TestHash {
hash.Algorithm.SHA512_256,
"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23",
"abc",
},
TestHash {
hash.Algorithm.SHA512_256,
"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA3-224
//
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA3_224,
"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7",
"",
},
TestHash {
hash.Algorithm.SHA3_224,
"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf",
"abc",
},
TestHash {
hash.Algorithm.SHA3_224,
"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e",
"abcdbcdecdefdefgefghfghighijhi",
},
TestHash {
hash.Algorithm.SHA3_224,
"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_224,
"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b",
"a",
},
TestHash {
hash.Algorithm.SHA3_224,
"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112",
"01234567012345670123456701234567",
},
TestHash {
hash.Algorithm.SHA3_224,
"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_224,
"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA3-256
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA3_256,
"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a",
"",
},
TestHash {
hash.Algorithm.SHA3_256,
"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532",
"abc",
},
TestHash {
hash.Algorithm.SHA3_256,
"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a",
"abcdbcdecdefdefgefghfghighijhi",
},
TestHash {
hash.Algorithm.SHA3_256,
"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_256,
"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b",
"a",
},
TestHash {
hash.Algorithm.SHA3_256,
"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767",
"01234567012345670123456701234567",
},
TestHash {
hash.Algorithm.SHA3_256,
"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_256,
"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA3-384
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA3_384,
"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004",
"",
},
TestHash {
hash.Algorithm.SHA3_384,
"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25",
"abc",
},
TestHash {
hash.Algorithm.SHA3_384,
"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a",
"abcdbcdecdefdefgefghfghighijhi",
},
TestHash {
hash.Algorithm.SHA3_384,
"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_384,
"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9",
"a",
},
TestHash {
hash.Algorithm.SHA3_384,
"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be",
"01234567012345670123456701234567",
},
TestHash {
hash.Algorithm.SHA3_384,
"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_384,
"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHA3-512
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.SHA3_512,
"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26",
"",
},
TestHash {
hash.Algorithm.SHA3_512,
"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0",
"abc",
},
TestHash {
hash.Algorithm.SHA3_512,
"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e",
"abcdbcdecdefdefgefghfghighijhi",
},
TestHash {
hash.Algorithm.SHA3_512,
"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_512,
"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a",
"a",
},
TestHash {
hash.Algorithm.SHA3_512,
"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1",
"01234567012345670123456701234567",
},
TestHash {
hash.Algorithm.SHA3_512,
"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.SHA3_512,
"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
// SHAKE-128
TestHash{hash.Algorithm.SHAKE_128, "7f9c2ba4e88f827d616045507605853e", ""},
TestHash {
hash.Algorithm.SHAKE_128,
"f4202e3c5852f9182a0430fd8144f0a7",
"The quick brown fox jumps over the lazy dog",
},
TestHash {
hash.Algorithm.SHAKE_128,
"853f4538be0db9621a6cea659a06c110",
"The quick brown fox jumps over the lazy dof",
},
// SHAKE-256
TestHash {
hash.Algorithm.SHAKE_256,
"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
"",
},
TestHash {
hash.Algorithm.SHAKE_256,
"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca",
"The quick brown fox jumps over the lazy dog",
},
TestHash {
hash.Algorithm.SHAKE_256,
"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401",
"The quick brown fox jumps over the lazy dof",
},
// SM3
TestHash {
hash.Algorithm.SM3,
"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b",
"",
},
TestHash {
hash.Algorithm.SM3,
"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0",
"abc",
},
TestHash {
hash.Algorithm.SM3,
"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732",
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
},
TestHash {
hash.Algorithm.SM3,
"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea",
"The quick brown fox jumps over the lazy dog",
},
TestHash {
hash.Algorithm.SM3,
"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098",
"The quick brown fox jumps over the lazy cog",
},
// Keccak-224 (Legacy)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.Legacy_KECCAK_224,
"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd",
"",
},
TestHash {
hash.Algorithm.Legacy_KECCAK_224,
"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8",
"abc",
},
// Keccak-256 (Legacy)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.Legacy_KECCAK_256,
"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
"",
},
TestHash {
hash.Algorithm.Legacy_KECCAK_256,
"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45",
"abc",
},
// Keccak-384 (Legacy)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.Legacy_KECCAK_384,
"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff",
"",
},
TestHash {
hash.Algorithm.Legacy_KECCAK_384,
"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e",
"abc",
},
// Keccak-512 (Legacy)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash {
hash.Algorithm.Legacy_KECCAK_512,
"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e",
"",
},
TestHash {
hash.Algorithm.Legacy_KECCAK_512,
"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96",
"abc",
},
// MD5 (Insecure)
// - https://datatracker.ietf.org/doc/html/rfc1321
TestHash{hash.Algorithm.Insecure_MD5, "d41d8cd98f00b204e9800998ecf8427e", ""},
TestHash{hash.Algorithm.Insecure_MD5, "0cc175b9c0f1b6a831c399e269772661", "a"},
TestHash{hash.Algorithm.Insecure_MD5, "900150983cd24fb0d6963f7d28e17f72", "abc"},
TestHash {
hash.Algorithm.Insecure_MD5,
"f96b697d7cb7938d525a2f31aaf161d0",
"message digest",
},
TestHash {
hash.Algorithm.Insecure_MD5,
"c3fcd3d76192e4007dfb496cca67e13b",
"abcdefghijklmnopqrstuvwxyz",
},
TestHash {
hash.Algorithm.Insecure_MD5,
"d174ab98d277d9f5a5611c2c9f419d9f",
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
},
TestHash {
hash.Algorithm.Insecure_MD5,
"57edf4a22be3c955ac49da2e2107b67a",
"12345678901234567890123456789012345678901234567890123456789012345678901234567890",
},
// SHA-1 (Insecure)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash{hash.Algorithm.Insecure_SHA1, "da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
TestHash{hash.Algorithm.Insecure_SHA1, "a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
TestHash {
hash.Algorithm.Insecure_SHA1,
"f9537c23893d2014f365adf8ffe33b8eb0297ed1",
"abcdbcdecdefdefgefghfghighijhi",
},
TestHash {
hash.Algorithm.Insecure_SHA1,
"346fb528a24b48f563cb061470bcfd23740427ad",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash{hash.Algorithm.Insecure_SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
TestHash {
hash.Algorithm.Insecure_SHA1,
"c729c8996ee0a6f74f4f3248e8957edf704fb624",
"01234567012345670123456701234567",
},
TestHash {
hash.Algorithm.Insecure_SHA1,
"84983e441c3bd26ebaae4aa1f95129e5e54670f1",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
},
TestHash {
hash.Algorithm.Insecure_SHA1,
"a49b2446a02c645bf419f995b67091253a04a259",
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
},
}
for v, _ in test_vectors {
algo_name := hash.ALGORITHM_NAMES[v.algo]
dst := digest[:hash.DIGEST_SIZES[v.algo]]
data := transmute([]byte)(v.str)
ctx: hash.Context
hash.init(&ctx, v.algo)
hash.update(&ctx, data)
hash.final(&ctx, dst)
dst_str := hex_string(dst)
expect(
t,
dst_str == v.hash,
fmt.tprintf(
"%s/incremental: Expected: %s for input of %s, but got %s instead",
algo_name,
v.hash,
v.str,
dst_str,
),
)
}
for algo in hash.Algorithm {
// Skip the sentinel value.
if algo == .Invalid {
continue
}
algo_name := hash.ALGORITHM_NAMES[algo]
// Exercise most of the happy-path for the high level interface.
rd: bytes.Reader
bytes.reader_init(&rd, transmute([]byte)(data_1_000_000_a))
st := bytes.reader_to_stream(&rd)
digest_a, _ := hash.hash_stream(algo, st, context.temp_allocator)
digest_b := hash.hash_string(algo, data_1_000_000_a, context.temp_allocator)
a_str, b_str := hex_string(digest_a), hex_string(digest_b)
expect(
t,
a_str == b_str,
fmt.tprintf(
"%s/cmp: Expected: %s (hash_stream) == %s (hash_bytes)",
algo_name,
a_str,
b_str,
),
)
// Exercise the rolling digest functionality, which also covers
// each implementation's clone routine.
ctx: hash.Context
hash.init(&ctx, algo, context.temp_allocator)
api_algo := hash.algorithm(&ctx)
api_digest_size := hash.digest_size(&ctx)
expect(
t,
algo == api_algo,
fmt.tprintf(
"%s/algorithm: Expected: %v but got %v instead",
algo_name,
algo,
api_algo,
),
)
expect(
t,
hash.DIGEST_SIZES[algo] == api_digest_size,
fmt.tprintf(
"%s/digest_size: Expected: %d but got %d instead",
algo_name,
hash.DIGEST_SIZES[algo],
api_digest_size,
),
)
hash.update(&ctx, digest_a)
hash.final(&ctx, digest_a, true)
hash.final(&ctx, digest_b)
a_str, b_str = hex_string(digest_a), hex_string(digest_b)
expect(
t,
a_str == b_str,
fmt.tprintf(
"%s/rolling: Expected: %s (first) == %s (second)",
algo_name,
a_str,
b_str,
),
)
}
}