mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-01 19:02:13 +00:00
Merge pull request #3136 from Yawning/feature/crypto-hash
core:crypto/hash - Add and refactor
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -39,7 +39,7 @@ tests/core/test_core_net
|
||||
tests/core/test_core_os_exit
|
||||
tests/core/test_core_reflect
|
||||
tests/core/test_core_strings
|
||||
tests/core/test_crypto_hash
|
||||
tests/core/test_crypto
|
||||
tests/core/test_hash
|
||||
tests/core/test_hxa
|
||||
tests/core/test_json
|
||||
@@ -49,6 +49,7 @@ tests/core/test_varint
|
||||
tests/core/test_xml
|
||||
tests/core/test_core_slice
|
||||
tests/core/test_core_thread
|
||||
tests/core/test_core_runtime
|
||||
tests/vendor/vendor_botan
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
|
||||
@@ -1,84 +1,22 @@
|
||||
# crypto
|
||||
|
||||
A cryptography library for the Odin language
|
||||
A cryptography library for the Odin language.
|
||||
|
||||
## Supported
|
||||
|
||||
This library offers various algorithms implemented in Odin.
|
||||
Please see the chart below for some of the options.
|
||||
|
||||
## Hashing algorithms
|
||||
|
||||
| Algorithm | |
|
||||
|:-------------------------------------------------------------------------------------------------------------|:-----------------|
|
||||
| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
|
||||
| [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
|
||||
| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
|
||||
| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ |
|
||||
| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ |
|
||||
| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ |
|
||||
|
||||
#### High level API
|
||||
|
||||
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
|
||||
Included in these groups are six procedures.
|
||||
- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
|
||||
- `hash_bytes` - Hash a given byte slice and return the computed hash
|
||||
- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
|
||||
- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
|
||||
- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
|
||||
- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
|
||||
|
||||
\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
|
||||
For instance, `SHA-2` offers different sizes.
|
||||
Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`.
|
||||
|
||||
#### Low level API
|
||||
|
||||
The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
|
||||
You may also directly call them, if you wish.
|
||||
|
||||
#### Example
|
||||
|
||||
```odin
|
||||
package crypto_example
|
||||
|
||||
// Import the desired package
|
||||
import "core:crypto/blake2b"
|
||||
|
||||
main :: proc() {
|
||||
input := "foo"
|
||||
|
||||
// Compute the hash, using the high level API
|
||||
computed_hash := blake2b.hash(input)
|
||||
|
||||
// Variant that takes a destination buffer, instead of returning the computed hash
|
||||
hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
|
||||
blake2b.hash(input, hash[:])
|
||||
|
||||
// Compute the hash, using the low level API
|
||||
ctx: blake2b.Context
|
||||
computed_hash_low: [blake2b.DIGEST_SIZE]byte
|
||||
blake2b.init(&ctx)
|
||||
blake2b.update(&ctx, transmute([]byte)input)
|
||||
blake2b.final(&ctx, computed_hash_low[:])
|
||||
}
|
||||
```
|
||||
For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
|
||||
This package offers various algorithms implemented in Odin, along with
|
||||
useful helpers such as access to the system entropy source, and a
|
||||
constant-time byte comparison.
|
||||
|
||||
## Implementation considerations
|
||||
|
||||
- The crypto packages are not thread-safe.
|
||||
- Best-effort is make to mitigate timing side-channels on reasonable
|
||||
architectures. Architectures that are known to be unreasonable include
|
||||
architectures. Architectures that are known to be unreasonable include
|
||||
but are not limited to i386, i486, and WebAssembly.
|
||||
- Some but not all of the packages attempt to santize sensitive data,
|
||||
however this is not done consistently through the library at the moment.
|
||||
As Thomas Pornin puts it "In general, such memory cleansing is a fool's
|
||||
quest."
|
||||
- The packages attempt to santize sensitive data, however this is, and
|
||||
will remain a "best-effort" implementation decision. As Thomas Pornin
|
||||
puts it "In general, such memory cleansing is a fool's quest."
|
||||
- All of these packages have not received independent third party review.
|
||||
|
||||
## License
|
||||
|
||||
@@ -11,6 +11,7 @@ package _blake2
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:mem"
|
||||
|
||||
BLAKE2S_BLOCK_SIZE :: 64
|
||||
BLAKE2S_SIZE :: 32
|
||||
@@ -28,7 +29,6 @@ Blake2s_Context :: struct {
|
||||
is_keyed: bool,
|
||||
size: byte,
|
||||
is_last_node: bool,
|
||||
cfg: Blake2_Config,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
@@ -44,7 +44,6 @@ Blake2b_Context :: struct {
|
||||
is_keyed: bool,
|
||||
size: byte,
|
||||
is_last_node: bool,
|
||||
cfg: Blake2_Config,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
@@ -83,62 +82,61 @@ BLAKE2B_IV := [8]u64 {
|
||||
0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
|
||||
}
|
||||
|
||||
init :: proc(ctx: ^$T) {
|
||||
init :: proc(ctx: ^$T, cfg: ^Blake2_Config) {
|
||||
when T == Blake2s_Context {
|
||||
block_size :: BLAKE2S_BLOCK_SIZE
|
||||
max_size :: BLAKE2S_SIZE
|
||||
} else when T == Blake2b_Context {
|
||||
block_size :: BLAKE2B_BLOCK_SIZE
|
||||
max_size :: BLAKE2B_SIZE
|
||||
}
|
||||
|
||||
if ctx.cfg.size > max_size {
|
||||
if cfg.size > max_size {
|
||||
panic("blake2: requested output size exceeeds algorithm max")
|
||||
}
|
||||
|
||||
p := make([]byte, block_size)
|
||||
defer delete(p)
|
||||
// To save having to allocate a scratch buffer, use the internal
|
||||
// data buffer (`ctx.x`), as it is exactly the correct size.
|
||||
p := ctx.x[:]
|
||||
|
||||
p[0] = ctx.cfg.size
|
||||
p[1] = byte(len(ctx.cfg.key))
|
||||
p[0] = cfg.size
|
||||
p[1] = byte(len(cfg.key))
|
||||
|
||||
if ctx.cfg.salt != nil {
|
||||
if cfg.salt != nil {
|
||||
when T == Blake2s_Context {
|
||||
copy(p[16:], ctx.cfg.salt)
|
||||
copy(p[16:], cfg.salt)
|
||||
} else when T == Blake2b_Context {
|
||||
copy(p[32:], ctx.cfg.salt)
|
||||
copy(p[32:], cfg.salt)
|
||||
}
|
||||
}
|
||||
if ctx.cfg.person != nil {
|
||||
if cfg.person != nil {
|
||||
when T == Blake2s_Context {
|
||||
copy(p[24:], ctx.cfg.person)
|
||||
copy(p[24:], cfg.person)
|
||||
} else when T == Blake2b_Context {
|
||||
copy(p[48:], ctx.cfg.person)
|
||||
copy(p[48:], cfg.person)
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.cfg.tree != nil {
|
||||
p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
|
||||
p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
|
||||
endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
|
||||
if cfg.tree != nil {
|
||||
p[2] = cfg.tree.(Blake2_Tree).fanout
|
||||
p[3] = cfg.tree.(Blake2_Tree).max_depth
|
||||
endian.unchecked_put_u32le(p[4:], cfg.tree.(Blake2_Tree).leaf_size)
|
||||
when T == Blake2s_Context {
|
||||
p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
|
||||
p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
|
||||
p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
|
||||
p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
|
||||
p[13] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 40)
|
||||
p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
|
||||
p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
p[8] = byte(cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[9] = byte(cfg.tree.(Blake2_Tree).node_offset >> 8)
|
||||
p[10] = byte(cfg.tree.(Blake2_Tree).node_offset >> 16)
|
||||
p[11] = byte(cfg.tree.(Blake2_Tree).node_offset >> 24)
|
||||
p[12] = byte(cfg.tree.(Blake2_Tree).node_offset >> 32)
|
||||
p[13] = byte(cfg.tree.(Blake2_Tree).node_offset >> 40)
|
||||
p[14] = cfg.tree.(Blake2_Tree).node_depth
|
||||
p[15] = cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
} else when T == Blake2b_Context {
|
||||
endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
|
||||
p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
endian.unchecked_put_u64le(p[8:], cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[16] = cfg.tree.(Blake2_Tree).node_depth
|
||||
p[17] = cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
}
|
||||
} else {
|
||||
p[2], p[3] = 1, 1
|
||||
}
|
||||
ctx.size = ctx.cfg.size
|
||||
ctx.size = cfg.size
|
||||
for i := 0; i < 8; i += 1 {
|
||||
when T == Blake2s_Context {
|
||||
ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
|
||||
@@ -147,11 +145,14 @@ init :: proc(ctx: ^$T) {
|
||||
ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
|
||||
}
|
||||
}
|
||||
if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
|
||||
|
||||
mem.zero(&ctx.x, size_of(ctx.x)) // Done with the scratch space, no barrier.
|
||||
|
||||
if cfg.tree != nil && cfg.tree.(Blake2_Tree).is_last_node {
|
||||
ctx.is_last_node = true
|
||||
}
|
||||
if len(ctx.cfg.key) > 0 {
|
||||
copy(ctx.padded_key[:], ctx.cfg.key)
|
||||
if len(cfg.key) > 0 {
|
||||
copy(ctx.padded_key[:], cfg.key)
|
||||
update(ctx, ctx.padded_key[:])
|
||||
ctx.is_keyed = true
|
||||
}
|
||||
@@ -194,22 +195,40 @@ update :: proc(ctx: ^$T, p: []byte) {
|
||||
ctx.nx += copy(ctx.x[ctx.nx:], p)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^$T, hash: []byte) {
|
||||
final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: T
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
when T == Blake2s_Context {
|
||||
if len(hash) < int(ctx.cfg.size) {
|
||||
if len(hash) < int(ctx.size) {
|
||||
panic("crypto/blake2s: invalid destination digest size")
|
||||
}
|
||||
blake2s_final(ctx, hash)
|
||||
} else when T == Blake2b_Context {
|
||||
if len(hash) < int(ctx.cfg.size) {
|
||||
if len(hash) < int(ctx.size) {
|
||||
panic("crypto/blake2b: invalid destination digest size")
|
||||
}
|
||||
blake2b_final(ctx, hash)
|
||||
}
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
clone :: proc(ctx, other: ^$T) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
reset :: proc(ctx: ^$T) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
@(private)
|
||||
|
||||
@@ -12,10 +12,16 @@ package _sha3
|
||||
*/
|
||||
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
|
||||
ROUNDS :: 24
|
||||
|
||||
Sha3_Context :: struct {
|
||||
RATE_224 :: 1152 / 8
|
||||
RATE_256 :: 1088 / 8
|
||||
RATE_384 :: 832 / 8
|
||||
RATE_512 :: 576 / 8
|
||||
|
||||
Context :: struct {
|
||||
st: struct #raw_union {
|
||||
b: [200]u8,
|
||||
q: [25]u64,
|
||||
@@ -103,81 +109,100 @@ keccakf :: proc "contextless" (st: ^[25]u64) {
|
||||
}
|
||||
}
|
||||
|
||||
init :: proc(c: ^Sha3_Context) {
|
||||
init :: proc(ctx: ^Context) {
|
||||
for i := 0; i < 25; i += 1 {
|
||||
c.st.q[i] = 0
|
||||
ctx.st.q[i] = 0
|
||||
}
|
||||
c.rsiz = 200 - 2 * c.mdlen
|
||||
c.pt = 0
|
||||
ctx.rsiz = 200 - 2 * ctx.mdlen
|
||||
ctx.pt = 0
|
||||
|
||||
c.is_initialized = true
|
||||
c.is_finalized = false
|
||||
ctx.is_initialized = true
|
||||
ctx.is_finalized = false
|
||||
}
|
||||
|
||||
update :: proc(c: ^Sha3_Context, data: []byte) {
|
||||
assert(c.is_initialized)
|
||||
assert(!c.is_finalized)
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
assert(!ctx.is_finalized)
|
||||
|
||||
j := c.pt
|
||||
j := ctx.pt
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
c.st.b[j] ~= data[i]
|
||||
ctx.st.b[j] ~= data[i]
|
||||
j += 1
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
if j >= ctx.rsiz {
|
||||
keccakf(&ctx.st.q)
|
||||
j = 0
|
||||
}
|
||||
}
|
||||
c.pt = j
|
||||
ctx.pt = j
|
||||
}
|
||||
|
||||
final :: proc(c: ^Sha3_Context, hash: []byte) {
|
||||
assert(c.is_initialized)
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < c.mdlen {
|
||||
if c.is_keccak {
|
||||
if len(hash) < ctx.mdlen {
|
||||
if ctx.is_keccak {
|
||||
panic("crypto/keccac: invalid destination digest size")
|
||||
}
|
||||
panic("crypto/sha3: invalid destination digest size")
|
||||
}
|
||||
if c.is_keccak {
|
||||
c.st.b[c.pt] ~= 0x01
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: Context
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
if ctx.is_keccak {
|
||||
ctx.st.b[ctx.pt] ~= 0x01
|
||||
} else {
|
||||
c.st.b[c.pt] ~= 0x06
|
||||
ctx.st.b[ctx.pt] ~= 0x06
|
||||
}
|
||||
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
for i := 0; i < c.mdlen; i += 1 {
|
||||
hash[i] = c.st.b[i]
|
||||
ctx.st.b[ctx.rsiz - 1] ~= 0x80
|
||||
keccakf(&ctx.st.q)
|
||||
for i := 0; i < ctx.mdlen; i += 1 {
|
||||
hash[i] = ctx.st.b[i]
|
||||
}
|
||||
}
|
||||
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
reset :: proc(ctx: ^Context) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
c.is_initialized = false // No more absorb, no more squeeze.
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
shake_xof :: proc(c: ^Sha3_Context) {
|
||||
assert(c.is_initialized)
|
||||
assert(!c.is_finalized)
|
||||
shake_xof :: proc(ctx: ^Context) {
|
||||
assert(ctx.is_initialized)
|
||||
assert(!ctx.is_finalized)
|
||||
|
||||
c.st.b[c.pt] ~= 0x1F
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
c.pt = 0
|
||||
ctx.st.b[ctx.pt] ~= 0x1F
|
||||
ctx.st.b[ctx.rsiz - 1] ~= 0x80
|
||||
keccakf(&ctx.st.q)
|
||||
ctx.pt = 0
|
||||
|
||||
c.is_finalized = true // No more absorb, unlimited squeeze.
|
||||
ctx.is_finalized = true // No more absorb, unlimited squeeze.
|
||||
}
|
||||
|
||||
shake_out :: proc(c: ^Sha3_Context, hash: []byte) {
|
||||
assert(c.is_initialized)
|
||||
assert(c.is_finalized)
|
||||
shake_out :: proc(ctx: ^Context, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
assert(ctx.is_finalized)
|
||||
|
||||
j := c.pt
|
||||
j := ctx.pt
|
||||
for i := 0; i < len(hash); i += 1 {
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
if j >= ctx.rsiz {
|
||||
keccakf(&ctx.st.q)
|
||||
j = 0
|
||||
}
|
||||
hash[i] = c.st.b[j]
|
||||
hash[i] = ctx.st.b[j]
|
||||
j += 1
|
||||
}
|
||||
c.pt = j
|
||||
ctx.pt = j
|
||||
}
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
/*
|
||||
package blake2b implements the BLAKE2b hash algorithm.
|
||||
|
||||
See:
|
||||
- https://datatracker.ietf.org/doc/html/rfc7693
|
||||
- https://www.blake2.net
|
||||
*/
|
||||
package blake2b
|
||||
|
||||
/*
|
||||
@@ -6,122 +13,47 @@ package blake2b
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the BLAKE2b hashing algorithm.
|
||||
BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_blake2"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE is the BLAKE2b digest size in bytes.
|
||||
DIGEST_SIZE :: 64
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
// BLOCK_SIZE is the BLAKE2b block size in bytes.
|
||||
BLOCK_SIZE :: _blake2.BLAKE2B_BLOCK_SIZE
|
||||
|
||||
// Context is a BLAKE2b instance.
|
||||
Context :: _blake2.Blake2b_Context
|
||||
|
||||
// init initializes a Context with the default BLAKE2b config.
|
||||
init :: proc(ctx: ^Context) {
|
||||
_blake2.init(ctx)
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
_blake2.init(ctx, &cfg)
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
_blake2.final(ctx, hash, finalize_clone)
|
||||
}
|
||||
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
_blake2.clone(ctx, other)
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
_blake2.reset(ctx)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
/*
|
||||
package blake2s implements the BLAKE2s hash algorithm.
|
||||
|
||||
See:
|
||||
- https://datatracker.ietf.org/doc/html/rfc7693
|
||||
- https://www.blake2.net/
|
||||
*/
|
||||
package blake2s
|
||||
|
||||
/*
|
||||
@@ -6,122 +13,47 @@ package blake2s
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the BLAKE2s hashing algorithm.
|
||||
BLAKE2s and BLAKE2b share the implementation in the _blake2 package.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_blake2"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE is the BLAKE2s digest size in bytes.
|
||||
DIGEST_SIZE :: 32
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
// BLOCK_SIZE is the BLAKE2s block size in bytes.
|
||||
BLOCK_SIZE :: _blake2.BLAKE2S_BLOCK_SIZE
|
||||
|
||||
// Context is a BLAKE2s instance.
|
||||
Context :: _blake2.Blake2s_Context
|
||||
|
||||
// init initializes a Context with the default BLAKE2s config.
|
||||
init :: proc(ctx: ^Context) {
|
||||
_blake2.init(ctx)
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
_blake2.init(ctx, &cfg)
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
_blake2.final(ctx, hash, finalize_clone)
|
||||
}
|
||||
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
_blake2.clone(ctx, other)
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
_blake2.reset(ctx)
|
||||
}
|
||||
|
||||
62
core/crypto/hash/doc.odin
Normal file
62
core/crypto/hash/doc.odin
Normal file
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
package hash provides a generic interface to the supported hash algorithms.
|
||||
|
||||
A high-level convenience procedure group `hash` is provided to easily
|
||||
accomplish common tasks.
|
||||
- `hash_string` - Hash a given string and return the digest.
|
||||
- `hash_bytes` - Hash a given byte slice and return the digest.
|
||||
- `hash_string_to_buffer` - Hash a given string and put the digest in
|
||||
the third parameter. It requires that the destination buffer
|
||||
is at least as big as the digest size.
|
||||
- `hash_bytes_to_buffer` - Hash a given string and put the computed
|
||||
digest in the third parameter. It requires that the destination
|
||||
buffer is at least as big as the digest size.
|
||||
- `hash_stream` - Incrementally fully consume a `io.Stream`, and return
|
||||
the computed digest.
|
||||
- `hash_file` - Takes a file handle and returns the computed digest.
|
||||
A third optional boolean parameter controls if the file is streamed
|
||||
(default), or or read at once.
|
||||
|
||||
```odin
|
||||
package hash_example
|
||||
|
||||
import "core:crypto/hash"
|
||||
|
||||
main :: proc() {
|
||||
input := "Feed the fire."
|
||||
|
||||
// Compute the digest, using the high level API.
|
||||
returned_digest := hash.hash(hash.Algorithm.SHA512_256, input)
|
||||
defer delete(returned_digest)
|
||||
|
||||
// Variant that takes a destination buffer, instead of returning
|
||||
// the digest.
|
||||
digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.BLAKE2B]) // @note: Destination buffer has to be at least as big as the digest size of the hash.
|
||||
defer delete(digest)
|
||||
hash.hash(hash.Algorithm.BLAKE2B, input, digest)
|
||||
}
|
||||
```
|
||||
|
||||
A generic low level API is provided supporting the init/update/final interface
|
||||
that is typical with cryptographic hash function implementations.
|
||||
|
||||
```odin
|
||||
package hash_example
|
||||
|
||||
import "core:crypto/hash"
|
||||
|
||||
main :: proc() {
|
||||
input := "Let the cinders burn."
|
||||
|
||||
// Compute the digest, using the low level API.
|
||||
ctx: hash.Context
|
||||
digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.SHA3_512])
|
||||
defer delete(digest)
|
||||
|
||||
hash.init(&ctx, hash.Algorithm.SHA3_512)
|
||||
hash.update(&ctx, transmute([]byte)input)
|
||||
hash.final(&ctx, digest)
|
||||
}
|
||||
```
|
||||
*/
|
||||
package crypto_hash
|
||||
116
core/crypto/hash/hash.odin
Normal file
116
core/crypto/hash/hash.odin
Normal file
@@ -0,0 +1,116 @@
|
||||
package crypto_hash
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
|
||||
// hash_bytes will hash the given input and return the computed digest
|
||||
// in a newly allocated slice.
|
||||
hash_string :: proc(algorithm: Algorithm, data: string, allocator := context.allocator) -> []byte {
|
||||
return hash_bytes(algorithm, transmute([]byte)(data), allocator)
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the computed digest
|
||||
// in a newly allocated slice.
|
||||
hash_bytes :: proc(algorithm: Algorithm, data: []byte, allocator := context.allocator) -> []byte {
|
||||
dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
|
||||
hash_bytes_to_buffer(algorithm, data, dst)
|
||||
return dst
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed digest to the third parameter. It requires that the
|
||||
// destination buffer is at least as big as the digest size.
|
||||
hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed digest into the third parameter. It requires that the
|
||||
// destination buffer is at least as big as the digest size.
|
||||
hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) {
|
||||
ctx: Context
|
||||
|
||||
init(&ctx, algorithm)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will incrementally fully consume a stream, and return the
|
||||
// computed digest in a newly allocated slice.
|
||||
hash_stream :: proc(
|
||||
algorithm: Algorithm,
|
||||
s: io.Stream,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
[]byte,
|
||||
io.Error,
|
||||
) {
|
||||
ctx: Context
|
||||
|
||||
buf: [MAX_BLOCK_SIZE * 4]byte
|
||||
defer mem.zero_explicit(&buf, size_of(buf))
|
||||
|
||||
init(&ctx, algorithm)
|
||||
|
||||
loop: for {
|
||||
n, err := io.read(s, buf[:])
|
||||
if n > 0 {
|
||||
// XXX/yawning: Can io.read return n > 0 and EOF?
|
||||
update(&ctx, buf[:n])
|
||||
}
|
||||
#partial switch err {
|
||||
case .None:
|
||||
case .EOF:
|
||||
break loop
|
||||
case:
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
|
||||
final(&ctx, dst)
|
||||
|
||||
return dst, io.Error.None
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle and return the
|
||||
// computed digest in a newly allocated slice.
|
||||
hash_file :: proc(
|
||||
algorithm: Algorithm,
|
||||
hd: os.Handle,
|
||||
load_at_once := false,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
[]byte,
|
||||
io.Error,
|
||||
) {
|
||||
if !load_at_once {
|
||||
return hash_stream(algorithm, os.stream_from_handle(hd), allocator)
|
||||
}
|
||||
|
||||
buf, ok := os.read_entire_file(hd, allocator)
|
||||
if !ok {
|
||||
return nil, io.Error.Unknown
|
||||
}
|
||||
defer delete(buf, allocator)
|
||||
|
||||
return hash_bytes(algorithm, buf, allocator), io.Error.None
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
353
core/crypto/hash/low_level.odin
Normal file
353
core/crypto/hash/low_level.odin
Normal file
@@ -0,0 +1,353 @@
|
||||
package crypto_hash
|
||||
|
||||
import "core:crypto/blake2b"
|
||||
import "core:crypto/blake2s"
|
||||
import "core:crypto/sha2"
|
||||
import "core:crypto/sha3"
|
||||
import "core:crypto/sm3"
|
||||
import "core:crypto/legacy/keccak"
|
||||
import "core:crypto/legacy/md5"
|
||||
import "core:crypto/legacy/sha1"
|
||||
|
||||
import "core:reflect"
|
||||
|
||||
// MAX_DIGEST_SIZE is the maximum size digest that can be returned by any
|
||||
// of the Algorithms supported via this package.
|
||||
MAX_DIGEST_SIZE :: 64
|
||||
// MAX_BLOCK_SIZE is the maximum block size used by any of Algorithms
|
||||
// supported by this package.
|
||||
MAX_BLOCK_SIZE :: sha3.BLOCK_SIZE_224
|
||||
|
||||
// Algorithm is the algorithm identifier associated with a given Context.
|
||||
Algorithm :: enum {
|
||||
Invalid,
|
||||
BLAKE2B,
|
||||
BLAKE2S,
|
||||
SHA224,
|
||||
SHA256,
|
||||
SHA384,
|
||||
SHA512,
|
||||
SHA512_256,
|
||||
SHA3_224,
|
||||
SHA3_256,
|
||||
SHA3_384,
|
||||
SHA3_512,
|
||||
SM3,
|
||||
Legacy_KECCAK_224,
|
||||
Legacy_KECCAK_256,
|
||||
Legacy_KECCAK_384,
|
||||
Legacy_KECCAK_512,
|
||||
Insecure_MD5,
|
||||
Insecure_SHA1,
|
||||
}
|
||||
|
||||
// ALGORITHM_NAMES is the Algorithm to algorithm name string.
|
||||
ALGORITHM_NAMES := [Algorithm]string {
|
||||
.Invalid = "Invalid",
|
||||
.BLAKE2B = "BLAKE2b",
|
||||
.BLAKE2S = "BLAKE2s",
|
||||
.SHA224 = "SHA-224",
|
||||
.SHA256 = "SHA-256",
|
||||
.SHA384 = "SHA-384",
|
||||
.SHA512 = "SHA-512",
|
||||
.SHA512_256 = "SHA-512/256",
|
||||
.SHA3_224 = "SHA3-224",
|
||||
.SHA3_256 = "SHA3-256",
|
||||
.SHA3_384 = "SHA3-384",
|
||||
.SHA3_512 = "SHA3-512",
|
||||
.SM3 = "SM3",
|
||||
.Legacy_KECCAK_224 = "Keccak-224",
|
||||
.Legacy_KECCAK_256 = "Keccak-256",
|
||||
.Legacy_KECCAK_384 = "Keccak-384",
|
||||
.Legacy_KECCAK_512 = "Keccak-512",
|
||||
.Insecure_MD5 = "MD5",
|
||||
.Insecure_SHA1 = "SHA-1",
|
||||
}
|
||||
|
||||
// DIGEST_SIZES is the Algorithm to digest size in bytes.
|
||||
DIGEST_SIZES := [Algorithm]int {
|
||||
.Invalid = 0,
|
||||
.BLAKE2B = blake2b.DIGEST_SIZE,
|
||||
.BLAKE2S = blake2s.DIGEST_SIZE,
|
||||
.SHA224 = sha2.DIGEST_SIZE_224,
|
||||
.SHA256 = sha2.DIGEST_SIZE_256,
|
||||
.SHA384 = sha2.DIGEST_SIZE_384,
|
||||
.SHA512 = sha2.DIGEST_SIZE_512,
|
||||
.SHA512_256 = sha2.DIGEST_SIZE_512_256,
|
||||
.SHA3_224 = sha3.DIGEST_SIZE_224,
|
||||
.SHA3_256 = sha3.DIGEST_SIZE_256,
|
||||
.SHA3_384 = sha3.DIGEST_SIZE_384,
|
||||
.SHA3_512 = sha3.DIGEST_SIZE_512,
|
||||
.SM3 = sm3.DIGEST_SIZE,
|
||||
.Legacy_KECCAK_224 = keccak.DIGEST_SIZE_224,
|
||||
.Legacy_KECCAK_256 = keccak.DIGEST_SIZE_256,
|
||||
.Legacy_KECCAK_384 = keccak.DIGEST_SIZE_384,
|
||||
.Legacy_KECCAK_512 = keccak.DIGEST_SIZE_512,
|
||||
.Insecure_MD5 = md5.DIGEST_SIZE,
|
||||
.Insecure_SHA1 = sha1.DIGEST_SIZE,
|
||||
}
|
||||
|
||||
// BLOCK_SIZES is the Algoritm to block size in bytes.
|
||||
BLOCK_SIZES := [Algorithm]int {
|
||||
.Invalid = 0,
|
||||
.BLAKE2B = blake2b.BLOCK_SIZE,
|
||||
.BLAKE2S = blake2s.BLOCK_SIZE,
|
||||
.SHA224 = sha2.BLOCK_SIZE_256,
|
||||
.SHA256 = sha2.BLOCK_SIZE_256,
|
||||
.SHA384 = sha2.BLOCK_SIZE_512,
|
||||
.SHA512 = sha2.BLOCK_SIZE_512,
|
||||
.SHA512_256 = sha2.BLOCK_SIZE_512,
|
||||
.SHA3_224 = sha3.BLOCK_SIZE_224,
|
||||
.SHA3_256 = sha3.BLOCK_SIZE_256,
|
||||
.SHA3_384 = sha3.BLOCK_SIZE_384,
|
||||
.SHA3_512 = sha3.BLOCK_SIZE_512,
|
||||
.SM3 = sm3.BLOCK_SIZE,
|
||||
.Legacy_KECCAK_224 = keccak.BLOCK_SIZE_224,
|
||||
.Legacy_KECCAK_256 = keccak.BLOCK_SIZE_256,
|
||||
.Legacy_KECCAK_384 = keccak.BLOCK_SIZE_384,
|
||||
.Legacy_KECCAK_512 = keccak.BLOCK_SIZE_512,
|
||||
.Insecure_MD5 = md5.BLOCK_SIZE,
|
||||
.Insecure_SHA1 = sha1.BLOCK_SIZE,
|
||||
}
|
||||
|
||||
// Context is a concrete instantiation of a specific hash algorithm.
|
||||
Context :: struct {
|
||||
_algo: Algorithm,
|
||||
_impl: union {
|
||||
blake2b.Context,
|
||||
blake2s.Context,
|
||||
sha2.Context_256,
|
||||
sha2.Context_512,
|
||||
sha3.Context,
|
||||
sm3.Context,
|
||||
keccak.Context,
|
||||
md5.Context,
|
||||
sha1.Context,
|
||||
},
|
||||
}
|
||||
|
||||
@(private)
|
||||
_IMPL_IDS := [Algorithm]typeid {
|
||||
.Invalid = nil,
|
||||
.BLAKE2B = typeid_of(blake2b.Context),
|
||||
.BLAKE2S = typeid_of(blake2s.Context),
|
||||
.SHA224 = typeid_of(sha2.Context_256),
|
||||
.SHA256 = typeid_of(sha2.Context_256),
|
||||
.SHA384 = typeid_of(sha2.Context_512),
|
||||
.SHA512 = typeid_of(sha2.Context_512),
|
||||
.SHA512_256 = typeid_of(sha2.Context_512),
|
||||
.SHA3_224 = typeid_of(sha3.Context),
|
||||
.SHA3_256 = typeid_of(sha3.Context),
|
||||
.SHA3_384 = typeid_of(sha3.Context),
|
||||
.SHA3_512 = typeid_of(sha3.Context),
|
||||
.SM3 = typeid_of(sm3.Context),
|
||||
.Legacy_KECCAK_224 = typeid_of(keccak.Context),
|
||||
.Legacy_KECCAK_256 = typeid_of(keccak.Context),
|
||||
.Legacy_KECCAK_384 = typeid_of(keccak.Context),
|
||||
.Legacy_KECCAK_512 = typeid_of(keccak.Context),
|
||||
.Insecure_MD5 = typeid_of(md5.Context),
|
||||
.Insecure_SHA1 = typeid_of(sha1.Context),
|
||||
}
|
||||
|
||||
// init initializes a Context with a specific hash Algorithm.
|
||||
init :: proc(ctx: ^Context, algorithm: Algorithm) {
|
||||
if ctx._impl != nil {
|
||||
reset(ctx)
|
||||
}
|
||||
|
||||
// Directly specialize the union by setting the type ID (save a copy).
|
||||
reflect.set_union_variant_typeid(
|
||||
ctx._impl,
|
||||
_IMPL_IDS[algorithm],
|
||||
)
|
||||
switch algorithm {
|
||||
case .BLAKE2B:
|
||||
blake2b.init(&ctx._impl.(blake2b.Context))
|
||||
case .BLAKE2S:
|
||||
blake2s.init(&ctx._impl.(blake2s.Context))
|
||||
case .SHA224:
|
||||
sha2.init_224(&ctx._impl.(sha2.Context_256))
|
||||
case .SHA256:
|
||||
sha2.init_256(&ctx._impl.(sha2.Context_256))
|
||||
case .SHA384:
|
||||
sha2.init_384(&ctx._impl.(sha2.Context_512))
|
||||
case .SHA512:
|
||||
sha2.init_512(&ctx._impl.(sha2.Context_512))
|
||||
case .SHA512_256:
|
||||
sha2.init_512_256(&ctx._impl.(sha2.Context_512))
|
||||
case .SHA3_224:
|
||||
sha3.init_224(&ctx._impl.(sha3.Context))
|
||||
case .SHA3_256:
|
||||
sha3.init_256(&ctx._impl.(sha3.Context))
|
||||
case .SHA3_384:
|
||||
sha3.init_384(&ctx._impl.(sha3.Context))
|
||||
case .SHA3_512:
|
||||
sha3.init_512(&ctx._impl.(sha3.Context))
|
||||
case .SM3:
|
||||
sm3.init(&ctx._impl.(sm3.Context))
|
||||
case .Legacy_KECCAK_224:
|
||||
keccak.init_224(&ctx._impl.(keccak.Context))
|
||||
case .Legacy_KECCAK_256:
|
||||
keccak.init_256(&ctx._impl.(keccak.Context))
|
||||
case .Legacy_KECCAK_384:
|
||||
keccak.init_384(&ctx._impl.(keccak.Context))
|
||||
case .Legacy_KECCAK_512:
|
||||
keccak.init_512(&ctx._impl.(keccak.Context))
|
||||
case .Insecure_MD5:
|
||||
md5.init(&ctx._impl.(md5.Context))
|
||||
case .Insecure_SHA1:
|
||||
sha1.init(&ctx._impl.(sha1.Context))
|
||||
case .Invalid:
|
||||
panic("crypto/hash: uninitialized algorithm")
|
||||
case:
|
||||
panic("crypto/hash: invalid algorithm")
|
||||
}
|
||||
|
||||
ctx._algo = algorithm
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
switch &impl in ctx._impl {
|
||||
case blake2b.Context:
|
||||
blake2b.update(&impl, data)
|
||||
case blake2s.Context:
|
||||
blake2s.update(&impl, data)
|
||||
case sha2.Context_256:
|
||||
sha2.update(&impl, data)
|
||||
case sha2.Context_512:
|
||||
sha2.update(&impl, data)
|
||||
case sha3.Context:
|
||||
sha3.update(&impl, data)
|
||||
case sm3.Context:
|
||||
sm3.update(&impl, data)
|
||||
case keccak.Context:
|
||||
keccak.update(&impl, data)
|
||||
case md5.Context:
|
||||
md5.update(&impl, data)
|
||||
case sha1.Context:
|
||||
sha1.update(&impl, data)
|
||||
case:
|
||||
panic("crypto/hash: uninitialized algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
switch &impl in ctx._impl {
|
||||
case blake2b.Context:
|
||||
blake2b.final(&impl, hash, finalize_clone)
|
||||
case blake2s.Context:
|
||||
blake2s.final(&impl, hash, finalize_clone)
|
||||
case sha2.Context_256:
|
||||
sha2.final(&impl, hash, finalize_clone)
|
||||
case sha2.Context_512:
|
||||
sha2.final(&impl, hash, finalize_clone)
|
||||
case sha3.Context:
|
||||
sha3.final(&impl, hash, finalize_clone)
|
||||
case sm3.Context:
|
||||
sm3.final(&impl, hash, finalize_clone)
|
||||
case keccak.Context:
|
||||
keccak.final(&impl, hash, finalize_clone)
|
||||
case md5.Context:
|
||||
md5.final(&impl, hash, finalize_clone)
|
||||
case sha1.Context:
|
||||
sha1.final(&impl, hash, finalize_clone)
|
||||
case:
|
||||
panic("crypto/hash: uninitialized algorithm")
|
||||
}
|
||||
|
||||
if !finalize_clone {
|
||||
reset(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
// XXX/yawning: Maybe these cases should panic, because both cases,
|
||||
// are probably bugs.
|
||||
if ctx == other {
|
||||
return
|
||||
}
|
||||
if ctx._impl != nil {
|
||||
reset(ctx)
|
||||
}
|
||||
|
||||
ctx._algo = other._algo
|
||||
|
||||
reflect.set_union_variant_typeid(
|
||||
ctx._impl,
|
||||
reflect.union_variant_typeid(other._impl),
|
||||
)
|
||||
switch &src_impl in other._impl {
|
||||
case blake2b.Context:
|
||||
blake2b.clone(&ctx._impl.(blake2b.Context), &src_impl)
|
||||
case blake2s.Context:
|
||||
blake2s.clone(&ctx._impl.(blake2s.Context), &src_impl)
|
||||
case sha2.Context_256:
|
||||
sha2.clone(&ctx._impl.(sha2.Context_256), &src_impl)
|
||||
case sha2.Context_512:
|
||||
sha2.clone(&ctx._impl.(sha2.Context_512), &src_impl)
|
||||
case sha3.Context:
|
||||
sha3.clone(&ctx._impl.(sha3.Context), &src_impl)
|
||||
case sm3.Context:
|
||||
sm3.clone(&ctx._impl.(sm3.Context), &src_impl)
|
||||
case keccak.Context:
|
||||
keccak.clone(&ctx._impl.(keccak.Context), &src_impl)
|
||||
case md5.Context:
|
||||
md5.clone(&ctx._impl.(md5.Context), &src_impl)
|
||||
case sha1.Context:
|
||||
sha1.clone(&ctx._impl.(sha1.Context), &src_impl)
|
||||
case:
|
||||
panic("crypto/hash: uninitialized algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
switch &impl in ctx._impl {
|
||||
case blake2b.Context:
|
||||
blake2b.reset(&impl)
|
||||
case blake2s.Context:
|
||||
blake2s.reset(&impl)
|
||||
case sha2.Context_256:
|
||||
sha2.reset(&impl)
|
||||
case sha2.Context_512:
|
||||
sha2.reset(&impl)
|
||||
case sha3.Context:
|
||||
sha3.reset(&impl)
|
||||
case sm3.Context:
|
||||
sm3.reset(&impl)
|
||||
case keccak.Context:
|
||||
keccak.reset(&impl)
|
||||
case md5.Context:
|
||||
md5.reset(&impl)
|
||||
case sha1.Context:
|
||||
sha1.reset(&impl)
|
||||
case:
|
||||
// Unlike clone, calling reset repeatedly is fine.
|
||||
}
|
||||
|
||||
ctx._algo = .Invalid
|
||||
ctx._impl = nil
|
||||
}
|
||||
|
||||
// algorithm returns the Algorithm used by a Context instance.
|
||||
algorithm :: proc(ctx: ^Context) -> Algorithm {
|
||||
return ctx._algo
|
||||
}
|
||||
|
||||
// digest_size returns the digest size of a Context instance in bytes.
|
||||
digest_size :: proc(ctx: ^Context) -> int {
|
||||
return DIGEST_SIZES[ctx._algo]
|
||||
}
|
||||
|
||||
// block_size returns the block size of a Context instance in bytes.
|
||||
block_size :: proc(ctx: ^Context) -> int {
|
||||
return BLOCK_SIZES[ctx._algo]
|
||||
}
|
||||
162
core/crypto/hmac/hmac.odin
Normal file
162
core/crypto/hmac/hmac.odin
Normal file
@@ -0,0 +1,162 @@
|
||||
/*
|
||||
package hmac implements the HMAC MAC algorithm.
|
||||
|
||||
See:
|
||||
- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.198-1.pdf
|
||||
*/
|
||||
package hmac
|
||||
|
||||
import "core:crypto"
|
||||
import "core:crypto/hash"
|
||||
import "core:mem"
|
||||
|
||||
// sum will compute the HMAC with the specified algorithm and key
|
||||
// over msg, and write the computed digest to dst. It requires that
|
||||
// the dst buffer is the tag size.
|
||||
sum :: proc(algorithm: hash.Algorithm, dst, msg, key: []byte) {
|
||||
ctx: Context
|
||||
|
||||
init(&ctx, algorithm, key)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, dst)
|
||||
}
|
||||
|
||||
// verify will verify the HMAC tag computed with the specified algorithm
|
||||
// and key over msg and return true iff the tag is valid. It requires
|
||||
// that the tag is correctly sized.
|
||||
verify :: proc(algorithm: hash.Algorithm, tag, msg, key: []byte) -> bool {
|
||||
tag_buf: [hash.MAX_DIGEST_SIZE]byte
|
||||
|
||||
derived_tag := tag_buf[:hash.DIGEST_SIZES[algorithm]]
|
||||
sum(algorithm, derived_tag, msg, key)
|
||||
|
||||
return crypto.compare_constant_time(derived_tag, tag) == 1
|
||||
}
|
||||
|
||||
// Context is a concrete instantiation of HMAC with a specific hash
|
||||
// algorithm.
|
||||
Context :: struct {
|
||||
_o_hash: hash.Context, // H(k ^ ipad) (not finalized)
|
||||
_i_hash: hash.Context, // H(k ^ opad) (not finalized)
|
||||
_tag_sz: int,
|
||||
_is_initialized: bool,
|
||||
}
|
||||
|
||||
// init initializes a Context with a specific hash Algorithm and key.
|
||||
init :: proc(ctx: ^Context, algorithm: hash.Algorithm, key: []byte) {
|
||||
if ctx._is_initialized {
|
||||
reset(ctx)
|
||||
}
|
||||
|
||||
_init_hashes(ctx, algorithm, key)
|
||||
|
||||
ctx._tag_sz = hash.DIGEST_SIZES[algorithm]
|
||||
ctx._is_initialized = true
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx._is_initialized)
|
||||
|
||||
hash.update(&ctx._i_hash, data)
|
||||
}
|
||||
|
||||
// final finalizes the Context, writes the tag to dst, and calls
|
||||
// reset on the Context.
|
||||
final :: proc(ctx: ^Context, dst: []byte) {
|
||||
assert(ctx._is_initialized)
|
||||
|
||||
defer (reset(ctx))
|
||||
|
||||
if len(dst) != ctx._tag_sz {
|
||||
panic("crypto/hmac: invalid destination tag size")
|
||||
}
|
||||
|
||||
hash.final(&ctx._i_hash, dst) // H((k ^ ipad) || text)
|
||||
|
||||
hash.update(&ctx._o_hash, dst) // H((k ^ opad) || H((k ^ ipad) || text))
|
||||
hash.final(&ctx._o_hash, dst)
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
if !ctx._is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
hash.reset(&ctx._o_hash)
|
||||
hash.reset(&ctx._i_hash)
|
||||
ctx._tag_sz = 0
|
||||
ctx._is_initialized = false
|
||||
}
|
||||
|
||||
// algorithm returns the Algorithm used by a Context instance.
|
||||
algorithm :: proc(ctx: ^Context) -> hash.Algorithm {
|
||||
assert(ctx._is_initialized)
|
||||
|
||||
return hash.algorithm(&ctx._i_hash)
|
||||
}
|
||||
|
||||
// tag_size returns the tag size of a Context instance in bytes.
|
||||
tag_size :: proc(ctx: ^Context) -> int {
|
||||
assert(ctx._is_initialized)
|
||||
|
||||
return ctx._tag_sz
|
||||
}
|
||||
|
||||
@(private)
|
||||
_I_PAD :: 0x36
|
||||
_O_PAD :: 0x5c
|
||||
|
||||
@(private)
|
||||
_init_hashes :: proc(ctx: ^Context, algorithm: hash.Algorithm, key: []byte) {
|
||||
K0_buf: [hash.MAX_BLOCK_SIZE]byte
|
||||
kPad_buf: [hash.MAX_BLOCK_SIZE]byte
|
||||
|
||||
kLen := len(key)
|
||||
B := hash.BLOCK_SIZES[algorithm]
|
||||
K0 := K0_buf[:B]
|
||||
defer mem.zero_explicit(raw_data(K0), B)
|
||||
|
||||
switch {
|
||||
case kLen == B, kLen < B:
|
||||
// If the length of K = B: set K0 = K.
|
||||
//
|
||||
// If the length of K < B: append zeros to the end of K to
|
||||
// create a B-byte string K0 (e.g., if K is 20 bytes in
|
||||
// length and B = 64, then K will be appended with 44 zero
|
||||
// bytes x’00’).
|
||||
//
|
||||
// K0 is zero-initialized, so the copy handles both cases.
|
||||
copy(K0, key)
|
||||
case kLen > B:
|
||||
// If the length of K > B: hash K to obtain an L byte string,
|
||||
// then append (B-L) zeros to create a B-byte string K0
|
||||
// (i.e., K0 = H(K) || 00...00).
|
||||
tmpCtx := &ctx._o_hash // Saves allocating a hash.Context.
|
||||
hash.init(tmpCtx, algorithm)
|
||||
hash.update(tmpCtx, key)
|
||||
hash.final(tmpCtx, K0)
|
||||
}
|
||||
|
||||
// Initialize the hashes, and write the padded keys:
|
||||
// - ctx._i_hash -> H(K0 ^ ipad)
|
||||
// - ctx._o_hash -> H(K0 ^ opad)
|
||||
|
||||
hash.init(&ctx._o_hash, algorithm)
|
||||
hash.init(&ctx._i_hash, algorithm)
|
||||
|
||||
kPad := kPad_buf[:B]
|
||||
defer mem.zero_explicit(raw_data(kPad), B)
|
||||
|
||||
for v, i in K0 {
|
||||
kPad[i] = v ~ _I_PAD
|
||||
}
|
||||
hash.update(&ctx._i_hash, kPad)
|
||||
|
||||
for v, i in K0 {
|
||||
kPad[i] = v ~ _O_PAD
|
||||
}
|
||||
hash.update(&ctx._o_hash, kPad)
|
||||
}
|
||||
@@ -1,3 +1,11 @@
|
||||
/*
|
||||
package keccak implements the Keccak hash algorithm family.
|
||||
|
||||
During the SHA-3 standardization process, the padding scheme was changed
|
||||
thus Keccac and SHA-3 produce different outputs. Most users should use
|
||||
SHA-3 and/or SHAKE instead, however the legacy algorithm is provided for
|
||||
backward compatibility purposes.
|
||||
*/
|
||||
package keccak
|
||||
|
||||
/*
|
||||
@@ -6,372 +14,82 @@ package keccak
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Keccak hashing algorithm.
|
||||
This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../../_sha3"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE_224 is the Keccak-224 digest size.
|
||||
DIGEST_SIZE_224 :: 28
|
||||
// DIGEST_SIZE_256 is the Keccak-256 digest size.
|
||||
DIGEST_SIZE_256 :: 32
|
||||
// DIGEST_SIZE_384 is the Keccak-384 digest size.
|
||||
DIGEST_SIZE_384 :: 48
|
||||
// DIGEST_SIZE_512 is the Keccak-512 digest size.
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
// BLOCK_SIZE_224 is the Keccak-224 block size in bytes.
|
||||
BLOCK_SIZE_224 :: _sha3.RATE_224
|
||||
// BLOCK_SIZE_256 is the Keccak-256 block size in bytes.
|
||||
BLOCK_SIZE_256 :: _sha3.RATE_256
|
||||
// BLOCK_SIZE_384 is the Keccak-384 block size in bytes.
|
||||
BLOCK_SIZE_384 :: _sha3.RATE_384
|
||||
// BLOCK_SIZE_512 is the Keccak-512 block size in bytes.
|
||||
BLOCK_SIZE_512 :: _sha3.RATE_512
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
// Context is a Keccak instance.
|
||||
Context :: distinct _sha3.Context
|
||||
|
||||
// init_224 initializes a Context for Keccak-224.
|
||||
init_224 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
// init_256 initializes a Context for Keccak-256.
|
||||
init_256 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
// init_384 initializes a Context for Keccak-384.
|
||||
init_384 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
// init_512 initializes a Context for Keccak-512.
|
||||
init_512 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
@(private)
|
||||
_init :: proc(ctx: ^Context) {
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(ctx)
|
||||
_sha3.init(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
_sha3.update(transmute(^_sha3.Context)(ctx), data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
|
||||
}
|
||||
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
_sha3.reset(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
@@ -1,3 +1,13 @@
|
||||
/*
|
||||
package md5 implements the MD5 hash algorithm.
|
||||
|
||||
WARNING: The MD5 algorithm is known to be insecure and should only be
|
||||
used for interoperating with legacy applications.
|
||||
|
||||
See:
|
||||
- https://eprint.iacr.org/2005/075
|
||||
- https://datatracker.ietf.org/doc/html/rfc1321
|
||||
*/
|
||||
package md5
|
||||
|
||||
/*
|
||||
@@ -6,103 +16,29 @@ package md5
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE is the MD5 digest size in bytes.
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
// BLOCK_SIZE is the MD5 block size in bytes.
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
// Context is a MD5 instance.
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
// init initializes a Context.
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
@@ -115,6 +51,7 @@ init :: proc(ctx: ^Context) {
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
@@ -129,13 +66,26 @@ update :: proc(ctx: ^Context, data: []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/md5: invalid destination digest size")
|
||||
}
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: Context
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
i := ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
@@ -163,25 +113,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
|
||||
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^$T) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^$T) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
/*
|
||||
MD5 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
/*
|
||||
@note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
|
||||
and II respectively, instead of declaring them separately.
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
/*
|
||||
package sha1 implements the SHA1 hash algorithm.
|
||||
|
||||
WARNING: The SHA1 algorithm is known to be insecure and should only be
|
||||
used for interoperating with legacy applications.
|
||||
|
||||
See:
|
||||
- https://eprint.iacr.org/2017/190
|
||||
- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
|
||||
- https://datatracker.ietf.org/doc/html/rfc3174
|
||||
*/
|
||||
package sha1
|
||||
|
||||
/*
|
||||
@@ -6,103 +17,30 @@ package sha1
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE is the SHA1 digest size in bytes.
|
||||
DIGEST_SIZE :: 20
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
// BLOCK_SIZE is the SHA1 block size in bytes.
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
// Context is a SHA1 instance.
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [5]u32,
|
||||
k: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
// init initializes a Context.
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
@@ -120,6 +58,7 @@ init :: proc(ctx: ^Context) {
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
@@ -134,13 +73,26 @@ update :: proc(ctx: ^Context, data: []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/sha1: invalid destination digest size")
|
||||
}
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: Context
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
i := ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
@@ -168,26 +120,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
|
||||
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^$T) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^$T) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
/*
|
||||
SHA1 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
datalen: u32,
|
||||
bitlen: u64,
|
||||
state: [5]u32,
|
||||
k: [4]u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
a, b, c, d, e, i, t: u32
|
||||
|
||||
@@ -23,10 +23,6 @@ verify :: proc (tag, msg, key: []byte) -> bool {
|
||||
ctx: Context = ---
|
||||
derived_tag: [16]byte = ---
|
||||
|
||||
if len(tag) != TAG_SIZE {
|
||||
panic("crypto/poly1305: invalid tag size")
|
||||
}
|
||||
|
||||
init(&ctx, key)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, derived_tag[:])
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
/*
|
||||
package sha2 implements the SHA2 hash algorithm family.
|
||||
|
||||
See:
|
||||
- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
|
||||
- https://datatracker.ietf.org/doc/html/rfc3874
|
||||
*/
|
||||
package sha2
|
||||
|
||||
/*
|
||||
@@ -6,431 +13,83 @@ package sha2
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the SHA2 hashing algorithm, as defined in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf>
|
||||
and in RFC 3874 <https://datatracker.ietf.org/doc/html/rfc3874>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
import "core:mem"
|
||||
|
||||
// DIGEST_SIZE_224 is the SHA-224 digest size in bytes.
|
||||
DIGEST_SIZE_224 :: 28
|
||||
// DIGEST_SIZE_256 is the SHA-256 digest size in bytes.
|
||||
DIGEST_SIZE_256 :: 32
|
||||
// DIGEST_SIZE_384 is the SHA-384 digest size in bytes.
|
||||
DIGEST_SIZE_384 :: 48
|
||||
// DIGEST_SIZE_512 is the SHA-512 digest size in bytes.
|
||||
DIGEST_SIZE_512 :: 64
|
||||
// DIGEST_SIZE_512_256 is the SHA-512/256 digest size in bytes.
|
||||
DIGEST_SIZE_512_256 :: 32
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
// BLOCK_SIZE_256 is the SHA-224 and SHA-256 block size in bytes.
|
||||
BLOCK_SIZE_256 :: 64
|
||||
// BLOCK_SIZE_512 is the SHA-384, SHA-512, and SHA-512/256 block size
|
||||
// in bytes.
|
||||
BLOCK_SIZE_512 :: 128
|
||||
|
||||
// Context_256 is a SHA-224 or SHA-256 instance.
|
||||
Context_256 :: struct {
|
||||
block: [BLOCK_SIZE_256]byte,
|
||||
h: [8]u32,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
md_bits: int,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context_256
|
||||
// Context_512 is a SHA-384, SHA-512 or SHA-512/256 instance.
|
||||
Context_512 :: struct {
|
||||
block: [BLOCK_SIZE_512]byte,
|
||||
h: [8]u64,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
md_bits: int,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
// init_224 initializes a Context_256 for SHA-224.
|
||||
init_224 :: proc(ctx: ^Context_256) {
|
||||
ctx.md_bits = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
ctx: Context_256
|
||||
ctx.md_bits = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context_256
|
||||
ctx.md_bits = 224
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context_256
|
||||
// init_256 initializes a Context_256 for SHA-256.
|
||||
init_256 :: proc(ctx: ^Context_256) {
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context_256
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context_256
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context_512
|
||||
// init_384 initializes a Context_512 for SHA-384.
|
||||
init_384 :: proc(ctx: ^Context_512) {
|
||||
ctx.md_bits = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 384
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context_512
|
||||
// init_512 initializes a Context_512 for SHA-512.
|
||||
init_512 :: proc(ctx: ^Context_512) {
|
||||
ctx.md_bits = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 512
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
// hash_string_512_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512_256 :: proc(data: string) -> [DIGEST_SIZE_512_256]byte {
|
||||
return hash_bytes_512_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512_256 :: proc(data: []byte) -> [DIGEST_SIZE_512_256]byte {
|
||||
hash: [DIGEST_SIZE_512_256]byte
|
||||
ctx: Context_512
|
||||
// init_512_256 initializes a Context_512 for SHA-512/256.
|
||||
init_512_256 :: proc(ctx: ^Context_512) {
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512_256]byte
|
||||
ctx: Context_512
|
||||
ctx.md_bits = 256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_512_256 :: proc {
|
||||
hash_stream_512_256,
|
||||
hash_file_512_256,
|
||||
hash_bytes_512_256,
|
||||
hash_string_512_256,
|
||||
hash_bytes_to_buffer_512_256,
|
||||
hash_string_to_buffer_512_256,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^$T) {
|
||||
@(private)
|
||||
_init :: proc(ctx: ^$T) {
|
||||
when T == Context_256 {
|
||||
switch ctx.md_bits {
|
||||
case 224:
|
||||
@@ -497,13 +156,14 @@ init :: proc(ctx: ^$T) {
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^$T, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
when T == Context_256 {
|
||||
CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_256
|
||||
} else when T == Context_512 {
|
||||
CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_512
|
||||
}
|
||||
|
||||
data := data
|
||||
@@ -528,21 +188,34 @@ update :: proc(ctx: ^$T, data: []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^$T, hash: []byte) {
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) * 8 < ctx.md_bits {
|
||||
panic("crypto/sha2: invalid destination digest size")
|
||||
}
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: T
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
length := ctx.length
|
||||
|
||||
raw_pad: [SHA512_BLOCK_SIZE]byte
|
||||
raw_pad: [BLOCK_SIZE_512]byte
|
||||
when T == Context_256 {
|
||||
CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_256
|
||||
pm_len := 8 // 64-bits for length
|
||||
} else when T == Context_512 {
|
||||
CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_512
|
||||
pm_len := 16 // 128-bits for length
|
||||
}
|
||||
pad := raw_pad[:CURR_BLOCK_SIZE]
|
||||
@@ -576,37 +249,27 @@ final :: proc(ctx: ^$T, hash: []byte) {
|
||||
endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^$T) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^$T) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
/*
|
||||
SHA2 implementation
|
||||
*/
|
||||
|
||||
SHA256_BLOCK_SIZE :: 64
|
||||
SHA512_BLOCK_SIZE :: 128
|
||||
|
||||
Context_256 :: struct {
|
||||
block: [SHA256_BLOCK_SIZE]byte,
|
||||
h: [8]u32,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
md_bits: int,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
Context_512 :: struct {
|
||||
block: [SHA512_BLOCK_SIZE]byte,
|
||||
h: [8]u64,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
md_bits: int,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
sha256_k := [64]u32 {
|
||||
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
|
||||
@@ -737,12 +400,12 @@ sha2_transf :: proc "contextless" (ctx: ^$T, data: []byte) {
|
||||
w: [64]u32
|
||||
wv: [8]u32
|
||||
t1, t2: u32
|
||||
CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_256
|
||||
} else when T == Context_512 {
|
||||
w: [80]u64
|
||||
wv: [8]u64
|
||||
t1, t2: u64
|
||||
CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
|
||||
CURR_BLOCK_SIZE :: BLOCK_SIZE_512
|
||||
}
|
||||
|
||||
data := data
|
||||
|
||||
@@ -1,3 +1,13 @@
|
||||
/*
|
||||
package sha3 implements the SHA3 hash algorithm family.
|
||||
|
||||
The SHAKE XOF can be found in crypto/shake. While discouraged if the
|
||||
pre-standardization Keccak algorithm is required, it can be found in
|
||||
crypto/legacy/keccak.
|
||||
|
||||
See:
|
||||
- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
|
||||
*/
|
||||
package sha3
|
||||
|
||||
/*
|
||||
@@ -6,359 +16,81 @@ package sha3
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the SHA3 hashing algorithm. The SHAKE functionality can be found in package shake.
|
||||
If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_sha3"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
// DIGEST_SIZE_224 is the SHA3-224 digest size.
|
||||
DIGEST_SIZE_224 :: 28
|
||||
// DIGEST_SIZE_256 is the SHA3-256 digest size.
|
||||
DIGEST_SIZE_256 :: 32
|
||||
// DIGEST_SIZE_384 is the SHA3-384 digest size.
|
||||
DIGEST_SIZE_384 :: 48
|
||||
// DIGEST_SIZE_512 is the SHA3-512 digest size.
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
// BLOCK_SIZE_224 is the SHA3-224 block size in bytes.
|
||||
BLOCK_SIZE_224 :: _sha3.RATE_224
|
||||
// BLOCK_SIZE_256 is the SHA3-256 block size in bytes.
|
||||
BLOCK_SIZE_256 :: _sha3.RATE_256
|
||||
// BLOCK_SIZE_384 is the SHA3-384 block size in bytes.
|
||||
BLOCK_SIZE_384 :: _sha3.RATE_384
|
||||
// BLOCK_SIZE_512 is the SHA3-512 block size in bytes.
|
||||
BLOCK_SIZE_512 :: _sha3.RATE_512
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
// Context is a SHA3 instance.
|
||||
Context :: distinct _sha3.Context
|
||||
|
||||
// init_224 initializes a Context for SHA3-224.
|
||||
init_224 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
// init_256 initializes a Context for SHA3-256.
|
||||
init_256 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
// init_384 initializes a Context for SHA3-384.
|
||||
init_384 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
// init_512 initializes a Context for SHA3-512.
|
||||
init_512 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
_sha3.init(ctx)
|
||||
@(private)
|
||||
_init :: proc(ctx: ^Context) {
|
||||
_sha3.init(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
_sha3.update(transmute(^_sha3.Context)(ctx), data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
|
||||
}
|
||||
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
_sha3.reset(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
/*
|
||||
package shake implements the SHAKE XOF algorithm family.
|
||||
|
||||
The SHA3 hash algorithm can be found in the crypto/sha3.
|
||||
|
||||
See:
|
||||
- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
|
||||
*/
|
||||
package shake
|
||||
|
||||
/*
|
||||
@@ -6,201 +14,55 @@ package shake
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the SHAKE hashing algorithm.
|
||||
The SHA3 functionality can be found in package sha3.
|
||||
|
||||
TODO: This should provide an incremental squeeze interface, in addition
|
||||
to the one-shot final call.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_sha3"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
// Context is a SHAKE128 or SHAKE256 instance.
|
||||
Context :: distinct _sha3.Context
|
||||
|
||||
DIGEST_SIZE_128 :: 16
|
||||
DIGEST_SIZE_256 :: 32
|
||||
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
// init_128 initializes a Context for SHAKE128.
|
||||
init_128 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = 128 / 8
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
// init_256 initializes a Context for SHAKE256.
|
||||
init_256 :: proc(ctx: ^Context) {
|
||||
ctx.mdlen = 256 / 8
|
||||
_init(ctx)
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
@(private)
|
||||
_init :: proc(ctx: ^Context) {
|
||||
_sha3.init(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
// write writes more data into the SHAKE instance. This MUST not be called
|
||||
// after any reads have been done, and attempts to do so will panic.
|
||||
write :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(transmute(^_sha3.Context)(ctx), data)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
// read reads output from the SHAKE instance. There is no practical upper
|
||||
// limit to the amount of data that can be read from SHAKE. After read has
|
||||
// been called one or more times, further calls to write will panic.
|
||||
read :: proc(ctx: ^Context, dst: []byte) {
|
||||
ctx_ := transmute(^_sha3.Context)(ctx)
|
||||
if !ctx.is_finalized {
|
||||
_sha3.shake_xof(ctx_)
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
|
||||
_sha3.shake_out(ctx_, dst)
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
_sha3.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.shake_xof(ctx)
|
||||
_sha3.shake_out(ctx, hash[:])
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
_sha3.reset(transmute(^_sha3.Context)(ctx))
|
||||
}
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
/*
|
||||
package sm3 implements the SM3 hash algorithm.
|
||||
|
||||
See:
|
||||
- https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
|
||||
*/
|
||||
package sm3
|
||||
|
||||
/*
|
||||
@@ -6,102 +12,29 @@ package sm3
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
import "core:mem"
|
||||
|
||||
// DIGEST_SIZE is the SM3 digest size in bytes.
|
||||
DIGEST_SIZE :: 32
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
// BLOCK_SIZE is the SM3 block size in bytes.
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
// Context is a SM3 instance.
|
||||
Context :: struct {
|
||||
state: [8]u32,
|
||||
x: [BLOCK_SIZE]byte,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
// init initializes a Context.
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = IV[0]
|
||||
ctx.state[1] = IV[1]
|
||||
@@ -118,6 +51,7 @@ init :: proc(ctx: ^Context) {
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
// update adds more data to the Context.
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
@@ -143,13 +77,26 @@ update :: proc(ctx: ^Context, data: []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
// final finalizes the Context, writes the digest to hash, and calls
|
||||
// reset on the Context.
|
||||
//
|
||||
// Iff finalize_clone is set, final will work on a copy of the Context,
|
||||
// which is useful for for calculating rolling digests.
|
||||
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/sm3: invalid destination digest size")
|
||||
}
|
||||
|
||||
ctx := ctx
|
||||
if finalize_clone {
|
||||
tmp_ctx: Context
|
||||
clone(&tmp_ctx, ctx)
|
||||
ctx = &tmp_ctx
|
||||
}
|
||||
defer(reset(ctx))
|
||||
|
||||
length := ctx.length
|
||||
|
||||
pad: [BLOCK_SIZE]byte
|
||||
@@ -168,25 +115,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
|
||||
for i := 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
// clone clones the Context other into ctx.
|
||||
clone :: proc(ctx, other: ^Context) {
|
||||
ctx^ = other^
|
||||
}
|
||||
|
||||
// reset sanitizes the Context. The Context must be re-initialized to
|
||||
// be used again.
|
||||
reset :: proc(ctx: ^Context) {
|
||||
if !ctx.is_initialized {
|
||||
return
|
||||
}
|
||||
|
||||
mem.zero_explicit(ctx, size_of(ctx^))
|
||||
}
|
||||
|
||||
/*
|
||||
SM3 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
state: [8]u32,
|
||||
x: [BLOCK_SIZE]byte,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
IV := [8]u32 {
|
||||
0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
|
||||
|
||||
@@ -27,6 +27,8 @@ import blake2b "core:crypto/blake2b"
|
||||
import blake2s "core:crypto/blake2s"
|
||||
import chacha20 "core:crypto/chacha20"
|
||||
import chacha20poly1305 "core:crypto/chacha20poly1305"
|
||||
import crypto_hash "core:crypto/hash"
|
||||
import hmac "core:crypto/hmac"
|
||||
import keccak "core:crypto/legacy/keccak"
|
||||
import md5 "core:crypto/legacy/md5"
|
||||
import sha1 "core:crypto/legacy/sha1"
|
||||
@@ -137,10 +139,12 @@ _ :: lru
|
||||
_ :: list
|
||||
_ :: topological_sort
|
||||
_ :: crypto
|
||||
_ :: crypto_hash
|
||||
_ :: blake2b
|
||||
_ :: blake2s
|
||||
_ :: chacha20
|
||||
_ :: chacha20poly1305
|
||||
_ :: hmac
|
||||
_ :: keccak
|
||||
_ :: md5
|
||||
_ :: poly1305
|
||||
|
||||
@@ -39,7 +39,7 @@ hash_test:
|
||||
$(ODIN) run hash -o:speed -no-bounds-check -out:test_hash
|
||||
|
||||
crypto_test:
|
||||
$(ODIN) run crypto -o:speed -no-bounds-check -out:test_crypto_hash
|
||||
$(ODIN) run crypto -o:speed -no-bounds-check -out:test_crypto
|
||||
|
||||
noise_test:
|
||||
$(ODIN) run math/noise -out:test_noise
|
||||
|
||||
@@ -29,9 +29,9 @@ echo ---
|
||||
%PATH_TO_ODIN% run odin %COMMON% -o:size -out:test_core_odin.exe || exit /b
|
||||
|
||||
echo ---
|
||||
echo Running core:crypto hash tests
|
||||
echo Running core:crypto tests
|
||||
echo ---
|
||||
%PATH_TO_ODIN% run crypto %COMMON% -out:test_crypto_hash.exe || exit /b
|
||||
%PATH_TO_ODIN% run crypto %COMMON% -out:test_crypto.exe || exit /b
|
||||
|
||||
echo ---
|
||||
echo Running core:encoding tests
|
||||
|
||||
@@ -8,34 +8,31 @@ package test_core_crypto
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
Jeroen van Rijn: Test runner setup.
|
||||
|
||||
Tests for the hashing algorithms within the crypto library.
|
||||
Tests for the various algorithms within the crypto library.
|
||||
Where possible, the official test vectors are used to validate the implementation.
|
||||
*/
|
||||
|
||||
import "core:testing"
|
||||
import "core:encoding/hex"
|
||||
import "core:fmt"
|
||||
import "core:strings"
|
||||
|
||||
import "core:crypto/sha2"
|
||||
import "core:crypto/sha3"
|
||||
import "core:crypto/shake"
|
||||
import "core:crypto/blake2b"
|
||||
import "core:crypto/blake2s"
|
||||
import "core:crypto/sm3"
|
||||
import "core:crypto/siphash"
|
||||
import "core:crypto/legacy/keccak"
|
||||
import "core:crypto/legacy/md5"
|
||||
import "core:crypto/legacy/sha1"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
import "core:testing"
|
||||
|
||||
import "core:crypto"
|
||||
import "core:crypto/chacha20"
|
||||
import "core:crypto/chacha20poly1305"
|
||||
|
||||
import "core:crypto/shake"
|
||||
import "core:crypto/x25519"
|
||||
|
||||
TEST_count := 0
|
||||
TEST_fail := 0
|
||||
TEST_fail := 0
|
||||
|
||||
when ODIN_TEST {
|
||||
expect :: testing.expect
|
||||
log :: testing.log
|
||||
expect :: testing.expect
|
||||
log :: testing.log
|
||||
} else {
|
||||
expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
|
||||
expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
|
||||
TEST_count += 1
|
||||
if !condition {
|
||||
TEST_fail += 1
|
||||
@@ -51,36 +48,18 @@ when ODIN_TEST {
|
||||
|
||||
main :: proc() {
|
||||
t := testing.T{}
|
||||
test_md5(&t)
|
||||
test_sha1(&t)
|
||||
test_sha224(&t)
|
||||
test_sha256(&t)
|
||||
test_sha384(&t)
|
||||
test_sha512(&t)
|
||||
test_sha512_256(&t)
|
||||
test_sha3_224(&t)
|
||||
test_sha3_256(&t)
|
||||
test_sha3_384(&t)
|
||||
test_sha3_512(&t)
|
||||
test_shake_128(&t)
|
||||
test_shake_256(&t)
|
||||
test_keccak_224(&t)
|
||||
test_keccak_256(&t)
|
||||
test_keccak_384(&t)
|
||||
test_keccak_512(&t)
|
||||
test_blake2b(&t)
|
||||
test_blake2s(&t)
|
||||
test_sm3(&t)
|
||||
test_siphash_2_4(&t)
|
||||
|
||||
// "modern" crypto tests
|
||||
test_chacha20(&t)
|
||||
test_poly1305(&t)
|
||||
test_chacha20poly1305(&t)
|
||||
test_x25519(&t)
|
||||
test_rand_bytes(&t)
|
||||
|
||||
bench_modern(&t)
|
||||
test_hash(&t)
|
||||
test_mac(&t)
|
||||
|
||||
test_chacha20(&t)
|
||||
test_chacha20poly1305(&t)
|
||||
test_shake(&t)
|
||||
test_x25519(&t)
|
||||
|
||||
bench_crypto(&t)
|
||||
|
||||
fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
|
||||
if TEST_fail > 0 {
|
||||
@@ -88,411 +67,422 @@ main :: proc() {
|
||||
}
|
||||
}
|
||||
|
||||
TestHash :: struct {
|
||||
hash: string,
|
||||
str: string,
|
||||
}
|
||||
_PLAINTEXT_SUNSCREEN_STR := "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."
|
||||
|
||||
hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string {
|
||||
lut: [16]byte = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}
|
||||
buf := make([]byte, len(bytes) * 2, allocator)
|
||||
for i := 0; i < len(bytes); i += 1 {
|
||||
buf[i * 2 + 0] = lut[bytes[i] >> 4 & 0xf]
|
||||
buf[i * 2 + 1] = lut[bytes[i] & 0xf]
|
||||
@(test)
|
||||
test_chacha20 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing (X)ChaCha20")
|
||||
|
||||
// Test cases taken from RFC 8439, and draft-irtf-cfrg-xchacha-03
|
||||
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
|
||||
|
||||
key := [chacha20.KEY_SIZE]byte {
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
}
|
||||
return string(buf)
|
||||
|
||||
nonce := [chacha20.NONCE_SIZE]byte {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
ciphertext := [114]byte {
|
||||
0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
|
||||
0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
|
||||
0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
|
||||
0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b,
|
||||
0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab,
|
||||
0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57,
|
||||
0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab,
|
||||
0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8,
|
||||
0x07, 0xca, 0x0d, 0xbf, 0x50, 0x0d, 0x6a, 0x61,
|
||||
0x56, 0xa3, 0x8e, 0x08, 0x8a, 0x22, 0xb6, 0x5e,
|
||||
0x52, 0xbc, 0x51, 0x4d, 0x16, 0xcc, 0xf8, 0x06,
|
||||
0x81, 0x8c, 0xe9, 0x1a, 0xb7, 0x79, 0x37, 0x36,
|
||||
0x5a, 0xf9, 0x0b, 0xbf, 0x74, 0xa3, 0x5b, 0xe6,
|
||||
0xb4, 0x0b, 0x8e, 0xed, 0xf2, 0x78, 0x5e, 0x42,
|
||||
0x87, 0x4d,
|
||||
}
|
||||
ciphertext_str := string(hex.encode(ciphertext[:], context.temp_allocator))
|
||||
|
||||
derived_ciphertext: [114]byte
|
||||
ctx: chacha20.Context = ---
|
||||
chacha20.init(&ctx, key[:], nonce[:])
|
||||
chacha20.seek(&ctx, 1) // The test vectors start the counter at 1.
|
||||
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
|
||||
|
||||
derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_ciphertext_str == ciphertext_str,
|
||||
fmt.tprintf(
|
||||
"Expected %s for xor_bytes(plaintext_str), but got %s instead",
|
||||
ciphertext_str,
|
||||
derived_ciphertext_str,
|
||||
),
|
||||
)
|
||||
|
||||
xkey := [chacha20.KEY_SIZE]byte {
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
}
|
||||
|
||||
xnonce := [chacha20.XNONCE_SIZE]byte {
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
}
|
||||
|
||||
xciphertext := [114]byte {
|
||||
0xbd, 0x6d, 0x17, 0x9d, 0x3e, 0x83, 0xd4, 0x3b,
|
||||
0x95, 0x76, 0x57, 0x94, 0x93, 0xc0, 0xe9, 0x39,
|
||||
0x57, 0x2a, 0x17, 0x00, 0x25, 0x2b, 0xfa, 0xcc,
|
||||
0xbe, 0xd2, 0x90, 0x2c, 0x21, 0x39, 0x6c, 0xbb,
|
||||
0x73, 0x1c, 0x7f, 0x1b, 0x0b, 0x4a, 0xa6, 0x44,
|
||||
0x0b, 0xf3, 0xa8, 0x2f, 0x4e, 0xda, 0x7e, 0x39,
|
||||
0xae, 0x64, 0xc6, 0x70, 0x8c, 0x54, 0xc2, 0x16,
|
||||
0xcb, 0x96, 0xb7, 0x2e, 0x12, 0x13, 0xb4, 0x52,
|
||||
0x2f, 0x8c, 0x9b, 0xa4, 0x0d, 0xb5, 0xd9, 0x45,
|
||||
0xb1, 0x1b, 0x69, 0xb9, 0x82, 0xc1, 0xbb, 0x9e,
|
||||
0x3f, 0x3f, 0xac, 0x2b, 0xc3, 0x69, 0x48, 0x8f,
|
||||
0x76, 0xb2, 0x38, 0x35, 0x65, 0xd3, 0xff, 0xf9,
|
||||
0x21, 0xf9, 0x66, 0x4c, 0x97, 0x63, 0x7d, 0xa9,
|
||||
0x76, 0x88, 0x12, 0xf6, 0x15, 0xc6, 0x8b, 0x13,
|
||||
0xb5, 0x2e,
|
||||
}
|
||||
xciphertext_str := string(hex.encode(xciphertext[:], context.temp_allocator))
|
||||
|
||||
chacha20.init(&ctx, xkey[:], xnonce[:])
|
||||
chacha20.seek(&ctx, 1)
|
||||
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
|
||||
|
||||
derived_ciphertext_str = string(hex.encode(derived_ciphertext[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_ciphertext_str == xciphertext_str,
|
||||
fmt.tprintf(
|
||||
"Expected %s for xor_bytes(plaintext_str), but got %s instead",
|
||||
xciphertext_str,
|
||||
derived_ciphertext_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_md5 :: proc(t: ^testing.T) {
|
||||
// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"d41d8cd98f00b204e9800998ecf8427e", ""},
|
||||
TestHash{"0cc175b9c0f1b6a831c399e269772661", "a"},
|
||||
TestHash{"900150983cd24fb0d6963f7d28e17f72", "abc"},
|
||||
TestHash{"f96b697d7cb7938d525a2f31aaf161d0", "message digest"},
|
||||
TestHash{"c3fcd3d76192e4007dfb496cca67e13b", "abcdefghijklmnopqrstuvwxyz"},
|
||||
TestHash{"d174ab98d277d9f5a5611c2c9f419d9f", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
|
||||
TestHash{"57edf4a22be3c955ac49da2e2107b67a", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
|
||||
test_chacha20poly1305 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing chacha20poly1205")
|
||||
|
||||
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
|
||||
|
||||
aad := [12]byte {
|
||||
0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
|
||||
0xc4, 0xc5, 0xc6, 0xc7,
|
||||
}
|
||||
|
||||
key := [chacha20poly1305.KEY_SIZE]byte {
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
}
|
||||
|
||||
nonce := [chacha20poly1305.NONCE_SIZE]byte {
|
||||
0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43,
|
||||
0x44, 0x45, 0x46, 0x47,
|
||||
}
|
||||
|
||||
ciphertext := [114]byte {
|
||||
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
|
||||
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
|
||||
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
|
||||
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
|
||||
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
|
||||
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
|
||||
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
|
||||
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
|
||||
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
|
||||
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
|
||||
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
|
||||
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
|
||||
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
|
||||
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
|
||||
0x61, 0x16,
|
||||
}
|
||||
ciphertext_str := string(hex.encode(ciphertext[:], context.temp_allocator))
|
||||
|
||||
tag := [chacha20poly1305.TAG_SIZE]byte {
|
||||
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
|
||||
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91,
|
||||
}
|
||||
tag_str := string(hex.encode(tag[:], context.temp_allocator))
|
||||
|
||||
derived_tag: [chacha20poly1305.TAG_SIZE]byte
|
||||
derived_ciphertext: [114]byte
|
||||
|
||||
chacha20poly1305.encrypt(
|
||||
derived_ciphertext[:],
|
||||
derived_tag[:],
|
||||
key[:],
|
||||
nonce[:],
|
||||
aad[:],
|
||||
plaintext,
|
||||
)
|
||||
|
||||
derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_ciphertext_str == ciphertext_str,
|
||||
fmt.tprintf(
|
||||
"Expected ciphertext %s for encrypt(aad, plaintext), but got %s instead",
|
||||
ciphertext_str,
|
||||
derived_ciphertext_str,
|
||||
),
|
||||
)
|
||||
|
||||
derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_tag_str == tag_str,
|
||||
fmt.tprintf(
|
||||
"Expected tag %s for encrypt(aad, plaintext), but got %s instead",
|
||||
tag_str,
|
||||
derived_tag_str,
|
||||
),
|
||||
)
|
||||
|
||||
derived_plaintext: [114]byte
|
||||
ok := chacha20poly1305.decrypt(
|
||||
derived_plaintext[:],
|
||||
tag[:],
|
||||
key[:],
|
||||
nonce[:],
|
||||
aad[:],
|
||||
ciphertext[:],
|
||||
)
|
||||
derived_plaintext_str := string(derived_plaintext[:])
|
||||
expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
|
||||
expect(
|
||||
t,
|
||||
derived_plaintext_str == _PLAINTEXT_SUNSCREEN_STR,
|
||||
fmt.tprintf(
|
||||
"Expected plaintext %s for decrypt(tag, aad, ciphertext), but got %s instead",
|
||||
_PLAINTEXT_SUNSCREEN_STR,
|
||||
derived_plaintext_str,
|
||||
),
|
||||
)
|
||||
|
||||
derived_ciphertext[0] ~= 0xa5
|
||||
ok = chacha20poly1305.decrypt(
|
||||
derived_plaintext[:],
|
||||
tag[:],
|
||||
key[:],
|
||||
nonce[:],
|
||||
aad[:],
|
||||
derived_ciphertext[:],
|
||||
)
|
||||
expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
|
||||
|
||||
aad[0] ~= 0xa5
|
||||
ok = chacha20poly1305.decrypt(
|
||||
derived_plaintext[:],
|
||||
tag[:],
|
||||
key[:],
|
||||
nonce[:],
|
||||
aad[:],
|
||||
ciphertext[:],
|
||||
)
|
||||
expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
|
||||
}
|
||||
|
||||
TestECDH :: struct {
|
||||
scalar: string,
|
||||
point: string,
|
||||
product: string,
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_x25519 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing X25519")
|
||||
|
||||
// Local copy of this so that the base point doesn't need to be exported.
|
||||
_BASE_POINT: [32]byte = {
|
||||
9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
}
|
||||
|
||||
test_vectors := [?]TestECDH {
|
||||
// Test vectors from RFC 7748
|
||||
{
|
||||
"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
|
||||
"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
|
||||
"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
|
||||
},
|
||||
{
|
||||
"4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d",
|
||||
"e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493",
|
||||
"95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957",
|
||||
},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := md5.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
scalar, _ := hex.decode(transmute([]byte)(v.scalar), context.temp_allocator)
|
||||
point, _ := hex.decode(transmute([]byte)(v.point), context.temp_allocator)
|
||||
|
||||
derived_point: [x25519.POINT_SIZE]byte
|
||||
x25519.scalarmult(derived_point[:], scalar[:], point[:])
|
||||
derived_point_str := string(hex.encode(derived_point[:], context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
derived_point_str == v.product,
|
||||
fmt.tprintf(
|
||||
"Expected %s for %s * %s, but got %s instead",
|
||||
v.product,
|
||||
v.scalar,
|
||||
v.point,
|
||||
derived_point_str,
|
||||
),
|
||||
)
|
||||
|
||||
// Abuse the test vectors to sanity-check the scalar-basepoint multiply.
|
||||
p1, p2: [x25519.POINT_SIZE]byte
|
||||
x25519.scalarmult_basepoint(p1[:], scalar[:])
|
||||
x25519.scalarmult(p2[:], scalar[:], _BASE_POINT[:])
|
||||
p1_str := string(hex.encode(p1[:], context.temp_allocator))
|
||||
p2_str := string(hex.encode(p2[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
p1_str == p2_str,
|
||||
fmt.tprintf(
|
||||
"Expected %s for %s * basepoint, but got %s instead",
|
||||
p2_str,
|
||||
v.scalar,
|
||||
p1_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// TODO/tests: Run the wycheproof test vectors, once I figure out
|
||||
// how to work with JSON.
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha1 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
|
||||
TestHash{"a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
|
||||
TestHash{"f9537c23893d2014f365adf8ffe33b8eb0297ed1", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"346fb528a24b48f563cb061470bcfd23740427ad", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
|
||||
TestHash{"c729c8996ee0a6f74f4f3248e8957edf704fb624", "01234567012345670123456701234567"},
|
||||
TestHash{"84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"a49b2446a02c645bf419f995b67091253a04a259", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
test_rand_bytes :: proc(t: ^testing.T) {
|
||||
log(t, "Testing rand_bytes")
|
||||
|
||||
if ODIN_OS != .Linux {
|
||||
log(t, "rand_bytes not supported - skipping")
|
||||
return
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha1.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
|
||||
allocator := context.allocator
|
||||
|
||||
buf := make([]byte, 1 << 25, allocator)
|
||||
defer delete(buf)
|
||||
|
||||
// Testing a CSPRNG for correctness is incredibly involved and
|
||||
// beyond the scope of an implementation that offloads
|
||||
// responsibility for correctness to the OS.
|
||||
//
|
||||
// Just attempt to randomize a sufficiently large buffer, where
|
||||
// sufficiently large is:
|
||||
// * Larger than the maximum getentropy request size (256 bytes).
|
||||
// * Larger than the maximum getrandom request size (2^25 - 1 bytes).
|
||||
//
|
||||
// While theoretically non-deterministic, if this fails, chances
|
||||
// are the CSPRNG is busted.
|
||||
seems_ok := false
|
||||
for i := 0; i < 256; i = i + 1 {
|
||||
mem.zero_explicit(raw_data(buf), len(buf))
|
||||
crypto.rand_bytes(buf)
|
||||
|
||||
if buf[0] != 0 && buf[len(buf) - 1] != 0 {
|
||||
seems_ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
expect(
|
||||
t,
|
||||
seems_ok,
|
||||
"Expected to randomize the head and tail of the buffer within a handful of attempts",
|
||||
)
|
||||
}
|
||||
|
||||
TestXOF :: struct {
|
||||
sec_strength: int,
|
||||
output: string,
|
||||
str: string,
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha224 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
// https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
|
||||
data_1_000_000_a := strings.repeat("a", 1_000_000)
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f", ""},
|
||||
TestHash{"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7", "abc"},
|
||||
TestHash{"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
TestHash{"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67", data_1_000_000_a},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_224(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
test_shake :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestXOF {
|
||||
// SHAKE128
|
||||
{
|
||||
128,
|
||||
"7f9c2ba4e88f827d616045507605853e",
|
||||
"",
|
||||
},
|
||||
{
|
||||
128,
|
||||
"f4202e3c5852f9182a0430fd8144f0a7",
|
||||
"The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
{
|
||||
128,
|
||||
"853f4538be0db9621a6cea659a06c110",
|
||||
"The quick brown fox jumps over the lazy dof",
|
||||
},
|
||||
|
||||
@(test)
|
||||
test_sha256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ""},
|
||||
TestHash{"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc"},
|
||||
TestHash{"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
// SHAKE256
|
||||
{
|
||||
256,
|
||||
"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
|
||||
"",
|
||||
},
|
||||
{
|
||||
256,
|
||||
"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca",
|
||||
"The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
{
|
||||
256,
|
||||
"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401",
|
||||
"The quick brown fox jumps over the lazy dof",
|
||||
},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
for v in test_vectors {
|
||||
dst := make([]byte, len(v.output)/2, context.temp_allocator)
|
||||
|
||||
@(test)
|
||||
test_sha384 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", ""},
|
||||
TestHash{"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7", "abc"},
|
||||
TestHash{"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_384(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
data := transmute([]byte)(v.str)
|
||||
|
||||
@(test)
|
||||
test_sha512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ""},
|
||||
TestHash{"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", "abc"},
|
||||
TestHash{"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha512_256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23", "abc"},
|
||||
TestHash{"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_512_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_224 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7", ""},
|
||||
TestHash{"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf", "abc"},
|
||||
TestHash{"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b", "a"},
|
||||
TestHash{"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112", "01234567012345670123456701234567"},
|
||||
TestHash{"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_224(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a", ""},
|
||||
TestHash{"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532", "abc"},
|
||||
TestHash{"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "a"},
|
||||
TestHash{"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767", "01234567012345670123456701234567"},
|
||||
TestHash{"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_384 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004", ""},
|
||||
TestHash{"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25", "abc"},
|
||||
TestHash{"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9", "a"},
|
||||
TestHash{"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be", "01234567012345670123456701234567"},
|
||||
TestHash{"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_384(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26", ""},
|
||||
TestHash{"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0", "abc"},
|
||||
TestHash{"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a", "a"},
|
||||
TestHash{"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1", "01234567012345670123456701234567"},
|
||||
TestHash{"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_shake_128 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"7f9c2ba4e88f827d616045507605853e", ""},
|
||||
TestHash{"f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := shake.hash_128(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_shake_256 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
|
||||
TestHash{"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401", "The quick brown fox jumps over the lazy dof"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := shake.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_keccak_224 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd", ""},
|
||||
TestHash{"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8", "abc"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := keccak.hash_224(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_keccak_256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", ""},
|
||||
TestHash{"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45", "abc"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := keccak.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_keccak_384 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff", ""},
|
||||
TestHash{"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e", "abc"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := keccak.hash_384(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_keccak_512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e", ""},
|
||||
TestHash{"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", "abc"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := keccak.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_blake2b :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""},
|
||||
TestHash{"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918", "The quick brown fox jumps over the lazy dog"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := blake2b.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_blake2s :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9", ""},
|
||||
TestHash{"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812", "The quick brown fox jumps over the lazy dog"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := blake2s.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sm3 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""},
|
||||
TestHash{"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0", "abc"},
|
||||
TestHash{"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732", "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"},
|
||||
TestHash{"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098", "The quick brown fox jumps over the lazy cog"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sm3.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_siphash_2_4 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://github.com/veorq/SipHash/blob/master/vectors.h
|
||||
test_vectors := [?]u64 {
|
||||
0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
|
||||
0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
|
||||
0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
|
||||
0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
|
||||
0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
|
||||
0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
|
||||
0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
|
||||
0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
|
||||
0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
|
||||
0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
|
||||
0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
|
||||
0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
|
||||
0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
|
||||
0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
|
||||
0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
|
||||
0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
|
||||
}
|
||||
|
||||
key: [16]byte
|
||||
for i in 0..<16 {
|
||||
key[i] = byte(i)
|
||||
}
|
||||
|
||||
for i in 0..<len(test_vectors) {
|
||||
data := make([]byte, i)
|
||||
for j in 0..<i {
|
||||
data[j] = byte(j)
|
||||
ctx: shake.Context
|
||||
switch v.sec_strength {
|
||||
case 128:
|
||||
shake.init_128(&ctx)
|
||||
case 256:
|
||||
shake.init_256(&ctx)
|
||||
}
|
||||
|
||||
vector := test_vectors[i]
|
||||
computed := siphash.sum_2_4(data[:], key[:])
|
||||
shake.write(&ctx, data)
|
||||
shake.read(&ctx, dst)
|
||||
|
||||
expect(t, computed == vector, fmt.tprintf("Expected: 0x%x for input of %v, but got 0x%x instead", vector, data, computed))
|
||||
dst_str := string(hex.encode(dst, context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
dst_str == v.output,
|
||||
fmt.tprintf(
|
||||
"SHAKE%d: Expected: %s for input of %s, but got %s instead",
|
||||
v.sec_strength,
|
||||
v.output,
|
||||
v.str,
|
||||
dst_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
616
tests/core/crypto/test_core_crypto_hash.odin
Normal file
616
tests/core/crypto/test_core_crypto_hash.odin
Normal file
@@ -0,0 +1,616 @@
|
||||
package test_core_crypto
|
||||
|
||||
import "core:bytes"
|
||||
import "core:encoding/hex"
|
||||
import "core:fmt"
|
||||
import "core:strings"
|
||||
import "core:testing"
|
||||
|
||||
import "core:crypto/hash"
|
||||
|
||||
TestHash :: struct {
|
||||
algo: hash.Algorithm,
|
||||
hash: string,
|
||||
str: string,
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_hash :: proc(t: ^testing.T) {
|
||||
log(t, "Testing Hashes")
|
||||
|
||||
// TODO:
|
||||
// - Stick the test vectors in a JSON file or something.
|
||||
data_1_000_000_a := strings.repeat("a", 1_000_000)
|
||||
|
||||
digest: [64]byte // 512-bits is enough for every digest for now.
|
||||
test_vectors := [?]TestHash {
|
||||
// BLAKE2b
|
||||
{
|
||||
hash.Algorithm.BLAKE2B,
|
||||
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.BLAKE2B,
|
||||
"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918",
|
||||
"The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
|
||||
// BLAKE2s
|
||||
{
|
||||
hash.Algorithm.BLAKE2S,
|
||||
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.BLAKE2S,
|
||||
"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812",
|
||||
"The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
|
||||
// SHA-224
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
// - https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
|
||||
{
|
||||
hash.Algorithm.SHA224,
|
||||
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA224,
|
||||
"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA224,
|
||||
"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA224,
|
||||
"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA224,
|
||||
"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67",
|
||||
data_1_000_000_a,
|
||||
},
|
||||
|
||||
// SHA-256
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA256,
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA256,
|
||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA256,
|
||||
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA256,
|
||||
"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA-384
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA384,
|
||||
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA384,
|
||||
"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA384,
|
||||
"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA384,
|
||||
"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA-512
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA512,
|
||||
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA512,
|
||||
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA512,
|
||||
"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA512,
|
||||
"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
// SHA-512/256
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
{
|
||||
hash.Algorithm.SHA512_256,
|
||||
"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA512_256,
|
||||
"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA3-224
|
||||
//
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e",
|
||||
"abcdbcdecdefdefgefghfghighijhi",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a",
|
||||
"jkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112",
|
||||
"01234567012345670123456701234567",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_224,
|
||||
"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA3-256
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a",
|
||||
"abcdbcdecdefdefgefghfghighijhi",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98",
|
||||
"jkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767",
|
||||
"01234567012345670123456701234567",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_256,
|
||||
"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA3-384
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a",
|
||||
"abcdbcdecdefdefgefghfghighijhi",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282",
|
||||
"jkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be",
|
||||
"01234567012345670123456701234567",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_384,
|
||||
"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SHA3-512
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e",
|
||||
"abcdbcdecdefdefgefghfghighijhi",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891",
|
||||
"jkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1",
|
||||
"01234567012345670123456701234567",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SHA3_512,
|
||||
"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
|
||||
// SM3
|
||||
{
|
||||
hash.Algorithm.SM3,
|
||||
"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SM3,
|
||||
"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SM3,
|
||||
"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732",
|
||||
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SM3,
|
||||
"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea",
|
||||
"The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.SM3,
|
||||
"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098",
|
||||
"The quick brown fox jumps over the lazy cog",
|
||||
},
|
||||
|
||||
// Keccak-224 (Legacy)
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_224,
|
||||
"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_224,
|
||||
"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8",
|
||||
"abc",
|
||||
},
|
||||
|
||||
// Keccak-256 (Legacy)
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_256,
|
||||
"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_256,
|
||||
"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45",
|
||||
"abc",
|
||||
},
|
||||
|
||||
// Keccak-384 (Legacy)
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_384,
|
||||
"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_384,
|
||||
"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e",
|
||||
"abc",
|
||||
},
|
||||
|
||||
// Keccak-512 (Legacy)
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_512,
|
||||
"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e",
|
||||
"",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Legacy_KECCAK_512,
|
||||
"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96",
|
||||
"abc",
|
||||
},
|
||||
|
||||
// MD5 (Insecure)
|
||||
// - https://datatracker.ietf.org/doc/html/rfc1321
|
||||
TestHash{hash.Algorithm.Insecure_MD5, "d41d8cd98f00b204e9800998ecf8427e", ""},
|
||||
TestHash{hash.Algorithm.Insecure_MD5, "0cc175b9c0f1b6a831c399e269772661", "a"},
|
||||
TestHash{hash.Algorithm.Insecure_MD5, "900150983cd24fb0d6963f7d28e17f72", "abc"},
|
||||
{
|
||||
hash.Algorithm.Insecure_MD5,
|
||||
"f96b697d7cb7938d525a2f31aaf161d0",
|
||||
"message digest",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_MD5,
|
||||
"c3fcd3d76192e4007dfb496cca67e13b",
|
||||
"abcdefghijklmnopqrstuvwxyz",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_MD5,
|
||||
"d174ab98d277d9f5a5611c2c9f419d9f",
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_MD5,
|
||||
"57edf4a22be3c955ac49da2e2107b67a",
|
||||
"12345678901234567890123456789012345678901234567890123456789012345678901234567890",
|
||||
},
|
||||
|
||||
// SHA-1 (Insecure)
|
||||
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// - https://www.di-mgt.com.au/sha_testvectors.html
|
||||
TestHash{hash.Algorithm.Insecure_SHA1, "da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
|
||||
TestHash{hash.Algorithm.Insecure_SHA1, "a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
|
||||
{
|
||||
hash.Algorithm.Insecure_SHA1,
|
||||
"f9537c23893d2014f365adf8ffe33b8eb0297ed1",
|
||||
"abcdbcdecdefdefgefghfghighijhi",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_SHA1,
|
||||
"346fb528a24b48f563cb061470bcfd23740427ad",
|
||||
"jkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
TestHash{hash.Algorithm.Insecure_SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
|
||||
{
|
||||
hash.Algorithm.Insecure_SHA1,
|
||||
"c729c8996ee0a6f74f4f3248e8957edf704fb624",
|
||||
"01234567012345670123456701234567",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_SHA1,
|
||||
"84983e441c3bd26ebaae4aa1f95129e5e54670f1",
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
|
||||
},
|
||||
{
|
||||
hash.Algorithm.Insecure_SHA1,
|
||||
"a49b2446a02c645bf419f995b67091253a04a259",
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
|
||||
},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
algo_name := hash.ALGORITHM_NAMES[v.algo]
|
||||
dst := digest[:hash.DIGEST_SIZES[v.algo]]
|
||||
|
||||
data := transmute([]byte)(v.str)
|
||||
|
||||
ctx: hash.Context
|
||||
hash.init(&ctx, v.algo)
|
||||
hash.update(&ctx, data)
|
||||
hash.final(&ctx, dst)
|
||||
|
||||
dst_str := string(hex.encode(dst, context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
dst_str == v.hash,
|
||||
fmt.tprintf(
|
||||
"%s/incremental: Expected: %s for input of %s, but got %s instead",
|
||||
algo_name,
|
||||
v.hash,
|
||||
v.str,
|
||||
dst_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
for algo in hash.Algorithm {
|
||||
// Skip the sentinel value.
|
||||
if algo == .Invalid {
|
||||
continue
|
||||
}
|
||||
|
||||
algo_name := hash.ALGORITHM_NAMES[algo]
|
||||
|
||||
// Ensure that the MAX_(DIGEST_SIZE, BLOCK_SIZE) constants are
|
||||
// still correct.
|
||||
digest_sz := hash.DIGEST_SIZES[algo]
|
||||
block_sz := hash.BLOCK_SIZES[algo]
|
||||
expect(
|
||||
t,
|
||||
digest_sz <= hash.MAX_DIGEST_SIZE,
|
||||
fmt.tprintf(
|
||||
"%s: Digest size %d exceeds max %d",
|
||||
algo_name,
|
||||
digest_sz,
|
||||
hash.MAX_DIGEST_SIZE,
|
||||
),
|
||||
)
|
||||
expect(
|
||||
t,
|
||||
block_sz <= hash.MAX_BLOCK_SIZE,
|
||||
fmt.tprintf(
|
||||
"%s: Block size %d exceeds max %d",
|
||||
algo_name,
|
||||
block_sz,
|
||||
hash.MAX_BLOCK_SIZE,
|
||||
),
|
||||
)
|
||||
|
||||
// Exercise most of the happy-path for the high level interface.
|
||||
rd: bytes.Reader
|
||||
bytes.reader_init(&rd, transmute([]byte)(data_1_000_000_a))
|
||||
st := bytes.reader_to_stream(&rd)
|
||||
|
||||
digest_a, _ := hash.hash_stream(algo, st, context.temp_allocator)
|
||||
digest_b := hash.hash_string(algo, data_1_000_000_a, context.temp_allocator)
|
||||
|
||||
a_str := string(hex.encode(digest_a, context.temp_allocator))
|
||||
b_str := string(hex.encode(digest_b, context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
a_str == b_str,
|
||||
fmt.tprintf(
|
||||
"%s/cmp: Expected: %s (hash_stream) == %s (hash_bytes)",
|
||||
algo_name,
|
||||
a_str,
|
||||
b_str,
|
||||
),
|
||||
)
|
||||
|
||||
// Exercise the rolling digest functionality, which also covers
|
||||
// each implementation's clone routine.
|
||||
ctx, ctx_clone: hash.Context
|
||||
hash.init(&ctx, algo)
|
||||
|
||||
api_algo := hash.algorithm(&ctx)
|
||||
api_digest_size := hash.digest_size(&ctx)
|
||||
expect(
|
||||
t,
|
||||
algo == api_algo,
|
||||
fmt.tprintf(
|
||||
"%s/algorithm: Expected: %v but got %v instead",
|
||||
algo_name,
|
||||
algo,
|
||||
api_algo,
|
||||
),
|
||||
)
|
||||
expect(
|
||||
t,
|
||||
hash.DIGEST_SIZES[algo] == api_digest_size,
|
||||
fmt.tprintf(
|
||||
"%s/digest_size: Expected: %d but got %d instead",
|
||||
algo_name,
|
||||
hash.DIGEST_SIZES[algo],
|
||||
api_digest_size,
|
||||
),
|
||||
)
|
||||
|
||||
hash.update(&ctx, digest_a)
|
||||
hash.clone(&ctx_clone, &ctx)
|
||||
hash.final(&ctx, digest_a, true)
|
||||
hash.final(&ctx, digest_b)
|
||||
|
||||
digest_c := make([]byte, hash.digest_size(&ctx_clone), context.temp_allocator)
|
||||
hash.final(&ctx_clone, digest_c)
|
||||
|
||||
a_str = string(hex.encode(digest_a, context.temp_allocator))
|
||||
b_str = string(hex.encode(digest_b, context.temp_allocator))
|
||||
c_str := string(hex.encode(digest_c, context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
a_str == b_str && b_str == c_str,
|
||||
fmt.tprintf(
|
||||
"%s/rolling: Expected: %s (first) == %s (second) == %s (third)",
|
||||
algo_name,
|
||||
a_str,
|
||||
b_str,
|
||||
c_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
241
tests/core/crypto/test_core_crypto_mac.odin
Normal file
241
tests/core/crypto/test_core_crypto_mac.odin
Normal file
@@ -0,0 +1,241 @@
|
||||
package test_core_crypto
|
||||
|
||||
import "core:encoding/hex"
|
||||
import "core:fmt"
|
||||
import "core:mem"
|
||||
import "core:testing"
|
||||
|
||||
import "core:crypto/hash"
|
||||
import "core:crypto/hmac"
|
||||
import "core:crypto/poly1305"
|
||||
import "core:crypto/siphash"
|
||||
|
||||
@(test)
|
||||
test_mac :: proc(t: ^testing.T) {
|
||||
log(t, "Testing MACs")
|
||||
|
||||
test_hmac(t)
|
||||
test_poly1305(t)
|
||||
test_siphash_2_4(t)
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_hmac :: proc(t: ^testing.T) {
|
||||
// Test cases pulled out of RFC 6234, note that HMAC is a generic
|
||||
// construct so as long as the underlying hash is correct and all
|
||||
// the code paths are covered the implementation is "fine", so
|
||||
// this only exercises SHA256.
|
||||
|
||||
test_keys := [?]string {
|
||||
"\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b",
|
||||
"Jefe",
|
||||
"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
|
||||
"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19",
|
||||
"\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c",
|
||||
"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
|
||||
"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
|
||||
}
|
||||
|
||||
test_msgs := [?]string {
|
||||
"Hi There",
|
||||
"what do ya want for nothing?",
|
||||
"\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd",
|
||||
"\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd",
|
||||
"Test With Truncation",
|
||||
"Test Using Larger Than Block-Size Key - Hash Key First",
|
||||
"This is a test using a larger than block-size key and a larger than block-size data. The key needs to be hashed before being used by the HMAC algorithm.",
|
||||
}
|
||||
|
||||
tags_sha256 := [?]string {
|
||||
"b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7",
|
||||
"5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843",
|
||||
"773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe",
|
||||
"82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b",
|
||||
"a3b6167473100ee06e0c796c2955552b",
|
||||
"60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54",
|
||||
"9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2",
|
||||
}
|
||||
|
||||
algo := hash.Algorithm.SHA256
|
||||
|
||||
tag: [64]byte // 512-bits is enough for every digest for now.
|
||||
for k, i in test_keys {
|
||||
algo_name := hash.ALGORITHM_NAMES[algo]
|
||||
dst := tag[:hash.DIGEST_SIZES[algo]]
|
||||
|
||||
key := transmute([]byte)(k)
|
||||
msg := transmute([]byte)(test_msgs[i])
|
||||
|
||||
ctx: hmac.Context
|
||||
hmac.init(&ctx, algo, key)
|
||||
hmac.update(&ctx, msg)
|
||||
hmac.final(&ctx, dst)
|
||||
|
||||
// For simplicity crypto/hmac does not support truncation, but
|
||||
// test it by truncating the tag down as appropriate based on
|
||||
// the expected value.
|
||||
expected_str := tags_sha256[i]
|
||||
tag_len := len(expected_str) / 2
|
||||
|
||||
key_str := string(hex.encode(key, context.temp_allocator))
|
||||
msg_str := string(hex.encode(msg, context.temp_allocator))
|
||||
dst_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
dst_str == expected_str,
|
||||
fmt.tprintf(
|
||||
"%s/incremental: Expected: %s for input of %s - %s, but got %s instead",
|
||||
algo_name,
|
||||
tags_sha256[i],
|
||||
key_str,
|
||||
msg_str,
|
||||
dst_str,
|
||||
),
|
||||
)
|
||||
|
||||
hmac.sum(algo, dst, msg, key)
|
||||
oneshot_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
|
||||
|
||||
expect(
|
||||
t,
|
||||
oneshot_str == expected_str,
|
||||
fmt.tprintf(
|
||||
"%s/oneshot: Expected: %s for input of %s - %s, but got %s instead",
|
||||
algo_name,
|
||||
tags_sha256[i],
|
||||
key_str,
|
||||
msg_str,
|
||||
oneshot_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_poly1305 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing poly1305")
|
||||
|
||||
// Test cases taken from poly1305-donna.
|
||||
key := [poly1305.KEY_SIZE]byte {
|
||||
0xee, 0xa6, 0xa7, 0x25, 0x1c, 0x1e, 0x72, 0x91,
|
||||
0x6d, 0x11, 0xc2, 0xcb, 0x21, 0x4d, 0x3c, 0x25,
|
||||
0x25, 0x39, 0x12, 0x1d, 0x8e, 0x23, 0x4e, 0x65,
|
||||
0x2d, 0x65, 0x1f, 0xa4, 0xc8, 0xcf, 0xf8, 0x80,
|
||||
}
|
||||
|
||||
msg := [131]byte {
|
||||
0x8e, 0x99, 0x3b, 0x9f, 0x48, 0x68, 0x12, 0x73,
|
||||
0xc2, 0x96, 0x50, 0xba, 0x32, 0xfc, 0x76, 0xce,
|
||||
0x48, 0x33, 0x2e, 0xa7, 0x16, 0x4d, 0x96, 0xa4,
|
||||
0x47, 0x6f, 0xb8, 0xc5, 0x31, 0xa1, 0x18, 0x6a,
|
||||
0xc0, 0xdf, 0xc1, 0x7c, 0x98, 0xdc, 0xe8, 0x7b,
|
||||
0x4d, 0xa7, 0xf0, 0x11, 0xec, 0x48, 0xc9, 0x72,
|
||||
0x71, 0xd2, 0xc2, 0x0f, 0x9b, 0x92, 0x8f, 0xe2,
|
||||
0x27, 0x0d, 0x6f, 0xb8, 0x63, 0xd5, 0x17, 0x38,
|
||||
0xb4, 0x8e, 0xee, 0xe3, 0x14, 0xa7, 0xcc, 0x8a,
|
||||
0xb9, 0x32, 0x16, 0x45, 0x48, 0xe5, 0x26, 0xae,
|
||||
0x90, 0x22, 0x43, 0x68, 0x51, 0x7a, 0xcf, 0xea,
|
||||
0xbd, 0x6b, 0xb3, 0x73, 0x2b, 0xc0, 0xe9, 0xda,
|
||||
0x99, 0x83, 0x2b, 0x61, 0xca, 0x01, 0xb6, 0xde,
|
||||
0x56, 0x24, 0x4a, 0x9e, 0x88, 0xd5, 0xf9, 0xb3,
|
||||
0x79, 0x73, 0xf6, 0x22, 0xa4, 0x3d, 0x14, 0xa6,
|
||||
0x59, 0x9b, 0x1f, 0x65, 0x4c, 0xb4, 0x5a, 0x74,
|
||||
0xe3, 0x55, 0xa5,
|
||||
}
|
||||
|
||||
tag := [poly1305.TAG_SIZE]byte {
|
||||
0xf3, 0xff, 0xc7, 0x70, 0x3f, 0x94, 0x00, 0xe5,
|
||||
0x2a, 0x7d, 0xfb, 0x4b, 0x3d, 0x33, 0x05, 0xd9,
|
||||
}
|
||||
tag_str := string(hex.encode(tag[:], context.temp_allocator))
|
||||
|
||||
// Verify - oneshot + compare
|
||||
ok := poly1305.verify(tag[:], msg[:], key[:])
|
||||
expect(t, ok, "oneshot verify call failed")
|
||||
|
||||
// Sum - oneshot
|
||||
derived_tag: [poly1305.TAG_SIZE]byte
|
||||
poly1305.sum(derived_tag[:], msg[:], key[:])
|
||||
derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_tag_str == tag_str,
|
||||
fmt.tprintf("Expected %s for sum(msg, key), but got %s instead", tag_str, derived_tag_str),
|
||||
)
|
||||
|
||||
// Incremental
|
||||
mem.zero(&derived_tag, size_of(derived_tag))
|
||||
ctx: poly1305.Context = ---
|
||||
poly1305.init(&ctx, key[:])
|
||||
read_lengths := [11]int{32, 64, 16, 8, 4, 2, 1, 1, 1, 1, 1}
|
||||
off := 0
|
||||
for read_length in read_lengths {
|
||||
to_read := msg[off:off + read_length]
|
||||
poly1305.update(&ctx, to_read)
|
||||
off = off + read_length
|
||||
}
|
||||
poly1305.final(&ctx, derived_tag[:])
|
||||
derived_tag_str = string(hex.encode(derived_tag[:], context.temp_allocator))
|
||||
expect(
|
||||
t,
|
||||
derived_tag_str == tag_str,
|
||||
fmt.tprintf(
|
||||
"Expected %s for init/update/final - incremental, but got %s instead",
|
||||
tag_str,
|
||||
derived_tag_str,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_siphash_2_4 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing SipHash-2-4")
|
||||
|
||||
// Test vectors from
|
||||
// https://github.com/veorq/SipHash/blob/master/vectors.h
|
||||
test_vectors := [?]u64 {
|
||||
0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
|
||||
0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
|
||||
0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
|
||||
0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
|
||||
0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
|
||||
0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
|
||||
0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
|
||||
0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
|
||||
0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
|
||||
0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
|
||||
0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
|
||||
0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
|
||||
0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
|
||||
0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
|
||||
0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
|
||||
0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
|
||||
}
|
||||
|
||||
key: [16]byte
|
||||
for i in 0 ..< 16 {
|
||||
key[i] = byte(i)
|
||||
}
|
||||
|
||||
for i in 0 ..< len(test_vectors) {
|
||||
data := make([]byte, i)
|
||||
for j in 0 ..< i {
|
||||
data[j] = byte(j)
|
||||
}
|
||||
|
||||
vector := test_vectors[i]
|
||||
computed := siphash.sum_2_4(data[:], key[:])
|
||||
|
||||
expect(
|
||||
t,
|
||||
computed == vector,
|
||||
fmt.tprintf(
|
||||
"Expected: 0x%x for input of %v, but got 0x%x instead",
|
||||
vector,
|
||||
data,
|
||||
computed,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,541 +0,0 @@
|
||||
package test_core_crypto
|
||||
|
||||
import "core:testing"
|
||||
import "core:fmt"
|
||||
import "core:mem"
|
||||
import "core:time"
|
||||
import "core:crypto"
|
||||
|
||||
import "core:crypto/chacha20"
|
||||
import "core:crypto/chacha20poly1305"
|
||||
import "core:crypto/poly1305"
|
||||
import "core:crypto/x25519"
|
||||
|
||||
_digit_value :: proc(r: rune) -> int {
|
||||
ri := int(r)
|
||||
v: int = 16
|
||||
switch r {
|
||||
case '0'..='9': v = ri-'0'
|
||||
case 'a'..='z': v = ri-'a'+10
|
||||
case 'A'..='Z': v = ri-'A'+10
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
_decode_hex32 :: proc(s: string) -> [32]byte{
|
||||
b: [32]byte
|
||||
for i := 0; i < len(s); i = i + 2 {
|
||||
hi := _digit_value(rune(s[i]))
|
||||
lo := _digit_value(rune(s[i+1]))
|
||||
b[i/2] = byte(hi << 4 | lo)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
_PLAINTEXT_SUNSCREEN_STR := "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."
|
||||
|
||||
@(test)
|
||||
test_chacha20 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing (X)ChaCha20")
|
||||
|
||||
// Test cases taken from RFC 8439, and draft-irtf-cfrg-xchacha-03
|
||||
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
|
||||
|
||||
key := [chacha20.KEY_SIZE]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
}
|
||||
|
||||
nonce := [chacha20.NONCE_SIZE]byte{
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
ciphertext := [114]byte{
|
||||
0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
|
||||
0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
|
||||
0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
|
||||
0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b,
|
||||
0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab,
|
||||
0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57,
|
||||
0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab,
|
||||
0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8,
|
||||
0x07, 0xca, 0x0d, 0xbf, 0x50, 0x0d, 0x6a, 0x61,
|
||||
0x56, 0xa3, 0x8e, 0x08, 0x8a, 0x22, 0xb6, 0x5e,
|
||||
0x52, 0xbc, 0x51, 0x4d, 0x16, 0xcc, 0xf8, 0x06,
|
||||
0x81, 0x8c, 0xe9, 0x1a, 0xb7, 0x79, 0x37, 0x36,
|
||||
0x5a, 0xf9, 0x0b, 0xbf, 0x74, 0xa3, 0x5b, 0xe6,
|
||||
0xb4, 0x0b, 0x8e, 0xed, 0xf2, 0x78, 0x5e, 0x42,
|
||||
0x87, 0x4d,
|
||||
}
|
||||
ciphertext_str := hex_string(ciphertext[:])
|
||||
|
||||
derived_ciphertext: [114]byte
|
||||
ctx: chacha20.Context = ---
|
||||
chacha20.init(&ctx, key[:], nonce[:])
|
||||
chacha20.seek(&ctx, 1) // The test vectors start the counter at 1.
|
||||
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
|
||||
|
||||
derived_ciphertext_str := hex_string(derived_ciphertext[:])
|
||||
expect(t, derived_ciphertext_str == ciphertext_str, fmt.tprintf("Expected %s for xor_bytes(plaintext_str), but got %s instead", ciphertext_str, derived_ciphertext_str))
|
||||
|
||||
xkey := [chacha20.KEY_SIZE]byte{
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
}
|
||||
|
||||
xnonce := [chacha20.XNONCE_SIZE]byte{
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
}
|
||||
|
||||
xciphertext := [114]byte{
|
||||
0xbd, 0x6d, 0x17, 0x9d, 0x3e, 0x83, 0xd4, 0x3b,
|
||||
0x95, 0x76, 0x57, 0x94, 0x93, 0xc0, 0xe9, 0x39,
|
||||
0x57, 0x2a, 0x17, 0x00, 0x25, 0x2b, 0xfa, 0xcc,
|
||||
0xbe, 0xd2, 0x90, 0x2c, 0x21, 0x39, 0x6c, 0xbb,
|
||||
0x73, 0x1c, 0x7f, 0x1b, 0x0b, 0x4a, 0xa6, 0x44,
|
||||
0x0b, 0xf3, 0xa8, 0x2f, 0x4e, 0xda, 0x7e, 0x39,
|
||||
0xae, 0x64, 0xc6, 0x70, 0x8c, 0x54, 0xc2, 0x16,
|
||||
0xcb, 0x96, 0xb7, 0x2e, 0x12, 0x13, 0xb4, 0x52,
|
||||
0x2f, 0x8c, 0x9b, 0xa4, 0x0d, 0xb5, 0xd9, 0x45,
|
||||
0xb1, 0x1b, 0x69, 0xb9, 0x82, 0xc1, 0xbb, 0x9e,
|
||||
0x3f, 0x3f, 0xac, 0x2b, 0xc3, 0x69, 0x48, 0x8f,
|
||||
0x76, 0xb2, 0x38, 0x35, 0x65, 0xd3, 0xff, 0xf9,
|
||||
0x21, 0xf9, 0x66, 0x4c, 0x97, 0x63, 0x7d, 0xa9,
|
||||
0x76, 0x88, 0x12, 0xf6, 0x15, 0xc6, 0x8b, 0x13,
|
||||
0xb5, 0x2e,
|
||||
}
|
||||
xciphertext_str := hex_string(xciphertext[:])
|
||||
|
||||
chacha20.init(&ctx, xkey[:], xnonce[:])
|
||||
chacha20.seek(&ctx, 1)
|
||||
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
|
||||
|
||||
derived_ciphertext_str = hex_string(derived_ciphertext[:])
|
||||
expect(t, derived_ciphertext_str == xciphertext_str, fmt.tprintf("Expected %s for xor_bytes(plaintext_str), but got %s instead", xciphertext_str, derived_ciphertext_str))
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_poly1305 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing poly1305")
|
||||
|
||||
// Test cases taken from poly1305-donna.
|
||||
key := [poly1305.KEY_SIZE]byte{
|
||||
0xee,0xa6,0xa7,0x25,0x1c,0x1e,0x72,0x91,
|
||||
0x6d,0x11,0xc2,0xcb,0x21,0x4d,0x3c,0x25,
|
||||
0x25,0x39,0x12,0x1d,0x8e,0x23,0x4e,0x65,
|
||||
0x2d,0x65,0x1f,0xa4,0xc8,0xcf,0xf8,0x80,
|
||||
}
|
||||
|
||||
msg := [131]byte{
|
||||
0x8e,0x99,0x3b,0x9f,0x48,0x68,0x12,0x73,
|
||||
0xc2,0x96,0x50,0xba,0x32,0xfc,0x76,0xce,
|
||||
0x48,0x33,0x2e,0xa7,0x16,0x4d,0x96,0xa4,
|
||||
0x47,0x6f,0xb8,0xc5,0x31,0xa1,0x18,0x6a,
|
||||
0xc0,0xdf,0xc1,0x7c,0x98,0xdc,0xe8,0x7b,
|
||||
0x4d,0xa7,0xf0,0x11,0xec,0x48,0xc9,0x72,
|
||||
0x71,0xd2,0xc2,0x0f,0x9b,0x92,0x8f,0xe2,
|
||||
0x27,0x0d,0x6f,0xb8,0x63,0xd5,0x17,0x38,
|
||||
0xb4,0x8e,0xee,0xe3,0x14,0xa7,0xcc,0x8a,
|
||||
0xb9,0x32,0x16,0x45,0x48,0xe5,0x26,0xae,
|
||||
0x90,0x22,0x43,0x68,0x51,0x7a,0xcf,0xea,
|
||||
0xbd,0x6b,0xb3,0x73,0x2b,0xc0,0xe9,0xda,
|
||||
0x99,0x83,0x2b,0x61,0xca,0x01,0xb6,0xde,
|
||||
0x56,0x24,0x4a,0x9e,0x88,0xd5,0xf9,0xb3,
|
||||
0x79,0x73,0xf6,0x22,0xa4,0x3d,0x14,0xa6,
|
||||
0x59,0x9b,0x1f,0x65,0x4c,0xb4,0x5a,0x74,
|
||||
0xe3,0x55,0xa5,
|
||||
}
|
||||
|
||||
tag := [poly1305.TAG_SIZE]byte{
|
||||
0xf3,0xff,0xc7,0x70,0x3f,0x94,0x00,0xe5,
|
||||
0x2a,0x7d,0xfb,0x4b,0x3d,0x33,0x05,0xd9,
|
||||
}
|
||||
tag_str := hex_string(tag[:])
|
||||
|
||||
// Verify - oneshot + compare
|
||||
ok := poly1305.verify(tag[:], msg[:], key[:])
|
||||
expect(t, ok, "oneshot verify call failed")
|
||||
|
||||
// Sum - oneshot
|
||||
derived_tag: [poly1305.TAG_SIZE]byte
|
||||
poly1305.sum(derived_tag[:], msg[:], key[:])
|
||||
derived_tag_str := hex_string(derived_tag[:])
|
||||
expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected %s for sum(msg, key), but got %s instead", tag_str, derived_tag_str))
|
||||
|
||||
// Incremental
|
||||
mem.zero(&derived_tag, size_of(derived_tag))
|
||||
ctx: poly1305.Context = ---
|
||||
poly1305.init(&ctx, key[:])
|
||||
read_lengths := [11]int{32, 64, 16, 8, 4, 2, 1, 1, 1, 1, 1}
|
||||
off := 0
|
||||
for read_length in read_lengths {
|
||||
to_read := msg[off:off+read_length]
|
||||
poly1305.update(&ctx, to_read)
|
||||
off = off + read_length
|
||||
}
|
||||
poly1305.final(&ctx, derived_tag[:])
|
||||
derived_tag_str = hex_string(derived_tag[:])
|
||||
expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected %s for init/update/final - incremental, but got %s instead", tag_str, derived_tag_str))
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_chacha20poly1305 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing chacha20poly1205")
|
||||
|
||||
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
|
||||
|
||||
aad := [12]byte{
|
||||
0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
|
||||
0xc4, 0xc5, 0xc6, 0xc7,
|
||||
}
|
||||
|
||||
key := [chacha20poly1305.KEY_SIZE]byte{
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
}
|
||||
|
||||
nonce := [chacha20poly1305.NONCE_SIZE]byte{
|
||||
0x07, 0x00, 0x00, 0x00,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
}
|
||||
|
||||
ciphertext := [114]byte{
|
||||
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
|
||||
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
|
||||
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
|
||||
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
|
||||
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
|
||||
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
|
||||
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
|
||||
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
|
||||
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
|
||||
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
|
||||
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
|
||||
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
|
||||
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
|
||||
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
|
||||
0x61, 0x16,
|
||||
}
|
||||
ciphertext_str := hex_string(ciphertext[:])
|
||||
|
||||
tag := [chacha20poly1305.TAG_SIZE]byte{
|
||||
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
|
||||
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91,
|
||||
}
|
||||
tag_str := hex_string(tag[:])
|
||||
|
||||
derived_tag: [chacha20poly1305.TAG_SIZE]byte
|
||||
derived_ciphertext: [114]byte
|
||||
|
||||
chacha20poly1305.encrypt(derived_ciphertext[:], derived_tag[:], key[:], nonce[:], aad[:], plaintext)
|
||||
|
||||
derived_ciphertext_str := hex_string(derived_ciphertext[:])
|
||||
expect(t, derived_ciphertext_str == ciphertext_str, fmt.tprintf("Expected ciphertext %s for encrypt(aad, plaintext), but got %s instead", ciphertext_str, derived_ciphertext_str))
|
||||
|
||||
derived_tag_str := hex_string(derived_tag[:])
|
||||
expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected tag %s for encrypt(aad, plaintext), but got %s instead", tag_str, derived_tag_str))
|
||||
|
||||
derived_plaintext: [114]byte
|
||||
ok := chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], ciphertext[:])
|
||||
derived_plaintext_str := string(derived_plaintext[:])
|
||||
expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
|
||||
expect(t, derived_plaintext_str == _PLAINTEXT_SUNSCREEN_STR, fmt.tprintf("Expected plaintext %s for decrypt(tag, aad, ciphertext), but got %s instead", _PLAINTEXT_SUNSCREEN_STR, derived_plaintext_str))
|
||||
|
||||
derived_ciphertext[0] ~= 0xa5
|
||||
ok = chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], derived_ciphertext[:])
|
||||
expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
|
||||
|
||||
aad[0] ~= 0xa5
|
||||
ok = chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], ciphertext[:])
|
||||
expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
|
||||
}
|
||||
|
||||
TestECDH :: struct {
|
||||
scalar: string,
|
||||
point: string,
|
||||
product: string,
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_x25519 :: proc(t: ^testing.T) {
|
||||
log(t, "Testing X25519")
|
||||
|
||||
// Local copy of this so that the base point doesn't need to be exported.
|
||||
_BASE_POINT: [32]byte = {
|
||||
9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
}
|
||||
|
||||
test_vectors := [?]TestECDH {
|
||||
// Test vectors from RFC 7748
|
||||
TestECDH{
|
||||
"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
|
||||
"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
|
||||
"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
|
||||
},
|
||||
TestECDH{
|
||||
"4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d",
|
||||
"e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493",
|
||||
"95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957",
|
||||
},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
scalar := _decode_hex32(v.scalar)
|
||||
point := _decode_hex32(v.point)
|
||||
|
||||
derived_point: [x25519.POINT_SIZE]byte
|
||||
x25519.scalarmult(derived_point[:], scalar[:], point[:])
|
||||
derived_point_str := hex_string(derived_point[:])
|
||||
|
||||
expect(t, derived_point_str == v.product, fmt.tprintf("Expected %s for %s * %s, but got %s instead", v.product, v.scalar, v.point, derived_point_str))
|
||||
|
||||
// Abuse the test vectors to sanity-check the scalar-basepoint multiply.
|
||||
p1, p2: [x25519.POINT_SIZE]byte
|
||||
x25519.scalarmult_basepoint(p1[:], scalar[:])
|
||||
x25519.scalarmult(p2[:], scalar[:], _BASE_POINT[:])
|
||||
p1_str, p2_str := hex_string(p1[:]), hex_string(p2[:])
|
||||
expect(t, p1_str == p2_str, fmt.tprintf("Expected %s for %s * basepoint, but got %s instead", p2_str, v.scalar, p1_str))
|
||||
}
|
||||
|
||||
// TODO/tests: Run the wycheproof test vectors, once I figure out
|
||||
// how to work with JSON.
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_rand_bytes :: proc(t: ^testing.T) {
|
||||
log(t, "Testing rand_bytes")
|
||||
|
||||
if ODIN_OS != .Linux {
|
||||
log(t, "rand_bytes not supported - skipping")
|
||||
return
|
||||
}
|
||||
|
||||
allocator := context.allocator
|
||||
|
||||
buf := make([]byte, 1 << 25, allocator)
|
||||
defer delete(buf)
|
||||
|
||||
// Testing a CSPRNG for correctness is incredibly involved and
|
||||
// beyond the scope of an implementation that offloads
|
||||
// responsibility for correctness to the OS.
|
||||
//
|
||||
// Just attempt to randomize a sufficiently large buffer, where
|
||||
// sufficiently large is:
|
||||
// * Larger than the maximum getentropy request size (256 bytes).
|
||||
// * Larger than the maximum getrandom request size (2^25 - 1 bytes).
|
||||
//
|
||||
// While theoretically non-deterministic, if this fails, chances
|
||||
// are the CSPRNG is busted.
|
||||
seems_ok := false
|
||||
for i := 0; i < 256; i = i + 1 {
|
||||
mem.zero_explicit(raw_data(buf), len(buf))
|
||||
crypto.rand_bytes(buf)
|
||||
|
||||
if buf[0] != 0 && buf[len(buf)-1] != 0 {
|
||||
seems_ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
expect(t, seems_ok, "Expected to randomize the head and tail of the buffer within a handful of attempts")
|
||||
}
|
||||
|
||||
@(test)
|
||||
bench_modern :: proc(t: ^testing.T) {
|
||||
fmt.println("Starting benchmarks:")
|
||||
|
||||
bench_chacha20(t)
|
||||
bench_poly1305(t)
|
||||
bench_chacha20poly1305(t)
|
||||
bench_x25519(t)
|
||||
}
|
||||
|
||||
_setup_sized_buf :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
|
||||
assert(options != nil)
|
||||
|
||||
options.input = make([]u8, options.bytes, allocator)
|
||||
return nil if len(options.input) == options.bytes else .Allocation_Error
|
||||
}
|
||||
|
||||
_teardown_sized_buf :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
|
||||
assert(options != nil)
|
||||
|
||||
delete(options.input)
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_chacha20 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
|
||||
buf := options.input
|
||||
key := [chacha20.KEY_SIZE]byte{
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
nonce := [chacha20.NONCE_SIZE]byte{
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
ctx: chacha20.Context = ---
|
||||
chacha20.init(&ctx, key[:], nonce[:])
|
||||
|
||||
for _ in 0..=options.rounds {
|
||||
chacha20.xor_bytes(&ctx, buf, buf)
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_poly1305 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
|
||||
buf := options.input
|
||||
key := [poly1305.KEY_SIZE]byte{
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
|
||||
tag: [poly1305.TAG_SIZE]byte = ---
|
||||
for _ in 0..=options.rounds {
|
||||
poly1305.sum(tag[:], buf, key[:])
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
//options.hash = u128(h)
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_chacha20poly1305 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
|
||||
buf := options.input
|
||||
key := [chacha20.KEY_SIZE]byte{
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
nonce := [chacha20.NONCE_SIZE]byte{
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
tag: [chacha20poly1305.TAG_SIZE]byte = ---
|
||||
|
||||
for _ in 0..=options.rounds {
|
||||
chacha20poly1305.encrypt(buf,tag[:], key[:], nonce[:], nil, buf)
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
return nil
|
||||
}
|
||||
|
||||
benchmark_print :: proc(name: string, options: ^time.Benchmark_Options) {
|
||||
fmt.printf("\t[%v] %v rounds, %v bytes processed in %v ns\n\t\t%5.3f rounds/s, %5.3f MiB/s\n",
|
||||
name,
|
||||
options.rounds,
|
||||
options.processed,
|
||||
time.duration_nanoseconds(options.duration),
|
||||
options.rounds_per_second,
|
||||
options.megabytes_per_second,
|
||||
)
|
||||
}
|
||||
|
||||
bench_chacha20 :: proc(t: ^testing.T) {
|
||||
name := "ChaCha20 64 bytes"
|
||||
options := &time.Benchmark_Options{
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_chacha20,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "ChaCha20 1024 bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "ChaCha20 65536 bytes"
|
||||
options.bytes = 65536
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_poly1305 :: proc(t: ^testing.T) {
|
||||
name := "Poly1305 64 zero bytes"
|
||||
options := &time.Benchmark_Options{
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_poly1305,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "Poly1305 1024 zero bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_chacha20poly1305 :: proc(t: ^testing.T) {
|
||||
name := "chacha20poly1305 64 bytes"
|
||||
options := &time.Benchmark_Options{
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_chacha20poly1305,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "chacha20poly1305 1024 bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "chacha20poly1305 65536 bytes"
|
||||
options.bytes = 65536
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_x25519 :: proc(t: ^testing.T) {
|
||||
point := _decode_hex32("deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef")
|
||||
scalar := _decode_hex32("cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe")
|
||||
out: [x25519.POINT_SIZE]byte = ---
|
||||
|
||||
iters :: 10000
|
||||
start := time.now()
|
||||
for i := 0; i < iters; i = i + 1 {
|
||||
x25519.scalarmult(out[:], scalar[:], point[:])
|
||||
}
|
||||
elapsed := time.since(start)
|
||||
|
||||
log(t, fmt.tprintf("x25519.scalarmult: ~%f us/op", time.duration_microseconds(elapsed) / iters))
|
||||
}
|
||||
236
tests/core/crypto/test_crypto_benchmark.odin
Normal file
236
tests/core/crypto/test_crypto_benchmark.odin
Normal file
@@ -0,0 +1,236 @@
|
||||
package test_core_crypto
|
||||
|
||||
import "core:encoding/hex"
|
||||
import "core:fmt"
|
||||
import "core:testing"
|
||||
import "core:time"
|
||||
|
||||
import "core:crypto/chacha20"
|
||||
import "core:crypto/chacha20poly1305"
|
||||
import "core:crypto/poly1305"
|
||||
import "core:crypto/x25519"
|
||||
|
||||
// Cryptographic primitive benchmarks.
|
||||
|
||||
@(test)
|
||||
bench_crypto :: proc(t: ^testing.T) {
|
||||
fmt.println("Starting benchmarks:")
|
||||
|
||||
bench_chacha20(t)
|
||||
bench_poly1305(t)
|
||||
bench_chacha20poly1305(t)
|
||||
bench_x25519(t)
|
||||
}
|
||||
|
||||
_setup_sized_buf :: proc(
|
||||
options: ^time.Benchmark_Options,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
err: time.Benchmark_Error,
|
||||
) {
|
||||
assert(options != nil)
|
||||
|
||||
options.input = make([]u8, options.bytes, allocator)
|
||||
return nil if len(options.input) == options.bytes else .Allocation_Error
|
||||
}
|
||||
|
||||
_teardown_sized_buf :: proc(
|
||||
options: ^time.Benchmark_Options,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
err: time.Benchmark_Error,
|
||||
) {
|
||||
assert(options != nil)
|
||||
|
||||
delete(options.input)
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_chacha20 :: proc(
|
||||
options: ^time.Benchmark_Options,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
err: time.Benchmark_Error,
|
||||
) {
|
||||
buf := options.input
|
||||
key := [chacha20.KEY_SIZE]byte {
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
nonce := [chacha20.NONCE_SIZE]byte {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
ctx: chacha20.Context = ---
|
||||
chacha20.init(&ctx, key[:], nonce[:])
|
||||
|
||||
for _ in 0 ..= options.rounds {
|
||||
chacha20.xor_bytes(&ctx, buf, buf)
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_poly1305 :: proc(
|
||||
options: ^time.Benchmark_Options,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
err: time.Benchmark_Error,
|
||||
) {
|
||||
buf := options.input
|
||||
key := [poly1305.KEY_SIZE]byte {
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
|
||||
tag: [poly1305.TAG_SIZE]byte = ---
|
||||
for _ in 0 ..= options.rounds {
|
||||
poly1305.sum(tag[:], buf, key[:])
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
//options.hash = u128(h)
|
||||
return nil
|
||||
}
|
||||
|
||||
_benchmark_chacha20poly1305 :: proc(
|
||||
options: ^time.Benchmark_Options,
|
||||
allocator := context.allocator,
|
||||
) -> (
|
||||
err: time.Benchmark_Error,
|
||||
) {
|
||||
buf := options.input
|
||||
key := [chacha20.KEY_SIZE]byte {
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
|
||||
}
|
||||
nonce := [chacha20.NONCE_SIZE]byte {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
tag: [chacha20poly1305.TAG_SIZE]byte = ---
|
||||
|
||||
for _ in 0 ..= options.rounds {
|
||||
chacha20poly1305.encrypt(buf, tag[:], key[:], nonce[:], nil, buf)
|
||||
}
|
||||
options.count = options.rounds
|
||||
options.processed = options.rounds * options.bytes
|
||||
return nil
|
||||
}
|
||||
|
||||
benchmark_print :: proc(name: string, options: ^time.Benchmark_Options) {
|
||||
fmt.printf(
|
||||
"\t[%v] %v rounds, %v bytes processed in %v ns\n\t\t%5.3f rounds/s, %5.3f MiB/s\n",
|
||||
name,
|
||||
options.rounds,
|
||||
options.processed,
|
||||
time.duration_nanoseconds(options.duration),
|
||||
options.rounds_per_second,
|
||||
options.megabytes_per_second,
|
||||
)
|
||||
}
|
||||
|
||||
bench_chacha20 :: proc(t: ^testing.T) {
|
||||
name := "ChaCha20 64 bytes"
|
||||
options := &time.Benchmark_Options {
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_chacha20,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "ChaCha20 1024 bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "ChaCha20 65536 bytes"
|
||||
options.bytes = 65536
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_poly1305 :: proc(t: ^testing.T) {
|
||||
name := "Poly1305 64 zero bytes"
|
||||
options := &time.Benchmark_Options {
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_poly1305,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "Poly1305 1024 zero bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_chacha20poly1305 :: proc(t: ^testing.T) {
|
||||
name := "chacha20poly1305 64 bytes"
|
||||
options := &time.Benchmark_Options {
|
||||
rounds = 1_000,
|
||||
bytes = 64,
|
||||
setup = _setup_sized_buf,
|
||||
bench = _benchmark_chacha20poly1305,
|
||||
teardown = _teardown_sized_buf,
|
||||
}
|
||||
|
||||
err := time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "chacha20poly1305 1024 bytes"
|
||||
options.bytes = 1024
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
|
||||
name = "chacha20poly1305 65536 bytes"
|
||||
options.bytes = 65536
|
||||
err = time.benchmark(options, context.allocator)
|
||||
expect(t, err == nil, name)
|
||||
benchmark_print(name, options)
|
||||
}
|
||||
|
||||
bench_x25519 :: proc(t: ^testing.T) {
|
||||
point_str := "deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"
|
||||
scalar_str := "cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe"
|
||||
|
||||
point, _ := hex.decode(transmute([]byte)(point_str), context.temp_allocator)
|
||||
scalar, _ := hex.decode(transmute([]byte)(scalar_str), context.temp_allocator)
|
||||
out: [x25519.POINT_SIZE]byte = ---
|
||||
|
||||
iters :: 10000
|
||||
start := time.now()
|
||||
for i := 0; i < iters; i = i + 1 {
|
||||
x25519.scalarmult(out[:], scalar[:], point[:])
|
||||
}
|
||||
elapsed := time.since(start)
|
||||
|
||||
log(
|
||||
t,
|
||||
fmt.tprintf("x25519.scalarmult: ~%f us/op", time.duration_microseconds(elapsed) / iters),
|
||||
)
|
||||
}
|
||||
Reference in New Issue
Block a user