Merge pull request #3229 from Yawning/feature/moar-crypto

core/crypto: More improvements/additions
This commit is contained in:
gingerBill
2024-03-06 14:49:15 +00:00
committed by GitHub
44 changed files with 1454 additions and 3033 deletions

View File

@@ -7,8 +7,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Download LLVM, botan
run: sudo apt-get install llvm-11 clang-11 libbotan-2-dev botan
- name: Download LLVM
run: sudo apt-get install llvm-11 clang-11
- name: build odin
run: ./build_odin.sh release
- name: Odin version
@@ -61,9 +61,9 @@ jobs:
runs-on: macos-latest
steps:
- uses: actions/checkout@v1
- name: Download LLVM, botan and setup PATH
- name: Download LLVM, and setup PATH
run: |
brew install llvm@13 botan
brew install llvm@13
echo "/usr/local/opt/llvm@13/bin" >> $GITHUB_PATH
TMP_PATH=$(xcrun --show-sdk-path)/user/include
echo "CPATH=$TMP_PATH" >> $GITHUB_ENV
@@ -102,9 +102,9 @@ jobs:
runs-on: macos-14 # This is an arm/m1 runner.
steps:
- uses: actions/checkout@v1
- name: Download LLVM, botan and setup PATH
- name: Download LLVM and setup PATH
run: |
brew install llvm@13 botan
brew install llvm@13
echo "/opt/homebrew/opt/llvm@13/bin" >> $GITHUB_PATH
TMP_PATH=$(xcrun --show-sdk-path)/user/include
echo "CPATH=$TMP_PATH" >> $GITHUB_ENV

View File

@@ -7,8 +7,12 @@ package _sha3
List of contributors:
zhibog, dotbmp: Initial implementation.
Implementation of the Keccak hashing algorithm, standardized as SHA3 in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf>
To use the original Keccak padding, set the is_keccak bool to true, otherwise it will use SHA3 padding.
Implementation of the Keccak hashing algorithm, standardized as SHA3
in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf>.
As the only difference between the legacy Keccak and SHA3 is the domain
separation byte, set dsbyte to the appropriate value to pick the desired
algorithm.
*/
import "core:math/bits"
@@ -16,47 +20,56 @@ import "core:mem"
ROUNDS :: 24
RATE_128 :: 1344 / 8 // ONLY for SHAKE128.
RATE_224 :: 1152 / 8
RATE_256 :: 1088 / 8
RATE_384 :: 832 / 8
RATE_512 :: 576 / 8
DS_KECCAK :: 0x01
DS_SHA3 :: 0x06
DS_SHAKE :: 0x1f
DS_CSHAKE :: 0x04
Context :: struct {
st: struct #raw_union {
st: struct #raw_union {
b: [200]u8,
q: [25]u64,
},
pt: int,
rsiz: int,
mdlen: int,
is_keccak: bool,
pt: int,
rsiz: int,
mdlen: int,
dsbyte: byte,
is_initialized: bool,
is_finalized: bool, // For SHAKE (unlimited squeeze is allowed)
}
@(private)
keccakf_rndc := [?]u64 {
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
}
@(private)
keccakf_rotc := [?]int {
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
}
@(private)
keccakf_piln := [?]i32 {
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
}
@(private)
keccakf :: proc "contextless" (st: ^[25]u64) {
keccakf_rndc := [?]u64 {
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
}
keccakf_rotc := [?]int {
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
}
keccakf_piln := [?]i32 {
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
}
i, j, r: i32 = ---, ---, ---
t: u64 = ---
bc: [5]u64 = ---
@@ -140,9 +153,6 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
if len(hash) < ctx.mdlen {
if ctx.is_keccak {
panic("crypto/keccac: invalid destination digest size")
}
panic("crypto/sha3: invalid destination digest size")
}
@@ -152,13 +162,9 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer(reset(ctx))
defer (reset(ctx))
if ctx.is_keccak {
ctx.st.b[ctx.pt] ~= 0x01
} else {
ctx.st.b[ctx.pt] ~= 0x06
}
ctx.st.b[ctx.pt] ~= ctx.dsbyte
ctx.st.b[ctx.rsiz - 1] ~= 0x80
keccakf(&ctx.st.q)
@@ -183,7 +189,7 @@ shake_xof :: proc(ctx: ^Context) {
assert(ctx.is_initialized)
assert(!ctx.is_finalized)
ctx.st.b[ctx.pt] ~= 0x1F
ctx.st.b[ctx.pt] ~= ctx.dsbyte
ctx.st.b[ctx.rsiz - 1] ~= 0x80
keccakf(&ctx.st.q)
ctx.pt = 0

View File

@@ -0,0 +1,145 @@
package _sha3
import "core:encoding/endian"
import "core:math/bits"
init_cshake :: proc(ctx: ^Context, n, s: []byte, sec_strength: int) {
ctx.mdlen = sec_strength / 8
// No domain separator is equivalent to vanilla SHAKE.
if len(n) == 0 && len(s) == 0 {
ctx.dsbyte = DS_SHAKE
init(ctx)
return
}
ctx.dsbyte = DS_CSHAKE
init(ctx)
bytepad(ctx, [][]byte{n, s}, rate_cshake(sec_strength))
}
final_cshake :: proc(ctx: ^Context, dst: []byte, finalize_clone: bool = false) {
ctx := ctx
if finalize_clone {
tmp_ctx: Context
clone(&tmp_ctx, ctx)
ctx = &tmp_ctx
}
defer reset(ctx)
encode_byte_len(ctx, len(dst), false) // right_encode
shake_xof(ctx)
shake_out(ctx, dst)
}
rate_cshake :: #force_inline proc(sec_strength: int) -> int {
switch sec_strength {
case 128:
return RATE_128
case 256:
return RATE_256
}
panic("crypto/sha3: invalid security strength")
}
// right_encode and left_encode are defined to support 0 <= x < 2^2040
// however, the largest value we will ever need to encode is `max(int) * 8`.
//
// This is unfortunate as the extreme upper edge is larger than
// `max(u64)`. While such values are impractical at present,
// they are possible (ie: https://arxiv.org/pdf/quant-ph/9908043.pdf).
//
// Thus we support 0 <= x < 2^128.
@(private)
_PAD: [RATE_128]byte // Biggest possible value of w per spec.
bytepad :: proc(ctx: ^Context, x_strings: [][]byte, w: int) {
// 1. z = left_encode(w) || X.
z_hi: u64
z_lo := left_right_encode(ctx, 0, u64(w), true)
for x in x_strings {
// All uses of bytepad in SP 800-185 use the output from
// one or more encode_string values for `X`.
hi, lo := encode_string(ctx, x)
carry: u64
z_lo, carry = bits.add_u64(z_lo, lo, 0)
z_hi, carry = bits.add_u64(z_hi, hi, carry)
// This isn't actually possible, at least with the currently
// defined SP 800-185 routines.
if carry != 0 {
panic("crypto/sha3: bytepad input length overflow")
}
}
// We skip this step as we are doing a byte-oriented implementation
// rather than a bit oriented one.
//
// 2. while len(z) mod 8 ≠ 0:
// z = z || 0
// 3. while (len(z)/8) mod w ≠ 0:
// z = z || 00000000
z_len := u128(z_hi) << 64 | u128(z_lo)
z_rem := int(z_len % u128(w))
pad := _PAD[:w - z_rem]
// We just add the padding to the state, instead of returning z.
//
// 4. return z.
update(ctx, pad)
}
encode_string :: #force_inline proc(ctx: ^Context, s: []byte) -> (u64, u64) {
l := encode_byte_len(ctx, len(s), true) // left_encode
update(ctx, s)
lo, hi := bits.add_u64(l, u64(len(s)), 0)
return hi, lo
}
encode_byte_len :: #force_inline proc(ctx: ^Context, l: int, is_left: bool) -> u64 {
hi, lo := bits.mul_u64(u64(l), 8)
return left_right_encode(ctx, hi, lo, is_left)
}
@(private)
left_right_encode :: proc(ctx: ^Context, hi, lo: u64, is_left: bool) -> u64 {
HI_OFFSET :: 1
LO_OFFSET :: HI_OFFSET + 8
RIGHT_OFFSET :: LO_OFFSET + 8
BUF_LEN :: RIGHT_OFFSET + 1
buf: [BUF_LEN]byte // prefix + largest uint + postfix
endian.unchecked_put_u64be(buf[HI_OFFSET:], hi)
endian.unchecked_put_u64be(buf[LO_OFFSET:], lo)
// 2. Strip leading `0x00` bytes.
off: int
for off = HI_OFFSET; off < RIGHT_OFFSET - 1; off = off + 1 {// Note: Minimum size is 1, not 0.
if buf[off] != 0 {
break
}
}
n := byte(RIGHT_OFFSET - off)
// 3. Prefix (left_encode) or postfix (right_encode) the length in bytes.
b: []byte
switch is_left {
case true:
buf[off - 1] = n // n | x
b = buf[off - 1:RIGHT_OFFSET]
case false:
buf[RIGHT_OFFSET] = n // x | n
b = buf[off:]
}
update(ctx, b)
return u64(len(b))
}

View File

@@ -1,11 +1,21 @@
/*
package chacha20 implements the ChaCha20 and XChaCha20 stream ciphers.
See:
- https://datatracker.ietf.org/doc/html/rfc8439
- https://datatracker.ietf.org/doc/draft-irtf-cfrg-xchacha/03/
*/
package chacha20
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
// KEY_SIZE is the (X)ChaCha20 key size in bytes.
KEY_SIZE :: 32
// NONCE_SIZE is the ChaCha20 nonce size in bytes.
NONCE_SIZE :: 12
// XNONCE_SIZE is the XChaCha20 nonce size in bytes.
XNONCE_SIZE :: 24
@(private)
@@ -19,25 +29,26 @@ _STATE_SIZE_U32 :: 16
_ROUNDS :: 20
@(private)
_SIGMA_0 : u32 : 0x61707865
_SIGMA_0: u32 : 0x61707865
@(private)
_SIGMA_1 : u32 : 0x3320646e
_SIGMA_1: u32 : 0x3320646e
@(private)
_SIGMA_2 : u32 : 0x79622d32
_SIGMA_2: u32 : 0x79622d32
@(private)
_SIGMA_3 : u32 : 0x6b206574
_SIGMA_3: u32 : 0x6b206574
// Context is a ChaCha20 or XChaCha20 instance.
Context :: struct {
_s: [_STATE_SIZE_U32]u32,
_buffer: [_BLOCK_SIZE]byte,
_off: int,
_s: [_STATE_SIZE_U32]u32,
_buffer: [_BLOCK_SIZE]byte,
_off: int,
_is_ietf_flavor: bool,
_is_initialized: bool,
}
init :: proc (ctx: ^Context, key, nonce: []byte) {
// init inititializes a Context for ChaCha20 or XChaCha20 with the provided
// key and nonce.
init :: proc(ctx: ^Context, key, nonce: []byte) {
if len(key) != KEY_SIZE {
panic("crypto/chacha20: invalid ChaCha20 key size")
}
@@ -89,7 +100,8 @@ init :: proc (ctx: ^Context, key, nonce: []byte) {
ctx._is_initialized = true
}
seek :: proc (ctx: ^Context, block_nr: u64) {
// seek seeks the (X)ChaCha20 stream counter to the specified block.
seek :: proc(ctx: ^Context, block_nr: u64) {
assert(ctx._is_initialized)
if ctx._is_ietf_flavor {
@@ -103,7 +115,10 @@ seek :: proc (ctx: ^Context, block_nr: u64) {
ctx._off = _BLOCK_SIZE
}
xor_bytes :: proc (ctx: ^Context, dst, src: []byte) {
// xor_bytes XORs each byte in src with bytes taken from the (X)ChaCha20
// keystream, and writes the resulting output to dst. Dst and src MUST
// alias exactly or not at all.
xor_bytes :: proc(ctx: ^Context, dst, src: []byte) {
assert(ctx._is_initialized)
// TODO: Enforcing that dst and src alias exactly or not at all
@@ -147,7 +162,8 @@ xor_bytes :: proc (ctx: ^Context, dst, src: []byte) {
}
}
keystream_bytes :: proc (ctx: ^Context, dst: []byte) {
// keystream_bytes fills dst with the raw (X)ChaCha20 keystream output.
keystream_bytes :: proc(ctx: ^Context, dst: []byte) {
assert(ctx._is_initialized)
dst := dst
@@ -180,7 +196,9 @@ keystream_bytes :: proc (ctx: ^Context, dst: []byte) {
}
}
reset :: proc (ctx: ^Context) {
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
mem.zero_explicit(&ctx._s, size_of(ctx._s))
mem.zero_explicit(&ctx._buffer, size_of(ctx._buffer))
@@ -188,7 +206,7 @@ reset :: proc (ctx: ^Context) {
}
@(private)
_do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
_do_blocks :: proc(ctx: ^Context, dst, src: []byte, nr_blocks: int) {
// Enforce the maximum consumed keystream per nonce.
//
// While all modern "standard" definitions of ChaCha20 use

View File

@@ -1,3 +1,10 @@
/*
package chacha20poly1305 implements the AEAD_CHACHA20_POLY1305 Authenticated
Encryption with Additional Data algorithm.
See:
- https://www.rfc-editor.org/rfc/rfc8439
*/
package chacha20poly1305
import "core:crypto"
@@ -6,8 +13,11 @@ import "core:crypto/poly1305"
import "core:encoding/endian"
import "core:mem"
// KEY_SIZE is the chacha20poly1305 key size in bytes.
KEY_SIZE :: chacha20.KEY_SIZE
// NONCE_SIZE is the chacha20poly1305 nonce size in bytes.
NONCE_SIZE :: chacha20.NONCE_SIZE
// TAG_SIZE is the chacha20poly1305 tag size in bytes.
TAG_SIZE :: poly1305.TAG_SIZE
@(private)
@@ -49,6 +59,8 @@ _update_mac_pad16 :: #force_inline proc (ctx: ^poly1305.Context, x_len: int) {
}
}
// encrypt encrypts the plaintext and authenticates the aad and ciphertext,
// with the provided key and nonce, stores the output in ciphertext and tag.
encrypt :: proc (ciphertext, tag, key, nonce, aad, plaintext: []byte) {
_validate_common_slice_sizes(tag, key, nonce, aad, plaintext)
if len(ciphertext) != len(plaintext) {
@@ -95,6 +107,11 @@ encrypt :: proc (ciphertext, tag, key, nonce, aad, plaintext: []byte) {
poly1305.final(&mac_ctx, tag) // Implicitly sanitizes context.
}
// decrypt authenticates the aad and ciphertext, and decrypts the ciphertext,
// with the provided key, nonce, and tag, and stores the output in plaintext,
// returning true iff the authentication was successful.
//
// If authentication fails, the destination plaintext buffer will be zeroed.
decrypt :: proc (plaintext, tag, key, nonce, aad, ciphertext: []byte) -> bool {
_validate_common_slice_sizes(tag, key, nonce, aad, ciphertext)
if len(ciphertext) != len(plaintext) {

103
core/crypto/hkdf/hkdf.odin Normal file
View File

@@ -0,0 +1,103 @@
/*
package hkdf implements the HKDF HMAC-based Extract-and-Expand Key
Derivation Function.
See: https://www.rfc-editor.org/rfc/rfc5869
*/
package hkdf
import "core:crypto/hash"
import "core:crypto/hmac"
import "core:mem"
// extract_and_expand derives output keying material (OKM) via the
// HKDF-Extract and HKDF-Expand algorithms, with the specified has
// function, salt, input keying material (IKM), and optional info.
// The dst buffer must be less-than-or-equal to 255 HMAC tags.
extract_and_expand :: proc(algorithm: hash.Algorithm, salt, ikm, info, dst: []byte) {
h_len := hash.DIGEST_SIZES[algorithm]
tmp: [hash.MAX_DIGEST_SIZE]byte
prk := tmp[:h_len]
defer mem.zero_explicit(raw_data(prk), h_len)
extract(algorithm, salt, ikm, prk)
expand(algorithm, prk, info, dst)
}
// extract derives a pseudorandom key (PRK) via the HKDF-Extract algorithm,
// with the specified hash function, salt, and input keying material (IKM).
// It requires that the dst buffer be the HMAC tag size for the specified
// hash function.
extract :: proc(algorithm: hash.Algorithm, salt, ikm, dst: []byte) {
// PRK = HMAC-Hash(salt, IKM)
hmac.sum(algorithm, dst, ikm, salt)
}
// expand derives output keying material (OKM) via the HKDF-Expand algorithm,
// with the specified hash function, pseudorandom key (PRK), and optional
// info. The dst buffer must be less-than-or-equal to 255 HMAC tags.
expand :: proc(algorithm: hash.Algorithm, prk, info, dst: []byte) {
h_len := hash.DIGEST_SIZES[algorithm]
// (<= 255*HashLen)
dk_len := len(dst)
switch {
case dk_len == 0:
return
case dk_len > h_len * 255:
panic("crypto/hkdf: derived key too long")
case:
}
// The output OKM is calculated as follows:
//
// N = ceil(L/HashLen)
// T = T(1) | T(2) | T(3) | ... | T(N)
// OKM = first L octets of T
//
// where:
// T(0) = empty string (zero length)
// T(1) = HMAC-Hash(PRK, T(0) | info | 0x01)
// T(2) = HMAC-Hash(PRK, T(1) | info | 0x02)
// T(3) = HMAC-Hash(PRK, T(2) | info | 0x03)
// ...
n := dk_len / h_len
r := dk_len % h_len
base: hmac.Context
defer hmac.reset(&base)
hmac.init(&base, algorithm, prk)
dst_blk := dst
prev: []byte
for i in 1 ..= n {
_F(&base, prev, info, i, dst_blk[:h_len])
prev = dst_blk[:h_len]
dst_blk = dst_blk[h_len:]
}
if r > 0 {
tmp: [hash.MAX_DIGEST_SIZE]byte
blk := tmp[:h_len]
defer mem.zero_explicit(raw_data(blk), h_len)
_F(&base, prev, info, n + 1, blk)
copy(dst_blk, blk)
}
}
@(private)
_F :: proc(base: ^hmac.Context, prev, info: []byte, i: int, dst_blk: []byte) {
prf: hmac.Context
hmac.clone(&prf, base)
hmac.update(&prf, prev)
hmac.update(&prf, info)
hmac.update(&prf, []byte{u8(i)})
hmac.final(&prf, dst_blk)
}

View File

@@ -11,7 +11,7 @@ import "core:crypto/hash"
import "core:mem"
// sum will compute the HMAC with the specified algorithm and key
// over msg, and write the computed digest to dst. It requires that
// over msg, and write the computed tag to dst. It requires that
// the dst buffer is the tag size.
sum :: proc(algorithm: hash.Algorithm, dst, msg, key: []byte) {
ctx: Context
@@ -78,6 +78,18 @@ final :: proc(ctx: ^Context, dst: []byte) {
hash.final(&ctx._o_hash, dst)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
if ctx == other {
return
}
hash.clone(&ctx._o_hash, &other._o_hash)
hash.clone(&ctx._i_hash, &other._i_hash)
ctx._tag_sz = other._tag_sz
ctx._is_initialized = other._is_initialized
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {

116
core/crypto/kmac/kmac.odin Normal file
View File

@@ -0,0 +1,116 @@
/*
package kmac implements the KMAC MAC algorithm.
See:
- https://nvlpubs.nist.gov/nistpubs/specialpublications/nist.sp.800-185.pdf
*/
package kmac
import "../_sha3"
import "core:crypto"
import "core:crypto/shake"
// MIN_KEY_SIZE_128 is the minimum key size for KMAC128 in bytes.
MIN_KEY_SIZE_128 :: 128 / 8
// MIN_KEY_SIZE_256 is the minimum key size for KMAC256 in bytes.
MIN_KEY_SIZE_256 :: 256 / 8
// MIN_TAG_SIZE is the absolute minimum tag size for KMAC in bytes (8.4.2).
// Most callers SHOULD use at least 128-bits if not 256-bits for the tag
// size.
MIN_TAG_SIZE :: 32 / 8
// sum will compute the KMAC with the specified security strength,
// key, and domain separator over msg, and write the computed digest to
// dst.
sum :: proc(sec_strength: int, dst, msg, key, domain_sep: []byte) {
ctx: Context
_init_kmac(&ctx, key, domain_sep, sec_strength)
update(&ctx, msg)
final(&ctx, dst)
}
// verify will verify the KMAC tag computed with the specified security
// strength, key and domain separator over msg and return true iff the
// tag is valid.
verify :: proc(sec_strength: int, tag, msg, key, domain_sep: []byte, allocator := context.temp_allocator) -> bool {
derived_tag := make([]byte, len(tag), allocator)
sum(sec_strength, derived_tag, msg, key, domain_sep)
return crypto.compare_constant_time(derived_tag, tag) == 1
}
// Context is a KMAC instance.
Context :: distinct shake.Context
// init_128 initializes a Context for KMAC28. This routine will panic if
// the key length is less than MIN_KEY_SIZE_128.
init_128 :: proc(ctx: ^Context, key, domain_sep: []byte) {
_init_kmac(ctx, key, domain_sep, 128)
}
// init_256 initializes a Context for KMAC256. This routine will panic if
// the key length is less than MIN_KEY_SIZE_256.
init_256 :: proc(ctx: ^Context, key, domain_sep: []byte) {
_init_kmac(ctx, key, domain_sep, 256)
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
shake.write(transmute(^shake.Context)(ctx), data)
}
// final finalizes the Context, writes the tag to dst, and calls reset
// on the Context. This routine will panic if the dst length is less than
// MIN_TAG_SIZE.
final :: proc(ctx: ^Context, dst: []byte) {
assert(ctx.is_initialized)
defer reset(ctx)
if len(dst) < MIN_TAG_SIZE {
panic("crypto/kmac: invalid KMAC tag_size, too short")
}
_sha3.final_cshake(transmute(^_sha3.Context)(ctx), dst)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
if ctx == other {
return
}
shake.clone(transmute(^shake.Context)(ctx), transmute(^shake.Context)(other))
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
if !ctx.is_initialized {
return
}
shake.reset(transmute(^shake.Context)(ctx))
}
@(private)
_init_kmac :: proc(ctx: ^Context, key, s: []byte, sec_strength: int) {
if ctx.is_initialized {
reset(ctx)
}
if len(key) < sec_strength / 8 {
panic("crypto/kmac: invalid KMAC key, too short")
}
ctx_ := transmute(^_sha3.Context)(ctx)
_sha3.init_cshake(ctx_, N_KMAC, s, sec_strength)
_sha3.bytepad(ctx_, [][]byte{key}, _sha3.rate_cshake(sec_strength))
}
@(private)
N_KMAC := []byte{'K', 'M', 'A', 'C'}

View File

@@ -65,7 +65,7 @@ init_512 :: proc(ctx: ^Context) {
@(private)
_init :: proc(ctx: ^Context) {
ctx.is_keccak = true
ctx.dsbyte = _sha3.DS_KECCAK
_sha3.init(transmute(^_sha3.Context)(ctx))
}

View File

@@ -0,0 +1,122 @@
/*
package pbkdf2 implements the PBKDF2 password-based key derivation function.
See: https://www.rfc-editor.org/rfc/rfc2898
*/
package pbkdf2
import "core:crypto/hash"
import "core:crypto/hmac"
import "core:encoding/endian"
import "core:mem"
// derive invokes PBKDF2-HMAC with the specified hash algorithm, password,
// salt, iteration count, and outputs the derived key to dst.
derive :: proc(
hmac_hash: hash.Algorithm,
password: []byte,
salt: []byte,
iterations: u32,
dst: []byte,
) {
h_len := hash.DIGEST_SIZES[hmac_hash]
// 1. If dkLen > (2^32 - 1) * hLen, output "derived key too long"
// and stop.
dk_len := len(dst)
switch {
case dk_len == 0:
return
case u64(dk_len) > u64(max(u32)) * u64(h_len):
// This is so beyond anything that is practical or reasonable,
// so just panic instead of returning an error.
panic("crypto/pbkdf2: derived key too long")
case:
}
// 2. Let l be the number of hLen-octet blocks in the derived key,
// rounding up, and let r be the number of octets in the last block.
l := dk_len / h_len // Don't need to round up.
r := dk_len % h_len
// 3. For each block of the derived key apply the function F defined
// below to the password P, the salt S, the iteration count c, and
// the block index to compute the block.
//
// 4. Concatenate the blocks and extract the first dkLen octets to
// produce a derived key DK.
//
// 5. Output the derived key DK.
// Each iteration of F is always `PRF (P, ...)`, so instantiate the
// PRF, and clone since memcpy is faster than having to re-initialize
// HMAC repeatedly.
base: hmac.Context
defer hmac.reset(&base)
hmac.init(&base, hmac_hash, password)
// Process all of the blocks that will be written directly to dst.
dst_blk := dst
for i in 1 ..= l { // F expects i starting at 1.
_F(&base, salt, iterations, u32(i), dst_blk[:h_len])
dst_blk = dst_blk[h_len:]
}
// Instead of rounding l up, just proceass the one extra block iff
// r != 0.
if r > 0 {
tmp: [hash.MAX_DIGEST_SIZE]byte
blk := tmp[:h_len]
defer mem.zero_explicit(raw_data(blk), h_len)
_F(&base, salt, iterations, u32(l + 1), blk)
copy(dst_blk, blk)
}
}
@(private)
_F :: proc(base: ^hmac.Context, salt: []byte, c: u32, i: u32, dst_blk: []byte) {
h_len := len(dst_blk)
tmp: [hash.MAX_DIGEST_SIZE]byte
u := tmp[:h_len]
defer mem.zero_explicit(raw_data(u), h_len)
// F (P, S, c, i) = U_1 \xor U_2 \xor ... \xor U_c
//
// where
//
// U_1 = PRF (P, S || INT (i)) ,
// U_2 = PRF (P, U_1) ,
// ...
// U_c = PRF (P, U_{c-1}) .
//
// Here, INT (i) is a four-octet encoding of the integer i, most
// significant octet first.
prf: hmac.Context
// U_1: PRF (P, S || INT (i))
hmac.clone(&prf, base)
hmac.update(&prf, salt)
endian.unchecked_put_u32be(u, i) // Use u as scratch space.
hmac.update(&prf, u[:4])
hmac.final(&prf, u)
copy(dst_blk, u)
// U_2 ... U_c: U_n = PRF (P, U_(n-1))
for _ in 1 ..< c {
hmac.clone(&prf, base)
hmac.update(&prf, u)
hmac.final(&prf, u)
// XOR dst_blk and u.
for v, i in u {
dst_blk[i] ~= v
}
}
}

View File

@@ -1,3 +1,9 @@
/*
package poly1305 implements the Poly1305 one-time MAC algorithm.
See:
- https://datatracker.ietf.org/doc/html/rfc8439
*/
package poly1305
import "core:crypto"
@@ -5,13 +11,20 @@ import field "core:crypto/_fiat/field_poly1305"
import "core:encoding/endian"
import "core:mem"
// KEY_SIZE is the Poly1305 key size in bytes.
KEY_SIZE :: 32
// TAG_SIZE is the Poly1305 tag size in bytes.
TAG_SIZE :: 16
@(private)
_BLOCK_SIZE :: 16
sum :: proc (dst, msg, key: []byte) {
// sum will compute the Poly1305 MAC with the key over msg, and write
// the computed tag to dst. It requires that the dst buffer is the tag
// size.
//
// The key SHOULD be unique and MUST be unpredictable for each invocation.
sum :: proc(dst, msg, key: []byte) {
ctx: Context = ---
init(&ctx, key)
@@ -19,9 +32,12 @@ sum :: proc (dst, msg, key: []byte) {
final(&ctx, dst)
}
verify :: proc (tag, msg, key: []byte) -> bool {
// verify will verify the Poly1305 tag computed with the key over msg and
// return true iff the tag is valid. It requires that the tag is correctly
// sized.
verify :: proc(tag, msg, key: []byte) -> bool {
ctx: Context = ---
derived_tag: [16]byte = ---
derived_tag: [TAG_SIZE]byte = ---
init(&ctx, key)
update(&ctx, msg)
@@ -30,18 +46,19 @@ verify :: proc (tag, msg, key: []byte) -> bool {
return crypto.compare_constant_time(derived_tag[:], tag) == 1
}
// Context is a Poly1305 instance.
Context :: struct {
_r: field.Tight_Field_Element,
_a: field.Tight_Field_Element,
_s: field.Tight_Field_Element,
_buffer: [_BLOCK_SIZE]byte,
_leftover: int,
_r: field.Tight_Field_Element,
_a: field.Tight_Field_Element,
_s: field.Tight_Field_Element,
_buffer: [_BLOCK_SIZE]byte,
_leftover: int,
_is_initialized: bool,
}
init :: proc (ctx: ^Context, key: []byte) {
// init initializes a Context with the specified key. The key SHOULD be
// unique and MUST be unpredictable for each invocation.
init :: proc(ctx: ^Context, key: []byte) {
if len(key) != KEY_SIZE {
panic("crypto/poly1305: invalid key size")
}
@@ -64,7 +81,8 @@ init :: proc (ctx: ^Context, key: []byte) {
ctx._is_initialized = true
}
update :: proc (ctx: ^Context, data: []byte) {
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx._is_initialized)
msg := data
@@ -101,8 +119,11 @@ update :: proc (ctx: ^Context, data: []byte) {
}
}
final :: proc (ctx: ^Context, dst: []byte) {
// final finalizes the Context, writes the tag to dst, and calls
// reset on the Context.
final :: proc(ctx: ^Context, dst: []byte) {
assert(ctx._is_initialized)
defer reset(ctx)
if len(dst) != TAG_SIZE {
panic("poly1305: invalid destination tag size")
@@ -125,11 +146,11 @@ final :: proc (ctx: ^Context, dst: []byte) {
tmp: [32]byte = ---
field.fe_to_bytes(&tmp, &ctx._a)
copy_slice(dst, tmp[0:16])
reset(ctx)
}
reset :: proc (ctx: ^Context) {
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
mem.zero_explicit(&ctx._r, size_of(ctx._r))
mem.zero_explicit(&ctx._a, size_of(ctx._a))
mem.zero_explicit(&ctx._s, size_of(ctx._s))
@@ -139,7 +160,7 @@ reset :: proc (ctx: ^Context) {
}
@(private)
_blocks :: proc (ctx: ^Context, msg: []byte, final := false) {
_blocks :: proc(ctx: ^Context, msg: []byte, final := false) {
n: field.Tight_Field_Element = ---
final_byte := byte(!final)

View File

@@ -67,6 +67,7 @@ init_512 :: proc(ctx: ^Context) {
@(private)
_init :: proc(ctx: ^Context) {
ctx.dsbyte = _sha3.DS_SHA3
_sha3.init(transmute(^_sha3.Context)(ctx))
}

View File

@@ -1,10 +1,11 @@
/*
package shake implements the SHAKE XOF algorithm family.
package shake implements the SHAKE and cSHAKE XOF algorithm families.
The SHA3 hash algorithm can be found in the crypto/sha3.
See:
- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
- https://nvlpubs.nist.gov/nistpubs/specialpublications/nist.sp.800-185.pdf
*/
package shake
@@ -18,24 +19,27 @@ package shake
import "../_sha3"
// Context is a SHAKE128 or SHAKE256 instance.
// Context is a SHAKE128, SHAKE256, cSHAKE128, or cSHAKE256 instance.
Context :: distinct _sha3.Context
// init_128 initializes a Context for SHAKE128.
init_128 :: proc(ctx: ^Context) {
ctx.mdlen = 128 / 8
_init(ctx)
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), nil, nil, 128)
}
// init_256 initializes a Context for SHAKE256.
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = 256 / 8
_init(ctx)
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), nil, nil, 256)
}
@(private)
_init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
// init_cshake_128 initializes a Context for cSHAKE128.
init_cshake_128 :: proc(ctx: ^Context, domain_sep: []byte) {
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), nil, domain_sep, 128)
}
// init_cshake_256 initializes a Context for cSHAKE256.
init_cshake_256 :: proc(ctx: ^Context, domain_sep: []byte) {
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), nil, domain_sep, 256)
}
// write writes more data into the SHAKE instance. This MUST not be called

View File

@@ -0,0 +1,66 @@
/*
package tuplehash implements the TupleHash and TupleHashXOF algorithms.
See:
- https://nvlpubs.nist.gov/nistpubs/specialpublications/nist.sp.800-185.pdf
*/
package tuplehash
import "../_sha3"
// Context is a TupleHash or TupleHashXOF instance.
Context :: distinct _sha3.Context
// init_128 initializes a Context for TupleHash128 or TupleHashXOF128.
init_128 :: proc(ctx: ^Context, domain_sep: []byte) {
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), N_TUPLEHASH, domain_sep, 128)
}
// init_256 initializes a Context for TupleHash256 or TupleHashXOF256.
init_256 :: proc(ctx: ^Context, domain_sep: []byte) {
_sha3.init_cshake(transmute(^_sha3.Context)(ctx), N_TUPLEHASH, domain_sep, 256)
}
// write_element writes a tuple element into the TupleHash or TupleHashXOF
// instance. This MUST not be called after any reads have been done, and
// any attempts to do so will panic.
write_element :: proc(ctx: ^Context, data: []byte) {
_, _ = _sha3.encode_string(transmute(^_sha3.Context)(ctx), data)
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final_cshake(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
// read reads output from the TupleHashXOF instance. There is no practical
// upper limit to the amount of data that can be read from TupleHashXOF.
// After read has been called one or more times, further calls to
// write_element will panic.
read :: proc(ctx: ^Context, dst: []byte) {
ctx_ := transmute(^_sha3.Context)(ctx)
if !ctx.is_finalized {
_sha3.encode_byte_len(ctx_, 0, false) // right_encode
_sha3.shake_xof(ctx_)
}
_sha3.shake_out(ctx_, dst)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}
@(private)
N_TUPLEHASH := []byte{'T', 'u', 'p', 'l', 'e', 'H', 'a', 's', 'h'}

View File

@@ -1,9 +1,18 @@
/*
package x25519 implements the X25519 (aka curve25519) Elliptic-Curve
Diffie-Hellman key exchange protocol.
See:
- https://www.rfc-editor.org/rfc/rfc7748
*/
package x25519
import field "core:crypto/_fiat/field_curve25519"
import "core:mem"
// SCALAR_SIZE is the size of a X25519 scalar (private key) in bytes.
SCALAR_SIZE :: 32
// POINT_SIZE is the size of a X25519 point (public key/shared secret) in bytes.
POINT_SIZE :: 32
@(private)
@@ -14,11 +23,11 @@ _scalar_bit :: #force_inline proc "contextless" (s: ^[32]byte, i: int) -> u8 {
if i < 0 {
return 0
}
return (s[i>>3] >> uint(i&7)) & 1
return (s[i >> 3] >> uint(i & 7)) & 1
}
@(private)
_scalarmult :: proc (out, scalar, point: ^[32]byte) {
_scalarmult :: proc(out, scalar, point: ^[32]byte) {
// Montgomery pseduo-multiplication taken from Monocypher.
// computes the scalar product
@@ -26,7 +35,7 @@ _scalarmult :: proc (out, scalar, point: ^[32]byte) {
field.fe_from_bytes(&x1, point)
// computes the actual scalar product (the result is in x2 and z2)
x2, x3, z2, z3: field.Tight_Field_Element = ---, ---, ---, ---
x2, x3, z2, z3: field.Tight_Field_Element = ---, ---, ---, ---
t0, t1: field.Loose_Field_Element = ---, ---
// Montgomery ladder
@@ -38,7 +47,7 @@ _scalarmult :: proc (out, scalar, point: ^[32]byte) {
field.fe_one(&z3)
swap: int
for pos := 255-1; pos >= 0; pos = pos - 1 {
for pos := 255 - 1; pos >= 0; pos = pos - 1 {
// constant time conditional swap before ladder step
b := int(_scalar_bit(scalar, pos))
swap ~= b // xor trick avoids swapping at the end of the loop
@@ -94,7 +103,9 @@ _scalarmult :: proc (out, scalar, point: ^[32]byte) {
mem.zero_explicit(&t1, size_of(t1))
}
scalarmult :: proc (dst, scalar, point: []byte) {
// scalarmult "multiplies" the provided scalar and point, and writes the
// resulting point to dst.
scalarmult :: proc(dst, scalar, point: []byte) {
if len(scalar) != SCALAR_SIZE {
panic("crypto/x25519: invalid scalar size")
}
@@ -123,7 +134,9 @@ scalarmult :: proc (dst, scalar, point: []byte) {
mem.zero_explicit(&d, size_of(d))
}
scalarmult_basepoint :: proc (dst, scalar: []byte) {
// scalarmult_basepoint "multiplies" the provided scalar with the X25519
// base point and writes the resulting point to dst.
scalarmult_basepoint :: proc(dst, scalar: []byte) {
// TODO/perf: Switch to using a precomputed table.
scalarmult(dst, scalar, _BASE_POINT[:])
}

View File

@@ -29,15 +29,19 @@ import blake2s "core:crypto/blake2s"
import chacha20 "core:crypto/chacha20"
import chacha20poly1305 "core:crypto/chacha20poly1305"
import crypto_hash "core:crypto/hash"
import hkdf "core:crypto/hkdf"
import hmac "core:crypto/hmac"
import kmac "core:crypto/kmac"
import keccak "core:crypto/legacy/keccak"
import md5 "core:crypto/legacy/md5"
import sha1 "core:crypto/legacy/sha1"
import pbkdf2 "core:crypto/pbkdf2"
import poly1305 "core:crypto/poly1305"
import sha2 "core:crypto/sha2"
import sha3 "core:crypto/sha3"
import shake "core:crypto/shake"
import sm3 "core:crypto/sm3"
import tuplehash "core:crypto/tuplehash"
import x25519 "core:crypto/x25519"
import pe "core:debug/pe"
@@ -147,14 +151,18 @@ _ :: blake2s
_ :: chacha20
_ :: chacha20poly1305
_ :: hmac
_ :: hkdf
_ :: kmac
_ :: keccak
_ :: md5
_ :: pbkdf2
_ :: poly1305
_ :: sha1
_ :: sha2
_ :: sha3
_ :: shake
_ :: sm3
_ :: tuplehash
_ :: x25519
_ :: pe
_ :: dynlib

View File

@@ -1,16 +1,5 @@
package all
import botan_bindings "vendor:botan/bindings"
import botan_blake2b "vendor:botan/blake2b"
import keccak "vendor:botan/legacy/keccak"
import md5 "vendor:botan/legacy/md5"
import sha1 "vendor:botan/legacy/sha1"
import sha2 "vendor:botan/sha2"
import sha3 "vendor:botan/sha3"
import shake "vendor:botan/shake"
import siphash "vendor:botan/siphash"
import sm3 "vendor:botan/sm3"
import cgltf "vendor:cgltf"
// import commonmark "vendor:commonmark"
import ENet "vendor:ENet"
@@ -41,18 +30,6 @@ import fontstash "vendor:fontstash"
import xlib "vendor:x11/xlib"
_ :: botan_bindings
_ :: botan_blake2b
_ :: keccak
_ :: md5
_ :: sha1
_ :: sha2
_ :: sha3
_ :: shake
_ :: siphash
_ :: sm3
_ :: cgltf
// _ :: commonmark
_ :: ENet

View File

@@ -45,7 +45,7 @@ hash_test:
$(ODIN) run hash $(COMMON) -o:speed -no-bounds-check -out:test_hash
crypto_test:
$(ODIN) run crypto $(COMMON) -o:speed -no-bounds-check -out:test_crypto
$(ODIN) run crypto $(COMMON) $(COLLECTION) -o:speed -no-bounds-check -out:test_crypto
noise_test:
$(ODIN) run math/noise $(COMMON) -out:test_noise

View File

@@ -31,7 +31,7 @@ echo ---
echo ---
echo Running core:crypto tests
echo ---
%PATH_TO_ODIN% run crypto %COMMON% -out:test_crypto.exe || exit /b
%PATH_TO_ODIN% run crypto %COMMON% %COLLECTION% -out:test_crypto.exe || exit /b
echo ---
echo Running core:encoding tests

View File

@@ -15,36 +15,14 @@ package test_core_crypto
import "core:encoding/hex"
import "core:fmt"
import "core:mem"
import "core:os"
import "core:testing"
import "core:crypto"
import "core:crypto/chacha20"
import "core:crypto/chacha20poly1305"
import "core:crypto/shake"
import "core:crypto/x25519"
TEST_count := 0
TEST_fail := 0
when ODIN_TEST {
expect :: testing.expect
log :: testing.log
} else {
expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
TEST_count += 1
if !condition {
TEST_fail += 1
fmt.printf("[%v] %v\n", loc, message)
return
}
}
log :: proc(t: ^testing.T, v: any, loc := #caller_location) {
fmt.printf("[%v] ", loc)
fmt.printf("log: %v\n", v)
}
}
import tc "tests:common"
main :: proc() {
t := testing.T{}
@@ -53,25 +31,23 @@ main :: proc() {
test_hash(&t)
test_mac(&t)
test_kdf(&t) // After hash/mac tests because those should pass first.
test_chacha20(&t)
test_chacha20poly1305(&t)
test_shake(&t)
test_x25519(&t)
test_sha3_variants(&t)
bench_crypto(&t)
fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
if TEST_fail > 0 {
os.exit(1)
}
tc.report(&t)
}
_PLAINTEXT_SUNSCREEN_STR := "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."
@(test)
test_chacha20 :: proc(t: ^testing.T) {
log(t, "Testing (X)ChaCha20")
tc.log(t, "Testing (X)ChaCha20")
// Test cases taken from RFC 8439, and draft-irtf-cfrg-xchacha-03
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
@@ -114,7 +90,7 @@ test_chacha20 :: proc(t: ^testing.T) {
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_ciphertext_str == ciphertext_str,
fmt.tprintf(
@@ -161,7 +137,7 @@ test_chacha20 :: proc(t: ^testing.T) {
chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
derived_ciphertext_str = string(hex.encode(derived_ciphertext[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_ciphertext_str == xciphertext_str,
fmt.tprintf(
@@ -174,7 +150,7 @@ test_chacha20 :: proc(t: ^testing.T) {
@(test)
test_chacha20poly1305 :: proc(t: ^testing.T) {
log(t, "Testing chacha20poly1205")
tc.log(t, "Testing chacha20poly1205")
plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
@@ -233,7 +209,7 @@ test_chacha20poly1305 :: proc(t: ^testing.T) {
)
derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_ciphertext_str == ciphertext_str,
fmt.tprintf(
@@ -244,7 +220,7 @@ test_chacha20poly1305 :: proc(t: ^testing.T) {
)
derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_tag_str == tag_str,
fmt.tprintf(
@@ -264,8 +240,8 @@ test_chacha20poly1305 :: proc(t: ^testing.T) {
ciphertext[:],
)
derived_plaintext_str := string(derived_plaintext[:])
expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
expect(
tc.expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
tc.expect(
t,
derived_plaintext_str == _PLAINTEXT_SUNSCREEN_STR,
fmt.tprintf(
@@ -284,7 +260,7 @@ test_chacha20poly1305 :: proc(t: ^testing.T) {
aad[:],
derived_ciphertext[:],
)
expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
tc.expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
aad[0] ~= 0xa5
ok = chacha20poly1305.decrypt(
@@ -295,18 +271,12 @@ test_chacha20poly1305 :: proc(t: ^testing.T) {
aad[:],
ciphertext[:],
)
expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
}
TestECDH :: struct {
scalar: string,
point: string,
product: string,
tc.expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
}
@(test)
test_x25519 :: proc(t: ^testing.T) {
log(t, "Testing X25519")
tc.log(t, "Testing X25519")
// Local copy of this so that the base point doesn't need to be exported.
_BASE_POINT: [32]byte = {
@@ -314,7 +284,11 @@ test_x25519 :: proc(t: ^testing.T) {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
}
test_vectors := [?]TestECDH {
test_vectors := []struct{
scalar: string,
point: string,
product: string,
} {
// Test vectors from RFC 7748
{
"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
@@ -335,7 +309,7 @@ test_x25519 :: proc(t: ^testing.T) {
x25519.scalarmult(derived_point[:], scalar[:], point[:])
derived_point_str := string(hex.encode(derived_point[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_point_str == v.product,
fmt.tprintf(
@@ -353,7 +327,7 @@ test_x25519 :: proc(t: ^testing.T) {
x25519.scalarmult(p2[:], scalar[:], _BASE_POINT[:])
p1_str := string(hex.encode(p1[:], context.temp_allocator))
p2_str := string(hex.encode(p2[:], context.temp_allocator))
expect(
tc.expect(
t,
p1_str == p2_str,
fmt.tprintf(
@@ -371,16 +345,14 @@ test_x25519 :: proc(t: ^testing.T) {
@(test)
test_rand_bytes :: proc(t: ^testing.T) {
log(t, "Testing rand_bytes")
tc.log(t, "Testing rand_bytes")
if ODIN_OS != .Linux {
log(t, "rand_bytes not supported - skipping")
tc.log(t, "rand_bytes not supported - skipping")
return
}
allocator := context.allocator
buf := make([]byte, 1 << 25, allocator)
buf := make([]byte, 1 << 25, context.allocator)
defer delete(buf)
// Testing a CSPRNG for correctness is incredibly involved and
@@ -405,84 +377,9 @@ test_rand_bytes :: proc(t: ^testing.T) {
}
}
expect(
tc.expect(
t,
seems_ok,
"Expected to randomize the head and tail of the buffer within a handful of attempts",
)
}
TestXOF :: struct {
sec_strength: int,
output: string,
str: string,
}
@(test)
test_shake :: proc(t: ^testing.T) {
test_vectors := [?]TestXOF {
// SHAKE128
{
128,
"7f9c2ba4e88f827d616045507605853e",
"",
},
{
128,
"f4202e3c5852f9182a0430fd8144f0a7",
"The quick brown fox jumps over the lazy dog",
},
{
128,
"853f4538be0db9621a6cea659a06c110",
"The quick brown fox jumps over the lazy dof",
},
// SHAKE256
{
256,
"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
"",
},
{
256,
"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca",
"The quick brown fox jumps over the lazy dog",
},
{
256,
"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401",
"The quick brown fox jumps over the lazy dof",
},
}
for v in test_vectors {
dst := make([]byte, len(v.output)/2, context.temp_allocator)
data := transmute([]byte)(v.str)
ctx: shake.Context
switch v.sec_strength {
case 128:
shake.init_128(&ctx)
case 256:
shake.init_256(&ctx)
}
shake.write(&ctx, data)
shake.read(&ctx, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
expect(
t,
dst_str == v.output,
fmt.tprintf(
"SHAKE%d: Expected: %s for input of %s, but got %s instead",
v.sec_strength,
v.output,
v.str,
dst_str,
),
)
}
}

View File

@@ -8,22 +8,22 @@ import "core:testing"
import "core:crypto/hash"
TestHash :: struct {
algo: hash.Algorithm,
hash: string,
str: string,
}
import tc "tests:common"
@(test)
test_hash :: proc(t: ^testing.T) {
log(t, "Testing Hashes")
tc.log(t, "Testing Hashes")
// TODO:
// - Stick the test vectors in a JSON file or something.
data_1_000_000_a := strings.repeat("a", 1_000_000)
digest: [64]byte // 512-bits is enough for every digest for now.
test_vectors := [?]TestHash {
digest: [hash.MAX_DIGEST_SIZE]byte
test_vectors := []struct{
algo: hash.Algorithm,
hash: string,
str: string,
} {
// BLAKE2b
{
hash.Algorithm.BLAKE2B,
@@ -424,9 +424,9 @@ test_hash :: proc(t: ^testing.T) {
// MD5 (Insecure)
// - https://datatracker.ietf.org/doc/html/rfc1321
TestHash{hash.Algorithm.Insecure_MD5, "d41d8cd98f00b204e9800998ecf8427e", ""},
TestHash{hash.Algorithm.Insecure_MD5, "0cc175b9c0f1b6a831c399e269772661", "a"},
TestHash{hash.Algorithm.Insecure_MD5, "900150983cd24fb0d6963f7d28e17f72", "abc"},
{hash.Algorithm.Insecure_MD5, "d41d8cd98f00b204e9800998ecf8427e", ""},
{hash.Algorithm.Insecure_MD5, "0cc175b9c0f1b6a831c399e269772661", "a"},
{hash.Algorithm.Insecure_MD5, "900150983cd24fb0d6963f7d28e17f72", "abc"},
{
hash.Algorithm.Insecure_MD5,
"f96b697d7cb7938d525a2f31aaf161d0",
@@ -451,8 +451,8 @@ test_hash :: proc(t: ^testing.T) {
// SHA-1 (Insecure)
// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// - https://www.di-mgt.com.au/sha_testvectors.html
TestHash{hash.Algorithm.Insecure_SHA1, "da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
TestHash{hash.Algorithm.Insecure_SHA1, "a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
{hash.Algorithm.Insecure_SHA1, "da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
{hash.Algorithm.Insecure_SHA1, "a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
{
hash.Algorithm.Insecure_SHA1,
"f9537c23893d2014f365adf8ffe33b8eb0297ed1",
@@ -463,7 +463,7 @@ test_hash :: proc(t: ^testing.T) {
"346fb528a24b48f563cb061470bcfd23740427ad",
"jkijkljklmklmnlmnomnopnopq",
},
TestHash{hash.Algorithm.Insecure_SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
{hash.Algorithm.Insecure_SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
{
hash.Algorithm.Insecure_SHA1,
"c729c8996ee0a6f74f4f3248e8957edf704fb624",
@@ -493,7 +493,7 @@ test_hash :: proc(t: ^testing.T) {
dst_str := string(hex.encode(dst, context.temp_allocator))
expect(
tc.expect(
t,
dst_str == v.hash,
fmt.tprintf(
@@ -518,7 +518,7 @@ test_hash :: proc(t: ^testing.T) {
// still correct.
digest_sz := hash.DIGEST_SIZES[algo]
block_sz := hash.BLOCK_SIZES[algo]
expect(
tc.expect(
t,
digest_sz <= hash.MAX_DIGEST_SIZE,
fmt.tprintf(
@@ -528,7 +528,7 @@ test_hash :: proc(t: ^testing.T) {
hash.MAX_DIGEST_SIZE,
),
)
expect(
tc.expect(
t,
block_sz <= hash.MAX_BLOCK_SIZE,
fmt.tprintf(
@@ -550,7 +550,7 @@ test_hash :: proc(t: ^testing.T) {
a_str := string(hex.encode(digest_a, context.temp_allocator))
b_str := string(hex.encode(digest_b, context.temp_allocator))
expect(
tc.expect(
t,
a_str == b_str,
fmt.tprintf(
@@ -568,7 +568,7 @@ test_hash :: proc(t: ^testing.T) {
api_algo := hash.algorithm(&ctx)
api_digest_size := hash.digest_size(&ctx)
expect(
tc.expect(
t,
algo == api_algo,
fmt.tprintf(
@@ -578,7 +578,7 @@ test_hash :: proc(t: ^testing.T) {
api_algo,
),
)
expect(
tc.expect(
t,
hash.DIGEST_SIZES[algo] == api_digest_size,
fmt.tprintf(
@@ -601,7 +601,7 @@ test_hash :: proc(t: ^testing.T) {
b_str = string(hex.encode(digest_b, context.temp_allocator))
c_str := string(hex.encode(digest_c, context.temp_allocator))
expect(
tc.expect(
t,
a_str == b_str && b_str == c_str,
fmt.tprintf(

View File

@@ -0,0 +1,188 @@
package test_core_crypto
import "core:encoding/hex"
import "core:fmt"
import "core:testing"
import "core:crypto/hash"
import "core:crypto/hkdf"
import "core:crypto/pbkdf2"
import tc "tests:common"
@(test)
test_kdf :: proc(t: ^testing.T) {
tc.log(t, "Testing KDFs")
test_hkdf(t)
test_pbkdf2(t)
}
@(test)
test_hkdf :: proc(t: ^testing.T) {
tc.log(t, "Testing HKDF")
tmp: [128]byte // Good enough.
test_vectors := []struct {
algo: hash.Algorithm,
ikm: string,
salt: string,
info: string,
okm: string,
} {
// SHA-256
// - https://www.rfc-editor.org/rfc/rfc5869
{
hash.Algorithm.SHA256,
"0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
"000102030405060708090a0b0c",
"f0f1f2f3f4f5f6f7f8f9",
"3cb25f25faacd57a90434f64d0362f2a2d2d0a90cf1a5a4c5db02d56ecc4c5bf34007208d5b887185865",
},
{
hash.Algorithm.SHA256,
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f",
"606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeaf",
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
"b11e398dc80327a1c8e7f78c596a49344f012eda2d4efad8a050cc4c19afa97c59045a99cac7827271cb41c65e590e09da3275600c2f09b8367793a9aca3db71cc30c58179ec3e87c14c01d5c1f3434f1d87",
},
{
hash.Algorithm.SHA256,
"0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
"",
"",
"8da4e775a563c18f715f802a063c5a31b8a11f5c5ee1879ec3454e5f3c738d2d9d201395faa4b61a96c8",
},
}
for v, _ in test_vectors {
algo_name := hash.ALGORITHM_NAMES[v.algo]
dst := tmp[:len(v.okm) / 2]
ikm, _ := hex.decode(transmute([]byte)(v.ikm), context.temp_allocator)
salt, _ := hex.decode(transmute([]byte)(v.salt), context.temp_allocator)
info, _ := hex.decode(transmute([]byte)(v.info), context.temp_allocator)
hkdf.extract_and_expand(v.algo, salt, ikm, info, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.okm,
fmt.tprintf(
"HKDF-%s: Expected: %s for input of (%s, %s, %s), but got %s instead",
algo_name,
v.okm,
v.ikm,
v.salt,
v.info,
dst_str,
),
)
}
}
@(test)
test_pbkdf2 :: proc(t: ^testing.T) {
tc.log(t, "Testing PBKDF2")
tmp: [64]byte // 512-bits is enough for every output for now.
test_vectors := []struct {
algo: hash.Algorithm,
password: string,
salt: string,
iterations: u32,
dk: string,
} {
// SHA-1
// - https://www.rfc-editor.org/rfc/rfc2898
{
hash.Algorithm.Insecure_SHA1,
"password",
"salt",
1,
"0c60c80f961f0e71f3a9b524af6012062fe037a6",
},
{
hash.Algorithm.Insecure_SHA1,
"password",
"salt",
2,
"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957",
},
{
hash.Algorithm.Insecure_SHA1,
"password",
"salt",
4096,
"4b007901b765489abead49d926f721d065a429c1",
},
// This passes but takes a about 8 seconds on a modern-ish system.
//
// {
// hash.Algorithm.Insecure_SHA1,
// "password",
// "salt",
// 16777216,
// "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984",
// },
{
hash.Algorithm.Insecure_SHA1,
"passwordPASSWORDpassword",
"saltSALTsaltSALTsaltSALTsaltSALTsalt",
4096,
"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038",
},
{
hash.Algorithm.Insecure_SHA1,
"pass\x00word",
"sa\x00lt",
4096,
"56fa6aa75548099dcc37d7f03425e0c3",
},
// SHA-256
// - https://www.rfc-editor.org/rfc/rfc7914
{
hash.Algorithm.SHA256,
"passwd",
"salt",
1,
"55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783",
},
{
hash.Algorithm.SHA256,
"Password",
"NaCl",
80000,
"4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d",
},
}
for v, _ in test_vectors {
algo_name := hash.ALGORITHM_NAMES[v.algo]
dst := tmp[:len(v.dk) / 2]
password := transmute([]byte)(v.password)
salt := transmute([]byte)(v.salt)
pbkdf2.derive(v.algo, password, salt, v.iterations, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.dk,
fmt.tprintf(
"HMAC-%s: Expected: %s for input of (%s, %s, %d), but got %s instead",
algo_name,
v.dk,
v.password,
v.salt,
v.iterations,
dst_str,
),
)
}
}

View File

@@ -10,9 +10,11 @@ import "core:crypto/hmac"
import "core:crypto/poly1305"
import "core:crypto/siphash"
import tc "tests:common"
@(test)
test_mac :: proc(t: ^testing.T) {
log(t, "Testing MACs")
tc.log(t, "Testing MACs")
test_hmac(t)
test_poly1305(t)
@@ -81,7 +83,7 @@ test_hmac :: proc(t: ^testing.T) {
msg_str := string(hex.encode(msg, context.temp_allocator))
dst_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
expect(
tc.expect(
t,
dst_str == expected_str,
fmt.tprintf(
@@ -97,7 +99,7 @@ test_hmac :: proc(t: ^testing.T) {
hmac.sum(algo, dst, msg, key)
oneshot_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
expect(
tc.expect(
t,
oneshot_str == expected_str,
fmt.tprintf(
@@ -114,7 +116,7 @@ test_hmac :: proc(t: ^testing.T) {
@(test)
test_poly1305 :: proc(t: ^testing.T) {
log(t, "Testing poly1305")
tc.log(t, "Testing poly1305")
// Test cases taken from poly1305-donna.
key := [poly1305.KEY_SIZE]byte {
@@ -152,13 +154,13 @@ test_poly1305 :: proc(t: ^testing.T) {
// Verify - oneshot + compare
ok := poly1305.verify(tag[:], msg[:], key[:])
expect(t, ok, "oneshot verify call failed")
tc.expect(t, ok, "oneshot verify call failed")
// Sum - oneshot
derived_tag: [poly1305.TAG_SIZE]byte
poly1305.sum(derived_tag[:], msg[:], key[:])
derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_tag_str == tag_str,
fmt.tprintf("Expected %s for sum(msg, key), but got %s instead", tag_str, derived_tag_str),
@@ -177,7 +179,7 @@ test_poly1305 :: proc(t: ^testing.T) {
}
poly1305.final(&ctx, derived_tag[:])
derived_tag_str = string(hex.encode(derived_tag[:], context.temp_allocator))
expect(
tc.expect(
t,
derived_tag_str == tag_str,
fmt.tprintf(
@@ -190,7 +192,7 @@ test_poly1305 :: proc(t: ^testing.T) {
@(test)
test_siphash_2_4 :: proc(t: ^testing.T) {
log(t, "Testing SipHash-2-4")
tc.log(t, "Testing SipHash-2-4")
// Test vectors from
// https://github.com/veorq/SipHash/blob/master/vectors.h
@@ -227,7 +229,7 @@ test_siphash_2_4 :: proc(t: ^testing.T) {
vector := test_vectors[i]
computed := siphash.sum_2_4(data[:], key[:])
expect(
tc.expect(
t,
computed == vector,
fmt.tprintf(

View File

@@ -0,0 +1,441 @@
package test_core_crypto
import "core:encoding/hex"
import "core:fmt"
import "core:testing"
import "core:crypto/kmac"
import "core:crypto/shake"
import "core:crypto/tuplehash"
import tc "tests:common"
@(test)
test_sha3_variants :: proc(t: ^testing.T) {
tc.log(t, "Testing SHA3 derived functions")
test_shake(t)
test_cshake(t)
test_tuplehash(t)
test_kmac(t)
}
@(test)
test_shake :: proc(t: ^testing.T) {
tc.log(t, "Testing SHAKE")
test_vectors := []struct {
sec_strength: int,
output: string,
str: string,
} {
// SHAKE128
{128, "7f9c2ba4e88f827d616045507605853e", ""},
{128, "f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
{128, "853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
// SHAKE256
{256, "46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
{
256,
"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca",
"The quick brown fox jumps over the lazy dog",
},
{
256,
"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401",
"The quick brown fox jumps over the lazy dof",
},
}
for v in test_vectors {
dst := make([]byte, len(v.output) / 2, context.temp_allocator)
ctx: shake.Context
switch v.sec_strength {
case 128:
shake.init_128(&ctx)
case 256:
shake.init_256(&ctx)
}
shake.write(&ctx, transmute([]byte)(v.str))
shake.read(&ctx, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.output,
fmt.tprintf(
"SHAKE%d: Expected: %s for input of %s, but got %s instead",
v.sec_strength,
v.output,
v.str,
dst_str,
),
)
}
}
@(test)
test_cshake :: proc(t: ^testing.T) {
tc.log(t, "Testing cSHAKE")
test_vectors := []struct {
sec_strength: int,
domainsep: string,
output: string,
str: string,
} {
// cSHAKE128
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/cSHAKE_samples.pdf
{
128,
"Email Signature",
"c1c36925b6409a04f1b504fcbca9d82b4017277cb5ed2b2065fc1d3814d5aaf5",
"00010203",
},
{
128,
"Email Signature",
"c5221d50e4f822d96a2e8881a961420f294b7b24fe3d2094baed2c6524cc166b",
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7",
},
// cSHAKE256
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/cSHAKE_samples.pdf
{
256,
"Email Signature",
"d008828e2b80ac9d2218ffee1d070c48b8e4c87bff32c9699d5b6896eee0edd164020e2be0560858d9c00c037e34a96937c561a74c412bb4c746469527281c8c",
"00010203",
},
{
256,
"Email Signature",
"07dc27b11e51fbac75bc7b3c1d983e8b4b85fb1defaf218912ac86430273091727f42b17ed1df63e8ec118f04b23633c1dfb1574c8fb55cb45da8e25afb092bb",
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7",
},
}
for v in test_vectors {
dst := make([]byte, len(v.output) / 2, context.temp_allocator)
domainsep := transmute([]byte)(v.domainsep)
ctx: shake.Context
switch v.sec_strength {
case 128:
shake.init_cshake_128(&ctx, domainsep)
case 256:
shake.init_cshake_256(&ctx, domainsep)
}
data, _ := hex.decode(transmute([]byte)(v.str))
shake.write(&ctx, data)
shake.read(&ctx, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.output,
fmt.tprintf(
"cSHAKE%d: Expected: %s for input of %s, but got %s instead",
v.sec_strength,
v.output,
v.str,
dst_str,
),
)
}
}
@(test)
test_tuplehash :: proc(t: ^testing.T) {
tc.log(t, "Testing TupleHash(XOF)")
test_vectors := []struct {
sec_strength: int,
domainsep: string,
output: string,
tuple: []string,
is_xof: bool,
} {
// TupleHash128
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/TupleHash_samples.pdf
{
128,
"",
"c5d8786c1afb9b82111ab34b65b2c0048fa64e6d48e263264ce1707d3ffc8ed1",
[]string{
"000102",
"101112131415",
},
false,
},
{
128,
"My Tuple App",
"75cdb20ff4db1154e841d758e24160c54bae86eb8c13e7f5f40eb35588e96dfb",
[]string{
"000102",
"101112131415",
},
false,
},
{
128,
"My Tuple App",
"e60f202c89a2631eda8d4c588ca5fd07f39e5151998deccf973adb3804bb6e84",
[]string{
"000102",
"101112131415",
"202122232425262728",
},
false,
},
// TupleHash256
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/TupleHash_samples.pdf
{
256,
"",
"cfb7058caca5e668f81a12a20a2195ce97a925f1dba3e7449a56f82201ec607311ac2696b1ab5ea2352df1423bde7bd4bb78c9aed1a853c78672f9eb23bbe194",
[]string{
"000102",
"101112131415",
},
false,
},
{
256,
"My Tuple App",
"147c2191d5ed7efd98dbd96d7ab5a11692576f5fe2a5065f3e33de6bba9f3aa1c4e9a068a289c61c95aab30aee1e410b0b607de3620e24a4e3bf9852a1d4367e",
[]string{
"000102",
"101112131415",
},
false,
},
{
256,
"My Tuple App",
"45000be63f9b6bfd89f54717670f69a9bc763591a4f05c50d68891a744bcc6e7d6d5b5e82c018da999ed35b0bb49c9678e526abd8e85c13ed254021db9e790ce",
[]string{
"000102",
"101112131415",
"202122232425262728",
},
false,
},
// TupleHashXOF128
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/TupleHashXOF_samples.pdf
{
128,
"",
"2f103cd7c32320353495c68de1a8129245c6325f6f2a3d608d92179c96e68488",
[]string{
"000102",
"101112131415",
},
true,
},
{
128,
"My Tuple App",
"3fc8ad69453128292859a18b6c67d7ad85f01b32815e22ce839c49ec374e9b9a",
[]string{
"000102",
"101112131415",
},
true,
},
{
128,
"My Tuple App",
"900fe16cad098d28e74d632ed852f99daab7f7df4d99e775657885b4bf76d6f8",
[]string{
"000102",
"101112131415",
"202122232425262728",
},
true,
},
// TupleHashXOF256
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/TupleHashXOF_samples.pdf
{
256,
"",
"03ded4610ed6450a1e3f8bc44951d14fbc384ab0efe57b000df6b6df5aae7cd568e77377daf13f37ec75cf5fc598b6841d51dd207c991cd45d210ba60ac52eb9",
[]string{
"000102",
"101112131415",
},
true,
},
{
256,
"My Tuple App",
"6483cb3c9952eb20e830af4785851fc597ee3bf93bb7602c0ef6a65d741aeca7e63c3b128981aa05c6d27438c79d2754bb1b7191f125d6620fca12ce658b2442",
[]string{
"000102",
"101112131415",
},
true,
},
{
256,
"My Tuple App",
"0c59b11464f2336c34663ed51b2b950bec743610856f36c28d1d088d8a2446284dd09830a6a178dc752376199fae935d86cfdee5913d4922dfd369b66a53c897",
[]string{
"000102",
"101112131415",
"202122232425262728",
},
true,
},
}
for v in test_vectors {
dst := make([]byte, len(v.output) / 2, context.temp_allocator)
domainsep := transmute([]byte)(v.domainsep)
ctx: tuplehash.Context
switch v.sec_strength {
case 128:
tuplehash.init_128(&ctx, domainsep)
case 256:
tuplehash.init_256(&ctx, domainsep)
}
for e in v.tuple {
data, _ := hex.decode(transmute([]byte)(e))
tuplehash.write_element(&ctx, data)
}
suffix: string
switch v.is_xof {
case true:
suffix = "XOF"
tuplehash.read(&ctx, dst)
case false:
tuplehash.final(&ctx, dst)
}
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.output,
fmt.tprintf(
"TupleHash%s%d: Expected: %s for input of %v, but got %s instead",
suffix,
v.sec_strength,
v.output,
v.tuple,
dst_str,
),
)
}
}
@(test)
test_kmac :: proc(t:^testing.T) {
tc.log(t, "Testing KMAC")
test_vectors := []struct {
sec_strength: int,
key: string,
domainsep: string,
msg: string,
output: string,
} {
// KMAC128
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/KMAC_samples.pdf
{
128,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"",
"00010203",
"e5780b0d3ea6f7d3a429c5706aa43a00fadbd7d49628839e3187243f456ee14e",
},
{
128,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"My Tagged Application",
"00010203",
"3b1fba963cd8b0b59e8c1a6d71888b7143651af8ba0a7070c0979e2811324aa5",
},
{
128,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"My Tagged Application",
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7",
"1f5b4e6cca02209e0dcb5ca635b89a15e271ecc760071dfd805faa38f9729230",
},
// KMAC256
// - https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/KMAC_samples.pdf
{
256,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"My Tagged Application",
"00010203",
"20c570c31346f703c9ac36c61c03cb64c3970d0cfc787e9b79599d273a68d2f7f69d4cc3de9d104a351689f27cf6f5951f0103f33f4f24871024d9c27773a8dd",
},
{
256,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"",
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7",
"75358cf39e41494e949707927cee0af20a3ff553904c86b08f21cc414bcfd691589d27cf5e15369cbbff8b9a4c2eb17800855d0235ff635da82533ec6b759b69",
},
{
256,
"404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f",
"My Tagged Application",
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7",
"b58618f71f92e1d56c1b8c55ddd7cd188b97b4ca4d99831eb2699a837da2e4d970fbacfde50033aea585f1a2708510c32d07880801bd182898fe476876fc8965",
},
}
for v in test_vectors {
dst := make([]byte, len(v.output) / 2, context.temp_allocator)
key, _ := hex.decode(transmute([]byte)(v.key))
domainsep := transmute([]byte)(v.domainsep)
ctx: kmac.Context
switch v.sec_strength {
case 128:
kmac.init_128(&ctx, key, domainsep)
case 256:
kmac.init_256(&ctx, key, domainsep)
}
data, _ := hex.decode(transmute([]byte)(v.msg))
kmac.update(&ctx, data)
kmac.final(&ctx, dst)
dst_str := string(hex.encode(dst, context.temp_allocator))
tc.expect(
t,
dst_str == v.output,
fmt.tprintf(
"KMAC%d: Expected: %s for input of (%s, %s, %s), but got %s instead",
v.sec_strength,
v.output,
v.key,
v.domainsep,
v.msg,
dst_str,
),
)
}
}

View File

@@ -10,6 +10,8 @@ import "core:crypto/chacha20poly1305"
import "core:crypto/poly1305"
import "core:crypto/x25519"
import tc "tests:common"
// Cryptographic primitive benchmarks.
@(test)
@@ -150,19 +152,19 @@ bench_chacha20 :: proc(t: ^testing.T) {
}
err := time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
name = "ChaCha20 1024 bytes"
options.bytes = 1024
err = time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
name = "ChaCha20 65536 bytes"
options.bytes = 65536
err = time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
}
@@ -177,13 +179,13 @@ bench_poly1305 :: proc(t: ^testing.T) {
}
err := time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
name = "Poly1305 1024 zero bytes"
options.bytes = 1024
err = time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
}
@@ -198,19 +200,19 @@ bench_chacha20poly1305 :: proc(t: ^testing.T) {
}
err := time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
name = "chacha20poly1305 1024 bytes"
options.bytes = 1024
err = time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
name = "chacha20poly1305 65536 bytes"
options.bytes = 65536
err = time.benchmark(options, context.allocator)
expect(t, err == nil, name)
tc.expect(t, err == nil, name)
benchmark_print(name, options)
}
@@ -229,7 +231,7 @@ bench_x25519 :: proc(t: ^testing.T) {
}
elapsed := time.since(start)
log(
tc.log(
t,
fmt.tprintf("x25519.scalarmult: ~%f us/op", time.duration_microseconds(elapsed) / iters),
)

View File

@@ -7,7 +7,4 @@ ifeq ($(OS), OpenBSD)
ODINFLAGS:=$(ODINFLAGS) -extra-linker-flags:-L/usr/local/lib
endif
all: botan_test
botan_test:
$(ODIN) run botan -o:speed -no-bounds-check $(ODINFLAGS) -out=vendor_botan
all:

Binary file not shown.

View File

@@ -1,409 +0,0 @@
package test_vendor_botan
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Jeroen van Rijn: Test runner setup.
Tests for the hashing algorithms within the Botan library.
Where possible, the official test vectors are used to validate the implementation.
*/
import "core:testing"
import "core:fmt"
import "core:os"
import "core:strings"
import "vendor:botan/legacy/md5"
import "vendor:botan/legacy/sha1"
import "vendor:botan/sha2"
import "vendor:botan/sha3"
import "vendor:botan/legacy/keccak"
import "vendor:botan/shake"
import "vendor:botan/blake2b"
import "vendor:botan/sm3"
import "vendor:botan/siphash"
TEST_count := 0
TEST_fail := 0
when ODIN_TEST {
expect :: testing.expect
log :: testing.log
} else {
expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
fmt.printf("[%v] ", loc)
TEST_count += 1
if !condition {
TEST_fail += 1
fmt.println(message)
return
}
fmt.println(" PASS")
}
log :: proc(t: ^testing.T, v: any, loc := #caller_location) {
fmt.printf("[%v] ", loc)
fmt.printf("log: %v\n", v)
}
}
main :: proc() {
t := testing.T{}
test_md5(&t)
test_sha1(&t)
test_sha224(&t)
test_sha256(&t)
test_sha384(&t)
test_sha512(&t)
test_sha3_224(&t)
test_sha3_256(&t)
test_sha3_384(&t)
test_sha3_512(&t)
// test_shake_128(&t)
// test_shake_256(&t)
test_keccak_512(&t)
test_blake2b(&t)
test_sm3(&t)
test_siphash_2_4(&t)
fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
if TEST_fail > 0 {
os.exit(1)
}
}
TestHash :: struct {
hash: string,
str: string,
}
hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string {
lut: [16]byte = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}
buf := make([]byte, len(bytes) * 2, allocator)
for i := 0; i < len(bytes); i += 1 {
buf[i * 2 + 0] = lut[bytes[i] >> 4 & 0xf]
buf[i * 2 + 1] = lut[bytes[i] & 0xf]
}
return string(buf)
}
@(test)
test_md5 :: proc(t: ^testing.T) {
// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
test_vectors := [?]TestHash {
TestHash{"d41d8cd98f00b204e9800998ecf8427e", ""},
TestHash{"0cc175b9c0f1b6a831c399e269772661", "a"},
TestHash{"900150983cd24fb0d6963f7d28e17f72", "abc"},
TestHash{"f96b697d7cb7938d525a2f31aaf161d0", "message digest"},
TestHash{"c3fcd3d76192e4007dfb496cca67e13b", "abcdefghijklmnopqrstuvwxyz"},
TestHash{"d174ab98d277d9f5a5611c2c9f419d9f", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
TestHash{"57edf4a22be3c955ac49da2e2107b67a", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
}
for v, _ in test_vectors {
computed := md5.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha1 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
TestHash{"a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
TestHash{"f9537c23893d2014f365adf8ffe33b8eb0297ed1", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"346fb528a24b48f563cb061470bcfd23740427ad", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
TestHash{"c729c8996ee0a6f74f4f3248e8957edf704fb624", "01234567012345670123456701234567"},
TestHash{"84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"a49b2446a02c645bf419f995b67091253a04a259", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha1.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha224 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
// https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
data_1_000_000_a := strings.repeat("a", 1_000_000)
test_vectors := [?]TestHash {
TestHash{"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f", ""},
TestHash{"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7", "abc"},
TestHash{"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
TestHash{"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67", data_1_000_000_a},
}
for v, _ in test_vectors {
computed := sha2.hash_224(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ""},
TestHash{"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc"},
TestHash{"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha384 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", ""},
TestHash{"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7", "abc"},
TestHash{"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_384(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ""},
TestHash{"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", "abc"},
TestHash{"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha2.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_224 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7", ""},
TestHash{"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf", "abc"},
TestHash{"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b", "a"},
TestHash{"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112", "01234567012345670123456701234567"},
TestHash{"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_224(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_256 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a", ""},
TestHash{"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532", "abc"},
TestHash{"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "a"},
TestHash{"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767", "01234567012345670123456701234567"},
TestHash{"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_384 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004", ""},
TestHash{"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25", "abc"},
TestHash{"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9", "a"},
TestHash{"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be", "01234567012345670123456701234567"},
TestHash{"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_384(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sha3_512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26", ""},
TestHash{"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0", "abc"},
TestHash{"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e", "abcdbcdecdefdefgefghfghighijhi"},
TestHash{"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891", "jkijkljklmklmnlmnomnopnopq"},
TestHash{"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a", "a"},
TestHash{"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1", "01234567012345670123456701234567"},
TestHash{"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
TestHash{"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
}
for v, _ in test_vectors {
computed := sha3.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_shake_128 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"7f9c2ba4e88f827d616045507605853e", ""},
TestHash{"f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
TestHash{"853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
}
for v, _ in test_vectors {
computed := shake.hash_128(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_shake_256 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
TestHash{"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca", "The quick brown fox jumps over the lazy dog"},
TestHash{"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401", "The quick brown fox jumps over the lazy dof"},
}
for v, _ in test_vectors {
computed := shake.hash_256(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_keccak_512 :: proc(t: ^testing.T) {
// Test vectors from
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
// https://www.di-mgt.com.au/sha_testvectors.html
test_vectors := [?]TestHash {
TestHash{"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e", ""},
TestHash{"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", "abc"},
}
for v, _ in test_vectors {
computed := keccak.hash_512(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_blake2b :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""},
TestHash{"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918", "The quick brown fox jumps over the lazy dog"},
}
for v, _ in test_vectors {
computed := blake2b.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_sm3 :: proc(t: ^testing.T) {
test_vectors := [?]TestHash {
TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""},
TestHash{"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0", "abc"},
TestHash{"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732", "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"},
TestHash{"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea", "The quick brown fox jumps over the lazy dog"},
TestHash{"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098", "The quick brown fox jumps over the lazy cog"},
}
for v, _ in test_vectors {
computed := sm3.hash(v.str)
computed_str := hex_string(computed[:])
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
}
}
@(test)
test_siphash_2_4 :: proc(t: ^testing.T) {
// Test vectors from
// https://github.com/veorq/SipHash/blob/master/vectors.h
test_vectors := [?]u64 {
0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
}
key: [16]byte
for i in 0..<16 {
key[i] = byte(i)
}
for i in 0..<len(test_vectors) {
data := make([]byte, i)
for j in 0..<i {
data[j] = byte(j)
}
vector := test_vectors[i]
computed := siphash.sum_2_4(data[:], key[:])
expect(t, computed == vector, fmt.tprintf("Expected: 0x%x for input of %v, but got 0x%x instead", vector, data, computed))
}
}

View File

@@ -1,13 +1,8 @@
@echo off
set COMMON=-show-timings -no-bounds-check -vet -strict-style
set PATH_TO_ODIN==..\..\odin
echo ---
echo Running vendor:botan tests
echo ---
%PATH_TO_ODIN% run botan %COMMON% -out:vendor_botan.exe || exit /b
echo ---
echo Running vendor:glfw tests
echo ---
@echo off
set COMMON=-show-timings -no-bounds-check -vet -strict-style
set PATH_TO_ODIN==..\..\odin
echo ---
echo Running vendor:glfw tests
echo ---
%PATH_TO_ODIN% run glfw %COMMON% -out:vendor_glfw.exe || exit /b

9
vendor/README.md vendored
View File

@@ -127,15 +127,6 @@ Zero-input latency networking library for peer-to-peer games.
See also LICENSE in the `GGPO` directory itself.
## Botan
[Botan](https://botan.randombit.net/) Crypto and TLS library.
`botan.lib` is available under Botan's [BSD](https://botan.randombit.net/license.txt) license.
See also LICENSE in the `botan` directory itself.
Includes full bindings as well as wrappers to match the `core:crypto` API.
## CommonMark
[CMark](https://github.com/commonmark/cmark) CommonMark parsing library.

View File

@@ -1,69 +0,0 @@
# botan
A wrapper for the Botan cryptography library
## Supported
This library offers full bindings for everything exposed by Botan's FFI.
Wrappers for hashing algorithms have been added to match the API within
the Odin `core:crypto` library.
## Hashing algorithms
| Algorithm | |
|:-------------------------------------------------------------------------------------------------------------|:-----------------|
| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | &#10004;&#65039; |
| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | &#10004;&#65039; |
| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | &#10004;&#65039; |
| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | &#10004;&#65039; |
| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | &#10004;&#65039; |
| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | &#10004;&#65039; |
| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | &#10004;&#65039; |
| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | &#10004;&#65039; |
#### High level API
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`.
Included in these groups are six procedures.
- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
- `hash_bytes` - Hash a given byte slice and return the computed hash
- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
#### Low level API
The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
You may also directly call them, if you wish.
#### Example
```odin
package crypto_example
// Import the desired package
import "vendor:botan/blake2b"
main :: proc() {
input := "foo"
// Compute the hash, using the high level API
computed_hash := blake2b.hash(input)
// Variant that takes a destination buffer, instead of returning the computed hash
hash := make([]byte, blake2b.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
blake2b.hash(input, hash[:])
// Compute the hash, using the low level API
// @note: Botan's structs are opaque by design, they don't expose any fields
ctx: blake2b.Context
computed_hash_low: [blake2b.DIGEST_SIZE]byte
blake2b.init(&ctx)
blake2b.update(&ctx, transmute([]byte)input)
blake2b.final(&ctx, computed_hash_low[:])
}
```
For example uses of all available algorithms, please see the tests within `tests/vendor/botan`.
### License
This library is made available under the BSD-3 license.

Binary file not shown.

View File

@@ -1,460 +0,0 @@
package vendor_botan
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial creation and testing of the bindings.
Bindings for the Botan crypto library.
Created for version 2.18.1, using the provided FFI header within Botan.
The "botan_" prefix has been stripped from the identifiers to remove redundancy,
since the package is already named botan.
*/
import "core:c"
FFI_ERROR :: #type c.int
FFI_SUCCESS :: FFI_ERROR(0)
FFI_INVALID_VERIFIER :: FFI_ERROR(1)
FFI_ERROR_INVALID_INPUT :: FFI_ERROR(-1)
FFI_ERROR_BAD_MAC :: FFI_ERROR(-2)
FFI_ERROR_INSUFFICIENT_BUFFER_SPACE :: FFI_ERROR(-10)
FFI_ERROR_EXCEPTION_THROWN :: FFI_ERROR(-20)
FFI_ERROR_OUT_OF_MEMORY :: FFI_ERROR(-21)
FFI_ERROR_BAD_FLAG :: FFI_ERROR(-30)
FFI_ERROR_NULL_POINTER :: FFI_ERROR(-31)
FFI_ERROR_BAD_PARAMETER :: FFI_ERROR(-32)
FFI_ERROR_KEY_NOT_SET :: FFI_ERROR(-33)
FFI_ERROR_INVALID_KEY_LENGTH :: FFI_ERROR(-34)
FFI_ERROR_NOT_IMPLEMENTED :: FFI_ERROR(-40)
FFI_ERROR_INVALID_OBJECT :: FFI_ERROR(-50)
FFI_ERROR_UNKNOWN_ERROR :: FFI_ERROR(-100)
FFI_HEX_LOWER_CASE :: 1
CIPHER_INIT_FLAG_MASK_DIRECTION :: 1
CIPHER_INIT_FLAG_ENCRYPT :: 0
CIPHER_INIT_FLAG_DECRYPT :: 1
CIPHER_UPDATE_FLAG_FINAL :: 1 << 0
CHECK_KEY_EXPENSIVE_TESTS :: 1
PRIVKEY_EXPORT_FLAG_DER :: 0
PRIVKEY_EXPORT_FLAG_PEM :: 1
PUBKEY_DER_FORMAT_SIGNATURE :: 1
FPE_FLAG_FE1_COMPAT_MODE :: 1
x509_cert_key_constraints :: #type c.int
NO_CONSTRAINTS :: x509_cert_key_constraints(0)
DIGITAL_SIGNATURE :: x509_cert_key_constraints(32768)
NON_REPUDIATION :: x509_cert_key_constraints(16384)
KEY_ENCIPHERMENT :: x509_cert_key_constraints(8192)
DATA_ENCIPHERMENT :: x509_cert_key_constraints(4096)
KEY_AGREEMENT :: x509_cert_key_constraints(2048)
KEY_CERT_SIGN :: x509_cert_key_constraints(1024)
CRL_SIGN :: x509_cert_key_constraints(512)
ENCIPHER_ONLY :: x509_cert_key_constraints(256)
DECIPHER_ONLY :: x509_cert_key_constraints(128)
HASH_SHA1 :: "SHA-1"
HASH_SHA_224 :: "SHA-224"
HASH_SHA_256 :: "SHA-256"
HASH_SHA_384 :: "SHA-384"
HASH_SHA_512 :: "SHA-512"
HASH_SHA3_224 :: "SHA-3(224)"
HASH_SHA3_256 :: "SHA-3(256)"
HASH_SHA3_384 :: "SHA-3(384)"
HASH_SHA3_512 :: "SHA-3(512)"
HASH_SHAKE_128 :: "SHAKE-128"
HASH_SHAKE_256 :: "SHAKE-256"
HASH_KECCAK_512 :: "Keccak-1600"
HASH_BLAKE2B :: "BLAKE2b"
HASH_MD5 :: "MD5"
HASH_SM3 :: "SM3"
MAC_HMAC_SHA1 :: "HMAC(SHA1)"
MAC_HMAC_SHA_224 :: "HMAC(SHA-224)"
MAC_HMAC_SHA_256 :: "HMAC(SHA-256)"
MAC_HMAC_SHA_384 :: "HMAC(SHA-384)"
MAC_HMAC_SHA_512 :: "HMAC(SHA-512)"
MAC_HMAC_MD5 :: "HMAC(MD5)"
MAC_SIPHASH_1_3 :: "SipHash(1,3)"
MAC_SIPHASH_2_4 :: "SipHash(2,4)"
MAC_SIPHASH_4_8 :: "SipHash(4,8)"
hash_struct :: struct{}
hash_t :: ^hash_struct
rng_struct :: struct{}
rng_t :: ^rng_struct
mac_struct :: struct{}
mac_t :: ^mac_struct
cipher_struct :: struct{}
cipher_t :: ^cipher_struct
block_cipher_struct :: struct{}
block_cipher_t :: ^block_cipher_struct
mp_struct :: struct{}
mp_t :: ^mp_struct
privkey_struct :: struct{}
privkey_t :: ^privkey_struct
pubkey_struct :: struct{}
pubkey_t :: ^pubkey_struct
pk_op_encrypt_struct :: struct{}
pk_op_encrypt_t :: ^pk_op_encrypt_struct
pk_op_decrypt_struct :: struct{}
pk_op_decrypt_t :: ^pk_op_decrypt_struct
pk_op_sign_struct :: struct{}
pk_op_sign_t :: ^pk_op_sign_struct
pk_op_verify_struct :: struct{}
pk_op_verify_t :: ^pk_op_verify_struct
pk_op_ka_struct :: struct{}
pk_op_ka_t :: ^pk_op_ka_struct
x509_cert_struct :: struct{}
x509_cert_t :: ^x509_cert_struct
x509_crl_struct :: struct{}
x509_crl_t :: ^x509_crl_struct
hotp_struct :: struct{}
hotp_t :: ^hotp_struct
totp_struct :: struct{}
totp_t :: ^totp_struct
fpe_struct :: struct{}
fpe_t :: ^fpe_struct
when ODIN_OS == .Windows {
foreign import botan_lib "botan-3.lib"
} else when ODIN_OS == .Darwin {
foreign import botan_lib "system:botan-3"
} else {
foreign import botan_lib "system:botan-2"
}
@(default_calling_convention="c")
@(link_prefix="botan_")
foreign botan_lib {
error_description :: proc(err: c.int) -> cstring ---
ffi_api_version :: proc() -> c.int ---
ffi_supports_api :: proc(api_version: c.int) -> c.int ---
version_string :: proc() -> cstring ---
version_major :: proc() -> c.int ---
version_minor :: proc() -> c.int ---
version_patch :: proc() -> c.int ---
version_datestamp :: proc() -> c.int ---
constant_time_compare :: proc(x, y: ^c.char, length: c.size_t) -> c.int ---
same_mem :: proc(x, y: ^c.char, length: c.size_t) -> c.int ---
scrub_mem :: proc(mem: rawptr, bytes: c.size_t) -> c.int ---
hex_encode :: proc(x: ^c.char, length: c.size_t, out: ^c.char, flags: c.uint) -> c.int ---
hex_decode :: proc(hex_str: cstring, in_len: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
base64_encode :: proc(x: ^c.char, length: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
base64_decode :: proc(base64_str: cstring, in_len: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
rng_init :: proc(rng: ^rng_t, rng_type: cstring) -> c.int ---
rng_init_custom :: proc(rng_out: ^rng_t, rng_name: cstring, ctx: rawptr,
get_cb: proc(ctx: rawptr, out: ^c.char, out_len: c.size_t) -> ^c.int,
add_entropy_cb: proc(ctx: rawptr, input: ^c.char, length: c.size_t) -> ^c.int,
destroy_cb: proc(ctx: rawptr) -> rawptr) -> c.int ---
rng_get :: proc(rng: rng_t, out: ^c.char, out_len: c.size_t) -> c.int ---
rng_reseed :: proc(rng: rng_t, bits: c.size_t) -> c.int ---
rng_reseed_from_rng :: proc(rng, source_rng: rng_t, bits: c.size_t) -> c.int ---
rng_add_entropy :: proc(rng: rng_t, entropy: ^c.char, entropy_len: c.size_t) -> c.int ---
rng_destroy :: proc(rng: rng_t) -> c.int ---
hash_init :: proc(hash: ^hash_t, hash_name: cstring, flags: c.uint) -> c.int ---
hash_copy_state :: proc(dest: ^hash_t, source: hash_t) -> c.int ---
hash_output_length :: proc(hash: hash_t, output_length: ^c.size_t) -> c.int ---
hash_block_size :: proc(hash: hash_t, block_size: ^c.size_t) -> c.int ---
hash_update :: proc(hash: hash_t, input: ^c.char, input_len: c.size_t) -> c.int ---
hash_final :: proc(hash: hash_t, out: ^c.char) -> c.int ---
hash_clear :: proc(hash: hash_t) -> c.int ---
hash_destroy :: proc(hash: hash_t) -> c.int ---
hash_name :: proc(hash: hash_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
mac_init :: proc(mac: ^mac_t, hash_name: cstring, flags: c.uint) -> c.int ---
mac_output_length :: proc(mac: mac_t, output_length: ^c.size_t) -> c.int ---
mac_set_key :: proc(mac: mac_t, key: ^c.char, key_len: c.size_t) -> c.int ---
mac_update :: proc(mac: mac_t, buf: ^c.char, length: c.size_t) -> c.int ---
mac_final :: proc(mac: mac_t, out: ^c.char) -> c.int ---
mac_clear :: proc(mac: mac_t) -> c.int ---
mac_name :: proc(mac: mac_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
mac_get_keyspec :: proc(mac: mac_t, out_minimum_keylength, out_maximum_keylength, out_keylength_modulo: ^c.size_t) -> c.int ---
mac_destroy :: proc(mac: mac_t) -> c.int ---
cipher_init :: proc(cipher: ^cipher_t, name: cstring, flags: c.uint) -> c.int ---
cipher_name :: proc(cipher: cipher_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
cipher_output_length :: proc(cipher: cipher_t, output_length: ^c.size_t) -> c.int ---
cipher_valid_nonce_length :: proc(cipher: cipher_t, nl: c.size_t) -> c.int ---
cipher_get_tag_length :: proc(cipher: cipher_t, tag_size: ^c.size_t) -> c.int ---
cipher_get_default_nonce_length :: proc(cipher: cipher_t, nl: ^c.size_t) -> c.int ---
cipher_get_update_granularity :: proc(cipher: cipher_t, ug: ^c.size_t) -> c.int ---
cipher_query_keylen :: proc(cipher: cipher_t, out_minimum_keylength, out_maximum_keylength: ^c.size_t) -> c.int ---
cipher_get_keyspec :: proc(cipher: cipher_t, min_keylen, max_keylen, mod_keylen: ^c.size_t) -> c.int ---
cipher_set_key :: proc(cipher: cipher_t, key: ^c.char, key_len: c.size_t) -> c.int ---
cipher_reset :: proc(cipher: cipher_t) -> c.int ---
cipher_set_associated_data :: proc(cipher: cipher_t, ad: ^c.char, ad_len: c.size_t) -> c.int ---
cipher_start :: proc(cipher: cipher_t, nonce: ^c.char, nonce_len: c.size_t) -> c.int ---
cipher_update :: proc(cipher: cipher_t, flags: c.uint, output: ^c.char, output_size: c.size_t, output_written: ^c.size_t,
input_bytes: ^c.char, input_size: c.size_t, input_consumed: ^c.size_t) -> c.int ---
cipher_clear :: proc(hash: cipher_t) -> c.int ---
cipher_destroy :: proc(cipher: cipher_t) -> c.int ---
@(deprecated="Use botan.pwdhash")
pbkdf :: proc(pbkdf_algo: cstring, out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char,
salt_len, iterations: c.size_t) -> c.int ---
@(deprecated="Use botan.pwdhash_timed")
pbkdf_timed :: proc(pbkdf_algo: cstring, out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char,
salt_len, milliseconds_to_run: c.size_t, out_iterations_used: ^c.size_t) -> c.int ---
pwdhash :: proc(algo: cstring, param1, param2, param3: c.size_t, out: ^c.char, out_len: c.size_t, passphrase: cstring,
passphrase_len: c.size_t, salt: ^c.char, salt_len: c.size_t) -> c.int ---
pwdhash_timed :: proc(algo: cstring, msec: c.uint, param1, param2, param3: c.size_t, out: ^c.char, out_len: c.size_t,
passphrase: cstring, passphrase_len: c.size_t, salt: ^c.char, salt_len: c.size_t) -> c.int ---
@(deprecated="Use botan.pwdhash")
scrypt :: proc(out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char, salt_len, N, r, p: c.size_t) -> c.int ---
kdf :: proc(kdf_algo: cstring, out: ^c.char, out_len: c.size_t, secret: ^c.char, secret_lent: c.size_t, salt: ^c.char,
salt_len: c.size_t, label: ^c.char, label_len: c.size_t) -> c.int ---
block_cipher_init :: proc(bc: ^block_cipher_t, name: cstring) -> c.int ---
block_cipher_destroy :: proc(bc: block_cipher_t) -> c.int ---
block_cipher_clear :: proc(bc: block_cipher_t) -> c.int ---
block_cipher_set_key :: proc(bc: block_cipher_t, key: ^c.char, key_len: c.size_t) -> c.int ---
block_cipher_block_size :: proc(bc: block_cipher_t) -> c.int ---
block_cipher_encrypt_blocks :: proc(bc: block_cipher_t, input, out: ^c.char, blocks: c.size_t) -> c.int ---
block_cipher_decrypt_blocks :: proc(bc: block_cipher_t, input, out: ^c.char, blocks: c.size_t) -> c.int ---
block_cipher_name :: proc(bc: block_cipher_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
block_cipher_get_keyspec :: proc(bc: block_cipher_t, out_minimum_keylength, out_maximum_keylength, out_keylength_modulo: ^c.size_t) -> c.int ---
mp_init :: proc(mp: ^mp_t) -> c.int ---
mp_destroy :: proc(mp: mp_t) -> c.int ---
mp_to_hex :: proc(mp: mp_t, out: ^c.char) -> c.int ---
mp_to_str :: proc(mp: mp_t, base: c.char, out: ^c.char, out_len: ^c.size_t) -> c.int ---
mp_clear :: proc(mp: mp_t) -> c.int ---
mp_set_from_int :: proc(mp: mp_t, initial_value: c.int) -> c.int ---
mp_set_from_mp :: proc(dest, source: mp_t) -> c.int ---
mp_set_from_str :: proc(dest: mp_t, str: cstring) -> c.int ---
mp_set_from_radix_str :: proc(mp: mp_t, str: cstring, radix: c.size_t) -> c.int ---
mp_num_bits :: proc(n: mp_t, bits: ^c.size_t) -> c.int ---
mp_num_bytes :: proc(n: mp_t, bytes: ^c.size_t) -> c.int ---
mp_to_bin :: proc(mp: mp_t, vec: ^c.char) -> c.int ---
mp_from_bin :: proc(mp: mp_t, vec: ^c.char, vec_len: c.size_t) -> c.int ---
mp_to_uint32 :: proc(mp: mp_t, val: ^c.uint) -> c.int ---
mp_is_positive :: proc(mp: mp_t) -> c.int ---
mp_is_negative :: proc(mp: mp_t) -> c.int ---
mp_flip_sign :: proc(mp: mp_t) -> c.int ---
mp_is_zero :: proc(mp: mp_t) -> c.int ---
@(deprecated="Use botan.mp_get_bit(0)")
mp_is_odd :: proc(mp: mp_t) -> c.int ---
@(deprecated="Use botan.mp_get_bit(0)")
mp_is_even :: proc(mp: mp_t) -> c.int ---
mp_add_u32 :: proc(result, x: mp_t, y: c.uint) -> c.int ---
mp_sub_u32 :: proc(result, x: mp_t, y: c.uint) -> c.int ---
mp_add :: proc(result, x, y: mp_t) -> c.int ---
mp_sub :: proc(result, x, y: mp_t) -> c.int ---
mp_mul :: proc(result, x, y: mp_t) -> c.int ---
mp_div :: proc(quotient, remainder, x, y: mp_t) -> c.int ---
mp_mod_mul :: proc(result, x, y, mod: mp_t) -> c.int ---
mp_equal :: proc(x, y: mp_t) -> c.int ---
mp_cmp :: proc(result: ^c.int, x, y: mp_t) -> c.int ---
mp_swap :: proc(x, y: mp_t) -> c.int ---
mp_powmod :: proc(out, base, exponent, modulus: mp_t) -> c.int ---
mp_lshift :: proc(out, input: mp_t, shift: c.size_t) -> c.int ---
mp_rshift :: proc(out, input: mp_t, shift: c.size_t) -> c.int ---
mp_mod_inverse :: proc(out, input, modulus: mp_t) -> c.int ---
mp_rand_bits :: proc(rand_out: mp_t, rng: rng_t, bits: c.size_t) -> c.int ---
mp_rand_range :: proc(rand_out: mp_t, rng: rng_t, lower_bound, upper_bound: mp_t) -> c.int ---
mp_gcd :: proc(out, x, y: mp_t) -> c.int ---
mp_is_prime :: proc(n: mp_t, rng: rng_t, test_prob: c.size_t) -> c.int ---
mp_get_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
mp_set_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
mp_clear_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
bcrypt_generate :: proc(out: ^c.char, out_len: ^c.size_t, password: cstring, rng: rng_t, work_factor: c.size_t, flags: c.uint) -> c.int ---
bcrypt_is_valid :: proc(pass, hash: cstring) -> c.int ---
privkey_create :: proc(key: ^privkey_t, algo_name, algo_params: cstring, rng: rng_t) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_check_key :: proc(key: privkey_t, rng: rng_t, flags: c.uint) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_create_rsa :: proc(key: ^privkey_t, rng: rng_t, bits: c.size_t) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_create_ecdsa :: proc(key: ^privkey_t, rng: rng_t, params: cstring) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_create_ecdh :: proc(key: ^privkey_t, rng: rng_t, params: cstring) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_create_mceliece :: proc(key: ^privkey_t, rng: rng_t, n, t: c.size_t) -> c.int ---
@(deprecated="Use botan.privkey_create")
privkey_create_dh :: proc(key: ^privkey_t, rng: rng_t, param: cstring) -> c.int ---
privkey_create_dsa :: proc(key: ^privkey_t, rng: rng_t, pbits, qbits: c.size_t) -> c.int ---
privkey_create_elgamal :: proc(key: ^privkey_t, rng: rng_t, pbits, qbits: c.size_t) -> c.int ---
privkey_load :: proc(key: ^privkey_t, rng: rng_t, bits: ^c.char, length: c.size_t, password: cstring) -> c.int ---
privkey_destroy :: proc(key: privkey_t) -> c.int ---
privkey_export :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
privkey_algo_name :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
@(deprecated="Use botan.privkey_export_encrypted_pbkdf_{msec,iter}")
privkey_export_encrypted :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase, encryption_algo: cstring, flags: c.uint) -> c.int ---
privkey_export_encrypted_pbkdf_msec :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase: cstring, pbkdf_msec_runtime: c.uint,
pbkdf_iterations_out: ^c.size_t, cipher_algo, pbkdf_algo: cstring, flags: c.uint) -> c.int ---
privkey_export_encrypted_pbkdf_iter :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase: cstring, pbkdf_iterations: c.size_t,
cipher_algo, pbkdf_algo: cstring, flags: c.uint) -> c.int ---
pubkey_load :: proc(key: ^pubkey_t, bits: ^c.char, length: c.size_t) -> c.int ---
privkey_export_pubkey :: proc(out: ^pubkey_t, input: privkey_t) -> c.int ---
pubkey_export :: proc(key: pubkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
pubkey_algo_name :: proc(key: pubkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
pubkey_check_key :: proc(key: pubkey_t, rng: rng_t, flags: c.uint) -> c.int ---
pubkey_estimated_strength :: proc(key: pubkey_t, estimate: ^c.size_t) -> c.int ---
pubkey_fingerprint :: proc(key: pubkey_t, hash: cstring, out: ^c.char, out_len: ^c.size_t) -> c.int ---
pubkey_destroy :: proc(key: pubkey_t) -> c.int ---
pubkey_get_field :: proc(output: mp_t, key: pubkey_t, field_name: cstring) -> c.int ---
privkey_get_field :: proc(output: mp_t, key: privkey_t, field_name: cstring) -> c.int ---
privkey_load_rsa :: proc(key: ^privkey_t, p, q, e: mp_t) -> c.int ---
privkey_load_rsa_pkcs1 :: proc(key: ^privkey_t, bits: ^c.char, length: c.size_t) -> c.int ---
@(deprecated="Use botan.privkey_get_field")
privkey_rsa_get_p :: proc(p: mp_t, rsa_key: privkey_t) -> c.int ---
@(deprecated="Use botan.privkey_get_field")
privkey_rsa_get_q :: proc(q: mp_t, rsa_key: privkey_t) -> c.int ---
@(deprecated="Use botan.privkey_get_field")
privkey_rsa_get_d :: proc(d: mp_t, rsa_key: privkey_t) -> c.int ---
@(deprecated="Use botan.privkey_get_field")
privkey_rsa_get_n :: proc(n: mp_t, rsa_key: privkey_t) -> c.int ---
@(deprecated="Use botan.privkey_get_field")
privkey_rsa_get_e :: proc(e: mp_t, rsa_key: privkey_t) -> c.int ---
privkey_rsa_get_privkey :: proc(rsa_key: privkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
pubkey_load_rsa :: proc(key: ^pubkey_t, n, e: mp_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_rsa_get_e :: proc(e: mp_t, rsa_key: pubkey_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_rsa_get_n :: proc(n: mp_t, rsa_key: pubkey_t) -> c.int ---
privkey_load_dsa :: proc(key: ^privkey_t, p, q, g, x: mp_t) -> c.int ---
pubkey_load_dsa :: proc(key: ^pubkey_t, p, q, g, y: mp_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
privkey_dsa_get_x :: proc(n: mp_t, key: privkey_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_dsa_get_p :: proc(p: mp_t, key: pubkey_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_dsa_get_q :: proc(q: mp_t, key: pubkey_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_dsa_get_g :: proc(d: mp_t, key: pubkey_t) -> c.int ---
@(deprecated="Use botan.pubkey_get_field")
pubkey_dsa_get_y :: proc(y: mp_t, key: pubkey_t) -> c.int ---
privkey_load_dh :: proc(key: ^privkey_t, p, g, y: mp_t) -> c.int ---
pubkey_load_dh :: proc(key: ^pubkey_t, p, g, x: mp_t) -> c.int ---
privkey_load_elgamal :: proc(key: ^privkey_t, p, g, y: mp_t) -> c.int ---
pubkey_load_elgamal :: proc(key: ^pubkey_t, p, g, x: mp_t) -> c.int ---
privkey_load_ed25519 :: proc(key: ^privkey_t, privkey: [32]c.char) -> c.int ---
pubkey_load_ed25519 :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
privkey_ed25519_get_privkey :: proc(key: ^privkey_t, output: [64]c.char) -> c.int ---
pubkey_ed25519_get_pubkey :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
privkey_load_x25519 :: proc(key: ^privkey_t, privkey: [32]c.char) -> c.int ---
pubkey_load_x25519 :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
privkey_x25519_get_privkey :: proc(key: ^privkey_t, output: [32]c.char) -> c.int ---
pubkey_x25519_get_pubkey :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
privkey_load_ecdsa :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
pubkey_load_ecdsa :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
pubkey_load_ecdh :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
privkey_load_ecdh :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
pubkey_load_sm2 :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
privkey_load_sm2 :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
@(deprecated="Use botan.pubkey_load_sm2")
pubkey_load_sm2_enc :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
@(deprecated="Use botan.privkey_load_sm2")
privkey_load_sm2_enc :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
pubkey_sm2_compute_za :: proc(out: ^c.char, out_len: ^c.size_t, ident, hash_algo: cstring, key: pubkey_t) -> c.int ---
pk_op_encrypt_create :: proc(op: ^pk_op_encrypt_t, key: pubkey_t, padding: cstring, flags: c.uint) -> c.int ---
pk_op_encrypt_destroy :: proc(op: pk_op_encrypt_t) -> c.int ---
pk_op_encrypt_output_length :: proc(op: pk_op_encrypt_t, ptext_len: c.size_t, ctext_len: ^c.size_t) -> c.int ---
pk_op_encrypt :: proc(op: pk_op_encrypt_t, rng: rng_t, out: ^c.char, out_len: ^c.size_t, plaintext: cstring, plaintext_len: c.size_t) -> c.int ---
pk_op_decrypt_create :: proc(op: ^pk_op_decrypt_t, key: privkey_t, padding: cstring, flags: c.uint) -> c.int ---
pk_op_decrypt_destroy :: proc(op: pk_op_decrypt_t) -> c.int ---
pk_op_decrypt_output_length :: proc(op: pk_op_decrypt_t, ptext_len: c.size_t, ctext_len: ^c.size_t) -> c.int ---
pk_op_decrypt :: proc(op: pk_op_decrypt_t, rng: rng_t, out: ^c.char, out_len: ^c.size_t, ciphertext: cstring, ciphertext_len: c.size_t) -> c.int ---
pk_op_sign_create :: proc(op: ^pk_op_sign_t, key: privkey_t, hash_and_padding: cstring, flags: c.uint) -> c.int ---
pk_op_sign_destroy :: proc(op: pk_op_sign_t) -> c.int ---
pk_op_sign_output_length :: proc(op: pk_op_sign_t, olen: ^c.size_t) -> c.int ---
pk_op_sign_update :: proc(op: pk_op_sign_t, input: ^c.char, input_len: c.size_t) -> c.int ---
pk_op_sign_finish :: proc(op: pk_op_sign_t, rng: rng_t, sig: ^c.char, sig_len: ^c.size_t) -> c.int ---
pk_op_verify_create :: proc(op: ^pk_op_verify_t, hash_and_padding: cstring, flags: c.uint) -> c.int ---
pk_op_verify_destroy :: proc(op: pk_op_verify_t) -> c.int ---
pk_op_verify_update :: proc(op: pk_op_verify_t, input: ^c.char, input_len: c.size_t) -> c.int ---
pk_op_verify_finish :: proc(op: pk_op_verify_t, sig: ^c.char, sig_len: c.size_t) -> c.int ---
pk_op_key_agreement_create :: proc(op: ^pk_op_ka_t, kdf: cstring, flags: c.uint) -> c.int ---
pk_op_key_agreement_destroy :: proc(op: pk_op_ka_t) -> c.int ---
pk_op_key_agreement_export_public :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
pk_op_key_agreement_size :: proc(op: pk_op_ka_t, out_len: ^c.size_t) -> c.int ---
pk_op_key_agreement :: proc(op: pk_op_ka_t, out: ^c.char, out_len: ^c.size_t, other_key: ^c.char, other_key_len: c.size_t, salt: ^c.char,
salt_len: c.size_t) -> c.int ---
pkcs_hash_id :: proc(hash_name: cstring, pkcs_id: ^c.char, pkcs_id_len: ^c.size_t) -> c.int ---
@(deprecated="Poorly specified, avoid in new code")
mceies_encrypt :: proc(mce_key: pubkey_t, rng: rng_t, aead: cstring, pt: ^c.char, pt_len: c.size_t, ad: ^c.char, ad_len: c.size_t,
ct: ^c.char, ct_len: ^c.size_t) -> c.int ---
@(deprecated="Poorly specified, avoid in new code")
mceies_decrypt :: proc(mce_key: privkey_t, aead: cstring, ct: ^c.char, ct_len: c.size_t, ad: ^c.char, ad_len: c.size_t, pt: ^c.char,
pt_len: ^c.size_t) -> c.int ---
x509_cert_load :: proc(cert_obj: ^x509_cert_t, cert: ^c.char, cert_len: c.size_t) -> c.int ---
x509_cert_load_file :: proc(cert_obj: ^x509_cert_t, filename: cstring) -> c.int ---
x509_cert_destroy :: proc(cert: x509_cert_t) -> c.int ---
x509_cert_dup :: proc(new_cert: ^x509_cert_t, cert: x509_cert_t) -> c.int ---
x509_cert_get_time_starts :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_time_expires :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_not_before :: proc(cert: x509_cert_t, time_since_epoch: ^c.ulonglong) -> c.int ---
x509_cert_not_after :: proc(cert: x509_cert_t, time_since_epoch: ^c.ulonglong) -> c.int ---
x509_cert_get_fingerprint :: proc(cert: x509_cert_t, hash: cstring, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_serial_number :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_authority_key_id :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_subject_key_id :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_public_key_bits :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_public_key :: proc(cert: x509_cert_t, key: ^pubkey_t) -> c.int ---
x509_cert_get_issuer_dn :: proc(cert: x509_cert_t, key: ^c.char, index: c.size_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_get_subject_dn :: proc(cert: x509_cert_t, key: ^c.char, index: c.size_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_to_string :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
x509_cert_allowed_usage :: proc(cert: x509_cert_t, key_usage: c.uint) -> c.int ---
x509_cert_hostname_match :: proc(cert: x509_cert_t, hostname: cstring) -> c.int ---
x509_cert_verify :: proc(validation_result: ^c.int, cert: x509_cert_t, intermediates: ^x509_cert_t, intermediates_len: c.size_t, trusted: ^x509_cert_t,
trusted_len: c.size_t, trusted_path: cstring, required_strength: c.size_t, hostname: cstring, reference_time: c.ulonglong) -> c.int ---
x509_cert_validation_status :: proc(code: c.int) -> cstring ---
x509_crl_load_file :: proc(crl_obj: ^x509_crl_t, crl_path: cstring) -> c.int ---
x509_crl_load :: proc(crl_obj: ^x509_crl_t, crl_bits: ^c.char, crl_bits_len: c.size_t) -> c.int ---
x509_crl_destroy :: proc(crl: x509_crl_t) -> c.int ---
x509_is_revoked :: proc(crl: x509_crl_t, cert: x509_cert_t) -> c.int ---
x509_cert_verify_with_crl :: proc(validation_result: ^c.int, cert: x509_cert_t, intermediates: ^x509_cert_t, intermediates_len: c.size_t, trusted: ^x509_cert_t,
trusted_len: c.size_t, crls: ^x509_crl_t, crls_len: c.size_t, trusted_path: cstring, required_strength: c.size_t,
hostname: cstring, reference_time: c.ulonglong) -> c.int ---
key_wrap3394 :: proc(key: ^c.char, key_len: c.size_t, kek: ^c.char, kek_len: c.size_t, wrapped_key: ^c.char, wrapped_key_len: ^c.size_t) -> c.int ---
key_unwrap3394 :: proc(wrapped_key: ^c.char, wrapped_key_len: c.size_t, kek: ^c.char, kek_len: c.size_t, key: ^c.char, key_len: ^c.size_t) -> c.int ---
hotp_init :: proc(hotp: ^hotp_t, key: ^c.char, key_len: c.size_t, hash_algo: cstring, digits: c.size_t) -> c.int ---
hotp_destroy :: proc(hotp: hotp_t) -> c.int ---
hotp_generate :: proc(hotp: hotp_t, hotp_code: ^c.uint, hotp_counter: c.ulonglong) -> c.int ---
hotp_check :: proc(hotp: hotp_t, next_hotp_counter: ^c.ulonglong, hotp_code: c.uint, hotp_counter: c.ulonglong, resync_range: c.size_t) -> c.int ---
totp_init :: proc(totp: ^totp_t, key: ^c.char, key_len: c.size_t, hash_algo: cstring, digits, time_step: c.size_t) -> c.int ---
totp_destroy :: proc(totp: totp_t) -> c.int ---
totp_generate :: proc(totp: totp_t, totp_code: ^c.uint, timestamp: c.ulonglong) -> c.int ---
totp_check :: proc(totp: totp_t, totp_code: ^c.uint, timestamp: c.ulonglong, acceptable_clock_drift: c.size_t) -> c.int ---
fpe_fe1_init :: proc(fpe: ^fpe_t, n: mp_t, key: ^c.char, key_len, rounds: c.size_t, flags: c.uint) -> c.int ---
fpe_destroy :: proc(fpe: fpe_t) -> c.int ---
fpe_encrypt :: proc(fpe: fpe_t, x: mp_t, tweak: ^c.char, tweak_len: c.size_t) -> c.int ---
fpe_decrypt :: proc(fpe: fpe_t, x: mp_t, tweak: ^c.char, tweak_len: c.size_t) -> c.int ---
}

View File

@@ -1,24 +0,0 @@
Copyright (C) 1999-2023 The Botan Authors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions, and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,118 +0,0 @@
package vendor_botan_blake2b
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Interface for the BLAKE2B hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../bindings"
/*
High level API
*/
DIGEST_SIZE :: 64
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context) {
botan.hash_init(ctx, botan.HASH_BLAKE2B, 0)
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,10 +0,0 @@
# crypto/legacy
These are algorithms that are shipped solely for the purpose of
interoperability with legacy systems. The use of these packages in
any other capacity is discouraged, especially those that are known
to be broken.
- keccak - The draft version of the algorithm that became SHA-3
- MD5 - Broken (https://eprint.iacr.org/2005/075)
- SHA-1 - Broken (https://eprint.iacr.org/2017/190)

View File

@@ -1,118 +0,0 @@
package vendor_keccak
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the Keccak hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../../bindings"
/*
High level API
*/
DIGEST_SIZE_512 :: 64
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context) {
botan.hash_init(ctx, botan.HASH_KECCAK_512, 0)
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,118 +0,0 @@
package vendor_md5
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Interface for the MD5 hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../../bindings"
/*
High level API
*/
DIGEST_SIZE :: 16
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context) {
botan.hash_init(ctx, botan.HASH_MD5, 0)
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,118 +0,0 @@
package vendor_sha1
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Interface for the SHA-1 hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../../bindings"
/*
High level API
*/
DIGEST_SIZE :: 20
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context) {
botan.hash_init(ctx, botan.HASH_SHA1, 0)
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,354 +0,0 @@
package vendor_sha2
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHA-2 hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../bindings"
/*
High level API
*/
DIGEST_SIZE_224 :: 28
DIGEST_SIZE_256 :: 32
DIGEST_SIZE_384 :: 48
DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
}
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init(&ctx, hash_size = 224)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 224)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init(&ctx, hash_size = 224)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init(&ctx, hash_size = 384)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 384)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init(&ctx, hash_size = 384)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx, hash_size = 512)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 512)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx, hash_size = 512)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
switch hash_size {
case 224: botan.hash_init(ctx, botan.HASH_SHA_224, 0)
case 256: botan.hash_init(ctx, botan.HASH_SHA_256, 0)
case 384: botan.hash_init(ctx, botan.HASH_SHA_384, 0)
case 512: botan.hash_init(ctx, botan.HASH_SHA_512, 0)
}
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,354 +0,0 @@
package vendor_sha3
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHA-3 hashing algorithm. Variants for Keccak and SHAKE can be found in the appropriate packages.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../bindings"
/*
High level API
*/
DIGEST_SIZE_224 :: 28
DIGEST_SIZE_256 :: 32
DIGEST_SIZE_384 :: 48
DIGEST_SIZE_512 :: 64
// hash_string_224 will hash the given input and return the
// computed hash
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
return hash_bytes_224(transmute([]byte)(data))
}
// hash_bytes_224 will hash the given input and return the
// computed hash
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init(&ctx, hash_size = 224)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_224 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_224 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 224)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_224 will read the stream in chunks and compute a
// hash from its contents
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
hash: [DIGEST_SIZE_224]byte
ctx: Context
init(&ctx, hash_size = 224)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_224 will read the file provided by the given handle
// and compute a hash
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
if !load_at_once {
return hash_stream_224(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_224(buf[:]), ok
}
}
return [DIGEST_SIZE_224]byte{}, false
}
hash_224 :: proc {
hash_stream_224,
hash_file_224,
hash_bytes_224,
hash_string_224,
hash_bytes_to_buffer_224,
hash_string_to_buffer_224,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
// hash_string_384 will hash the given input and return the
// computed hash
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
return hash_bytes_384(transmute([]byte)(data))
}
// hash_bytes_384 will hash the given input and return the
// computed hash
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init(&ctx, hash_size = 384)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_384 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_384 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 384)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_384 will read the stream in chunks and compute a
// hash from its contents
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
hash: [DIGEST_SIZE_384]byte
ctx: Context
init(&ctx, hash_size = 384)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_384 will read the file provided by the given handle
// and compute a hash
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
if !load_at_once {
return hash_stream_384(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_384(buf[:]), ok
}
}
return [DIGEST_SIZE_384]byte{}, false
}
hash_384 :: proc {
hash_stream_384,
hash_file_384,
hash_bytes_384,
hash_string_384,
hash_bytes_to_buffer_384,
hash_string_to_buffer_384,
}
// hash_string_512 will hash the given input and return the
// computed hash
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
return hash_bytes_512(transmute([]byte)(data))
}
// hash_bytes_512 will hash the given input and return the
// computed hash
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx, hash_size = 512)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_512 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_512 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 512)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream_512 will read the stream in chunks and compute a
// hash from its contents
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
hash: [DIGEST_SIZE_512]byte
ctx: Context
init(&ctx, hash_size = 512)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_512 will read the file provided by the given handle
// and compute a hash
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
if !load_at_once {
return hash_stream_512(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_512(buf[:]), ok
}
}
return [DIGEST_SIZE_512]byte{}, false
}
hash_512 :: proc {
hash_stream_512,
hash_file_512,
hash_bytes_512,
hash_string_512,
hash_bytes_to_buffer_512,
hash_string_to_buffer_512,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
switch hash_size {
case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
case 384: botan.hash_init(ctx, botan.HASH_SHA3_384, 0)
case 512: botan.hash_init(ctx, botan.HASH_SHA3_512, 0)
}
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,198 +0,0 @@
package vendor_shake
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHAKE hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../bindings"
/*
High level API
*/
DIGEST_SIZE_128 :: 16
DIGEST_SIZE_256 :: 32
// hash_string_128 will hash the given input and return the
// computed hash
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
return hash_bytes_128(transmute([]byte)(data))
}
// hash_bytes_128 will hash the given input and return the
// computed hash
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
hash: [DIGEST_SIZE_128]byte
ctx: Context
init(&ctx, hash_size = 128)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_128 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_128 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 128)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_128 will read the stream in chunks and compute a
// hash from its contents
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
hash: [DIGEST_SIZE_128]byte
ctx: Context
init(&ctx, hash_size = 128)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_128 will read the file provided by the given handle
// and compute a hash
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
if !load_at_once {
return hash_stream_128(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_128(buf[:]), ok
}
}
return [DIGEST_SIZE_128]byte{}, false
}
hash_128 :: proc {
hash_stream_128,
hash_file_128,
hash_bytes_128,
hash_string_128,
hash_bytes_to_buffer_128,
hash_string_to_buffer_128,
}
// hash_string_256 will hash the given input and return the
// computed hash
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
return hash_bytes_256(transmute([]byte)(data))
}
// hash_bytes_256 will hash the given input and return the
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer_256 will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer_256 will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx, hash_size = 256)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
ctx: Context
init(&ctx, hash_size = 256)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file_256 will read the file provided by the given handle
// and compute a hash
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
if !load_at_once {
return hash_stream_256(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes_256(buf[:]), ok
}
}
return [DIGEST_SIZE_256]byte{}, false
}
hash_256 :: proc {
hash_stream_256,
hash_file_256,
hash_bytes_256,
hash_string_256,
hash_bytes_to_buffer_256,
hash_string_to_buffer_256,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context, hash_size := 256) {
switch hash_size {
case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0)
case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0)
}
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}

View File

@@ -1,253 +0,0 @@
package vendor_siphash
/*
Copyright 2022 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Interface for the SipHash hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
Use the specific procedures for a certain setup. The generic procdedures will default to Siphash 2-4
*/
import "core:crypto"
import "core:encoding/endian"
import botan "../bindings"
KEY_SIZE :: 16
DIGEST_SIZE :: 8
// sum_string_1_3 will hash the given message with the key and return
// the computed hash as a u64
sum_string_1_3 :: proc(msg, key: string) -> u64 {
return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
}
// sum_bytes_1_3 will hash the given message with the key and return
// the computed hash as a u64
sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 {
dst: [8]byte
ctx: botan.mac_t
init(&ctx, key[:], 1, 3)
update(&ctx, msg[:])
final(&ctx, dst[:])
return endian.unchecked_get_u64le(dst[:])
}
// sum_string_to_buffer_1_3 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) {
sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
}
// sum_bytes_to_buffer_1_3 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) {
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
ctx: botan.mac_t
init(&ctx, key[:], 1, 3)
update(&ctx, msg[:])
final(&ctx, dst[:])
}
sum_1_3 :: proc {
sum_string_1_3,
sum_bytes_1_3,
sum_string_to_buffer_1_3,
sum_bytes_to_buffer_1_3,
}
// verify_u64_1_3 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_u64_1_3 :: proc (tag: u64 msg, key: []byte) -> bool {
return sum_bytes_1_3(msg, key) == tag
}
// verify_bytes_1_3 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_bytes_1_3 :: proc (tag, msg, key: []byte) -> bool {
derived_tag: [8]byte
sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
return crypto.compare_constant_time(derived_tag[:], tag) == 1
}
verify_1_3 :: proc {
verify_bytes_1_3,
verify_u64_1_3,
}
// sum_string_2_4 will hash the given message with the key and return
// the computed hash as a u64
sum_string_2_4 :: proc(msg, key: string) -> u64 {
return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
}
// sum_bytes_2_4 will hash the given message with the key and return
// the computed hash as a u64
sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 {
dst: [8]byte
ctx: botan.mac_t
init(&ctx, key[:])
update(&ctx, msg[:])
final(&ctx, dst[:])
return endian.unchecked_get_u64le(dst[:])
}
// sum_string_to_buffer_2_4 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) {
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
}
// sum_bytes_to_buffer_2_4 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) {
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
ctx: botan.mac_t
init(&ctx, key[:])
update(&ctx, msg[:])
final(&ctx, dst[:])
}
sum_2_4 :: proc {
sum_string_2_4,
sum_bytes_2_4,
sum_string_to_buffer_2_4,
sum_bytes_to_buffer_2_4,
}
sum_string :: sum_string_2_4
sum_bytes :: sum_bytes_2_4
sum_string_to_buffer :: sum_string_to_buffer_2_4
sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4
sum :: proc {
sum_string,
sum_bytes,
sum_string_to_buffer,
sum_bytes_to_buffer,
}
// verify_u64_2_4 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_u64_2_4 :: proc (tag: u64 msg, key: []byte) -> bool {
return sum_bytes_2_4(msg, key) == tag
}
// verify_bytes_2_4 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_bytes_2_4 :: proc (tag, msg, key: []byte) -> bool {
derived_tag: [8]byte
sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
return crypto.compare_constant_time(derived_tag[:], tag) == 1
}
verify_2_4 :: proc {
verify_bytes_2_4,
verify_u64_2_4,
}
verify_bytes :: verify_bytes_2_4
verify_u64 :: verify_u64_2_4
verify :: proc {
verify_bytes,
verify_u64,
}
// sum_string_4_8 will hash the given message with the key and return
// the computed hash as a u64
sum_string_4_8 :: proc(msg, key: string) -> u64 {
return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
}
// sum_bytes_4_8 will hash the given message with the key and return
// the computed hash as a u64
sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 {
dst: [8]byte
ctx: botan.mac_t
init(&ctx, key[:], 4, 8)
update(&ctx, msg[:])
final(&ctx, dst[:])
return endian.unchecked_get_u64le(dst[:])
}
// sum_string_to_buffer_4_8 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) {
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
}
// sum_bytes_to_buffer_4_8 will hash the given message with the key and write
// the computed hash into the provided destination buffer
sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) {
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
ctx: botan.mac_t
init(&ctx, key[:], 4, 8)
update(&ctx, msg[:])
final(&ctx, dst[:])
}
sum_4_8 :: proc {
sum_string_4_8,
sum_bytes_4_8,
sum_string_to_buffer_4_8,
sum_bytes_to_buffer_4_8,
}
// verify_u64_4_8 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_u64_4_8 :: proc (tag: u64 msg, key: []byte) -> bool {
return sum_bytes_4_8(msg, key) == tag
}
// verify_bytes_4_8 will check if the supplied tag matches with the output you
// will get from the provided message and key
verify_bytes_4_8 :: proc (tag, msg, key: []byte) -> bool {
derived_tag: [8]byte
sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
return crypto.compare_constant_time(derived_tag[:], tag) == 1
}
verify_4_8 :: proc {
verify_bytes_4_8,
verify_u64_4_8,
}
/*
Low level API
*/
Context :: botan.mac_t
init :: proc(ctx: ^botan.mac_t, key: []byte, c_rounds := 2, d_rounds := 4) {
assert(len(key) == KEY_SIZE, "vendor/botan: Invalid key size, want 16")
is_valid_setting := (c_rounds == 1 && d_rounds == 3) ||
(c_rounds == 2 && d_rounds == 4) ||
(c_rounds == 4 && d_rounds == 8)
assert(is_valid_setting, "vendor/botan: Incorrect rounds set up. Valid pairs are (1,3), (2,4) and (4,8)")
if c_rounds == 1 && d_rounds == 3 {
botan.mac_init(ctx, botan.MAC_SIPHASH_1_3, 0)
} else if c_rounds == 2 && d_rounds == 4 {
botan.mac_init(ctx, botan.MAC_SIPHASH_2_4, 0)
} else if c_rounds == 4 && d_rounds == 8 {
botan.mac_init(ctx, botan.MAC_SIPHASH_4_8, 0)
}
botan.mac_set_key(ctx^, len(key) == 0 ? nil : &key[0], uint(len(key)))
}
update :: proc "contextless" (ctx: ^botan.mac_t, data: []byte) {
botan.mac_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc(ctx: ^botan.mac_t, dst: []byte) {
botan.mac_final(ctx^, &dst[0])
reset(ctx)
}
reset :: proc(ctx: ^botan.mac_t) {
botan.mac_destroy(ctx^)
}

View File

@@ -1,118 +0,0 @@
package vendor_sm3
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog: Initial implementation.
Interface for the SM3 hashing algorithm.
The hash will be computed via bindings to the Botan crypto library
*/
import "core:os"
import "core:io"
import botan "../bindings"
/*
High level API
*/
DIGEST_SIZE :: 32
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
ctx: Context
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
i := 1
for i > 0 {
i, _ = io.read(s, buf)
if i > 0 {
update(&ctx, buf[:i])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: botan.hash_t
init :: proc "contextless" (ctx: ^Context) {
botan.hash_init(ctx, botan.HASH_SM3, 0)
}
update :: proc "contextless" (ctx: ^Context, data: []byte) {
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
}
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
botan.hash_final(ctx^, &hash[0])
botan.hash_destroy(ctx^)
}