mirror of
https://github.com/odin-lang/Odin.git
synced 2026-02-13 06:43:35 +00:00
vendor/botan: Remove
This is infrequently maintained, and has been a strict subset of what is available in `core:crypto` for a while. Instead of improving the bindings, it is better to spend resources improving `core:crypto`.
This commit is contained in:
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@@ -7,8 +7,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Download LLVM, botan
|
||||
run: sudo apt-get install llvm-11 clang-11 libbotan-2-dev botan
|
||||
- name: Download LLVM
|
||||
run: sudo apt-get install llvm-11 clang-11
|
||||
- name: build odin
|
||||
run: ./build_odin.sh release
|
||||
- name: Odin version
|
||||
@@ -61,9 +61,9 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Download LLVM, botan and setup PATH
|
||||
- name: Download LLVM, and setup PATH
|
||||
run: |
|
||||
brew install llvm@13 botan
|
||||
brew install llvm@13
|
||||
echo "/usr/local/opt/llvm@13/bin" >> $GITHUB_PATH
|
||||
TMP_PATH=$(xcrun --show-sdk-path)/user/include
|
||||
echo "CPATH=$TMP_PATH" >> $GITHUB_ENV
|
||||
@@ -102,9 +102,9 @@ jobs:
|
||||
runs-on: macos-14 # This is an arm/m1 runner.
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Download LLVM, botan and setup PATH
|
||||
- name: Download LLVM and setup PATH
|
||||
run: |
|
||||
brew install llvm@13 botan
|
||||
brew install llvm@13
|
||||
echo "/opt/homebrew/opt/llvm@13/bin" >> $GITHUB_PATH
|
||||
TMP_PATH=$(xcrun --show-sdk-path)/user/include
|
||||
echo "CPATH=$TMP_PATH" >> $GITHUB_ENV
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
package all
|
||||
|
||||
import botan_bindings "vendor:botan/bindings"
|
||||
import botan_blake2b "vendor:botan/blake2b"
|
||||
import keccak "vendor:botan/legacy/keccak"
|
||||
import md5 "vendor:botan/legacy/md5"
|
||||
import sha1 "vendor:botan/legacy/sha1"
|
||||
import sha2 "vendor:botan/sha2"
|
||||
import sha3 "vendor:botan/sha3"
|
||||
import shake "vendor:botan/shake"
|
||||
import siphash "vendor:botan/siphash"
|
||||
import sm3 "vendor:botan/sm3"
|
||||
|
||||
import cgltf "vendor:cgltf"
|
||||
// import commonmark "vendor:commonmark"
|
||||
import ENet "vendor:ENet"
|
||||
@@ -41,18 +30,6 @@ import fontstash "vendor:fontstash"
|
||||
|
||||
import xlib "vendor:x11/xlib"
|
||||
|
||||
_ :: botan_bindings
|
||||
_ :: botan_blake2b
|
||||
_ :: keccak
|
||||
_ :: md5
|
||||
_ :: sha1
|
||||
_ :: sha2
|
||||
_ :: sha3
|
||||
_ :: shake
|
||||
_ :: siphash
|
||||
_ :: sm3
|
||||
|
||||
|
||||
_ :: cgltf
|
||||
// _ :: commonmark
|
||||
_ :: ENet
|
||||
|
||||
5
tests/vendor/Makefile
vendored
5
tests/vendor/Makefile
vendored
@@ -7,7 +7,4 @@ ifeq ($(OS), OpenBSD)
|
||||
ODINFLAGS:=$(ODINFLAGS) -extra-linker-flags:-L/usr/local/lib
|
||||
endif
|
||||
|
||||
all: botan_test
|
||||
|
||||
botan_test:
|
||||
$(ODIN) run botan -o:speed -no-bounds-check $(ODINFLAGS) -out=vendor_botan
|
||||
all:
|
||||
|
||||
BIN
tests/vendor/botan-3.dll
vendored
BIN
tests/vendor/botan-3.dll
vendored
Binary file not shown.
409
tests/vendor/botan/test_vendor_botan.odin
vendored
409
tests/vendor/botan/test_vendor_botan.odin
vendored
@@ -1,409 +0,0 @@
|
||||
package test_vendor_botan
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
Jeroen van Rijn: Test runner setup.
|
||||
|
||||
Tests for the hashing algorithms within the Botan library.
|
||||
Where possible, the official test vectors are used to validate the implementation.
|
||||
*/
|
||||
|
||||
import "core:testing"
|
||||
import "core:fmt"
|
||||
import "core:os"
|
||||
import "core:strings"
|
||||
|
||||
import "vendor:botan/legacy/md5"
|
||||
import "vendor:botan/legacy/sha1"
|
||||
import "vendor:botan/sha2"
|
||||
import "vendor:botan/sha3"
|
||||
import "vendor:botan/legacy/keccak"
|
||||
import "vendor:botan/shake"
|
||||
import "vendor:botan/blake2b"
|
||||
import "vendor:botan/sm3"
|
||||
import "vendor:botan/siphash"
|
||||
|
||||
TEST_count := 0
|
||||
TEST_fail := 0
|
||||
|
||||
when ODIN_TEST {
|
||||
expect :: testing.expect
|
||||
log :: testing.log
|
||||
} else {
|
||||
expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
|
||||
fmt.printf("[%v] ", loc)
|
||||
TEST_count += 1
|
||||
if !condition {
|
||||
TEST_fail += 1
|
||||
fmt.println(message)
|
||||
return
|
||||
}
|
||||
fmt.println(" PASS")
|
||||
}
|
||||
log :: proc(t: ^testing.T, v: any, loc := #caller_location) {
|
||||
fmt.printf("[%v] ", loc)
|
||||
fmt.printf("log: %v\n", v)
|
||||
}
|
||||
}
|
||||
|
||||
main :: proc() {
|
||||
t := testing.T{}
|
||||
test_md5(&t)
|
||||
test_sha1(&t)
|
||||
test_sha224(&t)
|
||||
test_sha256(&t)
|
||||
test_sha384(&t)
|
||||
test_sha512(&t)
|
||||
test_sha3_224(&t)
|
||||
test_sha3_256(&t)
|
||||
test_sha3_384(&t)
|
||||
test_sha3_512(&t)
|
||||
// test_shake_128(&t)
|
||||
// test_shake_256(&t)
|
||||
test_keccak_512(&t)
|
||||
test_blake2b(&t)
|
||||
test_sm3(&t)
|
||||
test_siphash_2_4(&t)
|
||||
|
||||
fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
|
||||
if TEST_fail > 0 {
|
||||
os.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
TestHash :: struct {
|
||||
hash: string,
|
||||
str: string,
|
||||
}
|
||||
|
||||
hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string {
|
||||
lut: [16]byte = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}
|
||||
buf := make([]byte, len(bytes) * 2, allocator)
|
||||
for i := 0; i < len(bytes); i += 1 {
|
||||
buf[i * 2 + 0] = lut[bytes[i] >> 4 & 0xf]
|
||||
buf[i * 2 + 1] = lut[bytes[i] & 0xf]
|
||||
}
|
||||
return string(buf)
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_md5 :: proc(t: ^testing.T) {
|
||||
// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"d41d8cd98f00b204e9800998ecf8427e", ""},
|
||||
TestHash{"0cc175b9c0f1b6a831c399e269772661", "a"},
|
||||
TestHash{"900150983cd24fb0d6963f7d28e17f72", "abc"},
|
||||
TestHash{"f96b697d7cb7938d525a2f31aaf161d0", "message digest"},
|
||||
TestHash{"c3fcd3d76192e4007dfb496cca67e13b", "abcdefghijklmnopqrstuvwxyz"},
|
||||
TestHash{"d174ab98d277d9f5a5611c2c9f419d9f", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
|
||||
TestHash{"57edf4a22be3c955ac49da2e2107b67a", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := md5.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha1 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
|
||||
TestHash{"a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
|
||||
TestHash{"f9537c23893d2014f365adf8ffe33b8eb0297ed1", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"346fb528a24b48f563cb061470bcfd23740427ad", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
|
||||
TestHash{"c729c8996ee0a6f74f4f3248e8957edf704fb624", "01234567012345670123456701234567"},
|
||||
TestHash{"84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"a49b2446a02c645bf419f995b67091253a04a259", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha1.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha224 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
// https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
|
||||
data_1_000_000_a := strings.repeat("a", 1_000_000)
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f", ""},
|
||||
TestHash{"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7", "abc"},
|
||||
TestHash{"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
TestHash{"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67", data_1_000_000_a},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_224(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ""},
|
||||
TestHash{"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc"},
|
||||
TestHash{"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha384 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", ""},
|
||||
TestHash{"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7", "abc"},
|
||||
TestHash{"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_384(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ""},
|
||||
TestHash{"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", "abc"},
|
||||
TestHash{"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha2.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_224 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7", ""},
|
||||
TestHash{"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf", "abc"},
|
||||
TestHash{"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b", "a"},
|
||||
TestHash{"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112", "01234567012345670123456701234567"},
|
||||
TestHash{"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_224(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_256 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a", ""},
|
||||
TestHash{"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532", "abc"},
|
||||
TestHash{"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "a"},
|
||||
TestHash{"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767", "01234567012345670123456701234567"},
|
||||
TestHash{"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_384 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004", ""},
|
||||
TestHash{"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25", "abc"},
|
||||
TestHash{"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9", "a"},
|
||||
TestHash{"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be", "01234567012345670123456701234567"},
|
||||
TestHash{"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_384(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sha3_512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26", ""},
|
||||
TestHash{"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0", "abc"},
|
||||
TestHash{"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e", "abcdbcdecdefdefgefghfghighijhi"},
|
||||
TestHash{"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891", "jkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a", "a"},
|
||||
TestHash{"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1", "01234567012345670123456701234567"},
|
||||
TestHash{"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
|
||||
TestHash{"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sha3.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_shake_128 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"7f9c2ba4e88f827d616045507605853e", ""},
|
||||
TestHash{"f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := shake.hash_128(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_shake_256 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
|
||||
TestHash{"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401", "The quick brown fox jumps over the lazy dof"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := shake.hash_256(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_keccak_512 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
|
||||
// https://www.di-mgt.com.au/sha_testvectors.html
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e", ""},
|
||||
TestHash{"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", "abc"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := keccak.hash_512(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_blake2b :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""},
|
||||
TestHash{"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918", "The quick brown fox jumps over the lazy dog"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := blake2b.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_sm3 :: proc(t: ^testing.T) {
|
||||
test_vectors := [?]TestHash {
|
||||
TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""},
|
||||
TestHash{"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0", "abc"},
|
||||
TestHash{"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732", "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"},
|
||||
TestHash{"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea", "The quick brown fox jumps over the lazy dog"},
|
||||
TestHash{"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098", "The quick brown fox jumps over the lazy cog"},
|
||||
}
|
||||
for v, _ in test_vectors {
|
||||
computed := sm3.hash(v.str)
|
||||
computed_str := hex_string(computed[:])
|
||||
expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
|
||||
}
|
||||
}
|
||||
|
||||
@(test)
|
||||
test_siphash_2_4 :: proc(t: ^testing.T) {
|
||||
// Test vectors from
|
||||
// https://github.com/veorq/SipHash/blob/master/vectors.h
|
||||
test_vectors := [?]u64 {
|
||||
0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
|
||||
0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
|
||||
0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
|
||||
0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
|
||||
0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
|
||||
0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
|
||||
0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
|
||||
0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
|
||||
0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
|
||||
0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
|
||||
0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
|
||||
0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
|
||||
0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
|
||||
0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
|
||||
0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
|
||||
0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
|
||||
}
|
||||
|
||||
key: [16]byte
|
||||
for i in 0..<16 {
|
||||
key[i] = byte(i)
|
||||
}
|
||||
|
||||
for i in 0..<len(test_vectors) {
|
||||
data := make([]byte, i)
|
||||
for j in 0..<i {
|
||||
data[j] = byte(j)
|
||||
}
|
||||
|
||||
vector := test_vectors[i]
|
||||
computed := siphash.sum_2_4(data[:], key[:])
|
||||
|
||||
expect(t, computed == vector, fmt.tprintf("Expected: 0x%x for input of %v, but got 0x%x instead", vector, data, computed))
|
||||
}
|
||||
}
|
||||
19
tests/vendor/build.bat
vendored
19
tests/vendor/build.bat
vendored
@@ -1,13 +1,8 @@
|
||||
@echo off
|
||||
set COMMON=-show-timings -no-bounds-check -vet -strict-style
|
||||
set PATH_TO_ODIN==..\..\odin
|
||||
|
||||
echo ---
|
||||
echo Running vendor:botan tests
|
||||
echo ---
|
||||
%PATH_TO_ODIN% run botan %COMMON% -out:vendor_botan.exe || exit /b
|
||||
|
||||
echo ---
|
||||
echo Running vendor:glfw tests
|
||||
echo ---
|
||||
@echo off
|
||||
set COMMON=-show-timings -no-bounds-check -vet -strict-style
|
||||
set PATH_TO_ODIN==..\..\odin
|
||||
|
||||
echo ---
|
||||
echo Running vendor:glfw tests
|
||||
echo ---
|
||||
%PATH_TO_ODIN% run glfw %COMMON% -out:vendor_glfw.exe || exit /b
|
||||
9
vendor/README.md
vendored
9
vendor/README.md
vendored
@@ -127,15 +127,6 @@ Zero-input latency networking library for peer-to-peer games.
|
||||
|
||||
See also LICENSE in the `GGPO` directory itself.
|
||||
|
||||
## Botan
|
||||
|
||||
[Botan](https://botan.randombit.net/) Crypto and TLS library.
|
||||
|
||||
`botan.lib` is available under Botan's [BSD](https://botan.randombit.net/license.txt) license.
|
||||
|
||||
See also LICENSE in the `botan` directory itself.
|
||||
Includes full bindings as well as wrappers to match the `core:crypto` API.
|
||||
|
||||
## CommonMark
|
||||
|
||||
[CMark](https://github.com/commonmark/cmark) CommonMark parsing library.
|
||||
|
||||
69
vendor/botan/README.md
vendored
69
vendor/botan/README.md
vendored
@@ -1,69 +0,0 @@
|
||||
# botan
|
||||
|
||||
A wrapper for the Botan cryptography library
|
||||
|
||||
## Supported
|
||||
This library offers full bindings for everything exposed by Botan's FFI.
|
||||
Wrappers for hashing algorithms have been added to match the API within
|
||||
the Odin `core:crypto` library.
|
||||
|
||||
## Hashing algorithms
|
||||
|
||||
| Algorithm | |
|
||||
|:-------------------------------------------------------------------------------------------------------------|:-----------------|
|
||||
| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
|
||||
| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
|
||||
| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ |
|
||||
| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ |
|
||||
| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ |
|
||||
|
||||
#### High level API
|
||||
|
||||
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`.
|
||||
Included in these groups are six procedures.
|
||||
- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
|
||||
- `hash_bytes` - Hash a given byte slice and return the computed hash
|
||||
- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
|
||||
- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
|
||||
- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
|
||||
- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
|
||||
|
||||
#### Low level API
|
||||
|
||||
The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
|
||||
You may also directly call them, if you wish.
|
||||
|
||||
#### Example
|
||||
```odin
|
||||
package crypto_example
|
||||
|
||||
// Import the desired package
|
||||
import "vendor:botan/blake2b"
|
||||
|
||||
main :: proc() {
|
||||
input := "foo"
|
||||
|
||||
// Compute the hash, using the high level API
|
||||
computed_hash := blake2b.hash(input)
|
||||
|
||||
// Variant that takes a destination buffer, instead of returning the computed hash
|
||||
hash := make([]byte, blake2b.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
|
||||
blake2b.hash(input, hash[:])
|
||||
|
||||
// Compute the hash, using the low level API
|
||||
// @note: Botan's structs are opaque by design, they don't expose any fields
|
||||
ctx: blake2b.Context
|
||||
computed_hash_low: [blake2b.DIGEST_SIZE]byte
|
||||
blake2b.init(&ctx)
|
||||
blake2b.update(&ctx, transmute([]byte)input)
|
||||
blake2b.final(&ctx, computed_hash_low[:])
|
||||
}
|
||||
```
|
||||
For example uses of all available algorithms, please see the tests within `tests/vendor/botan`.
|
||||
|
||||
### License
|
||||
|
||||
This library is made available under the BSD-3 license.
|
||||
BIN
vendor/botan/bindings/botan-3.lib
vendored
BIN
vendor/botan/bindings/botan-3.lib
vendored
Binary file not shown.
460
vendor/botan/bindings/botan.odin
vendored
460
vendor/botan/bindings/botan.odin
vendored
@@ -1,460 +0,0 @@
|
||||
package vendor_botan
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial creation and testing of the bindings.
|
||||
|
||||
Bindings for the Botan crypto library.
|
||||
Created for version 2.18.1, using the provided FFI header within Botan.
|
||||
|
||||
The "botan_" prefix has been stripped from the identifiers to remove redundancy,
|
||||
since the package is already named botan.
|
||||
*/
|
||||
|
||||
import "core:c"
|
||||
|
||||
FFI_ERROR :: #type c.int
|
||||
FFI_SUCCESS :: FFI_ERROR(0)
|
||||
FFI_INVALID_VERIFIER :: FFI_ERROR(1)
|
||||
FFI_ERROR_INVALID_INPUT :: FFI_ERROR(-1)
|
||||
FFI_ERROR_BAD_MAC :: FFI_ERROR(-2)
|
||||
FFI_ERROR_INSUFFICIENT_BUFFER_SPACE :: FFI_ERROR(-10)
|
||||
FFI_ERROR_EXCEPTION_THROWN :: FFI_ERROR(-20)
|
||||
FFI_ERROR_OUT_OF_MEMORY :: FFI_ERROR(-21)
|
||||
FFI_ERROR_BAD_FLAG :: FFI_ERROR(-30)
|
||||
FFI_ERROR_NULL_POINTER :: FFI_ERROR(-31)
|
||||
FFI_ERROR_BAD_PARAMETER :: FFI_ERROR(-32)
|
||||
FFI_ERROR_KEY_NOT_SET :: FFI_ERROR(-33)
|
||||
FFI_ERROR_INVALID_KEY_LENGTH :: FFI_ERROR(-34)
|
||||
FFI_ERROR_NOT_IMPLEMENTED :: FFI_ERROR(-40)
|
||||
FFI_ERROR_INVALID_OBJECT :: FFI_ERROR(-50)
|
||||
FFI_ERROR_UNKNOWN_ERROR :: FFI_ERROR(-100)
|
||||
|
||||
FFI_HEX_LOWER_CASE :: 1
|
||||
|
||||
CIPHER_INIT_FLAG_MASK_DIRECTION :: 1
|
||||
CIPHER_INIT_FLAG_ENCRYPT :: 0
|
||||
CIPHER_INIT_FLAG_DECRYPT :: 1
|
||||
|
||||
CIPHER_UPDATE_FLAG_FINAL :: 1 << 0
|
||||
|
||||
CHECK_KEY_EXPENSIVE_TESTS :: 1
|
||||
|
||||
PRIVKEY_EXPORT_FLAG_DER :: 0
|
||||
PRIVKEY_EXPORT_FLAG_PEM :: 1
|
||||
|
||||
PUBKEY_DER_FORMAT_SIGNATURE :: 1
|
||||
|
||||
FPE_FLAG_FE1_COMPAT_MODE :: 1
|
||||
|
||||
x509_cert_key_constraints :: #type c.int
|
||||
NO_CONSTRAINTS :: x509_cert_key_constraints(0)
|
||||
DIGITAL_SIGNATURE :: x509_cert_key_constraints(32768)
|
||||
NON_REPUDIATION :: x509_cert_key_constraints(16384)
|
||||
KEY_ENCIPHERMENT :: x509_cert_key_constraints(8192)
|
||||
DATA_ENCIPHERMENT :: x509_cert_key_constraints(4096)
|
||||
KEY_AGREEMENT :: x509_cert_key_constraints(2048)
|
||||
KEY_CERT_SIGN :: x509_cert_key_constraints(1024)
|
||||
CRL_SIGN :: x509_cert_key_constraints(512)
|
||||
ENCIPHER_ONLY :: x509_cert_key_constraints(256)
|
||||
DECIPHER_ONLY :: x509_cert_key_constraints(128)
|
||||
|
||||
HASH_SHA1 :: "SHA-1"
|
||||
HASH_SHA_224 :: "SHA-224"
|
||||
HASH_SHA_256 :: "SHA-256"
|
||||
HASH_SHA_384 :: "SHA-384"
|
||||
HASH_SHA_512 :: "SHA-512"
|
||||
HASH_SHA3_224 :: "SHA-3(224)"
|
||||
HASH_SHA3_256 :: "SHA-3(256)"
|
||||
HASH_SHA3_384 :: "SHA-3(384)"
|
||||
HASH_SHA3_512 :: "SHA-3(512)"
|
||||
HASH_SHAKE_128 :: "SHAKE-128"
|
||||
HASH_SHAKE_256 :: "SHAKE-256"
|
||||
HASH_KECCAK_512 :: "Keccak-1600"
|
||||
HASH_BLAKE2B :: "BLAKE2b"
|
||||
HASH_MD5 :: "MD5"
|
||||
HASH_SM3 :: "SM3"
|
||||
|
||||
MAC_HMAC_SHA1 :: "HMAC(SHA1)"
|
||||
MAC_HMAC_SHA_224 :: "HMAC(SHA-224)"
|
||||
MAC_HMAC_SHA_256 :: "HMAC(SHA-256)"
|
||||
MAC_HMAC_SHA_384 :: "HMAC(SHA-384)"
|
||||
MAC_HMAC_SHA_512 :: "HMAC(SHA-512)"
|
||||
MAC_HMAC_MD5 :: "HMAC(MD5)"
|
||||
|
||||
MAC_SIPHASH_1_3 :: "SipHash(1,3)"
|
||||
MAC_SIPHASH_2_4 :: "SipHash(2,4)"
|
||||
MAC_SIPHASH_4_8 :: "SipHash(4,8)"
|
||||
|
||||
hash_struct :: struct{}
|
||||
hash_t :: ^hash_struct
|
||||
rng_struct :: struct{}
|
||||
rng_t :: ^rng_struct
|
||||
mac_struct :: struct{}
|
||||
mac_t :: ^mac_struct
|
||||
cipher_struct :: struct{}
|
||||
cipher_t :: ^cipher_struct
|
||||
block_cipher_struct :: struct{}
|
||||
block_cipher_t :: ^block_cipher_struct
|
||||
mp_struct :: struct{}
|
||||
mp_t :: ^mp_struct
|
||||
privkey_struct :: struct{}
|
||||
privkey_t :: ^privkey_struct
|
||||
pubkey_struct :: struct{}
|
||||
pubkey_t :: ^pubkey_struct
|
||||
pk_op_encrypt_struct :: struct{}
|
||||
pk_op_encrypt_t :: ^pk_op_encrypt_struct
|
||||
pk_op_decrypt_struct :: struct{}
|
||||
pk_op_decrypt_t :: ^pk_op_decrypt_struct
|
||||
pk_op_sign_struct :: struct{}
|
||||
pk_op_sign_t :: ^pk_op_sign_struct
|
||||
pk_op_verify_struct :: struct{}
|
||||
pk_op_verify_t :: ^pk_op_verify_struct
|
||||
pk_op_ka_struct :: struct{}
|
||||
pk_op_ka_t :: ^pk_op_ka_struct
|
||||
x509_cert_struct :: struct{}
|
||||
x509_cert_t :: ^x509_cert_struct
|
||||
x509_crl_struct :: struct{}
|
||||
x509_crl_t :: ^x509_crl_struct
|
||||
hotp_struct :: struct{}
|
||||
hotp_t :: ^hotp_struct
|
||||
totp_struct :: struct{}
|
||||
totp_t :: ^totp_struct
|
||||
fpe_struct :: struct{}
|
||||
fpe_t :: ^fpe_struct
|
||||
|
||||
when ODIN_OS == .Windows {
|
||||
foreign import botan_lib "botan-3.lib"
|
||||
} else when ODIN_OS == .Darwin {
|
||||
foreign import botan_lib "system:botan-3"
|
||||
} else {
|
||||
foreign import botan_lib "system:botan-2"
|
||||
}
|
||||
|
||||
@(default_calling_convention="c")
|
||||
@(link_prefix="botan_")
|
||||
foreign botan_lib {
|
||||
error_description :: proc(err: c.int) -> cstring ---
|
||||
ffi_api_version :: proc() -> c.int ---
|
||||
ffi_supports_api :: proc(api_version: c.int) -> c.int ---
|
||||
version_string :: proc() -> cstring ---
|
||||
version_major :: proc() -> c.int ---
|
||||
version_minor :: proc() -> c.int ---
|
||||
version_patch :: proc() -> c.int ---
|
||||
version_datestamp :: proc() -> c.int ---
|
||||
|
||||
constant_time_compare :: proc(x, y: ^c.char, length: c.size_t) -> c.int ---
|
||||
same_mem :: proc(x, y: ^c.char, length: c.size_t) -> c.int ---
|
||||
scrub_mem :: proc(mem: rawptr, bytes: c.size_t) -> c.int ---
|
||||
|
||||
hex_encode :: proc(x: ^c.char, length: c.size_t, out: ^c.char, flags: c.uint) -> c.int ---
|
||||
hex_decode :: proc(hex_str: cstring, in_len: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
|
||||
|
||||
base64_encode :: proc(x: ^c.char, length: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
|
||||
base64_decode :: proc(base64_str: cstring, in_len: c.size_t, out: ^c.char, out_len: c.size_t) -> c.int ---
|
||||
|
||||
rng_init :: proc(rng: ^rng_t, rng_type: cstring) -> c.int ---
|
||||
rng_init_custom :: proc(rng_out: ^rng_t, rng_name: cstring, ctx: rawptr,
|
||||
get_cb: proc(ctx: rawptr, out: ^c.char, out_len: c.size_t) -> ^c.int,
|
||||
add_entropy_cb: proc(ctx: rawptr, input: ^c.char, length: c.size_t) -> ^c.int,
|
||||
destroy_cb: proc(ctx: rawptr) -> rawptr) -> c.int ---
|
||||
rng_get :: proc(rng: rng_t, out: ^c.char, out_len: c.size_t) -> c.int ---
|
||||
rng_reseed :: proc(rng: rng_t, bits: c.size_t) -> c.int ---
|
||||
rng_reseed_from_rng :: proc(rng, source_rng: rng_t, bits: c.size_t) -> c.int ---
|
||||
rng_add_entropy :: proc(rng: rng_t, entropy: ^c.char, entropy_len: c.size_t) -> c.int ---
|
||||
rng_destroy :: proc(rng: rng_t) -> c.int ---
|
||||
|
||||
hash_init :: proc(hash: ^hash_t, hash_name: cstring, flags: c.uint) -> c.int ---
|
||||
hash_copy_state :: proc(dest: ^hash_t, source: hash_t) -> c.int ---
|
||||
hash_output_length :: proc(hash: hash_t, output_length: ^c.size_t) -> c.int ---
|
||||
hash_block_size :: proc(hash: hash_t, block_size: ^c.size_t) -> c.int ---
|
||||
hash_update :: proc(hash: hash_t, input: ^c.char, input_len: c.size_t) -> c.int ---
|
||||
hash_final :: proc(hash: hash_t, out: ^c.char) -> c.int ---
|
||||
hash_clear :: proc(hash: hash_t) -> c.int ---
|
||||
hash_destroy :: proc(hash: hash_t) -> c.int ---
|
||||
hash_name :: proc(hash: hash_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
|
||||
|
||||
mac_init :: proc(mac: ^mac_t, hash_name: cstring, flags: c.uint) -> c.int ---
|
||||
mac_output_length :: proc(mac: mac_t, output_length: ^c.size_t) -> c.int ---
|
||||
mac_set_key :: proc(mac: mac_t, key: ^c.char, key_len: c.size_t) -> c.int ---
|
||||
mac_update :: proc(mac: mac_t, buf: ^c.char, length: c.size_t) -> c.int ---
|
||||
mac_final :: proc(mac: mac_t, out: ^c.char) -> c.int ---
|
||||
mac_clear :: proc(mac: mac_t) -> c.int ---
|
||||
mac_name :: proc(mac: mac_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
|
||||
mac_get_keyspec :: proc(mac: mac_t, out_minimum_keylength, out_maximum_keylength, out_keylength_modulo: ^c.size_t) -> c.int ---
|
||||
mac_destroy :: proc(mac: mac_t) -> c.int ---
|
||||
|
||||
cipher_init :: proc(cipher: ^cipher_t, name: cstring, flags: c.uint) -> c.int ---
|
||||
cipher_name :: proc(cipher: cipher_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
|
||||
cipher_output_length :: proc(cipher: cipher_t, output_length: ^c.size_t) -> c.int ---
|
||||
cipher_valid_nonce_length :: proc(cipher: cipher_t, nl: c.size_t) -> c.int ---
|
||||
cipher_get_tag_length :: proc(cipher: cipher_t, tag_size: ^c.size_t) -> c.int ---
|
||||
cipher_get_default_nonce_length :: proc(cipher: cipher_t, nl: ^c.size_t) -> c.int ---
|
||||
cipher_get_update_granularity :: proc(cipher: cipher_t, ug: ^c.size_t) -> c.int ---
|
||||
cipher_query_keylen :: proc(cipher: cipher_t, out_minimum_keylength, out_maximum_keylength: ^c.size_t) -> c.int ---
|
||||
cipher_get_keyspec :: proc(cipher: cipher_t, min_keylen, max_keylen, mod_keylen: ^c.size_t) -> c.int ---
|
||||
cipher_set_key :: proc(cipher: cipher_t, key: ^c.char, key_len: c.size_t) -> c.int ---
|
||||
cipher_reset :: proc(cipher: cipher_t) -> c.int ---
|
||||
cipher_set_associated_data :: proc(cipher: cipher_t, ad: ^c.char, ad_len: c.size_t) -> c.int ---
|
||||
cipher_start :: proc(cipher: cipher_t, nonce: ^c.char, nonce_len: c.size_t) -> c.int ---
|
||||
cipher_update :: proc(cipher: cipher_t, flags: c.uint, output: ^c.char, output_size: c.size_t, output_written: ^c.size_t,
|
||||
input_bytes: ^c.char, input_size: c.size_t, input_consumed: ^c.size_t) -> c.int ---
|
||||
cipher_clear :: proc(hash: cipher_t) -> c.int ---
|
||||
cipher_destroy :: proc(cipher: cipher_t) -> c.int ---
|
||||
|
||||
@(deprecated="Use botan.pwdhash")
|
||||
pbkdf :: proc(pbkdf_algo: cstring, out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char,
|
||||
salt_len, iterations: c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.pwdhash_timed")
|
||||
pbkdf_timed :: proc(pbkdf_algo: cstring, out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char,
|
||||
salt_len, milliseconds_to_run: c.size_t, out_iterations_used: ^c.size_t) -> c.int ---
|
||||
pwdhash :: proc(algo: cstring, param1, param2, param3: c.size_t, out: ^c.char, out_len: c.size_t, passphrase: cstring,
|
||||
passphrase_len: c.size_t, salt: ^c.char, salt_len: c.size_t) -> c.int ---
|
||||
pwdhash_timed :: proc(algo: cstring, msec: c.uint, param1, param2, param3: c.size_t, out: ^c.char, out_len: c.size_t,
|
||||
passphrase: cstring, passphrase_len: c.size_t, salt: ^c.char, salt_len: c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.pwdhash")
|
||||
scrypt :: proc(out: ^c.char, out_len: c.size_t, passphrase: cstring, salt: ^c.char, salt_len, N, r, p: c.size_t) -> c.int ---
|
||||
kdf :: proc(kdf_algo: cstring, out: ^c.char, out_len: c.size_t, secret: ^c.char, secret_lent: c.size_t, salt: ^c.char,
|
||||
salt_len: c.size_t, label: ^c.char, label_len: c.size_t) -> c.int ---
|
||||
|
||||
block_cipher_init :: proc(bc: ^block_cipher_t, name: cstring) -> c.int ---
|
||||
block_cipher_destroy :: proc(bc: block_cipher_t) -> c.int ---
|
||||
block_cipher_clear :: proc(bc: block_cipher_t) -> c.int ---
|
||||
block_cipher_set_key :: proc(bc: block_cipher_t, key: ^c.char, key_len: c.size_t) -> c.int ---
|
||||
block_cipher_block_size :: proc(bc: block_cipher_t) -> c.int ---
|
||||
block_cipher_encrypt_blocks :: proc(bc: block_cipher_t, input, out: ^c.char, blocks: c.size_t) -> c.int ---
|
||||
block_cipher_decrypt_blocks :: proc(bc: block_cipher_t, input, out: ^c.char, blocks: c.size_t) -> c.int ---
|
||||
block_cipher_name :: proc(bc: block_cipher_t, name: ^c.char, name_len: ^c.size_t) -> c.int ---
|
||||
block_cipher_get_keyspec :: proc(bc: block_cipher_t, out_minimum_keylength, out_maximum_keylength, out_keylength_modulo: ^c.size_t) -> c.int ---
|
||||
|
||||
mp_init :: proc(mp: ^mp_t) -> c.int ---
|
||||
mp_destroy :: proc(mp: mp_t) -> c.int ---
|
||||
mp_to_hex :: proc(mp: mp_t, out: ^c.char) -> c.int ---
|
||||
mp_to_str :: proc(mp: mp_t, base: c.char, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
mp_clear :: proc(mp: mp_t) -> c.int ---
|
||||
mp_set_from_int :: proc(mp: mp_t, initial_value: c.int) -> c.int ---
|
||||
mp_set_from_mp :: proc(dest, source: mp_t) -> c.int ---
|
||||
mp_set_from_str :: proc(dest: mp_t, str: cstring) -> c.int ---
|
||||
mp_set_from_radix_str :: proc(mp: mp_t, str: cstring, radix: c.size_t) -> c.int ---
|
||||
mp_num_bits :: proc(n: mp_t, bits: ^c.size_t) -> c.int ---
|
||||
mp_num_bytes :: proc(n: mp_t, bytes: ^c.size_t) -> c.int ---
|
||||
mp_to_bin :: proc(mp: mp_t, vec: ^c.char) -> c.int ---
|
||||
mp_from_bin :: proc(mp: mp_t, vec: ^c.char, vec_len: c.size_t) -> c.int ---
|
||||
mp_to_uint32 :: proc(mp: mp_t, val: ^c.uint) -> c.int ---
|
||||
mp_is_positive :: proc(mp: mp_t) -> c.int ---
|
||||
mp_is_negative :: proc(mp: mp_t) -> c.int ---
|
||||
mp_flip_sign :: proc(mp: mp_t) -> c.int ---
|
||||
mp_is_zero :: proc(mp: mp_t) -> c.int ---
|
||||
@(deprecated="Use botan.mp_get_bit(0)")
|
||||
mp_is_odd :: proc(mp: mp_t) -> c.int ---
|
||||
@(deprecated="Use botan.mp_get_bit(0)")
|
||||
mp_is_even :: proc(mp: mp_t) -> c.int ---
|
||||
mp_add_u32 :: proc(result, x: mp_t, y: c.uint) -> c.int ---
|
||||
mp_sub_u32 :: proc(result, x: mp_t, y: c.uint) -> c.int ---
|
||||
mp_add :: proc(result, x, y: mp_t) -> c.int ---
|
||||
mp_sub :: proc(result, x, y: mp_t) -> c.int ---
|
||||
mp_mul :: proc(result, x, y: mp_t) -> c.int ---
|
||||
mp_div :: proc(quotient, remainder, x, y: mp_t) -> c.int ---
|
||||
mp_mod_mul :: proc(result, x, y, mod: mp_t) -> c.int ---
|
||||
mp_equal :: proc(x, y: mp_t) -> c.int ---
|
||||
mp_cmp :: proc(result: ^c.int, x, y: mp_t) -> c.int ---
|
||||
mp_swap :: proc(x, y: mp_t) -> c.int ---
|
||||
mp_powmod :: proc(out, base, exponent, modulus: mp_t) -> c.int ---
|
||||
mp_lshift :: proc(out, input: mp_t, shift: c.size_t) -> c.int ---
|
||||
mp_rshift :: proc(out, input: mp_t, shift: c.size_t) -> c.int ---
|
||||
mp_mod_inverse :: proc(out, input, modulus: mp_t) -> c.int ---
|
||||
mp_rand_bits :: proc(rand_out: mp_t, rng: rng_t, bits: c.size_t) -> c.int ---
|
||||
mp_rand_range :: proc(rand_out: mp_t, rng: rng_t, lower_bound, upper_bound: mp_t) -> c.int ---
|
||||
mp_gcd :: proc(out, x, y: mp_t) -> c.int ---
|
||||
mp_is_prime :: proc(n: mp_t, rng: rng_t, test_prob: c.size_t) -> c.int ---
|
||||
mp_get_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
|
||||
mp_set_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
|
||||
mp_clear_bit :: proc(n: mp_t, bit: c.size_t) -> c.int ---
|
||||
|
||||
bcrypt_generate :: proc(out: ^c.char, out_len: ^c.size_t, password: cstring, rng: rng_t, work_factor: c.size_t, flags: c.uint) -> c.int ---
|
||||
bcrypt_is_valid :: proc(pass, hash: cstring) -> c.int ---
|
||||
|
||||
privkey_create :: proc(key: ^privkey_t, algo_name, algo_params: cstring, rng: rng_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_check_key :: proc(key: privkey_t, rng: rng_t, flags: c.uint) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_create_rsa :: proc(key: ^privkey_t, rng: rng_t, bits: c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_create_ecdsa :: proc(key: ^privkey_t, rng: rng_t, params: cstring) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_create_ecdh :: proc(key: ^privkey_t, rng: rng_t, params: cstring) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_create_mceliece :: proc(key: ^privkey_t, rng: rng_t, n, t: c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_create")
|
||||
privkey_create_dh :: proc(key: ^privkey_t, rng: rng_t, param: cstring) -> c.int ---
|
||||
privkey_create_dsa :: proc(key: ^privkey_t, rng: rng_t, pbits, qbits: c.size_t) -> c.int ---
|
||||
privkey_create_elgamal :: proc(key: ^privkey_t, rng: rng_t, pbits, qbits: c.size_t) -> c.int ---
|
||||
privkey_load :: proc(key: ^privkey_t, rng: rng_t, bits: ^c.char, length: c.size_t, password: cstring) -> c.int ---
|
||||
privkey_destroy :: proc(key: privkey_t) -> c.int ---
|
||||
privkey_export :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
|
||||
privkey_algo_name :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_export_encrypted_pbkdf_{msec,iter}")
|
||||
privkey_export_encrypted :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase, encryption_algo: cstring, flags: c.uint) -> c.int ---
|
||||
privkey_export_encrypted_pbkdf_msec :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase: cstring, pbkdf_msec_runtime: c.uint,
|
||||
pbkdf_iterations_out: ^c.size_t, cipher_algo, pbkdf_algo: cstring, flags: c.uint) -> c.int ---
|
||||
privkey_export_encrypted_pbkdf_iter :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t, rng: rng_t, passphrase: cstring, pbkdf_iterations: c.size_t,
|
||||
cipher_algo, pbkdf_algo: cstring, flags: c.uint) -> c.int ---
|
||||
pubkey_load :: proc(key: ^pubkey_t, bits: ^c.char, length: c.size_t) -> c.int ---
|
||||
privkey_export_pubkey :: proc(out: ^pubkey_t, input: privkey_t) -> c.int ---
|
||||
pubkey_export :: proc(key: pubkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
|
||||
pubkey_algo_name :: proc(key: pubkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
pubkey_check_key :: proc(key: pubkey_t, rng: rng_t, flags: c.uint) -> c.int ---
|
||||
pubkey_estimated_strength :: proc(key: pubkey_t, estimate: ^c.size_t) -> c.int ---
|
||||
pubkey_fingerprint :: proc(key: pubkey_t, hash: cstring, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
pubkey_destroy :: proc(key: pubkey_t) -> c.int ---
|
||||
pubkey_get_field :: proc(output: mp_t, key: pubkey_t, field_name: cstring) -> c.int ---
|
||||
privkey_get_field :: proc(output: mp_t, key: privkey_t, field_name: cstring) -> c.int ---
|
||||
|
||||
privkey_load_rsa :: proc(key: ^privkey_t, p, q, e: mp_t) -> c.int ---
|
||||
privkey_load_rsa_pkcs1 :: proc(key: ^privkey_t, bits: ^c.char, length: c.size_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_get_field")
|
||||
privkey_rsa_get_p :: proc(p: mp_t, rsa_key: privkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_get_field")
|
||||
privkey_rsa_get_q :: proc(q: mp_t, rsa_key: privkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_get_field")
|
||||
privkey_rsa_get_d :: proc(d: mp_t, rsa_key: privkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_get_field")
|
||||
privkey_rsa_get_n :: proc(n: mp_t, rsa_key: privkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_get_field")
|
||||
privkey_rsa_get_e :: proc(e: mp_t, rsa_key: privkey_t) -> c.int ---
|
||||
privkey_rsa_get_privkey :: proc(rsa_key: privkey_t, out: ^c.char, out_len: ^c.size_t, flags: c.uint) -> c.int ---
|
||||
pubkey_load_rsa :: proc(key: ^pubkey_t, n, e: mp_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_rsa_get_e :: proc(e: mp_t, rsa_key: pubkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_rsa_get_n :: proc(n: mp_t, rsa_key: pubkey_t) -> c.int ---
|
||||
|
||||
privkey_load_dsa :: proc(key: ^privkey_t, p, q, g, x: mp_t) -> c.int ---
|
||||
pubkey_load_dsa :: proc(key: ^pubkey_t, p, q, g, y: mp_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
privkey_dsa_get_x :: proc(n: mp_t, key: privkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_dsa_get_p :: proc(p: mp_t, key: pubkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_dsa_get_q :: proc(q: mp_t, key: pubkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_dsa_get_g :: proc(d: mp_t, key: pubkey_t) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_get_field")
|
||||
pubkey_dsa_get_y :: proc(y: mp_t, key: pubkey_t) -> c.int ---
|
||||
|
||||
privkey_load_dh :: proc(key: ^privkey_t, p, g, y: mp_t) -> c.int ---
|
||||
pubkey_load_dh :: proc(key: ^pubkey_t, p, g, x: mp_t) -> c.int ---
|
||||
|
||||
privkey_load_elgamal :: proc(key: ^privkey_t, p, g, y: mp_t) -> c.int ---
|
||||
pubkey_load_elgamal :: proc(key: ^pubkey_t, p, g, x: mp_t) -> c.int ---
|
||||
|
||||
privkey_load_ed25519 :: proc(key: ^privkey_t, privkey: [32]c.char) -> c.int ---
|
||||
pubkey_load_ed25519 :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
|
||||
privkey_ed25519_get_privkey :: proc(key: ^privkey_t, output: [64]c.char) -> c.int ---
|
||||
pubkey_ed25519_get_pubkey :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
|
||||
|
||||
privkey_load_x25519 :: proc(key: ^privkey_t, privkey: [32]c.char) -> c.int ---
|
||||
pubkey_load_x25519 :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
|
||||
privkey_x25519_get_privkey :: proc(key: ^privkey_t, output: [32]c.char) -> c.int ---
|
||||
pubkey_x25519_get_pubkey :: proc(key: ^pubkey_t, pubkey: [32]c.char) -> c.int ---
|
||||
|
||||
privkey_load_ecdsa :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
|
||||
pubkey_load_ecdsa :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
|
||||
pubkey_load_ecdh :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
|
||||
privkey_load_ecdh :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
|
||||
pubkey_load_sm2 :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
|
||||
privkey_load_sm2 :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
|
||||
@(deprecated="Use botan.pubkey_load_sm2")
|
||||
pubkey_load_sm2_enc :: proc(key: ^pubkey_t, public_x, public_y: mp_t, curve_name: cstring) -> c.int ---
|
||||
@(deprecated="Use botan.privkey_load_sm2")
|
||||
privkey_load_sm2_enc :: proc(key: ^privkey_t, scalar: mp_t, curve_name: cstring) -> c.int ---
|
||||
pubkey_sm2_compute_za :: proc(out: ^c.char, out_len: ^c.size_t, ident, hash_algo: cstring, key: pubkey_t) -> c.int ---
|
||||
|
||||
pk_op_encrypt_create :: proc(op: ^pk_op_encrypt_t, key: pubkey_t, padding: cstring, flags: c.uint) -> c.int ---
|
||||
pk_op_encrypt_destroy :: proc(op: pk_op_encrypt_t) -> c.int ---
|
||||
pk_op_encrypt_output_length :: proc(op: pk_op_encrypt_t, ptext_len: c.size_t, ctext_len: ^c.size_t) -> c.int ---
|
||||
pk_op_encrypt :: proc(op: pk_op_encrypt_t, rng: rng_t, out: ^c.char, out_len: ^c.size_t, plaintext: cstring, plaintext_len: c.size_t) -> c.int ---
|
||||
|
||||
pk_op_decrypt_create :: proc(op: ^pk_op_decrypt_t, key: privkey_t, padding: cstring, flags: c.uint) -> c.int ---
|
||||
pk_op_decrypt_destroy :: proc(op: pk_op_decrypt_t) -> c.int ---
|
||||
pk_op_decrypt_output_length :: proc(op: pk_op_decrypt_t, ptext_len: c.size_t, ctext_len: ^c.size_t) -> c.int ---
|
||||
pk_op_decrypt :: proc(op: pk_op_decrypt_t, rng: rng_t, out: ^c.char, out_len: ^c.size_t, ciphertext: cstring, ciphertext_len: c.size_t) -> c.int ---
|
||||
|
||||
pk_op_sign_create :: proc(op: ^pk_op_sign_t, key: privkey_t, hash_and_padding: cstring, flags: c.uint) -> c.int ---
|
||||
pk_op_sign_destroy :: proc(op: pk_op_sign_t) -> c.int ---
|
||||
pk_op_sign_output_length :: proc(op: pk_op_sign_t, olen: ^c.size_t) -> c.int ---
|
||||
pk_op_sign_update :: proc(op: pk_op_sign_t, input: ^c.char, input_len: c.size_t) -> c.int ---
|
||||
pk_op_sign_finish :: proc(op: pk_op_sign_t, rng: rng_t, sig: ^c.char, sig_len: ^c.size_t) -> c.int ---
|
||||
|
||||
pk_op_verify_create :: proc(op: ^pk_op_verify_t, hash_and_padding: cstring, flags: c.uint) -> c.int ---
|
||||
pk_op_verify_destroy :: proc(op: pk_op_verify_t) -> c.int ---
|
||||
pk_op_verify_update :: proc(op: pk_op_verify_t, input: ^c.char, input_len: c.size_t) -> c.int ---
|
||||
pk_op_verify_finish :: proc(op: pk_op_verify_t, sig: ^c.char, sig_len: c.size_t) -> c.int ---
|
||||
|
||||
pk_op_key_agreement_create :: proc(op: ^pk_op_ka_t, kdf: cstring, flags: c.uint) -> c.int ---
|
||||
pk_op_key_agreement_destroy :: proc(op: pk_op_ka_t) -> c.int ---
|
||||
pk_op_key_agreement_export_public :: proc(key: privkey_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
pk_op_key_agreement_size :: proc(op: pk_op_ka_t, out_len: ^c.size_t) -> c.int ---
|
||||
pk_op_key_agreement :: proc(op: pk_op_ka_t, out: ^c.char, out_len: ^c.size_t, other_key: ^c.char, other_key_len: c.size_t, salt: ^c.char,
|
||||
salt_len: c.size_t) -> c.int ---
|
||||
|
||||
pkcs_hash_id :: proc(hash_name: cstring, pkcs_id: ^c.char, pkcs_id_len: ^c.size_t) -> c.int ---
|
||||
|
||||
@(deprecated="Poorly specified, avoid in new code")
|
||||
mceies_encrypt :: proc(mce_key: pubkey_t, rng: rng_t, aead: cstring, pt: ^c.char, pt_len: c.size_t, ad: ^c.char, ad_len: c.size_t,
|
||||
ct: ^c.char, ct_len: ^c.size_t) -> c.int ---
|
||||
@(deprecated="Poorly specified, avoid in new code")
|
||||
mceies_decrypt :: proc(mce_key: privkey_t, aead: cstring, ct: ^c.char, ct_len: c.size_t, ad: ^c.char, ad_len: c.size_t, pt: ^c.char,
|
||||
pt_len: ^c.size_t) -> c.int ---
|
||||
|
||||
x509_cert_load :: proc(cert_obj: ^x509_cert_t, cert: ^c.char, cert_len: c.size_t) -> c.int ---
|
||||
x509_cert_load_file :: proc(cert_obj: ^x509_cert_t, filename: cstring) -> c.int ---
|
||||
x509_cert_destroy :: proc(cert: x509_cert_t) -> c.int ---
|
||||
x509_cert_dup :: proc(new_cert: ^x509_cert_t, cert: x509_cert_t) -> c.int ---
|
||||
x509_cert_get_time_starts :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_time_expires :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_not_before :: proc(cert: x509_cert_t, time_since_epoch: ^c.ulonglong) -> c.int ---
|
||||
x509_cert_not_after :: proc(cert: x509_cert_t, time_since_epoch: ^c.ulonglong) -> c.int ---
|
||||
x509_cert_get_fingerprint :: proc(cert: x509_cert_t, hash: cstring, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_serial_number :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_authority_key_id :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_subject_key_id :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_public_key_bits :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_public_key :: proc(cert: x509_cert_t, key: ^pubkey_t) -> c.int ---
|
||||
x509_cert_get_issuer_dn :: proc(cert: x509_cert_t, key: ^c.char, index: c.size_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_get_subject_dn :: proc(cert: x509_cert_t, key: ^c.char, index: c.size_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_to_string :: proc(cert: x509_cert_t, out: ^c.char, out_len: ^c.size_t) -> c.int ---
|
||||
x509_cert_allowed_usage :: proc(cert: x509_cert_t, key_usage: c.uint) -> c.int ---
|
||||
x509_cert_hostname_match :: proc(cert: x509_cert_t, hostname: cstring) -> c.int ---
|
||||
x509_cert_verify :: proc(validation_result: ^c.int, cert: x509_cert_t, intermediates: ^x509_cert_t, intermediates_len: c.size_t, trusted: ^x509_cert_t,
|
||||
trusted_len: c.size_t, trusted_path: cstring, required_strength: c.size_t, hostname: cstring, reference_time: c.ulonglong) -> c.int ---
|
||||
x509_cert_validation_status :: proc(code: c.int) -> cstring ---
|
||||
x509_crl_load_file :: proc(crl_obj: ^x509_crl_t, crl_path: cstring) -> c.int ---
|
||||
x509_crl_load :: proc(crl_obj: ^x509_crl_t, crl_bits: ^c.char, crl_bits_len: c.size_t) -> c.int ---
|
||||
x509_crl_destroy :: proc(crl: x509_crl_t) -> c.int ---
|
||||
x509_is_revoked :: proc(crl: x509_crl_t, cert: x509_cert_t) -> c.int ---
|
||||
x509_cert_verify_with_crl :: proc(validation_result: ^c.int, cert: x509_cert_t, intermediates: ^x509_cert_t, intermediates_len: c.size_t, trusted: ^x509_cert_t,
|
||||
trusted_len: c.size_t, crls: ^x509_crl_t, crls_len: c.size_t, trusted_path: cstring, required_strength: c.size_t,
|
||||
hostname: cstring, reference_time: c.ulonglong) -> c.int ---
|
||||
|
||||
key_wrap3394 :: proc(key: ^c.char, key_len: c.size_t, kek: ^c.char, kek_len: c.size_t, wrapped_key: ^c.char, wrapped_key_len: ^c.size_t) -> c.int ---
|
||||
key_unwrap3394 :: proc(wrapped_key: ^c.char, wrapped_key_len: c.size_t, kek: ^c.char, kek_len: c.size_t, key: ^c.char, key_len: ^c.size_t) -> c.int ---
|
||||
|
||||
hotp_init :: proc(hotp: ^hotp_t, key: ^c.char, key_len: c.size_t, hash_algo: cstring, digits: c.size_t) -> c.int ---
|
||||
hotp_destroy :: proc(hotp: hotp_t) -> c.int ---
|
||||
hotp_generate :: proc(hotp: hotp_t, hotp_code: ^c.uint, hotp_counter: c.ulonglong) -> c.int ---
|
||||
hotp_check :: proc(hotp: hotp_t, next_hotp_counter: ^c.ulonglong, hotp_code: c.uint, hotp_counter: c.ulonglong, resync_range: c.size_t) -> c.int ---
|
||||
|
||||
totp_init :: proc(totp: ^totp_t, key: ^c.char, key_len: c.size_t, hash_algo: cstring, digits, time_step: c.size_t) -> c.int ---
|
||||
totp_destroy :: proc(totp: totp_t) -> c.int ---
|
||||
totp_generate :: proc(totp: totp_t, totp_code: ^c.uint, timestamp: c.ulonglong) -> c.int ---
|
||||
totp_check :: proc(totp: totp_t, totp_code: ^c.uint, timestamp: c.ulonglong, acceptable_clock_drift: c.size_t) -> c.int ---
|
||||
|
||||
fpe_fe1_init :: proc(fpe: ^fpe_t, n: mp_t, key: ^c.char, key_len, rounds: c.size_t, flags: c.uint) -> c.int ---
|
||||
fpe_destroy :: proc(fpe: fpe_t) -> c.int ---
|
||||
fpe_encrypt :: proc(fpe: fpe_t, x: mp_t, tweak: ^c.char, tweak_len: c.size_t) -> c.int ---
|
||||
fpe_decrypt :: proc(fpe: fpe_t, x: mp_t, tweak: ^c.char, tweak_len: c.size_t) -> c.int ---
|
||||
}
|
||||
24
vendor/botan/bindings/license.txt
vendored
24
vendor/botan/bindings/license.txt
vendored
@@ -1,24 +0,0 @@
|
||||
Copyright (C) 1999-2023 The Botan Authors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions, and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions, and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
118
vendor/botan/blake2b/blake2b.odin
vendored
118
vendor/botan/blake2b/blake2b.odin
vendored
@@ -1,118 +0,0 @@
|
||||
package vendor_botan_blake2b
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
|
||||
Interface for the BLAKE2B hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 64
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_BLAKE2B, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
10
vendor/botan/legacy/README.md
vendored
10
vendor/botan/legacy/README.md
vendored
@@ -1,10 +0,0 @@
|
||||
# crypto/legacy
|
||||
|
||||
These are algorithms that are shipped solely for the purpose of
|
||||
interoperability with legacy systems. The use of these packages in
|
||||
any other capacity is discouraged, especially those that are known
|
||||
to be broken.
|
||||
|
||||
- keccak - The draft version of the algorithm that became SHA-3
|
||||
- MD5 - Broken (https://eprint.iacr.org/2005/075)
|
||||
- SHA-1 - Broken (https://eprint.iacr.org/2017/190)
|
||||
118
vendor/botan/legacy/keccak/keccak.odin
vendored
118
vendor/botan/legacy/keccak/keccak.odin
vendored
@@ -1,118 +0,0 @@
|
||||
package vendor_keccak
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Keccak hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_KECCAK_512, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
118
vendor/botan/legacy/md5/md5.odin
vendored
118
vendor/botan/legacy/md5/md5.odin
vendored
@@ -1,118 +0,0 @@
|
||||
package vendor_md5
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
|
||||
Interface for the MD5 hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_MD5, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
118
vendor/botan/legacy/sha1/sha1.odin
vendored
118
vendor/botan/legacy/sha1/sha1.odin
vendored
@@ -1,118 +0,0 @@
|
||||
package vendor_sha1
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
|
||||
Interface for the SHA-1 hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 20
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_SHA1, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
354
vendor/botan/sha2/sha2.odin
vendored
354
vendor/botan/sha2/sha2.odin
vendored
@@ -1,354 +0,0 @@
|
||||
package vendor_sha2
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the SHA-2 hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
|
||||
switch hash_size {
|
||||
case 224: botan.hash_init(ctx, botan.HASH_SHA_224, 0)
|
||||
case 256: botan.hash_init(ctx, botan.HASH_SHA_256, 0)
|
||||
case 384: botan.hash_init(ctx, botan.HASH_SHA_384, 0)
|
||||
case 512: botan.hash_init(ctx, botan.HASH_SHA_512, 0)
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
354
vendor/botan/sha3/sha3.odin
vendored
354
vendor/botan/sha3/sha3.odin
vendored
@@ -1,354 +0,0 @@
|
||||
package vendor_sha3
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the SHA-3 hashing algorithm. Variants for Keccak and SHAKE can be found in the appropriate packages.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 224)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 384)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 512)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
|
||||
switch hash_size {
|
||||
case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
|
||||
case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
|
||||
case 384: botan.hash_init(ctx, botan.HASH_SHA3_384, 0)
|
||||
case 512: botan.hash_init(ctx, botan.HASH_SHA3_512, 0)
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
198
vendor/botan/shake/shake.odin
vendored
198
vendor/botan/shake/shake.odin
vendored
@@ -1,198 +0,0 @@
|
||||
package vendor_shake
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the SHAKE hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_128 :: 16
|
||||
DIGEST_SIZE_256 :: 32
|
||||
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 128)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 128)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 128)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
init(&ctx, hash_size = 256)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context, hash_size := 256) {
|
||||
switch hash_size {
|
||||
case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0)
|
||||
case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0)
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
253
vendor/botan/siphash/siphash.odin
vendored
253
vendor/botan/siphash/siphash.odin
vendored
@@ -1,253 +0,0 @@
|
||||
package vendor_siphash
|
||||
|
||||
/*
|
||||
Copyright 2022 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
|
||||
Interface for the SipHash hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
|
||||
Use the specific procedures for a certain setup. The generic procdedures will default to Siphash 2-4
|
||||
*/
|
||||
|
||||
import "core:crypto"
|
||||
import "core:encoding/endian"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
KEY_SIZE :: 16
|
||||
DIGEST_SIZE :: 8
|
||||
|
||||
// sum_string_1_3 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_1_3 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_1_3 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 {
|
||||
dst: [8]byte
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:], 1, 3)
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
return endian.unchecked_get_u64le(dst[:])
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_1_3 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_1_3 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:], 1, 3)
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
}
|
||||
|
||||
sum_1_3 :: proc {
|
||||
sum_string_1_3,
|
||||
sum_bytes_1_3,
|
||||
sum_string_to_buffer_1_3,
|
||||
sum_bytes_to_buffer_1_3,
|
||||
}
|
||||
|
||||
// verify_u64_1_3 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_1_3 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_1_3(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes_1_3 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_1_3 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_1_3 :: proc {
|
||||
verify_bytes_1_3,
|
||||
verify_u64_1_3,
|
||||
}
|
||||
|
||||
// sum_string_2_4 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_2_4 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_2_4 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 {
|
||||
dst: [8]byte
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:])
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
return endian.unchecked_get_u64le(dst[:])
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_2_4 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_2_4 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:])
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
}
|
||||
|
||||
sum_2_4 :: proc {
|
||||
sum_string_2_4,
|
||||
sum_bytes_2_4,
|
||||
sum_string_to_buffer_2_4,
|
||||
sum_bytes_to_buffer_2_4,
|
||||
}
|
||||
|
||||
sum_string :: sum_string_2_4
|
||||
sum_bytes :: sum_bytes_2_4
|
||||
sum_string_to_buffer :: sum_string_to_buffer_2_4
|
||||
sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4
|
||||
sum :: proc {
|
||||
sum_string,
|
||||
sum_bytes,
|
||||
sum_string_to_buffer,
|
||||
sum_bytes_to_buffer,
|
||||
}
|
||||
|
||||
|
||||
// verify_u64_2_4 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_2_4 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_2_4(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes_2_4 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_2_4 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_2_4 :: proc {
|
||||
verify_bytes_2_4,
|
||||
verify_u64_2_4,
|
||||
}
|
||||
|
||||
verify_bytes :: verify_bytes_2_4
|
||||
verify_u64 :: verify_u64_2_4
|
||||
verify :: proc {
|
||||
verify_bytes,
|
||||
verify_u64,
|
||||
}
|
||||
|
||||
// sum_string_4_8 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_4_8 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_4_8 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 {
|
||||
dst: [8]byte
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:], 4, 8)
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
return endian.unchecked_get_u64le(dst[:])
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_4_8 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_4_8 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "vendor/botan: Destination buffer needs to be at least of size 8")
|
||||
ctx: botan.mac_t
|
||||
init(&ctx, key[:], 4, 8)
|
||||
update(&ctx, msg[:])
|
||||
final(&ctx, dst[:])
|
||||
}
|
||||
|
||||
sum_4_8 :: proc {
|
||||
sum_string_4_8,
|
||||
sum_bytes_4_8,
|
||||
sum_string_to_buffer_4_8,
|
||||
sum_bytes_to_buffer_4_8,
|
||||
}
|
||||
|
||||
// verify_u64_4_8 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_4_8 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_4_8(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes_4_8 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_4_8 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_4_8 :: proc {
|
||||
verify_bytes_4_8,
|
||||
verify_u64_4_8,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.mac_t
|
||||
|
||||
init :: proc(ctx: ^botan.mac_t, key: []byte, c_rounds := 2, d_rounds := 4) {
|
||||
assert(len(key) == KEY_SIZE, "vendor/botan: Invalid key size, want 16")
|
||||
is_valid_setting := (c_rounds == 1 && d_rounds == 3) ||
|
||||
(c_rounds == 2 && d_rounds == 4) ||
|
||||
(c_rounds == 4 && d_rounds == 8)
|
||||
assert(is_valid_setting, "vendor/botan: Incorrect rounds set up. Valid pairs are (1,3), (2,4) and (4,8)")
|
||||
if c_rounds == 1 && d_rounds == 3 {
|
||||
botan.mac_init(ctx, botan.MAC_SIPHASH_1_3, 0)
|
||||
} else if c_rounds == 2 && d_rounds == 4 {
|
||||
botan.mac_init(ctx, botan.MAC_SIPHASH_2_4, 0)
|
||||
} else if c_rounds == 4 && d_rounds == 8 {
|
||||
botan.mac_init(ctx, botan.MAC_SIPHASH_4_8, 0)
|
||||
}
|
||||
botan.mac_set_key(ctx^, len(key) == 0 ? nil : &key[0], uint(len(key)))
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^botan.mac_t, data: []byte) {
|
||||
botan.mac_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^botan.mac_t, dst: []byte) {
|
||||
botan.mac_final(ctx^, &dst[0])
|
||||
reset(ctx)
|
||||
}
|
||||
|
||||
reset :: proc(ctx: ^botan.mac_t) {
|
||||
botan.mac_destroy(ctx^)
|
||||
}
|
||||
118
vendor/botan/sm3/sm3.odin
vendored
118
vendor/botan/sm3/sm3.odin
vendored
@@ -1,118 +0,0 @@
|
||||
package vendor_sm3
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog: Initial implementation.
|
||||
|
||||
Interface for the SM3 hashing algorithm.
|
||||
The hash will be computed via bindings to the Botan crypto library
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import botan "../bindings"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 32
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
i := 1
|
||||
for i > 0 {
|
||||
i, _ = io.read(s, buf)
|
||||
if i > 0 {
|
||||
update(&ctx, buf[:i])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: botan.hash_t
|
||||
|
||||
init :: proc "contextless" (ctx: ^Context) {
|
||||
botan.hash_init(ctx, botan.HASH_SM3, 0)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
||||
botan.hash_final(ctx^, &hash[0])
|
||||
botan.hash_destroy(ctx^)
|
||||
}
|
||||
Reference in New Issue
Block a user