mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-02 11:12:31 +00:00
Merge branch 'master' of https://github.com/FourteenBrush/Odin
This commit is contained in:
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1 +1,2 @@
|
||||
*.odin linguist-language=Odin
|
||||
* text=auto
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -163,7 +163,7 @@ jobs:
|
||||
run: |
|
||||
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
|
||||
cd tests\documentation
|
||||
call build.bat
|
||||
rem call build.bat
|
||||
timeout-minutes: 10
|
||||
- name: core:math/big tests
|
||||
shell: cmd
|
||||
|
||||
27
.gitignore
vendored
27
.gitignore
vendored
@@ -25,7 +25,31 @@ bld/
|
||||
tests/documentation/verify/
|
||||
tests/documentation/all.odin-doc
|
||||
tests/internal/test_map
|
||||
tests/internal/test_pow
|
||||
tests/internal/test_rtti
|
||||
tests/core/test_core_compress
|
||||
tests/core/test_core_filepath
|
||||
tests/core/test_core_fmt
|
||||
tests/core/test_core_i18n
|
||||
tests/core/test_core_image
|
||||
tests/core/test_core_libc
|
||||
tests/core/test_core_match
|
||||
tests/core/test_core_math
|
||||
tests/core/test_core_net
|
||||
tests/core/test_core_os_exit
|
||||
tests/core/test_core_reflect
|
||||
tests/core/test_core_strings
|
||||
tests/core/test_crypto_hash
|
||||
tests/core/test_hash
|
||||
tests/core/test_hxa
|
||||
tests/core/test_json
|
||||
tests/core/test_linalg_glsl_math
|
||||
tests/core/test_noise
|
||||
tests/core/test_varint
|
||||
tests/core/test_xml
|
||||
tests/core/test_core_slice
|
||||
tests/core/test_core_thread
|
||||
tests/vendor/vendor_botan
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
# Visual Studio Code options directory
|
||||
@@ -290,3 +314,6 @@ shared/
|
||||
examples/bug/
|
||||
build.sh
|
||||
!core/debug/
|
||||
|
||||
# RAD debugger project file
|
||||
*.raddbg
|
||||
BIN
bin/lld-link.exe
Normal file
BIN
bin/lld-link.exe
Normal file
Binary file not shown.
BIN
bin/wasm-ld.exe
Normal file
BIN
bin/wasm-ld.exe
Normal file
Binary file not shown.
@@ -110,7 +110,8 @@ if %errorlevel% neq 0 goto end_of_build
|
||||
call build_vendor.bat
|
||||
if %errorlevel% neq 0 goto end_of_build
|
||||
|
||||
if %release_mode% EQU 0 odin run examples/demo
|
||||
rem If the demo doesn't run for you and your CPU is more than a decade old, try -microarch:native
|
||||
if %release_mode% EQU 0 odin run examples/demo -- Hellope World
|
||||
|
||||
del *.obj > NUL 2> NUL
|
||||
|
||||
|
||||
@@ -27,11 +27,13 @@ error() {
|
||||
if [ -z "$LLVM_CONFIG" ]; then
|
||||
# darwin, linux, openbsd
|
||||
if [ -n "$(command -v llvm-config-17)" ]; then LLVM_CONFIG="llvm-config-17"
|
||||
elif [ -n "$(command -v llvm-config-14)" ]; then LLVM_CONFIG="llvm-config-14"
|
||||
elif [ -n "$(command -v llvm-config-13)" ]; then LLVM_CONFIG="llvm-config-13"
|
||||
elif [ -n "$(command -v llvm-config-12)" ]; then LLVM_CONFIG="llvm-config-12"
|
||||
elif [ -n "$(command -v llvm-config-11)" ]; then LLVM_CONFIG="llvm-config-11"
|
||||
# freebsd
|
||||
elif [ -n "$(command -v llvm-config17)" ]; then LLVM_CONFIG="llvm-config-17"
|
||||
elif [ -n "$(command -v llvm-config14)" ]; then LLVM_CONFIG="llvm-config-14"
|
||||
elif [ -n "$(command -v llvm-config13)" ]; then LLVM_CONFIG="llvm-config-13"
|
||||
elif [ -n "$(command -v llvm-config12)" ]; then LLVM_CONFIG="llvm-config-12"
|
||||
elif [ -n "$(command -v llvm-config11)" ]; then LLVM_CONFIG="llvm-config-11"
|
||||
@@ -117,7 +119,7 @@ build_odin() {
|
||||
}
|
||||
|
||||
run_demo() {
|
||||
./odin run examples/demo/demo.odin -file
|
||||
./odin run examples/demo/demo.odin -file -- Hellope World
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
|
||||
@@ -110,7 +110,7 @@ typeid_of :: proc($T: typeid) -> typeid ---
|
||||
swizzle :: proc(x: [N]T, indices: ..int) -> [len(indices)]T ---
|
||||
|
||||
complex :: proc(real, imag: Float) -> Complex_Type ---
|
||||
quaternion :: proc(real, imag, jmag, kmag: Float) -> Quaternion_Type ---
|
||||
quaternion :: proc(imag, jmag, kmag, real: Float) -> Quaternion_Type --- // fields must be named
|
||||
real :: proc(value: Complex_Or_Quaternion) -> Float ---
|
||||
imag :: proc(value: Complex_Or_Quaternion) -> Float ---
|
||||
jmag :: proc(value: Quaternion) -> Float ---
|
||||
|
||||
@@ -895,7 +895,7 @@ split_multi_iterator :: proc(s: ^[]byte, substrs: [][]byte, skip_empty := false)
|
||||
|
||||
|
||||
|
||||
// scrub scruvs invalid utf-8 characters and replaces them with the replacement string
|
||||
// Scrubs invalid utf-8 characters and replaces them with the replacement string
|
||||
// Adjacent invalid bytes are only replaced once
|
||||
scrub :: proc(s: []byte, replacement: []byte, allocator := context.allocator) -> []byte {
|
||||
str := s
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package libc
|
||||
|
||||
import "core:io"
|
||||
|
||||
when ODIN_OS == .Windows {
|
||||
foreign import libc {
|
||||
"system:libucrt.lib",
|
||||
@@ -218,3 +220,102 @@ foreign libc {
|
||||
ferror :: proc(stream: ^FILE) -> int ---
|
||||
perror :: proc(s: cstring) ---
|
||||
}
|
||||
|
||||
to_stream :: proc(file: ^FILE) -> io.Stream {
|
||||
stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
|
||||
unknown_or_eof :: proc(f: ^FILE) -> io.Error {
|
||||
switch {
|
||||
case ferror(f) != 0:
|
||||
return .Unknown
|
||||
case feof(f) != 0:
|
||||
return .EOF
|
||||
case:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
file := (^FILE)(stream_data)
|
||||
switch mode {
|
||||
case .Close:
|
||||
if fclose(file) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
case .Flush:
|
||||
if fflush(file) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
case .Read:
|
||||
n = i64(fread(raw_data(p), size_of(byte), len(p), file))
|
||||
if n == 0 { err = unknown_or_eof(file) }
|
||||
|
||||
case .Read_At:
|
||||
curr := ftell(file)
|
||||
if curr == -1 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
if fseek(file, long(offset), SEEK_SET) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
defer fseek(file, long(curr), SEEK_SET)
|
||||
|
||||
n = i64(fread(raw_data(p), size_of(byte), len(p), file))
|
||||
if n == 0 { err = unknown_or_eof(file) }
|
||||
|
||||
case .Write:
|
||||
n = i64(fwrite(raw_data(p), size_of(byte), len(p), file))
|
||||
if n == 0 { err = unknown_or_eof(file) }
|
||||
|
||||
case .Write_At:
|
||||
curr := ftell(file)
|
||||
if curr == -1 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
if fseek(file, long(offset), SEEK_SET) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
defer fseek(file, long(curr), SEEK_SET)
|
||||
|
||||
n = i64(fwrite(raw_data(p), size_of(byte), len(p), file))
|
||||
if n == 0 { err = unknown_or_eof(file) }
|
||||
|
||||
case .Seek:
|
||||
if fseek(file, long(offset), int(whence)) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
case .Size:
|
||||
curr := ftell(file)
|
||||
if curr == -1 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
defer fseek(file, curr, SEEK_SET)
|
||||
|
||||
if fseek(file, 0, SEEK_END) != 0 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
n = i64(ftell(file))
|
||||
if n == -1 {
|
||||
return 0, unknown_or_eof(file)
|
||||
}
|
||||
|
||||
case .Destroy:
|
||||
return 0, .Empty
|
||||
|
||||
case .Query:
|
||||
return io.query_utility({ .Close, .Flush, .Read, .Read_At, .Write, .Write_At, .Seek, .Size })
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
return {
|
||||
data = file,
|
||||
procedure = stream_proc,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package libc
|
||||
|
||||
import "core:c"
|
||||
|
||||
#assert(!ODIN_NO_CRT, `"core:c/libc" cannot be imported when '-no-crt' is used`)
|
||||
|
||||
char :: c.char // assuming -funsigned-char
|
||||
|
||||
schar :: c.schar
|
||||
|
||||
@@ -20,10 +20,9 @@ import "core:runtime"
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
When a decompression routine doesn't stream its output, but writes to a buffer,
|
||||
we pre-allocate an output buffer to speed up decompression. The default is 1 MiB.
|
||||
*/
|
||||
|
||||
// When a decompression routine doesn't stream its output, but writes to a buffer,
|
||||
// we pre-allocate an output buffer to speed up decompression. The default is 1 MiB.
|
||||
COMPRESS_OUTPUT_ALLOCATE_MIN :: int(#config(COMPRESS_OUTPUT_ALLOCATE_MIN, 1 << 20))
|
||||
|
||||
/*
|
||||
@@ -34,16 +33,14 @@ COMPRESS_OUTPUT_ALLOCATE_MIN :: int(#config(COMPRESS_OUTPUT_ALLOCATE_MIN, 1 << 2
|
||||
|
||||
*/
|
||||
when size_of(uintptr) == 8 {
|
||||
/*
|
||||
For 64-bit platforms, we set the default max buffer size to 4 GiB,
|
||||
which is GZIP and PKZIP's max payload size.
|
||||
*/
|
||||
|
||||
// For 64-bit platforms, we set the default max buffer size to 4 GiB,
|
||||
// which is GZIP and PKZIP's max payload size.
|
||||
COMPRESS_OUTPUT_ALLOCATE_MAX :: int(#config(COMPRESS_OUTPUT_ALLOCATE_MAX, 1 << 32))
|
||||
} else {
|
||||
/*
|
||||
For 32-bit platforms, we set the default max buffer size to 512 MiB.
|
||||
*/
|
||||
COMPRESS_OUTPUT_ALLOCATE_MAX :: int(#config(COMPRESS_OUTPUT_ALLOCATE_MAX, 1 << 29))
|
||||
|
||||
// For 32-bit platforms, we set the default max buffer size to 512 MiB.
|
||||
COMPRESS_OUTPUT_ALLOCATE_MAX :: int(#config(COMPRESS_OUTPUT_ALLOCATE_MAX, 1 << 29))
|
||||
}
|
||||
|
||||
|
||||
@@ -69,9 +66,8 @@ General_Error :: enum {
|
||||
Incompatible_Options,
|
||||
Unimplemented,
|
||||
|
||||
/*
|
||||
Memory errors
|
||||
*/
|
||||
// Memory errors
|
||||
|
||||
Allocation_Failed,
|
||||
Resize_Failed,
|
||||
}
|
||||
@@ -86,17 +82,16 @@ GZIP_Error :: enum {
|
||||
Payload_Length_Invalid,
|
||||
Payload_CRC_Invalid,
|
||||
|
||||
/*
|
||||
GZIP's payload can be a maximum of max(u32le), or 4 GiB.
|
||||
If you tell it you expect it to contain more, that's obviously an error.
|
||||
*/
|
||||
Payload_Size_Exceeds_Max_Payload,
|
||||
/*
|
||||
For buffered instead of streamed output, the payload size can't exceed
|
||||
the max set by the `COMPRESS_OUTPUT_ALLOCATE_MAX` switch in compress/common.odin.
|
||||
// GZIP's payload can be a maximum of max(u32le), or 4 GiB.
|
||||
// If you tell it you expect it to contain more, that's obviously an error.
|
||||
|
||||
Payload_Size_Exceeds_Max_Payload,
|
||||
|
||||
// For buffered instead of streamed output, the payload size can't exceed
|
||||
// the max set by the `COMPRESS_OUTPUT_ALLOCATE_MAX` switch in compress/common.odin.
|
||||
//
|
||||
// You can tweak this setting using `-define:COMPRESS_OUTPUT_ALLOCATE_MAX=size_in_bytes`
|
||||
|
||||
You can tweak this setting using `-define:COMPRESS_OUTPUT_ALLOCATE_MAX=size_in_bytes`
|
||||
*/
|
||||
Output_Exceeds_COMPRESS_OUTPUT_ALLOCATE_MAX,
|
||||
|
||||
}
|
||||
@@ -137,9 +132,8 @@ Context_Memory_Input :: struct #packed {
|
||||
code_buffer: u64,
|
||||
num_bits: u64,
|
||||
|
||||
/*
|
||||
If we know the data size, we can optimize the reads and writes.
|
||||
*/
|
||||
// If we know the data size, we can optimize the reads and writes.
|
||||
|
||||
size_packed: i64,
|
||||
size_unpacked: i64,
|
||||
}
|
||||
@@ -159,18 +153,16 @@ Context_Stream_Input :: struct #packed {
|
||||
code_buffer: u64,
|
||||
num_bits: u64,
|
||||
|
||||
/*
|
||||
If we know the data size, we can optimize the reads and writes.
|
||||
*/
|
||||
// If we know the data size, we can optimize the reads and writes.
|
||||
|
||||
size_packed: i64,
|
||||
size_unpacked: i64,
|
||||
|
||||
/*
|
||||
Flags:
|
||||
`input_fully_in_memory`
|
||||
true = This tells us we read input from `input_data` exclusively. [] = EOF.
|
||||
false = Try to refill `input_data` from the `input` stream.
|
||||
*/
|
||||
// Flags:
|
||||
// `input_fully_in_memory`
|
||||
// true = This tells us we read input from `input_data` exclusively. [] = EOF.
|
||||
// false = Try to refill `input_data` from the `input` stream.
|
||||
|
||||
input_fully_in_memory: b8,
|
||||
|
||||
padding: [1]u8,
|
||||
@@ -214,7 +206,7 @@ read_slice_from_memory :: #force_inline proc(z: ^Context_Memory_Input, size: int
|
||||
@(optimization_mode="speed")
|
||||
read_slice_from_stream :: #force_inline proc(z: ^Context_Stream_Input, size: int) -> (res: []u8, err: io.Error) {
|
||||
// TODO: REMOVE ALL USE OF context.temp_allocator here
|
||||
// the is literally no need for it
|
||||
// there is literally no need for it
|
||||
b := make([]u8, size, context.temp_allocator)
|
||||
_ = io.read(z.input, b[:]) or_return
|
||||
return b, nil
|
||||
@@ -248,10 +240,8 @@ read_u8_from_stream :: #force_inline proc(z: ^Context_Stream_Input) -> (res: u8,
|
||||
|
||||
read_u8 :: proc{read_u8_from_memory, read_u8_from_stream}
|
||||
|
||||
/*
|
||||
You would typically only use this at the end of Inflate, to drain bits from the code buffer
|
||||
preferentially.
|
||||
*/
|
||||
// You would typically only use this at the end of Inflate, to drain bits from the code buffer
|
||||
// preferentially.
|
||||
@(optimization_mode="speed")
|
||||
read_u8_prefer_code_buffer_lsb :: #force_inline proc(z: ^$C) -> (res: u8, err: io.Error) {
|
||||
if z.num_bits >= 8 {
|
||||
|
||||
@@ -140,3 +140,18 @@ remove :: proc(pq: ^$Q/Priority_Queue($T), i: int) -> (value: T, ok: bool) {
|
||||
return
|
||||
}
|
||||
|
||||
peek_safe :: proc(pq: $Q/Priority_Queue($T), loc := #caller_location) -> (res: T, ok: bool) {
|
||||
if builtin.len(pq.queue) > 0 {
|
||||
return pq.queue[0], true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
peek :: proc(pq: $Q/Priority_Queue($T), loc := #caller_location) -> (res: T) {
|
||||
assert(condition=builtin.len(pq.queue)>0, loc=loc)
|
||||
|
||||
if builtin.len(pq.queue) > 0 {
|
||||
return pq.queue[0]
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -22,7 +22,9 @@ init :: proc(q: ^$Q/Queue($T), capacity := DEFAULT_CAPACITY, allocator := contex
|
||||
return reserve(q, capacity)
|
||||
}
|
||||
|
||||
// Procedure to initialize a queue from a fixed backing slice
|
||||
// Procedure to initialize a queue from a fixed backing slice.
|
||||
// The contents of the `backing` will be overwritten as items are pushed onto the `Queue`.
|
||||
// Any previous contents are not available.
|
||||
init_from_slice :: proc(q: ^$Q/Queue($T), backing: []T) -> bool {
|
||||
clear(q)
|
||||
q.data = transmute([dynamic]T)runtime.Raw_Dynamic_Array{
|
||||
@@ -34,6 +36,21 @@ init_from_slice :: proc(q: ^$Q/Queue($T), backing: []T) -> bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Procedure to initialize a queue from a fixed backing slice.
|
||||
// Existing contents are preserved and available on the queue.
|
||||
init_with_contents :: proc(q: ^$Q/Queue($T), backing: []T) -> bool {
|
||||
clear(q)
|
||||
q.data = transmute([dynamic]T)runtime.Raw_Dynamic_Array{
|
||||
data = raw_data(backing),
|
||||
len = builtin.len(backing),
|
||||
cap = builtin.len(backing),
|
||||
allocator = {procedure=runtime.nil_allocator_proc, data=nil},
|
||||
}
|
||||
q.len = len(backing)
|
||||
q.offset = len(backing)
|
||||
return true
|
||||
}
|
||||
|
||||
// Procedure to destroy a queue
|
||||
destroy :: proc(q: ^$Q/Queue($T)) {
|
||||
delete(q.data)
|
||||
|
||||
@@ -1,95 +1,86 @@
|
||||
# crypto
|
||||
A crypto library for the Odin language
|
||||
|
||||
A cryptography library for the Odin language
|
||||
|
||||
## Supported
|
||||
|
||||
This library offers various algorithms implemented in Odin.
|
||||
Please see the chart below for the options.
|
||||
Please see the chart below for some of the options.
|
||||
|
||||
## Hashing algorithms
|
||||
|
||||
| Algorithm | |
|
||||
|:-------------------------------------------------------------------------------------------------------------|:-----------------|
|
||||
| [BLAKE](https://web.archive.org/web/20190915215948/https://131002.net/blake) | ✔️ |
|
||||
| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
|
||||
| [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ |
|
||||
| [GOST](https://datatracker.ietf.org/doc/html/rfc5831) | ✔️ |
|
||||
| [Grøstl](http://www.groestl.info/Groestl.zip) | ✔️ |
|
||||
| [HAVAL](https://web.archive.org/web/20150111210116/http://labs.calyptix.com/haval.php) | ✔️ |
|
||||
| [JH](https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html) | ✔️ |
|
||||
| [Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [MD2](https://datatracker.ietf.org/doc/html/rfc1319) | ✔️ |
|
||||
| [MD4](https://datatracker.ietf.org/doc/html/rfc1320) | ✔️ |
|
||||
| [MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ |
|
||||
| [RIPEMD](https://homes.esat.kuleuven.be/~bosselae/ripemd160.html) | ✔️ |
|
||||
| [SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ |
|
||||
| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
|
||||
| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ |
|
||||
| [Streebog](https://datatracker.ietf.org/doc/html/rfc6986) | ✔️ |
|
||||
| [Tiger](https://www.cs.technion.ac.il/~biham/Reports/Tiger/) | ✔️ |
|
||||
| [Tiger2](https://www.cs.technion.ac.il/~biham/Reports/Tiger/) | ✔️ |
|
||||
| [Whirlpool](https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html) | ✔️ |
|
||||
| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ |
|
||||
| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ |
|
||||
| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ |
|
||||
|
||||
#### High level API
|
||||
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
|
||||
Included in these groups are six procedures.
|
||||
* `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
|
||||
* `hash_bytes` - Hash a given byte slice and return the computed hash
|
||||
* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
|
||||
* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
|
||||
* `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
|
||||
* `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
|
||||
|
||||
\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
|
||||
For instance, `HAVAL` offers different sizes as well as three different round amounts.
|
||||
Computing a 256-bit hash with 3 rounds is therefore achieved by calling `haval.hash_256_3(...)`.
|
||||
Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
|
||||
Included in these groups are six procedures.
|
||||
- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
|
||||
- `hash_bytes` - Hash a given byte slice and return the computed hash
|
||||
- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
|
||||
- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
|
||||
- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
|
||||
- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
|
||||
|
||||
\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
|
||||
For instance, `SHA-2` offers different sizes.
|
||||
Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`.
|
||||
|
||||
#### Low level API
|
||||
|
||||
The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
|
||||
You may also directly call them, if you wish.
|
||||
|
||||
#### Example
|
||||
|
||||
```odin
|
||||
package crypto_example
|
||||
|
||||
// Import the desired package
|
||||
import "core:crypto/md4"
|
||||
import "core:crypto/blake2b"
|
||||
|
||||
main :: proc() {
|
||||
input := "foo"
|
||||
|
||||
// Compute the hash, using the high level API
|
||||
computed_hash := md4.hash(input)
|
||||
computed_hash := blake2b.hash(input)
|
||||
|
||||
// Variant that takes a destination buffer, instead of returning the computed hash
|
||||
hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
|
||||
md4.hash(input, hash[:])
|
||||
hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
|
||||
blake2b.hash(input, hash[:])
|
||||
|
||||
// Compute the hash, using the low level API
|
||||
ctx: md4.Md4_Context
|
||||
computed_hash_low: [16]byte
|
||||
md4.init(&ctx)
|
||||
md4.update(&ctx, transmute([]byte)input)
|
||||
md4.final(&ctx, computed_hash_low[:])
|
||||
ctx: blake2b.Context
|
||||
computed_hash_low: [blake2b.DIGEST_SIZE]byte
|
||||
blake2b.init(&ctx)
|
||||
blake2b.update(&ctx, transmute([]byte)input)
|
||||
blake2b.final(&ctx, computed_hash_low[:])
|
||||
}
|
||||
```
|
||||
For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
|
||||
|
||||
#### Thread safety
|
||||
The crypto package is not thread-safe at the moment. This may change in the future.
|
||||
## Implementation considerations
|
||||
|
||||
### Disclaimer
|
||||
The algorithms were ported out of curiosity and due to interest in the field.
|
||||
We have not had any of the code verified by a third party or tested/fuzzed by any automatic means.
|
||||
Wherever we were able to find official test vectors, those were used to verify the implementation.
|
||||
We do not recommend using them in a production environment, without any additional testing and/or verification.
|
||||
- The crypto packages are not thread-safe.
|
||||
- Best-effort is make to mitigate timing side-channels on reasonable
|
||||
architectures. Architectures that are known to be unreasonable include
|
||||
but are not limited to i386, i486, and WebAssembly.
|
||||
- Some but not all of the packages attempt to santize sensitive data,
|
||||
however this is not done consistently through the library at the moment.
|
||||
As Thomas Pornin puts it "In general, such memory cleansing is a fool's
|
||||
quest."
|
||||
- All of these packages have not received independent third party review.
|
||||
|
||||
### ToDo
|
||||
* Ciphers (Symmetric, Asymmetric)
|
||||
* MACs (Message Authentication Code)
|
||||
* CSPRNGs (Cryptographically Secure PseudoRandom Number Generator)
|
||||
* KDFs (Key Derivation Function)
|
||||
* KEAs (Key Exchange Algorithm)
|
||||
## License
|
||||
|
||||
### License
|
||||
This library is made available under the BSD-3 license.
|
||||
@@ -10,12 +10,12 @@ package _blake2
|
||||
Implementation of the BLAKE2 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/rfc7693> and <https://www.blake2.net/>
|
||||
*/
|
||||
|
||||
import "../util"
|
||||
import "core:encoding/endian"
|
||||
|
||||
BLAKE2S_BLOCK_SIZE :: 64
|
||||
BLAKE2S_SIZE :: 32
|
||||
BLAKE2B_BLOCK_SIZE :: 128
|
||||
BLAKE2B_SIZE :: 64
|
||||
BLAKE2S_BLOCK_SIZE :: 64
|
||||
BLAKE2S_SIZE :: 32
|
||||
BLAKE2B_BLOCK_SIZE :: 128
|
||||
BLAKE2B_SIZE :: 64
|
||||
|
||||
Blake2s_Context :: struct {
|
||||
h: [8]u32,
|
||||
@@ -28,7 +28,9 @@ Blake2s_Context :: struct {
|
||||
is_keyed: bool,
|
||||
size: byte,
|
||||
is_last_node: bool,
|
||||
cfg: Blake2_Config,
|
||||
cfg: Blake2_Config,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
Blake2b_Context :: struct {
|
||||
@@ -42,15 +44,19 @@ Blake2b_Context :: struct {
|
||||
is_keyed: bool,
|
||||
size: byte,
|
||||
is_last_node: bool,
|
||||
cfg: Blake2_Config,
|
||||
cfg: Blake2_Config,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
Blake2_Config :: struct {
|
||||
size: byte,
|
||||
key: []byte,
|
||||
salt: []byte,
|
||||
size: byte,
|
||||
key: []byte,
|
||||
salt: []byte,
|
||||
person: []byte,
|
||||
tree: union{Blake2_Tree},
|
||||
tree: union {
|
||||
Blake2_Tree,
|
||||
},
|
||||
}
|
||||
|
||||
Blake2_Tree :: struct {
|
||||
@@ -63,11 +69,13 @@ Blake2_Tree :: struct {
|
||||
is_last_node: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
BLAKE2S_IV := [8]u32 {
|
||||
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
|
||||
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
|
||||
}
|
||||
|
||||
@(private)
|
||||
BLAKE2B_IV := [8]u64 {
|
||||
0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
|
||||
0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
|
||||
@@ -78,8 +86,14 @@ BLAKE2B_IV := [8]u64 {
|
||||
init :: proc(ctx: ^$T) {
|
||||
when T == Blake2s_Context {
|
||||
block_size :: BLAKE2S_BLOCK_SIZE
|
||||
max_size :: BLAKE2S_SIZE
|
||||
} else when T == Blake2b_Context {
|
||||
block_size :: BLAKE2B_BLOCK_SIZE
|
||||
max_size :: BLAKE2B_SIZE
|
||||
}
|
||||
|
||||
if ctx.cfg.size > max_size {
|
||||
panic("blake2: requested output size exceeeds algorithm max")
|
||||
}
|
||||
|
||||
p := make([]byte, block_size)
|
||||
@@ -106,10 +120,10 @@ init :: proc(ctx: ^$T) {
|
||||
if ctx.cfg.tree != nil {
|
||||
p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
|
||||
p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
|
||||
util.PUT_U32_LE(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
|
||||
endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
|
||||
when T == Blake2s_Context {
|
||||
p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
|
||||
p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
|
||||
p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
|
||||
p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
|
||||
p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
|
||||
@@ -117,7 +131,7 @@ init :: proc(ctx: ^$T) {
|
||||
p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
|
||||
p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
} else when T == Blake2b_Context {
|
||||
util.PUT_U64_LE(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
|
||||
p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
|
||||
p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
|
||||
}
|
||||
@@ -127,10 +141,10 @@ init :: proc(ctx: ^$T) {
|
||||
ctx.size = ctx.cfg.size
|
||||
for i := 0; i < 8; i += 1 {
|
||||
when T == Blake2s_Context {
|
||||
ctx.h[i] = BLAKE2S_IV[i] ~ util.U32_LE(p[i * 4:])
|
||||
ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
|
||||
}
|
||||
when T == Blake2b_Context {
|
||||
ctx.h[i] = BLAKE2B_IV[i] ~ util.U64_LE(p[i * 8:])
|
||||
ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
|
||||
}
|
||||
}
|
||||
if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
|
||||
@@ -142,13 +156,19 @@ init :: proc(ctx: ^$T) {
|
||||
ctx.is_keyed = true
|
||||
}
|
||||
copy(ctx.ih[:], ctx.h[:])
|
||||
copy(ctx.h[:], ctx.ih[:])
|
||||
copy(ctx.h[:], ctx.ih[:])
|
||||
if ctx.is_keyed {
|
||||
update(ctx, ctx.padded_key[:])
|
||||
}
|
||||
|
||||
ctx.nx = 0
|
||||
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^$T, p: []byte) {
|
||||
update :: proc(ctx: ^$T, p: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
p := p
|
||||
when T == Blake2s_Context {
|
||||
block_size :: BLAKE2S_BLOCK_SIZE
|
||||
@@ -174,15 +194,25 @@ update :: proc "contextless" (ctx: ^$T, p: []byte) {
|
||||
ctx.nx += copy(ctx.x[ctx.nx:], p)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^$T, hash: []byte) {
|
||||
final :: proc(ctx: ^$T, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
when T == Blake2s_Context {
|
||||
if len(hash) < int(ctx.cfg.size) {
|
||||
panic("crypto/blake2s: invalid destination digest size")
|
||||
}
|
||||
blake2s_final(ctx, hash)
|
||||
}
|
||||
when T == Blake2b_Context {
|
||||
} else when T == Blake2b_Context {
|
||||
if len(hash) < int(ctx.cfg.size) {
|
||||
panic("crypto/blake2b: invalid destination digest size")
|
||||
}
|
||||
blake2b_final(ctx, hash)
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
}
|
||||
|
||||
@(private)
|
||||
blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) {
|
||||
if ctx.is_keyed {
|
||||
for i := 0; i < len(ctx.padded_key); i += 1 {
|
||||
@@ -203,16 +233,14 @@ blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) {
|
||||
|
||||
blocks(ctx, ctx.x[:])
|
||||
|
||||
j := 0
|
||||
for s, _ in ctx.h[:(ctx.size - 1) / 4 + 1] {
|
||||
hash[j + 0] = byte(s >> 0)
|
||||
hash[j + 1] = byte(s >> 8)
|
||||
hash[j + 2] = byte(s >> 16)
|
||||
hash[j + 3] = byte(s >> 24)
|
||||
j += 4
|
||||
dst: [BLAKE2S_SIZE]byte
|
||||
for i := 0; i < BLAKE2S_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32le(dst[i * 4:], ctx.h[i])
|
||||
}
|
||||
copy(hash, dst[:])
|
||||
}
|
||||
|
||||
@(private)
|
||||
blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) {
|
||||
if ctx.is_keyed {
|
||||
for i := 0; i < len(ctx.padded_key); i += 1 {
|
||||
@@ -229,56 +257,52 @@ blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) {
|
||||
ctx.f[0] = 0xffffffffffffffff
|
||||
if ctx.is_last_node {
|
||||
ctx.f[1] = 0xffffffffffffffff
|
||||
}
|
||||
}
|
||||
|
||||
blocks(ctx, ctx.x[:])
|
||||
|
||||
j := 0
|
||||
for s, _ in ctx.h[:(ctx.size - 1) / 8 + 1] {
|
||||
hash[j + 0] = byte(s >> 0)
|
||||
hash[j + 1] = byte(s >> 8)
|
||||
hash[j + 2] = byte(s >> 16)
|
||||
hash[j + 3] = byte(s >> 24)
|
||||
hash[j + 4] = byte(s >> 32)
|
||||
hash[j + 5] = byte(s >> 40)
|
||||
hash[j + 6] = byte(s >> 48)
|
||||
hash[j + 7] = byte(s >> 56)
|
||||
j += 8
|
||||
dst: [BLAKE2B_SIZE]byte
|
||||
for i := 0; i < BLAKE2B_SIZE / 8; i += 1 {
|
||||
endian.unchecked_put_u64le(dst[i * 8:], ctx.h[i])
|
||||
}
|
||||
copy(hash, dst[:])
|
||||
}
|
||||
|
||||
@(private)
|
||||
blocks :: proc "contextless" (ctx: ^$T, p: []byte) {
|
||||
when T == Blake2s_Context {
|
||||
blake2s_blocks(ctx, p)
|
||||
}
|
||||
when T == Blake2b_Context {
|
||||
} else when T == Blake2b_Context {
|
||||
blake2b_blocks(ctx, p)
|
||||
}
|
||||
}
|
||||
|
||||
@(private)
|
||||
blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []byte) {
|
||||
h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
|
||||
h0, h1, h2, h3, h4, h5, h6, h7 :=
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
|
||||
p := p
|
||||
for len(p) >= BLAKE2S_BLOCK_SIZE {
|
||||
ctx.t[0] += BLAKE2S_BLOCK_SIZE
|
||||
if ctx.t[0] < BLAKE2S_BLOCK_SIZE {
|
||||
ctx.t[1] += 1
|
||||
}
|
||||
}
|
||||
v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
|
||||
v8 := BLAKE2S_IV[0]
|
||||
v9 := BLAKE2S_IV[1]
|
||||
v8 := BLAKE2S_IV[0]
|
||||
v9 := BLAKE2S_IV[1]
|
||||
v10 := BLAKE2S_IV[2]
|
||||
v11 := BLAKE2S_IV[3]
|
||||
v12 := BLAKE2S_IV[4] ~ ctx.t[0]
|
||||
v13 := BLAKE2S_IV[5] ~ ctx.t[1]
|
||||
v14 := BLAKE2S_IV[6] ~ ctx.f[0]
|
||||
v15 := BLAKE2S_IV[7] ~ ctx.f[1]
|
||||
m: [16]u32
|
||||
j := 0
|
||||
|
||||
m: [16]u32 = ---
|
||||
for i := 0; i < 16; i += 1 {
|
||||
m[i] = u32(p[j]) | u32(p[j + 1]) << 8 | u32(p[j + 2]) << 16 | u32(p[j + 3]) << 24
|
||||
j += 4
|
||||
m[i] = endian.unchecked_get_u32le(p[i * 4:])
|
||||
}
|
||||
|
||||
// Round 1
|
||||
v0 += m[0]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -391,6 +415,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 2
|
||||
v0 += m[14]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -503,6 +529,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 3
|
||||
v0 += m[11]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -615,6 +643,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 4
|
||||
v0 += m[7]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -727,6 +757,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 5
|
||||
v0 += m[9]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -839,6 +871,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 6
|
||||
v0 += m[2]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -951,6 +985,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 7
|
||||
v0 += m[12]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1063,6 +1099,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 8
|
||||
v0 += m[13]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1175,6 +1213,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 9
|
||||
v0 += m[6]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1287,6 +1327,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
// Round 10
|
||||
v0 += m[10]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1399,6 +1441,7 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (32 - 7) | v5 >> 7
|
||||
|
||||
h0 ~= v0 ~ v8
|
||||
h1 ~= v1 ~ v9
|
||||
h2 ~= v2 ~ v10
|
||||
@@ -1407,19 +1450,23 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
|
||||
h5 ~= v5 ~ v13
|
||||
h6 ~= v6 ~ v14
|
||||
h7 ~= v7 ~ v15
|
||||
|
||||
p = p[BLAKE2S_BLOCK_SIZE:]
|
||||
}
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] =
|
||||
h0, h1, h2, h3, h4, h5, h6, h7
|
||||
}
|
||||
|
||||
@(private)
|
||||
blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []byte) {
|
||||
h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
|
||||
h0, h1, h2, h3, h4, h5, h6, h7 :=
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
|
||||
p := p
|
||||
for len(p) >= BLAKE2B_BLOCK_SIZE {
|
||||
ctx.t[0] += BLAKE2B_BLOCK_SIZE
|
||||
if ctx.t[0] < BLAKE2B_BLOCK_SIZE {
|
||||
ctx.t[1]+=1
|
||||
}
|
||||
ctx.t[1] += 1
|
||||
}
|
||||
v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
|
||||
v8 := BLAKE2B_IV[0]
|
||||
v9 := BLAKE2B_IV[1]
|
||||
@@ -1429,13 +1476,13 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v13 := BLAKE2B_IV[5] ~ ctx.t[1]
|
||||
v14 := BLAKE2B_IV[6] ~ ctx.f[0]
|
||||
v15 := BLAKE2B_IV[7] ~ ctx.f[1]
|
||||
|
||||
m: [16]u64 = ---
|
||||
j := 0
|
||||
for i := 0; i < 16; i+=1 {
|
||||
m[i] = u64(p[j]) | u64(p[j + 1]) << 8 | u64(p[j + 2]) << 16 | u64(p[j + 3]) << 24 |
|
||||
u64(p[j + 4]) << 32 | u64(p[j + 5]) << 40 | u64(p[j + 6]) << 48 | u64(p[j + 7]) << 56
|
||||
j += 8
|
||||
for i := 0; i < 16; i += 1 {
|
||||
m[i] = endian.unchecked_get_u64le(p[i * 8:])
|
||||
}
|
||||
|
||||
// Round 1
|
||||
v0 += m[0]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1548,6 +1595,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 2
|
||||
v0 += m[14]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1660,6 +1709,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 3
|
||||
v0 += m[11]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1772,6 +1823,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 4
|
||||
v0 += m[7]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1884,6 +1937,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 5
|
||||
v0 += m[9]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -1996,6 +2051,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 6
|
||||
v0 += m[2]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2108,6 +2165,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 7
|
||||
v0 += m[12]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2220,6 +2279,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 8
|
||||
v0 += m[13]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2332,6 +2393,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 9
|
||||
v0 += m[6]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2444,6 +2507,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 10
|
||||
v0 += m[10]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2556,6 +2621,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 11
|
||||
v0 += m[0]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2668,6 +2735,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
// Round 12
|
||||
v0 += m[14]
|
||||
v0 += v4
|
||||
v12 ~= v0
|
||||
@@ -2780,6 +2849,7 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
v10 += v15
|
||||
v5 ~= v10
|
||||
v5 = v5 << (64 - 63) | v5 >> 63
|
||||
|
||||
h0 ~= v0 ~ v8
|
||||
h1 ~= v1 ~ v9
|
||||
h2 ~= v2 ~ v10
|
||||
@@ -2788,7 +2858,9 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
|
||||
h5 ~= v5 ~ v13
|
||||
h6 ~= v6 ~ v14
|
||||
h7 ~= v7 ~ v15
|
||||
|
||||
p = p[BLAKE2B_BLOCK_SIZE:]
|
||||
}
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7
|
||||
}
|
||||
ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] =
|
||||
h0, h1, h2, h3, h4, h5, h6, h7
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package field_poly1305
|
||||
|
||||
import "core:crypto/util"
|
||||
import "core:encoding/endian"
|
||||
import "core:mem"
|
||||
|
||||
fe_relax_cast :: #force_inline proc "contextless" (arg1: ^Tight_Field_Element) -> ^Loose_Field_Element {
|
||||
@@ -11,7 +11,7 @@ fe_tighten_cast :: #force_inline proc "contextless" (arg1: ^Loose_Field_Element)
|
||||
return transmute(^Tight_Field_Element)(arg1)
|
||||
}
|
||||
|
||||
fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte, sanitize: bool = true) {
|
||||
fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte) {
|
||||
// fiat-crypto's deserialization routine effectively processes a
|
||||
// single byte at a time, and wants 256-bits of input for a value
|
||||
// that will be 128-bits or 129-bits.
|
||||
@@ -22,42 +22,29 @@ fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, a
|
||||
|
||||
assert(len(arg1) == 16)
|
||||
|
||||
when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 {
|
||||
// While it may be unwise to do deserialization here on our
|
||||
// own when fiat-crypto provides equivalent functionality,
|
||||
// doing it this way provides a little under 3x performance
|
||||
// improvement when optimization is enabled.
|
||||
src_p := transmute(^[2]u64)(&arg1[0])
|
||||
lo := src_p[0]
|
||||
hi := src_p[1]
|
||||
// While it may be unwise to do deserialization here on our
|
||||
// own when fiat-crypto provides equivalent functionality,
|
||||
// doing it this way provides a little under 3x performance
|
||||
// improvement when optimization is enabled.
|
||||
lo := endian.unchecked_get_u64le(arg1[0:])
|
||||
hi := endian.unchecked_get_u64le(arg1[8:])
|
||||
|
||||
// This is inspired by poly1305-donna, though adjustments were
|
||||
// made since a Tight_Field_Element's limbs are 44-bits, 43-bits,
|
||||
// and 43-bits wide.
|
||||
//
|
||||
// Note: This could be transplated into fe_from_u64s, but that
|
||||
// code is called once per MAC, and is non-criticial path.
|
||||
hibit := u64(arg2) << 41 // arg2 << 128
|
||||
out1[0] = lo & 0xfffffffffff
|
||||
out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff
|
||||
out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit
|
||||
} else {
|
||||
tmp: [32]byte
|
||||
copy_slice(tmp[0:16], arg1[:])
|
||||
tmp[16] = arg2
|
||||
|
||||
_fe_from_bytes(out1, &tmp)
|
||||
if sanitize {
|
||||
// This is used to deserialize `s` which is confidential.
|
||||
mem.zero_explicit(&tmp, size_of(tmp))
|
||||
}
|
||||
}
|
||||
// This is inspired by poly1305-donna, though adjustments were
|
||||
// made since a Tight_Field_Element's limbs are 44-bits, 43-bits,
|
||||
// and 43-bits wide.
|
||||
//
|
||||
// Note: This could be transplated into fe_from_u64s, but that
|
||||
// code is called once per MAC, and is non-criticial path.
|
||||
hibit := u64(arg2) << 41 // arg2 << 128
|
||||
out1[0] = lo & 0xfffffffffff
|
||||
out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff
|
||||
out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit
|
||||
}
|
||||
|
||||
fe_from_u64s :: proc "contextless" (out1: ^Tight_Field_Element, lo, hi: u64) {
|
||||
tmp: [32]byte
|
||||
util.PUT_U64_LE(tmp[0:8], lo)
|
||||
util.PUT_U64_LE(tmp[8:16], hi)
|
||||
endian.unchecked_put_u64le(tmp[0:], lo)
|
||||
endian.unchecked_put_u64le(tmp[8:], hi)
|
||||
|
||||
_fe_from_bytes(out1, &tmp)
|
||||
|
||||
|
||||
@@ -11,159 +11,173 @@ package _sha3
|
||||
To use the original Keccak padding, set the is_keccak bool to true, otherwise it will use SHA3 padding.
|
||||
*/
|
||||
|
||||
import "../util"
|
||||
import "core:math/bits"
|
||||
|
||||
ROUNDS :: 24
|
||||
|
||||
Sha3_Context :: struct {
|
||||
st: struct #raw_union {
|
||||
b: [200]u8,
|
||||
q: [25]u64,
|
||||
},
|
||||
pt: int,
|
||||
rsiz: int,
|
||||
mdlen: int,
|
||||
is_keccak: bool,
|
||||
st: struct #raw_union {
|
||||
b: [200]u8,
|
||||
q: [25]u64,
|
||||
},
|
||||
pt: int,
|
||||
rsiz: int,
|
||||
mdlen: int,
|
||||
is_keccak: bool,
|
||||
|
||||
is_initialized: bool,
|
||||
is_finalized: bool, // For SHAKE (unlimited squeeze is allowed)
|
||||
}
|
||||
|
||||
keccakf :: proc "contextless" (st: ^[25]u64) {
|
||||
keccakf_rndc := [?]u64 {
|
||||
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
|
||||
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
|
||||
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
|
||||
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
|
||||
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
|
||||
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
|
||||
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
|
||||
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
|
||||
}
|
||||
keccakf_rndc := [?]u64 {
|
||||
0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
|
||||
0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
|
||||
0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
|
||||
0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
|
||||
0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
|
||||
0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
|
||||
0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
|
||||
0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
|
||||
}
|
||||
|
||||
keccakf_rotc := [?]i32 {
|
||||
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
|
||||
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
|
||||
}
|
||||
keccakf_rotc := [?]int {
|
||||
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
|
||||
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
|
||||
}
|
||||
|
||||
keccakf_piln := [?]i32 {
|
||||
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
|
||||
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
|
||||
}
|
||||
keccakf_piln := [?]i32 {
|
||||
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
|
||||
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
|
||||
}
|
||||
|
||||
i, j, r: i32 = ---, ---, ---
|
||||
t: u64 = ---
|
||||
bc: [5]u64 = ---
|
||||
i, j, r: i32 = ---, ---, ---
|
||||
t: u64 = ---
|
||||
bc: [5]u64 = ---
|
||||
|
||||
when ODIN_ENDIAN != .Little {
|
||||
v: uintptr = ---
|
||||
for i = 0; i < 25; i += 1 {
|
||||
v := uintptr(&st[i])
|
||||
st[i] = u64((^u8)(v + 0)^ << 0) | u64((^u8)(v + 1)^ << 8) |
|
||||
u64((^u8)(v + 2)^ << 16) | u64((^u8)(v + 3)^ << 24) |
|
||||
u64((^u8)(v + 4)^ << 32) | u64((^u8)(v + 5)^ << 40) |
|
||||
u64((^u8)(v + 6)^ << 48) | u64((^u8)(v + 7)^ << 56)
|
||||
}
|
||||
}
|
||||
when ODIN_ENDIAN != .Little {
|
||||
for i = 0; i < 25; i += 1 {
|
||||
st[i] = bits.byte_swap(st[i])
|
||||
}
|
||||
}
|
||||
|
||||
for r = 0; r < ROUNDS; r += 1 {
|
||||
// theta
|
||||
for i = 0; i < 5; i += 1 {
|
||||
bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
|
||||
}
|
||||
for r = 0; r < ROUNDS; r += 1 {
|
||||
// theta
|
||||
for i = 0; i < 5; i += 1 {
|
||||
bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
|
||||
}
|
||||
|
||||
for i = 0; i < 5; i += 1 {
|
||||
t = bc[(i + 4) % 5] ~ util.ROTL64(bc[(i + 1) % 5], 1)
|
||||
for j = 0; j < 25; j += 5 {
|
||||
st[j + i] ~= t
|
||||
}
|
||||
}
|
||||
for i = 0; i < 5; i += 1 {
|
||||
t = bc[(i + 4) % 5] ~ bits.rotate_left64(bc[(i + 1) % 5], 1)
|
||||
for j = 0; j < 25; j += 5 {
|
||||
st[j + i] ~= t
|
||||
}
|
||||
}
|
||||
|
||||
// rho pi
|
||||
t = st[1]
|
||||
for i = 0; i < 24; i += 1 {
|
||||
j = keccakf_piln[i]
|
||||
bc[0] = st[j]
|
||||
st[j] = util.ROTL64(t, u64(keccakf_rotc[i]))
|
||||
t = bc[0]
|
||||
}
|
||||
// rho pi
|
||||
t = st[1]
|
||||
for i = 0; i < 24; i += 1 {
|
||||
j = keccakf_piln[i]
|
||||
bc[0] = st[j]
|
||||
st[j] = bits.rotate_left64(t, keccakf_rotc[i])
|
||||
t = bc[0]
|
||||
}
|
||||
|
||||
// chi
|
||||
for j = 0; j < 25; j += 5 {
|
||||
for i = 0; i < 5; i += 1 {
|
||||
bc[i] = st[j + i]
|
||||
}
|
||||
for i = 0; i < 5; i += 1 {
|
||||
st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
|
||||
}
|
||||
}
|
||||
// chi
|
||||
for j = 0; j < 25; j += 5 {
|
||||
for i = 0; i < 5; i += 1 {
|
||||
bc[i] = st[j + i]
|
||||
}
|
||||
for i = 0; i < 5; i += 1 {
|
||||
st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
|
||||
}
|
||||
}
|
||||
|
||||
st[0] ~= keccakf_rndc[r]
|
||||
}
|
||||
st[0] ~= keccakf_rndc[r]
|
||||
}
|
||||
|
||||
when ODIN_ENDIAN != .Little {
|
||||
for i = 0; i < 25; i += 1 {
|
||||
v = uintptr(&st[i])
|
||||
t = st[i]
|
||||
(^u8)(v + 0)^ = (t >> 0) & 0xff
|
||||
(^u8)(v + 1)^ = (t >> 8) & 0xff
|
||||
(^u8)(v + 2)^ = (t >> 16) & 0xff
|
||||
(^u8)(v + 3)^ = (t >> 24) & 0xff
|
||||
(^u8)(v + 4)^ = (t >> 32) & 0xff
|
||||
(^u8)(v + 5)^ = (t >> 40) & 0xff
|
||||
(^u8)(v + 6)^ = (t >> 48) & 0xff
|
||||
(^u8)(v + 7)^ = (t >> 56) & 0xff
|
||||
}
|
||||
}
|
||||
when ODIN_ENDIAN != .Little {
|
||||
for i = 0; i < 25; i += 1 {
|
||||
st[i] = bits.byte_swap(st[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init :: proc "contextless" (c: ^Sha3_Context) {
|
||||
for i := 0; i < 25; i += 1 {
|
||||
c.st.q[i] = 0
|
||||
}
|
||||
c.rsiz = 200 - 2 * c.mdlen
|
||||
init :: proc(c: ^Sha3_Context) {
|
||||
for i := 0; i < 25; i += 1 {
|
||||
c.st.q[i] = 0
|
||||
}
|
||||
c.rsiz = 200 - 2 * c.mdlen
|
||||
c.pt = 0
|
||||
|
||||
c.is_initialized = true
|
||||
c.is_finalized = false
|
||||
}
|
||||
|
||||
update :: proc "contextless" (c: ^Sha3_Context, data: []byte) {
|
||||
j := c.pt
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
c.st.b[j] ~= data[i]
|
||||
j += 1
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
j = 0
|
||||
}
|
||||
}
|
||||
c.pt = j
|
||||
update :: proc(c: ^Sha3_Context, data: []byte) {
|
||||
assert(c.is_initialized)
|
||||
assert(!c.is_finalized)
|
||||
|
||||
j := c.pt
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
c.st.b[j] ~= data[i]
|
||||
j += 1
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
j = 0
|
||||
}
|
||||
}
|
||||
c.pt = j
|
||||
}
|
||||
|
||||
final :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
|
||||
if c.is_keccak {
|
||||
c.st.b[c.pt] ~= 0x01
|
||||
} else {
|
||||
c.st.b[c.pt] ~= 0x06
|
||||
}
|
||||
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
for i := 0; i < c.mdlen; i += 1 {
|
||||
hash[i] = c.st.b[i]
|
||||
}
|
||||
final :: proc(c: ^Sha3_Context, hash: []byte) {
|
||||
assert(c.is_initialized)
|
||||
|
||||
if len(hash) < c.mdlen {
|
||||
if c.is_keccak {
|
||||
panic("crypto/keccac: invalid destination digest size")
|
||||
}
|
||||
panic("crypto/sha3: invalid destination digest size")
|
||||
}
|
||||
if c.is_keccak {
|
||||
c.st.b[c.pt] ~= 0x01
|
||||
} else {
|
||||
c.st.b[c.pt] ~= 0x06
|
||||
}
|
||||
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
for i := 0; i < c.mdlen; i += 1 {
|
||||
hash[i] = c.st.b[i]
|
||||
}
|
||||
|
||||
c.is_initialized = false // No more absorb, no more squeeze.
|
||||
}
|
||||
|
||||
shake_xof :: proc "contextless" (c: ^Sha3_Context) {
|
||||
c.st.b[c.pt] ~= 0x1F
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
c.pt = 0
|
||||
shake_xof :: proc(c: ^Sha3_Context) {
|
||||
assert(c.is_initialized)
|
||||
assert(!c.is_finalized)
|
||||
|
||||
c.st.b[c.pt] ~= 0x1F
|
||||
c.st.b[c.rsiz - 1] ~= 0x80
|
||||
keccakf(&c.st.q)
|
||||
c.pt = 0
|
||||
|
||||
c.is_finalized = true // No more absorb, unlimited squeeze.
|
||||
}
|
||||
|
||||
shake_out :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
|
||||
j := c.pt
|
||||
for i := 0; i < len(hash); i += 1 {
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
j = 0
|
||||
}
|
||||
hash[i] = c.st.b[j]
|
||||
j += 1
|
||||
}
|
||||
c.pt = j
|
||||
shake_out :: proc(c: ^Sha3_Context, hash: []byte) {
|
||||
assert(c.is_initialized)
|
||||
assert(c.is_finalized)
|
||||
|
||||
j := c.pt
|
||||
for i := 0; i < len(hash); i += 1 {
|
||||
if j >= c.rsiz {
|
||||
keccakf(&c.st.q)
|
||||
j = 0
|
||||
}
|
||||
hash[i] = c.st.b[j]
|
||||
j += 1
|
||||
}
|
||||
c.pt = j
|
||||
}
|
||||
|
||||
@@ -1,410 +0,0 @@
|
||||
package _tiger
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the Tiger hashing algorithm, as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
|
||||
*/
|
||||
|
||||
import "../util"
|
||||
|
||||
T1 := [?]u64 {
|
||||
0x02aab17cf7e90c5e, 0xac424b03e243a8ec, 0x72cd5be30dd5fcd3, 0x6d019b93f6f97f3a,
|
||||
0xcd9978ffd21f9193, 0x7573a1c9708029e2, 0xb164326b922a83c3, 0x46883eee04915870,
|
||||
0xeaace3057103ece6, 0xc54169b808a3535c, 0x4ce754918ddec47c, 0x0aa2f4dfdc0df40c,
|
||||
0x10b76f18a74dbefa, 0xc6ccb6235ad1ab6a, 0x13726121572fe2ff, 0x1a488c6f199d921e,
|
||||
0x4bc9f9f4da0007ca, 0x26f5e6f6e85241c7, 0x859079dbea5947b6, 0x4f1885c5c99e8c92,
|
||||
0xd78e761ea96f864b, 0x8e36428c52b5c17d, 0x69cf6827373063c1, 0xb607c93d9bb4c56e,
|
||||
0x7d820e760e76b5ea, 0x645c9cc6f07fdc42, 0xbf38a078243342e0, 0x5f6b343c9d2e7d04,
|
||||
0xf2c28aeb600b0ec6, 0x6c0ed85f7254bcac, 0x71592281a4db4fe5, 0x1967fa69ce0fed9f,
|
||||
0xfd5293f8b96545db, 0xc879e9d7f2a7600b, 0x860248920193194e, 0xa4f9533b2d9cc0b3,
|
||||
0x9053836c15957613, 0xdb6dcf8afc357bf1, 0x18beea7a7a370f57, 0x037117ca50b99066,
|
||||
0x6ab30a9774424a35, 0xf4e92f02e325249b, 0x7739db07061ccae1, 0xd8f3b49ceca42a05,
|
||||
0xbd56be3f51382f73, 0x45faed5843b0bb28, 0x1c813d5c11bf1f83, 0x8af0e4b6d75fa169,
|
||||
0x33ee18a487ad9999, 0x3c26e8eab1c94410, 0xb510102bc0a822f9, 0x141eef310ce6123b,
|
||||
0xfc65b90059ddb154, 0xe0158640c5e0e607, 0x884e079826c3a3cf, 0x930d0d9523c535fd,
|
||||
0x35638d754e9a2b00, 0x4085fccf40469dd5, 0xc4b17ad28be23a4c, 0xcab2f0fc6a3e6a2e,
|
||||
0x2860971a6b943fcd, 0x3dde6ee212e30446, 0x6222f32ae01765ae, 0x5d550bb5478308fe,
|
||||
0xa9efa98da0eda22a, 0xc351a71686c40da7, 0x1105586d9c867c84, 0xdcffee85fda22853,
|
||||
0xccfbd0262c5eef76, 0xbaf294cb8990d201, 0xe69464f52afad975, 0x94b013afdf133e14,
|
||||
0x06a7d1a32823c958, 0x6f95fe5130f61119, 0xd92ab34e462c06c0, 0xed7bde33887c71d2,
|
||||
0x79746d6e6518393e, 0x5ba419385d713329, 0x7c1ba6b948a97564, 0x31987c197bfdac67,
|
||||
0xde6c23c44b053d02, 0x581c49fed002d64d, 0xdd474d6338261571, 0xaa4546c3e473d062,
|
||||
0x928fce349455f860, 0x48161bbacaab94d9, 0x63912430770e6f68, 0x6ec8a5e602c6641c,
|
||||
0x87282515337ddd2b, 0x2cda6b42034b701b, 0xb03d37c181cb096d, 0xe108438266c71c6f,
|
||||
0x2b3180c7eb51b255, 0xdf92b82f96c08bbc, 0x5c68c8c0a632f3ba, 0x5504cc861c3d0556,
|
||||
0xabbfa4e55fb26b8f, 0x41848b0ab3baceb4, 0xb334a273aa445d32, 0xbca696f0a85ad881,
|
||||
0x24f6ec65b528d56c, 0x0ce1512e90f4524a, 0x4e9dd79d5506d35a, 0x258905fac6ce9779,
|
||||
0x2019295b3e109b33, 0xf8a9478b73a054cc, 0x2924f2f934417eb0, 0x3993357d536d1bc4,
|
||||
0x38a81ac21db6ff8b, 0x47c4fbf17d6016bf, 0x1e0faadd7667e3f5, 0x7abcff62938beb96,
|
||||
0xa78dad948fc179c9, 0x8f1f98b72911e50d, 0x61e48eae27121a91, 0x4d62f7ad31859808,
|
||||
0xeceba345ef5ceaeb, 0xf5ceb25ebc9684ce, 0xf633e20cb7f76221, 0xa32cdf06ab8293e4,
|
||||
0x985a202ca5ee2ca4, 0xcf0b8447cc8a8fb1, 0x9f765244979859a3, 0xa8d516b1a1240017,
|
||||
0x0bd7ba3ebb5dc726, 0xe54bca55b86adb39, 0x1d7a3afd6c478063, 0x519ec608e7669edd,
|
||||
0x0e5715a2d149aa23, 0x177d4571848ff194, 0xeeb55f3241014c22, 0x0f5e5ca13a6e2ec2,
|
||||
0x8029927b75f5c361, 0xad139fabc3d6e436, 0x0d5df1a94ccf402f, 0x3e8bd948bea5dfc8,
|
||||
0xa5a0d357bd3ff77e, 0xa2d12e251f74f645, 0x66fd9e525e81a082, 0x2e0c90ce7f687a49,
|
||||
0xc2e8bcbeba973bc5, 0x000001bce509745f, 0x423777bbe6dab3d6, 0xd1661c7eaef06eb5,
|
||||
0xa1781f354daacfd8, 0x2d11284a2b16affc, 0xf1fc4f67fa891d1f, 0x73ecc25dcb920ada,
|
||||
0xae610c22c2a12651, 0x96e0a810d356b78a, 0x5a9a381f2fe7870f, 0xd5ad62ede94e5530,
|
||||
0xd225e5e8368d1427, 0x65977b70c7af4631, 0x99f889b2de39d74f, 0x233f30bf54e1d143,
|
||||
0x9a9675d3d9a63c97, 0x5470554ff334f9a8, 0x166acb744a4f5688, 0x70c74caab2e4aead,
|
||||
0xf0d091646f294d12, 0x57b82a89684031d1, 0xefd95a5a61be0b6b, 0x2fbd12e969f2f29a,
|
||||
0x9bd37013feff9fe8, 0x3f9b0404d6085a06, 0x4940c1f3166cfe15, 0x09542c4dcdf3defb,
|
||||
0xb4c5218385cd5ce3, 0xc935b7dc4462a641, 0x3417f8a68ed3b63f, 0xb80959295b215b40,
|
||||
0xf99cdaef3b8c8572, 0x018c0614f8fcb95d, 0x1b14accd1a3acdf3, 0x84d471f200bb732d,
|
||||
0xc1a3110e95e8da16, 0x430a7220bf1a82b8, 0xb77e090d39df210e, 0x5ef4bd9f3cd05e9d,
|
||||
0x9d4ff6da7e57a444, 0xda1d60e183d4a5f8, 0xb287c38417998e47, 0xfe3edc121bb31886,
|
||||
0xc7fe3ccc980ccbef, 0xe46fb590189bfd03, 0x3732fd469a4c57dc, 0x7ef700a07cf1ad65,
|
||||
0x59c64468a31d8859, 0x762fb0b4d45b61f6, 0x155baed099047718, 0x68755e4c3d50baa6,
|
||||
0xe9214e7f22d8b4df, 0x2addbf532eac95f4, 0x32ae3909b4bd0109, 0x834df537b08e3450,
|
||||
0xfa209da84220728d, 0x9e691d9b9efe23f7, 0x0446d288c4ae8d7f, 0x7b4cc524e169785b,
|
||||
0x21d87f0135ca1385, 0xcebb400f137b8aa5, 0x272e2b66580796be, 0x3612264125c2b0de,
|
||||
0x057702bdad1efbb2, 0xd4babb8eacf84be9, 0x91583139641bc67b, 0x8bdc2de08036e024,
|
||||
0x603c8156f49f68ed, 0xf7d236f7dbef5111, 0x9727c4598ad21e80, 0xa08a0896670a5fd7,
|
||||
0xcb4a8f4309eba9cb, 0x81af564b0f7036a1, 0xc0b99aa778199abd, 0x959f1ec83fc8e952,
|
||||
0x8c505077794a81b9, 0x3acaaf8f056338f0, 0x07b43f50627a6778, 0x4a44ab49f5eccc77,
|
||||
0x3bc3d6e4b679ee98, 0x9cc0d4d1cf14108c, 0x4406c00b206bc8a0, 0x82a18854c8d72d89,
|
||||
0x67e366b35c3c432c, 0xb923dd61102b37f2, 0x56ab2779d884271d, 0xbe83e1b0ff1525af,
|
||||
0xfb7c65d4217e49a9, 0x6bdbe0e76d48e7d4, 0x08df828745d9179e, 0x22ea6a9add53bd34,
|
||||
0xe36e141c5622200a, 0x7f805d1b8cb750ee, 0xafe5c7a59f58e837, 0xe27f996a4fb1c23c,
|
||||
0xd3867dfb0775f0d0, 0xd0e673de6e88891a, 0x123aeb9eafb86c25, 0x30f1d5d5c145b895,
|
||||
0xbb434a2dee7269e7, 0x78cb67ecf931fa38, 0xf33b0372323bbf9c, 0x52d66336fb279c74,
|
||||
0x505f33ac0afb4eaa, 0xe8a5cd99a2cce187, 0x534974801e2d30bb, 0x8d2d5711d5876d90,
|
||||
0x1f1a412891bc038e, 0xd6e2e71d82e56648, 0x74036c3a497732b7, 0x89b67ed96361f5ab,
|
||||
0xffed95d8f1ea02a2, 0xe72b3bd61464d43d, 0xa6300f170bdc4820, 0xebc18760ed78a77a,
|
||||
}
|
||||
|
||||
T2 := [?]u64 {
|
||||
0xe6a6be5a05a12138, 0xb5a122a5b4f87c98, 0x563c6089140b6990, 0x4c46cb2e391f5dd5,
|
||||
0xd932addbc9b79434, 0x08ea70e42015aff5, 0xd765a6673e478cf1, 0xc4fb757eab278d99,
|
||||
0xdf11c6862d6e0692, 0xddeb84f10d7f3b16, 0x6f2ef604a665ea04, 0x4a8e0f0ff0e0dfb3,
|
||||
0xa5edeef83dbcba51, 0xfc4f0a2a0ea4371e, 0xe83e1da85cb38429, 0xdc8ff882ba1b1ce2,
|
||||
0xcd45505e8353e80d, 0x18d19a00d4db0717, 0x34a0cfeda5f38101, 0x0be77e518887caf2,
|
||||
0x1e341438b3c45136, 0xe05797f49089ccf9, 0xffd23f9df2591d14, 0x543dda228595c5cd,
|
||||
0x661f81fd99052a33, 0x8736e641db0f7b76, 0x15227725418e5307, 0xe25f7f46162eb2fa,
|
||||
0x48a8b2126c13d9fe, 0xafdc541792e76eea, 0x03d912bfc6d1898f, 0x31b1aafa1b83f51b,
|
||||
0xf1ac2796e42ab7d9, 0x40a3a7d7fcd2ebac, 0x1056136d0afbbcc5, 0x7889e1dd9a6d0c85,
|
||||
0xd33525782a7974aa, 0xa7e25d09078ac09b, 0xbd4138b3eac6edd0, 0x920abfbe71eb9e70,
|
||||
0xa2a5d0f54fc2625c, 0xc054e36b0b1290a3, 0xf6dd59ff62fe932b, 0x3537354511a8ac7d,
|
||||
0xca845e9172fadcd4, 0x84f82b60329d20dc, 0x79c62ce1cd672f18, 0x8b09a2add124642c,
|
||||
0xd0c1e96a19d9e726, 0x5a786a9b4ba9500c, 0x0e020336634c43f3, 0xc17b474aeb66d822,
|
||||
0x6a731ae3ec9baac2, 0x8226667ae0840258, 0x67d4567691caeca5, 0x1d94155c4875adb5,
|
||||
0x6d00fd985b813fdf, 0x51286efcb774cd06, 0x5e8834471fa744af, 0xf72ca0aee761ae2e,
|
||||
0xbe40e4cdaee8e09a, 0xe9970bbb5118f665, 0x726e4beb33df1964, 0x703b000729199762,
|
||||
0x4631d816f5ef30a7, 0xb880b5b51504a6be, 0x641793c37ed84b6c, 0x7b21ed77f6e97d96,
|
||||
0x776306312ef96b73, 0xae528948e86ff3f4, 0x53dbd7f286a3f8f8, 0x16cadce74cfc1063,
|
||||
0x005c19bdfa52c6dd, 0x68868f5d64d46ad3, 0x3a9d512ccf1e186a, 0x367e62c2385660ae,
|
||||
0xe359e7ea77dcb1d7, 0x526c0773749abe6e, 0x735ae5f9d09f734b, 0x493fc7cc8a558ba8,
|
||||
0xb0b9c1533041ab45, 0x321958ba470a59bd, 0x852db00b5f46c393, 0x91209b2bd336b0e5,
|
||||
0x6e604f7d659ef19f, 0xb99a8ae2782ccb24, 0xccf52ab6c814c4c7, 0x4727d9afbe11727b,
|
||||
0x7e950d0c0121b34d, 0x756f435670ad471f, 0xf5add442615a6849, 0x4e87e09980b9957a,
|
||||
0x2acfa1df50aee355, 0xd898263afd2fd556, 0xc8f4924dd80c8fd6, 0xcf99ca3d754a173a,
|
||||
0xfe477bacaf91bf3c, 0xed5371f6d690c12d, 0x831a5c285e687094, 0xc5d3c90a3708a0a4,
|
||||
0x0f7f903717d06580, 0x19f9bb13b8fdf27f, 0xb1bd6f1b4d502843, 0x1c761ba38fff4012,
|
||||
0x0d1530c4e2e21f3b, 0x8943ce69a7372c8a, 0xe5184e11feb5ce66, 0x618bdb80bd736621,
|
||||
0x7d29bad68b574d0b, 0x81bb613e25e6fe5b, 0x071c9c10bc07913f, 0xc7beeb7909ac2d97,
|
||||
0xc3e58d353bc5d757, 0xeb017892f38f61e8, 0xd4effb9c9b1cc21a, 0x99727d26f494f7ab,
|
||||
0xa3e063a2956b3e03, 0x9d4a8b9a4aa09c30, 0x3f6ab7d500090fb4, 0x9cc0f2a057268ac0,
|
||||
0x3dee9d2dedbf42d1, 0x330f49c87960a972, 0xc6b2720287421b41, 0x0ac59ec07c00369c,
|
||||
0xef4eac49cb353425, 0xf450244eef0129d8, 0x8acc46e5caf4deb6, 0x2ffeab63989263f7,
|
||||
0x8f7cb9fe5d7a4578, 0x5bd8f7644e634635, 0x427a7315bf2dc900, 0x17d0c4aa2125261c,
|
||||
0x3992486c93518e50, 0xb4cbfee0a2d7d4c3, 0x7c75d6202c5ddd8d, 0xdbc295d8e35b6c61,
|
||||
0x60b369d302032b19, 0xce42685fdce44132, 0x06f3ddb9ddf65610, 0x8ea4d21db5e148f0,
|
||||
0x20b0fce62fcd496f, 0x2c1b912358b0ee31, 0xb28317b818f5a308, 0xa89c1e189ca6d2cf,
|
||||
0x0c6b18576aaadbc8, 0xb65deaa91299fae3, 0xfb2b794b7f1027e7, 0x04e4317f443b5beb,
|
||||
0x4b852d325939d0a6, 0xd5ae6beefb207ffc, 0x309682b281c7d374, 0xbae309a194c3b475,
|
||||
0x8cc3f97b13b49f05, 0x98a9422ff8293967, 0x244b16b01076ff7c, 0xf8bf571c663d67ee,
|
||||
0x1f0d6758eee30da1, 0xc9b611d97adeb9b7, 0xb7afd5887b6c57a2, 0x6290ae846b984fe1,
|
||||
0x94df4cdeacc1a5fd, 0x058a5bd1c5483aff, 0x63166cc142ba3c37, 0x8db8526eb2f76f40,
|
||||
0xe10880036f0d6d4e, 0x9e0523c9971d311d, 0x45ec2824cc7cd691, 0x575b8359e62382c9,
|
||||
0xfa9e400dc4889995, 0xd1823ecb45721568, 0xdafd983b8206082f, 0xaa7d29082386a8cb,
|
||||
0x269fcd4403b87588, 0x1b91f5f728bdd1e0, 0xe4669f39040201f6, 0x7a1d7c218cf04ade,
|
||||
0x65623c29d79ce5ce, 0x2368449096c00bb1, 0xab9bf1879da503ba, 0xbc23ecb1a458058e,
|
||||
0x9a58df01bb401ecc, 0xa070e868a85f143d, 0x4ff188307df2239e, 0x14d565b41a641183,
|
||||
0xee13337452701602, 0x950e3dcf3f285e09, 0x59930254b9c80953, 0x3bf299408930da6d,
|
||||
0xa955943f53691387, 0xa15edecaa9cb8784, 0x29142127352be9a0, 0x76f0371fff4e7afb,
|
||||
0x0239f450274f2228, 0xbb073af01d5e868b, 0xbfc80571c10e96c1, 0xd267088568222e23,
|
||||
0x9671a3d48e80b5b0, 0x55b5d38ae193bb81, 0x693ae2d0a18b04b8, 0x5c48b4ecadd5335f,
|
||||
0xfd743b194916a1ca, 0x2577018134be98c4, 0xe77987e83c54a4ad, 0x28e11014da33e1b9,
|
||||
0x270cc59e226aa213, 0x71495f756d1a5f60, 0x9be853fb60afef77, 0xadc786a7f7443dbf,
|
||||
0x0904456173b29a82, 0x58bc7a66c232bd5e, 0xf306558c673ac8b2, 0x41f639c6b6c9772a,
|
||||
0x216defe99fda35da, 0x11640cc71c7be615, 0x93c43694565c5527, 0xea038e6246777839,
|
||||
0xf9abf3ce5a3e2469, 0x741e768d0fd312d2, 0x0144b883ced652c6, 0xc20b5a5ba33f8552,
|
||||
0x1ae69633c3435a9d, 0x97a28ca4088cfdec, 0x8824a43c1e96f420, 0x37612fa66eeea746,
|
||||
0x6b4cb165f9cf0e5a, 0x43aa1c06a0abfb4a, 0x7f4dc26ff162796b, 0x6cbacc8e54ed9b0f,
|
||||
0xa6b7ffefd2bb253e, 0x2e25bc95b0a29d4f, 0x86d6a58bdef1388c, 0xded74ac576b6f054,
|
||||
0x8030bdbc2b45805d, 0x3c81af70e94d9289, 0x3eff6dda9e3100db, 0xb38dc39fdfcc8847,
|
||||
0x123885528d17b87e, 0xf2da0ed240b1b642, 0x44cefadcd54bf9a9, 0x1312200e433c7ee6,
|
||||
0x9ffcc84f3a78c748, 0xf0cd1f72248576bb, 0xec6974053638cfe4, 0x2ba7b67c0cec4e4c,
|
||||
0xac2f4df3e5ce32ed, 0xcb33d14326ea4c11, 0xa4e9044cc77e58bc, 0x5f513293d934fcef,
|
||||
0x5dc9645506e55444, 0x50de418f317de40a, 0x388cb31a69dde259, 0x2db4a83455820a86,
|
||||
0x9010a91e84711ae9, 0x4df7f0b7b1498371, 0xd62a2eabc0977179, 0x22fac097aa8d5c0e,
|
||||
}
|
||||
|
||||
T3 := [?]u64 {
|
||||
0xf49fcc2ff1daf39b, 0x487fd5c66ff29281, 0xe8a30667fcdca83f, 0x2c9b4be3d2fcce63,
|
||||
0xda3ff74b93fbbbc2, 0x2fa165d2fe70ba66, 0xa103e279970e93d4, 0xbecdec77b0e45e71,
|
||||
0xcfb41e723985e497, 0xb70aaa025ef75017, 0xd42309f03840b8e0, 0x8efc1ad035898579,
|
||||
0x96c6920be2b2abc5, 0x66af4163375a9172, 0x2174abdcca7127fb, 0xb33ccea64a72ff41,
|
||||
0xf04a4933083066a5, 0x8d970acdd7289af5, 0x8f96e8e031c8c25e, 0xf3fec02276875d47,
|
||||
0xec7bf310056190dd, 0xf5adb0aebb0f1491, 0x9b50f8850fd58892, 0x4975488358b74de8,
|
||||
0xa3354ff691531c61, 0x0702bbe481d2c6ee, 0x89fb24057deded98, 0xac3075138596e902,
|
||||
0x1d2d3580172772ed, 0xeb738fc28e6bc30d, 0x5854ef8f63044326, 0x9e5c52325add3bbe,
|
||||
0x90aa53cf325c4623, 0xc1d24d51349dd067, 0x2051cfeea69ea624, 0x13220f0a862e7e4f,
|
||||
0xce39399404e04864, 0xd9c42ca47086fcb7, 0x685ad2238a03e7cc, 0x066484b2ab2ff1db,
|
||||
0xfe9d5d70efbf79ec, 0x5b13b9dd9c481854, 0x15f0d475ed1509ad, 0x0bebcd060ec79851,
|
||||
0xd58c6791183ab7f8, 0xd1187c5052f3eee4, 0xc95d1192e54e82ff, 0x86eea14cb9ac6ca2,
|
||||
0x3485beb153677d5d, 0xdd191d781f8c492a, 0xf60866baa784ebf9, 0x518f643ba2d08c74,
|
||||
0x8852e956e1087c22, 0xa768cb8dc410ae8d, 0x38047726bfec8e1a, 0xa67738b4cd3b45aa,
|
||||
0xad16691cec0dde19, 0xc6d4319380462e07, 0xc5a5876d0ba61938, 0x16b9fa1fa58fd840,
|
||||
0x188ab1173ca74f18, 0xabda2f98c99c021f, 0x3e0580ab134ae816, 0x5f3b05b773645abb,
|
||||
0x2501a2be5575f2f6, 0x1b2f74004e7e8ba9, 0x1cd7580371e8d953, 0x7f6ed89562764e30,
|
||||
0xb15926ff596f003d, 0x9f65293da8c5d6b9, 0x6ecef04dd690f84c, 0x4782275fff33af88,
|
||||
0xe41433083f820801, 0xfd0dfe409a1af9b5, 0x4325a3342cdb396b, 0x8ae77e62b301b252,
|
||||
0xc36f9e9f6655615a, 0x85455a2d92d32c09, 0xf2c7dea949477485, 0x63cfb4c133a39eba,
|
||||
0x83b040cc6ebc5462, 0x3b9454c8fdb326b0, 0x56f56a9e87ffd78c, 0x2dc2940d99f42bc6,
|
||||
0x98f7df096b096e2d, 0x19a6e01e3ad852bf, 0x42a99ccbdbd4b40b, 0xa59998af45e9c559,
|
||||
0x366295e807d93186, 0x6b48181bfaa1f773, 0x1fec57e2157a0a1d, 0x4667446af6201ad5,
|
||||
0xe615ebcacfb0f075, 0xb8f31f4f68290778, 0x22713ed6ce22d11e, 0x3057c1a72ec3c93b,
|
||||
0xcb46acc37c3f1f2f, 0xdbb893fd02aaf50e, 0x331fd92e600b9fcf, 0xa498f96148ea3ad6,
|
||||
0xa8d8426e8b6a83ea, 0xa089b274b7735cdc, 0x87f6b3731e524a11, 0x118808e5cbc96749,
|
||||
0x9906e4c7b19bd394, 0xafed7f7e9b24a20c, 0x6509eadeeb3644a7, 0x6c1ef1d3e8ef0ede,
|
||||
0xb9c97d43e9798fb4, 0xa2f2d784740c28a3, 0x7b8496476197566f, 0x7a5be3e6b65f069d,
|
||||
0xf96330ed78be6f10, 0xeee60de77a076a15, 0x2b4bee4aa08b9bd0, 0x6a56a63ec7b8894e,
|
||||
0x02121359ba34fef4, 0x4cbf99f8283703fc, 0x398071350caf30c8, 0xd0a77a89f017687a,
|
||||
0xf1c1a9eb9e423569, 0x8c7976282dee8199, 0x5d1737a5dd1f7abd, 0x4f53433c09a9fa80,
|
||||
0xfa8b0c53df7ca1d9, 0x3fd9dcbc886ccb77, 0xc040917ca91b4720, 0x7dd00142f9d1dcdf,
|
||||
0x8476fc1d4f387b58, 0x23f8e7c5f3316503, 0x032a2244e7e37339, 0x5c87a5d750f5a74b,
|
||||
0x082b4cc43698992e, 0xdf917becb858f63c, 0x3270b8fc5bf86dda, 0x10ae72bb29b5dd76,
|
||||
0x576ac94e7700362b, 0x1ad112dac61efb8f, 0x691bc30ec5faa427, 0xff246311cc327143,
|
||||
0x3142368e30e53206, 0x71380e31e02ca396, 0x958d5c960aad76f1, 0xf8d6f430c16da536,
|
||||
0xc8ffd13f1be7e1d2, 0x7578ae66004ddbe1, 0x05833f01067be646, 0xbb34b5ad3bfe586d,
|
||||
0x095f34c9a12b97f0, 0x247ab64525d60ca8, 0xdcdbc6f3017477d1, 0x4a2e14d4decad24d,
|
||||
0xbdb5e6d9be0a1eeb, 0x2a7e70f7794301ab, 0xdef42d8a270540fd, 0x01078ec0a34c22c1,
|
||||
0xe5de511af4c16387, 0x7ebb3a52bd9a330a, 0x77697857aa7d6435, 0x004e831603ae4c32,
|
||||
0xe7a21020ad78e312, 0x9d41a70c6ab420f2, 0x28e06c18ea1141e6, 0xd2b28cbd984f6b28,
|
||||
0x26b75f6c446e9d83, 0xba47568c4d418d7f, 0xd80badbfe6183d8e, 0x0e206d7f5f166044,
|
||||
0xe258a43911cbca3e, 0x723a1746b21dc0bc, 0xc7caa854f5d7cdd3, 0x7cac32883d261d9c,
|
||||
0x7690c26423ba942c, 0x17e55524478042b8, 0xe0be477656a2389f, 0x4d289b5e67ab2da0,
|
||||
0x44862b9c8fbbfd31, 0xb47cc8049d141365, 0x822c1b362b91c793, 0x4eb14655fb13dfd8,
|
||||
0x1ecbba0714e2a97b, 0x6143459d5cde5f14, 0x53a8fbf1d5f0ac89, 0x97ea04d81c5e5b00,
|
||||
0x622181a8d4fdb3f3, 0xe9bcd341572a1208, 0x1411258643cce58a, 0x9144c5fea4c6e0a4,
|
||||
0x0d33d06565cf620f, 0x54a48d489f219ca1, 0xc43e5eac6d63c821, 0xa9728b3a72770daf,
|
||||
0xd7934e7b20df87ef, 0xe35503b61a3e86e5, 0xcae321fbc819d504, 0x129a50b3ac60bfa6,
|
||||
0xcd5e68ea7e9fb6c3, 0xb01c90199483b1c7, 0x3de93cd5c295376c, 0xaed52edf2ab9ad13,
|
||||
0x2e60f512c0a07884, 0xbc3d86a3e36210c9, 0x35269d9b163951ce, 0x0c7d6e2ad0cdb5fa,
|
||||
0x59e86297d87f5733, 0x298ef221898db0e7, 0x55000029d1a5aa7e, 0x8bc08ae1b5061b45,
|
||||
0xc2c31c2b6c92703a, 0x94cc596baf25ef42, 0x0a1d73db22540456, 0x04b6a0f9d9c4179a,
|
||||
0xeffdafa2ae3d3c60, 0xf7c8075bb49496c4, 0x9cc5c7141d1cd4e3, 0x78bd1638218e5534,
|
||||
0xb2f11568f850246a, 0xedfabcfa9502bc29, 0x796ce5f2da23051b, 0xaae128b0dc93537c,
|
||||
0x3a493da0ee4b29ae, 0xb5df6b2c416895d7, 0xfcabbd25122d7f37, 0x70810b58105dc4b1,
|
||||
0xe10fdd37f7882a90, 0x524dcab5518a3f5c, 0x3c9e85878451255b, 0x4029828119bd34e2,
|
||||
0x74a05b6f5d3ceccb, 0xb610021542e13eca, 0x0ff979d12f59e2ac, 0x6037da27e4f9cc50,
|
||||
0x5e92975a0df1847d, 0xd66de190d3e623fe, 0x5032d6b87b568048, 0x9a36b7ce8235216e,
|
||||
0x80272a7a24f64b4a, 0x93efed8b8c6916f7, 0x37ddbff44cce1555, 0x4b95db5d4b99bd25,
|
||||
0x92d3fda169812fc0, 0xfb1a4a9a90660bb6, 0x730c196946a4b9b2, 0x81e289aa7f49da68,
|
||||
0x64669a0f83b1a05f, 0x27b3ff7d9644f48b, 0xcc6b615c8db675b3, 0x674f20b9bcebbe95,
|
||||
0x6f31238275655982, 0x5ae488713e45cf05, 0xbf619f9954c21157, 0xeabac46040a8eae9,
|
||||
0x454c6fe9f2c0c1cd, 0x419cf6496412691c, 0xd3dc3bef265b0f70, 0x6d0e60f5c3578a9e,
|
||||
}
|
||||
|
||||
T4 := [?]u64 {
|
||||
0x5b0e608526323c55, 0x1a46c1a9fa1b59f5, 0xa9e245a17c4c8ffa, 0x65ca5159db2955d7,
|
||||
0x05db0a76ce35afc2, 0x81eac77ea9113d45, 0x528ef88ab6ac0a0d, 0xa09ea253597be3ff,
|
||||
0x430ddfb3ac48cd56, 0xc4b3a67af45ce46f, 0x4ececfd8fbe2d05e, 0x3ef56f10b39935f0,
|
||||
0x0b22d6829cd619c6, 0x17fd460a74df2069, 0x6cf8cc8e8510ed40, 0xd6c824bf3a6ecaa7,
|
||||
0x61243d581a817049, 0x048bacb6bbc163a2, 0xd9a38ac27d44cc32, 0x7fddff5baaf410ab,
|
||||
0xad6d495aa804824b, 0xe1a6a74f2d8c9f94, 0xd4f7851235dee8e3, 0xfd4b7f886540d893,
|
||||
0x247c20042aa4bfda, 0x096ea1c517d1327c, 0xd56966b4361a6685, 0x277da5c31221057d,
|
||||
0x94d59893a43acff7, 0x64f0c51ccdc02281, 0x3d33bcc4ff6189db, 0xe005cb184ce66af1,
|
||||
0xff5ccd1d1db99bea, 0xb0b854a7fe42980f, 0x7bd46a6a718d4b9f, 0xd10fa8cc22a5fd8c,
|
||||
0xd31484952be4bd31, 0xc7fa975fcb243847, 0x4886ed1e5846c407, 0x28cddb791eb70b04,
|
||||
0xc2b00be2f573417f, 0x5c9590452180f877, 0x7a6bddfff370eb00, 0xce509e38d6d9d6a4,
|
||||
0xebeb0f00647fa702, 0x1dcc06cf76606f06, 0xe4d9f28ba286ff0a, 0xd85a305dc918c262,
|
||||
0x475b1d8732225f54, 0x2d4fb51668ccb5fe, 0xa679b9d9d72bba20, 0x53841c0d912d43a5,
|
||||
0x3b7eaa48bf12a4e8, 0x781e0e47f22f1ddf, 0xeff20ce60ab50973, 0x20d261d19dffb742,
|
||||
0x16a12b03062a2e39, 0x1960eb2239650495, 0x251c16fed50eb8b8, 0x9ac0c330f826016e,
|
||||
0xed152665953e7671, 0x02d63194a6369570, 0x5074f08394b1c987, 0x70ba598c90b25ce1,
|
||||
0x794a15810b9742f6, 0x0d5925e9fcaf8c6c, 0x3067716cd868744e, 0x910ab077e8d7731b,
|
||||
0x6a61bbdb5ac42f61, 0x93513efbf0851567, 0xf494724b9e83e9d5, 0xe887e1985c09648d,
|
||||
0x34b1d3c675370cfd, 0xdc35e433bc0d255d, 0xd0aab84234131be0, 0x08042a50b48b7eaf,
|
||||
0x9997c4ee44a3ab35, 0x829a7b49201799d0, 0x263b8307b7c54441, 0x752f95f4fd6a6ca6,
|
||||
0x927217402c08c6e5, 0x2a8ab754a795d9ee, 0xa442f7552f72943d, 0x2c31334e19781208,
|
||||
0x4fa98d7ceaee6291, 0x55c3862f665db309, 0xbd0610175d53b1f3, 0x46fe6cb840413f27,
|
||||
0x3fe03792df0cfa59, 0xcfe700372eb85e8f, 0xa7be29e7adbce118, 0xe544ee5cde8431dd,
|
||||
0x8a781b1b41f1873e, 0xa5c94c78a0d2f0e7, 0x39412e2877b60728, 0xa1265ef3afc9a62c,
|
||||
0xbcc2770c6a2506c5, 0x3ab66dd5dce1ce12, 0xe65499d04a675b37, 0x7d8f523481bfd216,
|
||||
0x0f6f64fcec15f389, 0x74efbe618b5b13c8, 0xacdc82b714273e1d, 0xdd40bfe003199d17,
|
||||
0x37e99257e7e061f8, 0xfa52626904775aaa, 0x8bbbf63a463d56f9, 0xf0013f1543a26e64,
|
||||
0xa8307e9f879ec898, 0xcc4c27a4150177cc, 0x1b432f2cca1d3348, 0xde1d1f8f9f6fa013,
|
||||
0x606602a047a7ddd6, 0xd237ab64cc1cb2c7, 0x9b938e7225fcd1d3, 0xec4e03708e0ff476,
|
||||
0xfeb2fbda3d03c12d, 0xae0bced2ee43889a, 0x22cb8923ebfb4f43, 0x69360d013cf7396d,
|
||||
0x855e3602d2d4e022, 0x073805bad01f784c, 0x33e17a133852f546, 0xdf4874058ac7b638,
|
||||
0xba92b29c678aa14a, 0x0ce89fc76cfaadcd, 0x5f9d4e0908339e34, 0xf1afe9291f5923b9,
|
||||
0x6e3480f60f4a265f, 0xeebf3a2ab29b841c, 0xe21938a88f91b4ad, 0x57dfeff845c6d3c3,
|
||||
0x2f006b0bf62caaf2, 0x62f479ef6f75ee78, 0x11a55ad41c8916a9, 0xf229d29084fed453,
|
||||
0x42f1c27b16b000e6, 0x2b1f76749823c074, 0x4b76eca3c2745360, 0x8c98f463b91691bd,
|
||||
0x14bcc93cf1ade66a, 0x8885213e6d458397, 0x8e177df0274d4711, 0xb49b73b5503f2951,
|
||||
0x10168168c3f96b6b, 0x0e3d963b63cab0ae, 0x8dfc4b5655a1db14, 0xf789f1356e14de5c,
|
||||
0x683e68af4e51dac1, 0xc9a84f9d8d4b0fd9, 0x3691e03f52a0f9d1, 0x5ed86e46e1878e80,
|
||||
0x3c711a0e99d07150, 0x5a0865b20c4e9310, 0x56fbfc1fe4f0682e, 0xea8d5de3105edf9b,
|
||||
0x71abfdb12379187a, 0x2eb99de1bee77b9c, 0x21ecc0ea33cf4523, 0x59a4d7521805c7a1,
|
||||
0x3896f5eb56ae7c72, 0xaa638f3db18f75dc, 0x9f39358dabe9808e, 0xb7defa91c00b72ac,
|
||||
0x6b5541fd62492d92, 0x6dc6dee8f92e4d5b, 0x353f57abc4beea7e, 0x735769d6da5690ce,
|
||||
0x0a234aa642391484, 0xf6f9508028f80d9d, 0xb8e319a27ab3f215, 0x31ad9c1151341a4d,
|
||||
0x773c22a57bef5805, 0x45c7561a07968633, 0xf913da9e249dbe36, 0xda652d9b78a64c68,
|
||||
0x4c27a97f3bc334ef, 0x76621220e66b17f4, 0x967743899acd7d0b, 0xf3ee5bcae0ed6782,
|
||||
0x409f753600c879fc, 0x06d09a39b5926db6, 0x6f83aeb0317ac588, 0x01e6ca4a86381f21,
|
||||
0x66ff3462d19f3025, 0x72207c24ddfd3bfb, 0x4af6b6d3e2ece2eb, 0x9c994dbec7ea08de,
|
||||
0x49ace597b09a8bc4, 0xb38c4766cf0797ba, 0x131b9373c57c2a75, 0xb1822cce61931e58,
|
||||
0x9d7555b909ba1c0c, 0x127fafdd937d11d2, 0x29da3badc66d92e4, 0xa2c1d57154c2ecbc,
|
||||
0x58c5134d82f6fe24, 0x1c3ae3515b62274f, 0xe907c82e01cb8126, 0xf8ed091913e37fcb,
|
||||
0x3249d8f9c80046c9, 0x80cf9bede388fb63, 0x1881539a116cf19e, 0x5103f3f76bd52457,
|
||||
0x15b7e6f5ae47f7a8, 0xdbd7c6ded47e9ccf, 0x44e55c410228bb1a, 0xb647d4255edb4e99,
|
||||
0x5d11882bb8aafc30, 0xf5098bbb29d3212a, 0x8fb5ea14e90296b3, 0x677b942157dd025a,
|
||||
0xfb58e7c0a390acb5, 0x89d3674c83bd4a01, 0x9e2da4df4bf3b93b, 0xfcc41e328cab4829,
|
||||
0x03f38c96ba582c52, 0xcad1bdbd7fd85db2, 0xbbb442c16082ae83, 0xb95fe86ba5da9ab0,
|
||||
0xb22e04673771a93f, 0x845358c9493152d8, 0xbe2a488697b4541e, 0x95a2dc2dd38e6966,
|
||||
0xc02c11ac923c852b, 0x2388b1990df2a87b, 0x7c8008fa1b4f37be, 0x1f70d0c84d54e503,
|
||||
0x5490adec7ece57d4, 0x002b3c27d9063a3a, 0x7eaea3848030a2bf, 0xc602326ded2003c0,
|
||||
0x83a7287d69a94086, 0xc57a5fcb30f57a8a, 0xb56844e479ebe779, 0xa373b40f05dcbce9,
|
||||
0xd71a786e88570ee2, 0x879cbacdbde8f6a0, 0x976ad1bcc164a32f, 0xab21e25e9666d78b,
|
||||
0x901063aae5e5c33c, 0x9818b34448698d90, 0xe36487ae3e1e8abb, 0xafbdf931893bdcb4,
|
||||
0x6345a0dc5fbbd519, 0x8628fe269b9465ca, 0x1e5d01603f9c51ec, 0x4de44006a15049b7,
|
||||
0xbf6c70e5f776cbb1, 0x411218f2ef552bed, 0xcb0c0708705a36a3, 0xe74d14754f986044,
|
||||
0xcd56d9430ea8280e, 0xc12591d7535f5065, 0xc83223f1720aef96, 0xc3a0396f7363a51f,
|
||||
}
|
||||
|
||||
Tiger_Context :: struct {
|
||||
a: u64,
|
||||
b: u64,
|
||||
c: u64,
|
||||
x: [64]byte,
|
||||
nx: int,
|
||||
length: u64,
|
||||
ver: int,
|
||||
}
|
||||
|
||||
round :: #force_inline proc "contextless" (a, b, c, x, mul: u64) -> (u64, u64, u64) {
|
||||
a, b, c := a, b, c
|
||||
c ~= x
|
||||
a -= T1[c & 0xff] ~ T2[(c >> 16) & 0xff] ~ T3[(c >> 32) & 0xff] ~ T4[(c >> 48) & 0xff]
|
||||
b += T4[(c >> 8) & 0xff] ~ T3[(c >> 24) & 0xff] ~ T2[(c >> 40) & 0xff] ~ T1[(c >> 56) & 0xff]
|
||||
b *= mul
|
||||
return a, b, c
|
||||
}
|
||||
|
||||
pass :: #force_inline proc "contextless" (a, b, c: u64, d: []u64, mul: u64) -> (x, y, z: u64) {
|
||||
x, y, z = round(a, b, c, d[0], mul)
|
||||
y, z, x = round(y, z, x, d[1], mul)
|
||||
z, x, y = round(z, x, y, d[2], mul)
|
||||
x, y, z = round(x, y, z, d[3], mul)
|
||||
y, z, x = round(y, z, x, d[4], mul)
|
||||
z, x, y = round(z, x, y, d[5], mul)
|
||||
x, y, z = round(x, y, z, d[6], mul)
|
||||
y, z, x = round(y, z, x, d[7], mul)
|
||||
return
|
||||
}
|
||||
|
||||
key_schedule :: #force_inline proc "contextless" (x: []u64) {
|
||||
x[0] -= x[7] ~ 0xa5a5a5a5a5a5a5a5
|
||||
x[1] ~= x[0]
|
||||
x[2] += x[1]
|
||||
x[3] -= x[2] ~ ((~x[1]) << 19)
|
||||
x[4] ~= x[3]
|
||||
x[5] += x[4]
|
||||
x[6] -= x[5] ~ ((~x[4]) >> 23)
|
||||
x[7] ~= x[6]
|
||||
x[0] += x[7]
|
||||
x[1] -= x[0] ~ ((~x[7]) << 19)
|
||||
x[2] ~= x[1]
|
||||
x[3] += x[2]
|
||||
x[4] -= x[3] ~ ((~x[2]) >> 23)
|
||||
x[5] ~= x[4]
|
||||
x[6] += x[5]
|
||||
x[7] -= x[6] ~ 0x0123456789abcdef
|
||||
}
|
||||
|
||||
compress :: #force_inline proc "contextless" (ctx: ^Tiger_Context, data: []byte) {
|
||||
a := ctx.a
|
||||
b := ctx.b
|
||||
c := ctx.c
|
||||
x := util.cast_slice([]u64, data)
|
||||
ctx.a, ctx.b, ctx.c = pass(ctx.a, ctx.b, ctx.c, x, 5)
|
||||
key_schedule(x)
|
||||
ctx.c, ctx.a, ctx.b = pass(ctx.c, ctx.a, ctx.b, x, 7)
|
||||
key_schedule(x)
|
||||
ctx.b, ctx.c, ctx.a = pass(ctx.b, ctx.c, ctx.a, x, 9)
|
||||
ctx.a ~= a
|
||||
ctx.b -= b
|
||||
ctx.c += c
|
||||
}
|
||||
|
||||
init :: proc "contextless" (ctx: ^Tiger_Context) {
|
||||
ctx.a = 0x0123456789abcdef
|
||||
ctx.b = 0xfedcba9876543210
|
||||
ctx.c = 0xf096a5b4c3b2e187
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Tiger_Context, input: []byte) {
|
||||
p := make([]byte, len(input))
|
||||
copy(p, input)
|
||||
|
||||
length := len(p)
|
||||
ctx.length += u64(length)
|
||||
if ctx.nx > 0 {
|
||||
n := len(p)
|
||||
if n > 64 - ctx.nx {
|
||||
n = 64 - ctx.nx
|
||||
}
|
||||
copy(ctx.x[ctx.nx:ctx.nx + n], p[:n])
|
||||
ctx.nx += n
|
||||
if ctx.nx == 64 {
|
||||
compress(ctx, ctx.x[:64 - 1])
|
||||
ctx.nx = 0
|
||||
}
|
||||
p = p[n:]
|
||||
}
|
||||
for len(p) >= 64 {
|
||||
compress(ctx, p[:64])
|
||||
p = p[64:]
|
||||
}
|
||||
if len(p) > 0 {
|
||||
ctx.nx = copy(ctx.x[:], p)
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Tiger_Context, hash: []byte) {
|
||||
length := ctx.length
|
||||
tmp: [64]byte
|
||||
if ctx.ver == 1 {
|
||||
tmp[0] = 0x01
|
||||
} else {
|
||||
tmp[0] = 0x80
|
||||
}
|
||||
|
||||
size := length & 0x3f
|
||||
if size < 56 {
|
||||
update(ctx, tmp[:56 - size])
|
||||
} else {
|
||||
update(ctx, tmp[:64 + 56 - size])
|
||||
}
|
||||
|
||||
length <<= 3
|
||||
for i := uint(0); i < 8; i += 1 {
|
||||
tmp[i] = byte(length >> (8 * i))
|
||||
}
|
||||
update(ctx, tmp[:8])
|
||||
|
||||
for i := uint(0); i < 8; i += 1 {
|
||||
tmp[i] = byte(ctx.a >> (8 * i))
|
||||
tmp[i + 8] = byte(ctx.b >> (8 * i))
|
||||
tmp[i + 16] = byte(ctx.c >> (8 * i))
|
||||
}
|
||||
copy(hash[:], tmp[:len(hash)])
|
||||
}
|
||||
@@ -1,726 +0,0 @@
|
||||
package blake
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the BLAKE hashing algorithm, as defined in <https://web.archive.org/web/20190915215948/https://131002.net/blake>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc "contextless" (data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = true
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc "contextless" (data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = false
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = false
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Blake256_Context
|
||||
ctx.is224 = false
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc "contextless" (data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = true
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc "contextless" (data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = false
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = false
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Blake512_Context
|
||||
ctx.is384 = false
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc "contextless" (ctx: ^$T) {
|
||||
when T == Blake256_Context {
|
||||
if ctx.is224 {
|
||||
ctx.h[0] = 0xc1059ed8
|
||||
ctx.h[1] = 0x367cd507
|
||||
ctx.h[2] = 0x3070dd17
|
||||
ctx.h[3] = 0xf70e5939
|
||||
ctx.h[4] = 0xffc00b31
|
||||
ctx.h[5] = 0x68581511
|
||||
ctx.h[6] = 0x64f98fa7
|
||||
ctx.h[7] = 0xbefa4fa4
|
||||
} else {
|
||||
ctx.h[0] = 0x6a09e667
|
||||
ctx.h[1] = 0xbb67ae85
|
||||
ctx.h[2] = 0x3c6ef372
|
||||
ctx.h[3] = 0xa54ff53a
|
||||
ctx.h[4] = 0x510e527f
|
||||
ctx.h[5] = 0x9b05688c
|
||||
ctx.h[6] = 0x1f83d9ab
|
||||
ctx.h[7] = 0x5be0cd19
|
||||
}
|
||||
} else when T == Blake512_Context {
|
||||
if ctx.is384 {
|
||||
ctx.h[0] = 0xcbbb9d5dc1059ed8
|
||||
ctx.h[1] = 0x629a292a367cd507
|
||||
ctx.h[2] = 0x9159015a3070dd17
|
||||
ctx.h[3] = 0x152fecd8f70e5939
|
||||
ctx.h[4] = 0x67332667ffc00b31
|
||||
ctx.h[5] = 0x8eb44a8768581511
|
||||
ctx.h[6] = 0xdb0c2e0d64f98fa7
|
||||
ctx.h[7] = 0x47b5481dbefa4fa4
|
||||
} else {
|
||||
ctx.h[0] = 0x6a09e667f3bcc908
|
||||
ctx.h[1] = 0xbb67ae8584caa73b
|
||||
ctx.h[2] = 0x3c6ef372fe94f82b
|
||||
ctx.h[3] = 0xa54ff53a5f1d36f1
|
||||
ctx.h[4] = 0x510e527fade682d1
|
||||
ctx.h[5] = 0x9b05688c2b3e6c1f
|
||||
ctx.h[6] = 0x1f83d9abfb41bd6b
|
||||
ctx.h[7] = 0x5be0cd19137e2179
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^$T, data: []byte) {
|
||||
data := data
|
||||
when T == Blake256_Context {
|
||||
if ctx.nx > 0 {
|
||||
n := copy(ctx.x[ctx.nx:], data)
|
||||
ctx.nx += n
|
||||
if ctx.nx == BLOCKSIZE_256 {
|
||||
block256(ctx, ctx.x[:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) >= BLOCKSIZE_256 {
|
||||
n := len(data) &~ (BLOCKSIZE_256 - 1)
|
||||
block256(ctx, data[:n])
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) > 0 {
|
||||
ctx.nx = copy(ctx.x[:], data)
|
||||
}
|
||||
} else when T == Blake512_Context {
|
||||
if ctx.nx > 0 {
|
||||
n := copy(ctx.x[ctx.nx:], data)
|
||||
ctx.nx += n
|
||||
if ctx.nx == BLOCKSIZE_512 {
|
||||
block512(ctx, ctx.x[:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) >= BLOCKSIZE_512 {
|
||||
n := len(data) &~ (BLOCKSIZE_512 - 1)
|
||||
block512(ctx, data[:n])
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) > 0 {
|
||||
ctx.nx = copy(ctx.x[:], data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^$T, hash: []byte) {
|
||||
when T == Blake256_Context {
|
||||
tmp: [65]byte
|
||||
} else when T == Blake512_Context {
|
||||
tmp: [129]byte
|
||||
}
|
||||
nx := u64(ctx.nx)
|
||||
tmp[0] = 0x80
|
||||
length := (ctx.t + nx) << 3
|
||||
|
||||
when T == Blake256_Context {
|
||||
if nx == 55 {
|
||||
if ctx.is224 {
|
||||
write_additional(ctx, {0x80})
|
||||
} else {
|
||||
write_additional(ctx, {0x81})
|
||||
}
|
||||
} else {
|
||||
if nx < 55 {
|
||||
if nx == 0 {
|
||||
ctx.nullt = true
|
||||
}
|
||||
write_additional(ctx, tmp[0 : 55 - nx])
|
||||
} else {
|
||||
write_additional(ctx, tmp[0 : 64 - nx])
|
||||
write_additional(ctx, tmp[1:56])
|
||||
ctx.nullt = true
|
||||
}
|
||||
if ctx.is224 {
|
||||
write_additional(ctx, {0x00})
|
||||
} else {
|
||||
write_additional(ctx, {0x01})
|
||||
}
|
||||
}
|
||||
|
||||
for i : uint = 0; i < 8; i += 1 {
|
||||
tmp[i] = byte(length >> (56 - 8 * i))
|
||||
}
|
||||
write_additional(ctx, tmp[0:8])
|
||||
|
||||
h := ctx.h[:]
|
||||
if ctx.is224 {
|
||||
h = h[0:7]
|
||||
}
|
||||
for s, i in h {
|
||||
hash[i * 4] = byte(s >> 24)
|
||||
hash[i * 4 + 1] = byte(s >> 16)
|
||||
hash[i * 4 + 2] = byte(s >> 8)
|
||||
hash[i * 4 + 3] = byte(s)
|
||||
}
|
||||
} else when T == Blake512_Context {
|
||||
if nx == 111 {
|
||||
if ctx.is384 {
|
||||
write_additional(ctx, {0x80})
|
||||
} else {
|
||||
write_additional(ctx, {0x81})
|
||||
}
|
||||
} else {
|
||||
if nx < 111 {
|
||||
if nx == 0 {
|
||||
ctx.nullt = true
|
||||
}
|
||||
write_additional(ctx, tmp[0 : 111 - nx])
|
||||
} else {
|
||||
write_additional(ctx, tmp[0 : 128 - nx])
|
||||
write_additional(ctx, tmp[1:112])
|
||||
ctx.nullt = true
|
||||
}
|
||||
if ctx.is384 {
|
||||
write_additional(ctx, {0x00})
|
||||
} else {
|
||||
write_additional(ctx, {0x01})
|
||||
}
|
||||
}
|
||||
|
||||
for i : uint = 0; i < 16; i += 1 {
|
||||
tmp[i] = byte(length >> (120 - 8 * i))
|
||||
}
|
||||
write_additional(ctx, tmp[0:16])
|
||||
|
||||
h := ctx.h[:]
|
||||
if ctx.is384 {
|
||||
h = h[0:6]
|
||||
}
|
||||
for s, i in h {
|
||||
hash[i * 8] = byte(s >> 56)
|
||||
hash[i * 8 + 1] = byte(s >> 48)
|
||||
hash[i * 8 + 2] = byte(s >> 40)
|
||||
hash[i * 8 + 3] = byte(s >> 32)
|
||||
hash[i * 8 + 4] = byte(s >> 24)
|
||||
hash[i * 8 + 5] = byte(s >> 16)
|
||||
hash[i * 8 + 6] = byte(s >> 8)
|
||||
hash[i * 8 + 7] = byte(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SIZE_224 :: 28
|
||||
SIZE_256 :: 32
|
||||
SIZE_384 :: 48
|
||||
SIZE_512 :: 64
|
||||
BLOCKSIZE_256 :: 64
|
||||
BLOCKSIZE_512 :: 128
|
||||
|
||||
Blake256_Context :: struct {
|
||||
h: [8]u32,
|
||||
s: [4]u32,
|
||||
t: u64,
|
||||
x: [64]byte,
|
||||
nx: int,
|
||||
is224: bool,
|
||||
nullt: bool,
|
||||
}
|
||||
|
||||
Blake512_Context :: struct {
|
||||
h: [8]u64,
|
||||
s: [4]u64,
|
||||
t: u64,
|
||||
x: [128]byte,
|
||||
nx: int,
|
||||
is384: bool,
|
||||
nullt: bool,
|
||||
}
|
||||
|
||||
SIGMA := [?]int {
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
|
||||
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
|
||||
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
|
||||
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
|
||||
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
|
||||
12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
|
||||
13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
|
||||
6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
|
||||
10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
|
||||
}
|
||||
|
||||
U256 := [16]u32 {
|
||||
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
|
||||
0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
|
||||
0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
|
||||
0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
|
||||
}
|
||||
|
||||
U512 := [16]u64 {
|
||||
0x243f6a8885a308d3, 0x13198a2e03707344, 0xa4093822299f31d0, 0x082efa98ec4e6c89,
|
||||
0x452821e638d01377, 0xbe5466cf34e90c6c, 0xc0ac29b7c97c50dd, 0x3f84d5b5b5470917,
|
||||
0x9216d5d98979fb1b, 0xd1310ba698dfb5ac, 0x2ffd72dbd01adfb7, 0xb8e1afed6a267e96,
|
||||
0xba7c9045f12c7f99, 0x24a19947b3916cf7, 0x0801f2e2858efc16, 0x636920d871574e69,
|
||||
}
|
||||
|
||||
G256 :: #force_inline proc "contextless" (a, b, c, d: u32, m: [16]u32, i, j: int) -> (u32, u32, u32, u32) {
|
||||
a, b, c, d := a, b, c, d
|
||||
a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
|
||||
a += b
|
||||
d ~= a
|
||||
d = d << (32 - 16) | d >> 16
|
||||
c += d
|
||||
b ~= c
|
||||
b = b << (32 - 12) | b >> 12
|
||||
a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j)]]
|
||||
a += b
|
||||
d ~= a
|
||||
d = d << (32 - 8) | d >> 8
|
||||
c += d
|
||||
b ~= c
|
||||
b = b << (32 - 7) | b >> 7
|
||||
return a, b, c, d
|
||||
}
|
||||
|
||||
G512 :: #force_inline proc "contextless" (a, b, c, d: u64, m: [16]u64, i, j: int) -> (u64, u64, u64, u64) {
|
||||
a, b, c, d := a, b, c, d
|
||||
a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
|
||||
a += b
|
||||
d ~= a
|
||||
d = d << (64 - 32) | d >> 32
|
||||
c += d
|
||||
b ~= c
|
||||
b = b << (64 - 25) | b >> 25
|
||||
a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j)]]
|
||||
a += b
|
||||
d ~= a
|
||||
d = d << (64 - 16) | d >> 16
|
||||
c += d
|
||||
b ~= c
|
||||
b = b << (64 - 11) | b >> 11
|
||||
return a, b, c, d
|
||||
}
|
||||
|
||||
block256 :: proc "contextless" (ctx: ^Blake256_Context, p: []byte) #no_bounds_check {
|
||||
i, j: int = ---, ---
|
||||
v, m: [16]u32 = ---, ---
|
||||
p := p
|
||||
for len(p) >= BLOCKSIZE_256 {
|
||||
v[0] = ctx.h[0]
|
||||
v[1] = ctx.h[1]
|
||||
v[2] = ctx.h[2]
|
||||
v[3] = ctx.h[3]
|
||||
v[4] = ctx.h[4]
|
||||
v[5] = ctx.h[5]
|
||||
v[6] = ctx.h[6]
|
||||
v[7] = ctx.h[7]
|
||||
v[8] = ctx.s[0] ~ U256[0]
|
||||
v[9] = ctx.s[1] ~ U256[1]
|
||||
v[10] = ctx.s[2] ~ U256[2]
|
||||
v[11] = ctx.s[3] ~ U256[3]
|
||||
v[12] = U256[4]
|
||||
v[13] = U256[5]
|
||||
v[14] = U256[6]
|
||||
v[15] = U256[7]
|
||||
|
||||
ctx.t += 512
|
||||
if !ctx.nullt {
|
||||
v[12] ~= u32(ctx.t)
|
||||
v[13] ~= u32(ctx.t)
|
||||
v[14] ~= u32(ctx.t >> 32)
|
||||
v[15] ~= u32(ctx.t >> 32)
|
||||
}
|
||||
|
||||
for i, j = 0, 0; i < 16; i, j = i+1, j+4 {
|
||||
m[i] = u32(p[j]) << 24 | u32(p[j + 1]) << 16 | u32(p[j + 2]) << 8 | u32(p[j + 3])
|
||||
}
|
||||
|
||||
for i = 0; i < 14; i += 1 {
|
||||
v[0], v[4], v[8], v[12] = G256(v[0], v[4], v[8], v[12], m, i, 0)
|
||||
v[1], v[5], v[9], v[13] = G256(v[1], v[5], v[9], v[13], m, i, 1)
|
||||
v[2], v[6], v[10], v[14] = G256(v[2], v[6], v[10], v[14], m, i, 2)
|
||||
v[3], v[7], v[11], v[15] = G256(v[3], v[7], v[11], v[15], m, i, 3)
|
||||
v[0], v[5], v[10], v[15] = G256(v[0], v[5], v[10], v[15], m, i, 4)
|
||||
v[1], v[6], v[11], v[12] = G256(v[1], v[6], v[11], v[12], m, i, 5)
|
||||
v[2], v[7], v[8], v[13] = G256(v[2], v[7], v[8], v[13], m, i, 6)
|
||||
v[3], v[4], v[9], v[14] = G256(v[3], v[4], v[9], v[14], m, i, 7)
|
||||
}
|
||||
|
||||
for i = 0; i < 8; i += 1 {
|
||||
ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
|
||||
}
|
||||
p = p[BLOCKSIZE_256:]
|
||||
}
|
||||
}
|
||||
|
||||
block512 :: proc "contextless" (ctx: ^Blake512_Context, p: []byte) #no_bounds_check {
|
||||
i, j: int = ---, ---
|
||||
v, m: [16]u64 = ---, ---
|
||||
p := p
|
||||
for len(p) >= BLOCKSIZE_512 {
|
||||
v[0] = ctx.h[0]
|
||||
v[1] = ctx.h[1]
|
||||
v[2] = ctx.h[2]
|
||||
v[3] = ctx.h[3]
|
||||
v[4] = ctx.h[4]
|
||||
v[5] = ctx.h[5]
|
||||
v[6] = ctx.h[6]
|
||||
v[7] = ctx.h[7]
|
||||
v[8] = ctx.s[0] ~ U512[0]
|
||||
v[9] = ctx.s[1] ~ U512[1]
|
||||
v[10] = ctx.s[2] ~ U512[2]
|
||||
v[11] = ctx.s[3] ~ U512[3]
|
||||
v[12] = U512[4]
|
||||
v[13] = U512[5]
|
||||
v[14] = U512[6]
|
||||
v[15] = U512[7]
|
||||
|
||||
ctx.t += 1024
|
||||
if !ctx.nullt {
|
||||
v[12] ~= ctx.t
|
||||
v[13] ~= ctx.t
|
||||
v[14] ~= 0
|
||||
v[15] ~= 0
|
||||
}
|
||||
|
||||
for i, j = 0, 0; i < 16; i, j = i + 1, j + 8 {
|
||||
m[i] = u64(p[j]) << 56 | u64(p[j + 1]) << 48 | u64(p[j + 2]) << 40 | u64(p[j + 3]) << 32 |
|
||||
u64(p[j + 4]) << 24 | u64(p[j + 5]) << 16 | u64(p[j + 6]) << 8 | u64(p[j + 7])
|
||||
}
|
||||
for i = 0; i < 16; i += 1 {
|
||||
v[0], v[4], v[8], v[12] = G512(v[0], v[4], v[8], v[12], m, i, 0)
|
||||
v[1], v[5], v[9], v[13] = G512(v[1], v[5], v[9], v[13], m, i, 1)
|
||||
v[2], v[6], v[10], v[14] = G512(v[2], v[6], v[10], v[14], m, i, 2)
|
||||
v[3], v[7], v[11], v[15] = G512(v[3], v[7], v[11], v[15], m, i, 3)
|
||||
v[0], v[5], v[10], v[15] = G512(v[0], v[5], v[10], v[15], m, i, 4)
|
||||
v[1], v[6], v[11], v[12] = G512(v[1], v[6], v[11], v[12], m, i, 5)
|
||||
v[2], v[7], v[8], v[13] = G512(v[2], v[7], v[8], v[13], m, i, 6)
|
||||
v[3], v[4], v[9], v[14] = G512(v[3], v[4], v[9], v[14], m, i, 7)
|
||||
}
|
||||
|
||||
for i = 0; i < 8; i += 1 {
|
||||
ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
|
||||
}
|
||||
p = p[BLOCKSIZE_512:]
|
||||
}
|
||||
}
|
||||
|
||||
write_additional :: proc "contextless" (ctx: ^$T, data: []byte) {
|
||||
ctx.t -= u64(len(data)) << 3
|
||||
update(ctx, data)
|
||||
}
|
||||
@@ -7,12 +7,12 @@ package blake2b
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the BLAKE2B hashing algorithm.
|
||||
BLAKE2B and BLAKE2B share the implementation in the _blake2 package.
|
||||
Interface for the BLAKE2b hashing algorithm.
|
||||
BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_blake2"
|
||||
|
||||
@@ -25,103 +25,103 @@ DIGEST_SIZE :: 64
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: _blake2.Blake2b_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
_blake2.update(&ctx, data)
|
||||
_blake2.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _blake2.Blake2b_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
_blake2.update(&ctx, data)
|
||||
_blake2.final(&ctx, hash)
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: _blake2.Blake2b_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_blake2.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_blake2.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2B_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Blake2b_Context :: _blake2.Blake2b_Context
|
||||
Context :: _blake2.Blake2b_Context
|
||||
|
||||
init :: proc(ctx: ^_blake2.Blake2b_Context) {
|
||||
_blake2.init(ctx)
|
||||
init :: proc(ctx: ^Context) {
|
||||
_blake2.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
}
|
||||
|
||||
@@ -7,12 +7,12 @@ package blake2s
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the BLAKE2S hashing algorithm.
|
||||
BLAKE2B and BLAKE2B share the implementation in the _blake2 package.
|
||||
Interface for the BLAKE2s hashing algorithm.
|
||||
BLAKE2s and BLAKE2b share the implementation in the _blake2 package.
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_blake2"
|
||||
|
||||
@@ -25,103 +25,103 @@ DIGEST_SIZE :: 32
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: _blake2.Blake2s_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
_blake2.update(&ctx, data)
|
||||
_blake2.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _blake2.Blake2s_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
_blake2.update(&ctx, data)
|
||||
_blake2.final(&ctx, hash)
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: _blake2.Blake2s_Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
_blake2.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_blake2.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_blake2.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
cfg: _blake2.Blake2_Config
|
||||
cfg.size = _blake2.BLAKE2S_SIZE
|
||||
ctx.cfg = cfg
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Blake2s_Context :: _blake2.Blake2b_Context
|
||||
Context :: _blake2.Blake2s_Context
|
||||
|
||||
init :: proc(ctx: ^_blake2.Blake2s_Context) {
|
||||
_blake2.init(ctx)
|
||||
init :: proc(ctx: ^Context) {
|
||||
_blake2.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_blake2.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_blake2.final(ctx, hash)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package chacha20
|
||||
|
||||
import "core:crypto/util"
|
||||
import "core:encoding/endian"
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
|
||||
@@ -60,23 +60,23 @@ init :: proc (ctx: ^Context, key, nonce: []byte) {
|
||||
ctx._s[1] = _SIGMA_1
|
||||
ctx._s[2] = _SIGMA_2
|
||||
ctx._s[3] = _SIGMA_3
|
||||
ctx._s[4] = util.U32_LE(k[0:4])
|
||||
ctx._s[5] = util.U32_LE(k[4:8])
|
||||
ctx._s[6] = util.U32_LE(k[8:12])
|
||||
ctx._s[7] = util.U32_LE(k[12:16])
|
||||
ctx._s[8] = util.U32_LE(k[16:20])
|
||||
ctx._s[9] = util.U32_LE(k[20:24])
|
||||
ctx._s[10] = util.U32_LE(k[24:28])
|
||||
ctx._s[11] = util.U32_LE(k[28:32])
|
||||
ctx._s[4] = endian.unchecked_get_u32le(k[0:4])
|
||||
ctx._s[5] = endian.unchecked_get_u32le(k[4:8])
|
||||
ctx._s[6] = endian.unchecked_get_u32le(k[8:12])
|
||||
ctx._s[7] = endian.unchecked_get_u32le(k[12:16])
|
||||
ctx._s[8] = endian.unchecked_get_u32le(k[16:20])
|
||||
ctx._s[9] = endian.unchecked_get_u32le(k[20:24])
|
||||
ctx._s[10] = endian.unchecked_get_u32le(k[24:28])
|
||||
ctx._s[11] = endian.unchecked_get_u32le(k[28:32])
|
||||
ctx._s[12] = 0
|
||||
if !is_xchacha {
|
||||
ctx._s[13] = util.U32_LE(n[0:4])
|
||||
ctx._s[14] = util.U32_LE(n[4:8])
|
||||
ctx._s[15] = util.U32_LE(n[8:12])
|
||||
ctx._s[13] = endian.unchecked_get_u32le(n[0:4])
|
||||
ctx._s[14] = endian.unchecked_get_u32le(n[4:8])
|
||||
ctx._s[15] = endian.unchecked_get_u32le(n[8:12])
|
||||
} else {
|
||||
ctx._s[13] = 0
|
||||
ctx._s[14] = util.U32_LE(n[0:4])
|
||||
ctx._s[15] = util.U32_LE(n[4:8])
|
||||
ctx._s[14] = endian.unchecked_get_u32le(n[0:4])
|
||||
ctx._s[15] = endian.unchecked_get_u32le(n[4:8])
|
||||
|
||||
// The sub-key is stored in the keystream buffer. While
|
||||
// this will be overwritten in most circumstances, explicitly
|
||||
@@ -221,114 +221,114 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
|
||||
// quarterround(x, 0, 4, 8, 12)
|
||||
x0 += x4
|
||||
x12 ~= x0
|
||||
x12 = util.ROTL32(x12, 16)
|
||||
x12 = bits.rotate_left32(x12, 16)
|
||||
x8 += x12
|
||||
x4 ~= x8
|
||||
x4 = util.ROTL32(x4, 12)
|
||||
x4 = bits.rotate_left32(x4, 12)
|
||||
x0 += x4
|
||||
x12 ~= x0
|
||||
x12 = util.ROTL32(x12, 8)
|
||||
x12 = bits.rotate_left32(x12, 8)
|
||||
x8 += x12
|
||||
x4 ~= x8
|
||||
x4 = util.ROTL32(x4, 7)
|
||||
x4 = bits.rotate_left32(x4, 7)
|
||||
|
||||
// quarterround(x, 1, 5, 9, 13)
|
||||
x1 += x5
|
||||
x13 ~= x1
|
||||
x13 = util.ROTL32(x13, 16)
|
||||
x13 = bits.rotate_left32(x13, 16)
|
||||
x9 += x13
|
||||
x5 ~= x9
|
||||
x5 = util.ROTL32(x5, 12)
|
||||
x5 = bits.rotate_left32(x5, 12)
|
||||
x1 += x5
|
||||
x13 ~= x1
|
||||
x13 = util.ROTL32(x13, 8)
|
||||
x13 = bits.rotate_left32(x13, 8)
|
||||
x9 += x13
|
||||
x5 ~= x9
|
||||
x5 = util.ROTL32(x5, 7)
|
||||
x5 = bits.rotate_left32(x5, 7)
|
||||
|
||||
// quarterround(x, 2, 6, 10, 14)
|
||||
x2 += x6
|
||||
x14 ~= x2
|
||||
x14 = util.ROTL32(x14, 16)
|
||||
x14 = bits.rotate_left32(x14, 16)
|
||||
x10 += x14
|
||||
x6 ~= x10
|
||||
x6 = util.ROTL32(x6, 12)
|
||||
x6 = bits.rotate_left32(x6, 12)
|
||||
x2 += x6
|
||||
x14 ~= x2
|
||||
x14 = util.ROTL32(x14, 8)
|
||||
x14 = bits.rotate_left32(x14, 8)
|
||||
x10 += x14
|
||||
x6 ~= x10
|
||||
x6 = util.ROTL32(x6, 7)
|
||||
x6 = bits.rotate_left32(x6, 7)
|
||||
|
||||
// quarterround(x, 3, 7, 11, 15)
|
||||
x3 += x7
|
||||
x15 ~= x3
|
||||
x15 = util.ROTL32(x15, 16)
|
||||
x15 = bits.rotate_left32(x15, 16)
|
||||
x11 += x15
|
||||
x7 ~= x11
|
||||
x7 = util.ROTL32(x7, 12)
|
||||
x7 = bits.rotate_left32(x7, 12)
|
||||
x3 += x7
|
||||
x15 ~= x3
|
||||
x15 = util.ROTL32(x15, 8)
|
||||
x15 = bits.rotate_left32(x15, 8)
|
||||
x11 += x15
|
||||
x7 ~= x11
|
||||
x7 = util.ROTL32(x7, 7)
|
||||
x7 = bits.rotate_left32(x7, 7)
|
||||
|
||||
// quarterround(x, 0, 5, 10, 15)
|
||||
x0 += x5
|
||||
x15 ~= x0
|
||||
x15 = util.ROTL32(x15, 16)
|
||||
x15 = bits.rotate_left32(x15, 16)
|
||||
x10 += x15
|
||||
x5 ~= x10
|
||||
x5 = util.ROTL32(x5, 12)
|
||||
x5 = bits.rotate_left32(x5, 12)
|
||||
x0 += x5
|
||||
x15 ~= x0
|
||||
x15 = util.ROTL32(x15, 8)
|
||||
x15 = bits.rotate_left32(x15, 8)
|
||||
x10 += x15
|
||||
x5 ~= x10
|
||||
x5 = util.ROTL32(x5, 7)
|
||||
x5 = bits.rotate_left32(x5, 7)
|
||||
|
||||
// quarterround(x, 1, 6, 11, 12)
|
||||
x1 += x6
|
||||
x12 ~= x1
|
||||
x12 = util.ROTL32(x12, 16)
|
||||
x12 = bits.rotate_left32(x12, 16)
|
||||
x11 += x12
|
||||
x6 ~= x11
|
||||
x6 = util.ROTL32(x6, 12)
|
||||
x6 = bits.rotate_left32(x6, 12)
|
||||
x1 += x6
|
||||
x12 ~= x1
|
||||
x12 = util.ROTL32(x12, 8)
|
||||
x12 = bits.rotate_left32(x12, 8)
|
||||
x11 += x12
|
||||
x6 ~= x11
|
||||
x6 = util.ROTL32(x6, 7)
|
||||
x6 = bits.rotate_left32(x6, 7)
|
||||
|
||||
// quarterround(x, 2, 7, 8, 13)
|
||||
x2 += x7
|
||||
x13 ~= x2
|
||||
x13 = util.ROTL32(x13, 16)
|
||||
x13 = bits.rotate_left32(x13, 16)
|
||||
x8 += x13
|
||||
x7 ~= x8
|
||||
x7 = util.ROTL32(x7, 12)
|
||||
x7 = bits.rotate_left32(x7, 12)
|
||||
x2 += x7
|
||||
x13 ~= x2
|
||||
x13 = util.ROTL32(x13, 8)
|
||||
x13 = bits.rotate_left32(x13, 8)
|
||||
x8 += x13
|
||||
x7 ~= x8
|
||||
x7 = util.ROTL32(x7, 7)
|
||||
x7 = bits.rotate_left32(x7, 7)
|
||||
|
||||
// quarterround(x, 3, 4, 9, 14)
|
||||
x3 += x4
|
||||
x14 ~= x3
|
||||
x14 = util.ROTL32(x14, 16)
|
||||
x14 = bits.rotate_left32(x14, 16)
|
||||
x9 += x14
|
||||
x4 ~= x9
|
||||
x4 = util.ROTL32(x4, 12)
|
||||
x4 = bits.rotate_left32(x4, 12)
|
||||
x3 += x4
|
||||
x14 ~= x3
|
||||
x14 = util.ROTL32(x14, 8)
|
||||
x14 = bits.rotate_left32(x14, 8)
|
||||
x9 += x14
|
||||
x4 ~= x9
|
||||
x4 = util.ROTL32(x4, 7)
|
||||
x4 = bits.rotate_left32(x4, 7)
|
||||
}
|
||||
|
||||
x0 += _SIGMA_0
|
||||
@@ -352,93 +352,48 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
|
||||
// this is "use vector operations", support for that is currently
|
||||
// a work in progress/to be designed.
|
||||
//
|
||||
// Until dedicated assembly can be written leverage the fact that
|
||||
// the callers of this routine ensure that src/dst are valid.
|
||||
// In the meantime:
|
||||
// - The caller(s) ensure that src/dst are valid.
|
||||
// - The compiler knows if the target is picky about alignment.
|
||||
|
||||
when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 {
|
||||
// util.PUT_U32_LE/util.U32_LE are not required on little-endian
|
||||
// systems that also happen to not be strict about aligned
|
||||
// memory access.
|
||||
|
||||
dst_p := transmute(^[16]u32)(&dst[0])
|
||||
#no_bounds_check {
|
||||
if src != nil {
|
||||
src_p := transmute(^[16]u32)(&src[0])
|
||||
dst_p[0] = src_p[0] ~ x0
|
||||
dst_p[1] = src_p[1] ~ x1
|
||||
dst_p[2] = src_p[2] ~ x2
|
||||
dst_p[3] = src_p[3] ~ x3
|
||||
dst_p[4] = src_p[4] ~ x4
|
||||
dst_p[5] = src_p[5] ~ x5
|
||||
dst_p[6] = src_p[6] ~ x6
|
||||
dst_p[7] = src_p[7] ~ x7
|
||||
dst_p[8] = src_p[8] ~ x8
|
||||
dst_p[9] = src_p[9] ~ x9
|
||||
dst_p[10] = src_p[10] ~ x10
|
||||
dst_p[11] = src_p[11] ~ x11
|
||||
dst_p[12] = src_p[12] ~ x12
|
||||
dst_p[13] = src_p[13] ~ x13
|
||||
dst_p[14] = src_p[14] ~ x14
|
||||
dst_p[15] = src_p[15] ~ x15
|
||||
endian.unchecked_put_u32le(dst[0:4], endian.unchecked_get_u32le(src[0:4]) ~ x0)
|
||||
endian.unchecked_put_u32le(dst[4:8], endian.unchecked_get_u32le(src[4:8]) ~ x1)
|
||||
endian.unchecked_put_u32le(dst[8:12], endian.unchecked_get_u32le(src[8:12]) ~ x2)
|
||||
endian.unchecked_put_u32le(dst[12:16], endian.unchecked_get_u32le(src[12:16]) ~ x3)
|
||||
endian.unchecked_put_u32le(dst[16:20], endian.unchecked_get_u32le(src[16:20]) ~ x4)
|
||||
endian.unchecked_put_u32le(dst[20:24], endian.unchecked_get_u32le(src[20:24]) ~ x5)
|
||||
endian.unchecked_put_u32le(dst[24:28], endian.unchecked_get_u32le(src[24:28]) ~ x6)
|
||||
endian.unchecked_put_u32le(dst[28:32], endian.unchecked_get_u32le(src[28:32]) ~ x7)
|
||||
endian.unchecked_put_u32le(dst[32:36], endian.unchecked_get_u32le(src[32:36]) ~ x8)
|
||||
endian.unchecked_put_u32le(dst[36:40], endian.unchecked_get_u32le(src[36:40]) ~ x9)
|
||||
endian.unchecked_put_u32le(dst[40:44], endian.unchecked_get_u32le(src[40:44]) ~ x10)
|
||||
endian.unchecked_put_u32le(dst[44:48], endian.unchecked_get_u32le(src[44:48]) ~ x11)
|
||||
endian.unchecked_put_u32le(dst[48:52], endian.unchecked_get_u32le(src[48:52]) ~ x12)
|
||||
endian.unchecked_put_u32le(dst[52:56], endian.unchecked_get_u32le(src[52:56]) ~ x13)
|
||||
endian.unchecked_put_u32le(dst[56:60], endian.unchecked_get_u32le(src[56:60]) ~ x14)
|
||||
endian.unchecked_put_u32le(dst[60:64], endian.unchecked_get_u32le(src[60:64]) ~ x15)
|
||||
src = src[_BLOCK_SIZE:]
|
||||
} else {
|
||||
dst_p[0] = x0
|
||||
dst_p[1] = x1
|
||||
dst_p[2] = x2
|
||||
dst_p[3] = x3
|
||||
dst_p[4] = x4
|
||||
dst_p[5] = x5
|
||||
dst_p[6] = x6
|
||||
dst_p[7] = x7
|
||||
dst_p[8] = x8
|
||||
dst_p[9] = x9
|
||||
dst_p[10] = x10
|
||||
dst_p[11] = x11
|
||||
dst_p[12] = x12
|
||||
dst_p[13] = x13
|
||||
dst_p[14] = x14
|
||||
dst_p[15] = x15
|
||||
endian.unchecked_put_u32le(dst[0:4], x0)
|
||||
endian.unchecked_put_u32le(dst[4:8], x1)
|
||||
endian.unchecked_put_u32le(dst[8:12], x2)
|
||||
endian.unchecked_put_u32le(dst[12:16], x3)
|
||||
endian.unchecked_put_u32le(dst[16:20], x4)
|
||||
endian.unchecked_put_u32le(dst[20:24], x5)
|
||||
endian.unchecked_put_u32le(dst[24:28], x6)
|
||||
endian.unchecked_put_u32le(dst[28:32], x7)
|
||||
endian.unchecked_put_u32le(dst[32:36], x8)
|
||||
endian.unchecked_put_u32le(dst[36:40], x9)
|
||||
endian.unchecked_put_u32le(dst[40:44], x10)
|
||||
endian.unchecked_put_u32le(dst[44:48], x11)
|
||||
endian.unchecked_put_u32le(dst[48:52], x12)
|
||||
endian.unchecked_put_u32le(dst[52:56], x13)
|
||||
endian.unchecked_put_u32le(dst[56:60], x14)
|
||||
endian.unchecked_put_u32le(dst[60:64], x15)
|
||||
}
|
||||
dst = dst[_BLOCK_SIZE:]
|
||||
} else {
|
||||
#no_bounds_check {
|
||||
if src != nil {
|
||||
util.PUT_U32_LE(dst[0:4], util.U32_LE(src[0:4]) ~ x0)
|
||||
util.PUT_U32_LE(dst[4:8], util.U32_LE(src[4:8]) ~ x1)
|
||||
util.PUT_U32_LE(dst[8:12], util.U32_LE(src[8:12]) ~ x2)
|
||||
util.PUT_U32_LE(dst[12:16], util.U32_LE(src[12:16]) ~ x3)
|
||||
util.PUT_U32_LE(dst[16:20], util.U32_LE(src[16:20]) ~ x4)
|
||||
util.PUT_U32_LE(dst[20:24], util.U32_LE(src[20:24]) ~ x5)
|
||||
util.PUT_U32_LE(dst[24:28], util.U32_LE(src[24:28]) ~ x6)
|
||||
util.PUT_U32_LE(dst[28:32], util.U32_LE(src[28:32]) ~ x7)
|
||||
util.PUT_U32_LE(dst[32:36], util.U32_LE(src[32:36]) ~ x8)
|
||||
util.PUT_U32_LE(dst[36:40], util.U32_LE(src[36:40]) ~ x9)
|
||||
util.PUT_U32_LE(dst[40:44], util.U32_LE(src[40:44]) ~ x10)
|
||||
util.PUT_U32_LE(dst[44:48], util.U32_LE(src[44:48]) ~ x11)
|
||||
util.PUT_U32_LE(dst[48:52], util.U32_LE(src[48:52]) ~ x12)
|
||||
util.PUT_U32_LE(dst[52:56], util.U32_LE(src[52:56]) ~ x13)
|
||||
util.PUT_U32_LE(dst[56:60], util.U32_LE(src[56:60]) ~ x14)
|
||||
util.PUT_U32_LE(dst[60:64], util.U32_LE(src[60:64]) ~ x15)
|
||||
src = src[_BLOCK_SIZE:]
|
||||
} else {
|
||||
util.PUT_U32_LE(dst[0:4], x0)
|
||||
util.PUT_U32_LE(dst[4:8], x1)
|
||||
util.PUT_U32_LE(dst[8:12], x2)
|
||||
util.PUT_U32_LE(dst[12:16], x3)
|
||||
util.PUT_U32_LE(dst[16:20], x4)
|
||||
util.PUT_U32_LE(dst[20:24], x5)
|
||||
util.PUT_U32_LE(dst[24:28], x6)
|
||||
util.PUT_U32_LE(dst[28:32], x7)
|
||||
util.PUT_U32_LE(dst[32:36], x8)
|
||||
util.PUT_U32_LE(dst[36:40], x9)
|
||||
util.PUT_U32_LE(dst[40:44], x10)
|
||||
util.PUT_U32_LE(dst[44:48], x11)
|
||||
util.PUT_U32_LE(dst[48:52], x12)
|
||||
util.PUT_U32_LE(dst[52:56], x13)
|
||||
util.PUT_U32_LE(dst[56:60], x14)
|
||||
util.PUT_U32_LE(dst[60:64], x15)
|
||||
}
|
||||
dst = dst[_BLOCK_SIZE:]
|
||||
}
|
||||
}
|
||||
|
||||
// Increment the counter. Overflow checking is done upon
|
||||
@@ -451,141 +406,141 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
|
||||
}
|
||||
|
||||
@(private)
|
||||
_hchacha20 :: proc (dst, key, nonce: []byte) {
|
||||
_hchacha20 :: proc "contextless" (dst, key, nonce: []byte) {
|
||||
x0, x1, x2, x3 := _SIGMA_0, _SIGMA_1, _SIGMA_2, _SIGMA_3
|
||||
x4 := util.U32_LE(key[0:4])
|
||||
x5 := util.U32_LE(key[4:8])
|
||||
x6 := util.U32_LE(key[8:12])
|
||||
x7 := util.U32_LE(key[12:16])
|
||||
x8 := util.U32_LE(key[16:20])
|
||||
x9 := util.U32_LE(key[20:24])
|
||||
x10 := util.U32_LE(key[24:28])
|
||||
x11 := util.U32_LE(key[28:32])
|
||||
x12 := util.U32_LE(nonce[0:4])
|
||||
x13 := util.U32_LE(nonce[4:8])
|
||||
x14 := util.U32_LE(nonce[8:12])
|
||||
x15 := util.U32_LE(nonce[12:16])
|
||||
x4 := endian.unchecked_get_u32le(key[0:4])
|
||||
x5 := endian.unchecked_get_u32le(key[4:8])
|
||||
x6 := endian.unchecked_get_u32le(key[8:12])
|
||||
x7 := endian.unchecked_get_u32le(key[12:16])
|
||||
x8 := endian.unchecked_get_u32le(key[16:20])
|
||||
x9 := endian.unchecked_get_u32le(key[20:24])
|
||||
x10 := endian.unchecked_get_u32le(key[24:28])
|
||||
x11 := endian.unchecked_get_u32le(key[28:32])
|
||||
x12 := endian.unchecked_get_u32le(nonce[0:4])
|
||||
x13 := endian.unchecked_get_u32le(nonce[4:8])
|
||||
x14 := endian.unchecked_get_u32le(nonce[8:12])
|
||||
x15 := endian.unchecked_get_u32le(nonce[12:16])
|
||||
|
||||
for i := _ROUNDS; i > 0; i = i - 2 {
|
||||
// quarterround(x, 0, 4, 8, 12)
|
||||
x0 += x4
|
||||
x12 ~= x0
|
||||
x12 = util.ROTL32(x12, 16)
|
||||
x12 = bits.rotate_left32(x12, 16)
|
||||
x8 += x12
|
||||
x4 ~= x8
|
||||
x4 = util.ROTL32(x4, 12)
|
||||
x4 = bits.rotate_left32(x4, 12)
|
||||
x0 += x4
|
||||
x12 ~= x0
|
||||
x12 = util.ROTL32(x12, 8)
|
||||
x12 = bits.rotate_left32(x12, 8)
|
||||
x8 += x12
|
||||
x4 ~= x8
|
||||
x4 = util.ROTL32(x4, 7)
|
||||
x4 = bits.rotate_left32(x4, 7)
|
||||
|
||||
// quarterround(x, 1, 5, 9, 13)
|
||||
x1 += x5
|
||||
x13 ~= x1
|
||||
x13 = util.ROTL32(x13, 16)
|
||||
x13 = bits.rotate_left32(x13, 16)
|
||||
x9 += x13
|
||||
x5 ~= x9
|
||||
x5 = util.ROTL32(x5, 12)
|
||||
x5 = bits.rotate_left32(x5, 12)
|
||||
x1 += x5
|
||||
x13 ~= x1
|
||||
x13 = util.ROTL32(x13, 8)
|
||||
x13 = bits.rotate_left32(x13, 8)
|
||||
x9 += x13
|
||||
x5 ~= x9
|
||||
x5 = util.ROTL32(x5, 7)
|
||||
x5 = bits.rotate_left32(x5, 7)
|
||||
|
||||
// quarterround(x, 2, 6, 10, 14)
|
||||
x2 += x6
|
||||
x14 ~= x2
|
||||
x14 = util.ROTL32(x14, 16)
|
||||
x14 = bits.rotate_left32(x14, 16)
|
||||
x10 += x14
|
||||
x6 ~= x10
|
||||
x6 = util.ROTL32(x6, 12)
|
||||
x6 = bits.rotate_left32(x6, 12)
|
||||
x2 += x6
|
||||
x14 ~= x2
|
||||
x14 = util.ROTL32(x14, 8)
|
||||
x14 = bits.rotate_left32(x14, 8)
|
||||
x10 += x14
|
||||
x6 ~= x10
|
||||
x6 = util.ROTL32(x6, 7)
|
||||
x6 = bits.rotate_left32(x6, 7)
|
||||
|
||||
// quarterround(x, 3, 7, 11, 15)
|
||||
x3 += x7
|
||||
x15 ~= x3
|
||||
x15 = util.ROTL32(x15, 16)
|
||||
x15 = bits.rotate_left32(x15, 16)
|
||||
x11 += x15
|
||||
x7 ~= x11
|
||||
x7 = util.ROTL32(x7, 12)
|
||||
x7 = bits.rotate_left32(x7, 12)
|
||||
x3 += x7
|
||||
x15 ~= x3
|
||||
x15 = util.ROTL32(x15, 8)
|
||||
x15 = bits.rotate_left32(x15, 8)
|
||||
x11 += x15
|
||||
x7 ~= x11
|
||||
x7 = util.ROTL32(x7, 7)
|
||||
x7 = bits.rotate_left32(x7, 7)
|
||||
|
||||
// quarterround(x, 0, 5, 10, 15)
|
||||
x0 += x5
|
||||
x15 ~= x0
|
||||
x15 = util.ROTL32(x15, 16)
|
||||
x15 = bits.rotate_left32(x15, 16)
|
||||
x10 += x15
|
||||
x5 ~= x10
|
||||
x5 = util.ROTL32(x5, 12)
|
||||
x5 = bits.rotate_left32(x5, 12)
|
||||
x0 += x5
|
||||
x15 ~= x0
|
||||
x15 = util.ROTL32(x15, 8)
|
||||
x15 = bits.rotate_left32(x15, 8)
|
||||
x10 += x15
|
||||
x5 ~= x10
|
||||
x5 = util.ROTL32(x5, 7)
|
||||
x5 = bits.rotate_left32(x5, 7)
|
||||
|
||||
// quarterround(x, 1, 6, 11, 12)
|
||||
x1 += x6
|
||||
x12 ~= x1
|
||||
x12 = util.ROTL32(x12, 16)
|
||||
x12 = bits.rotate_left32(x12, 16)
|
||||
x11 += x12
|
||||
x6 ~= x11
|
||||
x6 = util.ROTL32(x6, 12)
|
||||
x6 = bits.rotate_left32(x6, 12)
|
||||
x1 += x6
|
||||
x12 ~= x1
|
||||
x12 = util.ROTL32(x12, 8)
|
||||
x12 = bits.rotate_left32(x12, 8)
|
||||
x11 += x12
|
||||
x6 ~= x11
|
||||
x6 = util.ROTL32(x6, 7)
|
||||
x6 = bits.rotate_left32(x6, 7)
|
||||
|
||||
// quarterround(x, 2, 7, 8, 13)
|
||||
x2 += x7
|
||||
x13 ~= x2
|
||||
x13 = util.ROTL32(x13, 16)
|
||||
x13 = bits.rotate_left32(x13, 16)
|
||||
x8 += x13
|
||||
x7 ~= x8
|
||||
x7 = util.ROTL32(x7, 12)
|
||||
x7 = bits.rotate_left32(x7, 12)
|
||||
x2 += x7
|
||||
x13 ~= x2
|
||||
x13 = util.ROTL32(x13, 8)
|
||||
x13 = bits.rotate_left32(x13, 8)
|
||||
x8 += x13
|
||||
x7 ~= x8
|
||||
x7 = util.ROTL32(x7, 7)
|
||||
x7 = bits.rotate_left32(x7, 7)
|
||||
|
||||
// quarterround(x, 3, 4, 9, 14)
|
||||
x3 += x4
|
||||
x14 ~= x3
|
||||
x14 = util.ROTL32(x14, 16)
|
||||
x14 = bits.rotate_left32(x14, 16)
|
||||
x9 += x14
|
||||
x4 ~= x9
|
||||
x4 = util.ROTL32(x4, 12)
|
||||
x4 = bits.rotate_left32(x4, 12)
|
||||
x3 += x4
|
||||
x14 ~= x3
|
||||
x14 = util.ROTL32(x14, 8)
|
||||
x14 = bits.rotate_left32(x14, 8)
|
||||
x9 += x14
|
||||
x4 ~= x9
|
||||
x4 = util.ROTL32(x4, 7)
|
||||
x4 = bits.rotate_left32(x4, 7)
|
||||
}
|
||||
|
||||
util.PUT_U32_LE(dst[0:4], x0)
|
||||
util.PUT_U32_LE(dst[4:8], x1)
|
||||
util.PUT_U32_LE(dst[8:12], x2)
|
||||
util.PUT_U32_LE(dst[12:16], x3)
|
||||
util.PUT_U32_LE(dst[16:20], x12)
|
||||
util.PUT_U32_LE(dst[20:24], x13)
|
||||
util.PUT_U32_LE(dst[24:28], x14)
|
||||
util.PUT_U32_LE(dst[28:32], x15)
|
||||
endian.unchecked_put_u32le(dst[0:4], x0)
|
||||
endian.unchecked_put_u32le(dst[4:8], x1)
|
||||
endian.unchecked_put_u32le(dst[8:12], x2)
|
||||
endian.unchecked_put_u32le(dst[12:16], x3)
|
||||
endian.unchecked_put_u32le(dst[16:20], x12)
|
||||
endian.unchecked_put_u32le(dst[20:24], x13)
|
||||
endian.unchecked_put_u32le(dst[24:28], x14)
|
||||
endian.unchecked_put_u32le(dst[28:32], x15)
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ package chacha20poly1305
|
||||
import "core:crypto"
|
||||
import "core:crypto/chacha20"
|
||||
import "core:crypto/poly1305"
|
||||
import "core:crypto/util"
|
||||
import "core:encoding/endian"
|
||||
import "core:mem"
|
||||
|
||||
KEY_SIZE :: chacha20.KEY_SIZE
|
||||
@@ -87,8 +87,8 @@ encrypt :: proc (ciphertext, tag, key, nonce, aad, plaintext: []byte) {
|
||||
// mac_data |= num_to_8_le_bytes(aad.length)
|
||||
// mac_data |= num_to_8_le_bytes(ciphertext.length)
|
||||
l_buf := otk[0:16] // Reuse the scratch buffer.
|
||||
util.PUT_U64_LE(l_buf[0:8], u64(aad_len))
|
||||
util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len))
|
||||
endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len))
|
||||
endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len))
|
||||
poly1305.update(&mac_ctx, l_buf)
|
||||
|
||||
// tag = poly1305_mac(mac_data, otk)
|
||||
@@ -128,8 +128,8 @@ decrypt :: proc (plaintext, tag, key, nonce, aad, ciphertext: []byte) -> bool {
|
||||
poly1305.update(&mac_ctx, ciphertext)
|
||||
_update_mac_pad16(&mac_ctx, ciphertext_len)
|
||||
l_buf := otk[0:16] // Reuse the scratch buffer.
|
||||
util.PUT_U64_LE(l_buf[0:8], u64(aad_len))
|
||||
util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len))
|
||||
endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len))
|
||||
endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len))
|
||||
poly1305.update(&mac_ctx, l_buf)
|
||||
|
||||
// tag = poly1305_mac(mac_data, otk)
|
||||
|
||||
@@ -1,382 +0,0 @@
|
||||
package gost
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the GOST hashing algorithm, as defined in RFC 5831 <https://datatracker.ietf.org/doc/html/rfc5831>
|
||||
*/
|
||||
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 32
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Gost_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Gost_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Gost_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc "contextless" (ctx: ^Gost_Context) {
|
||||
sbox: [8][16]u32 = {
|
||||
{ 10, 4, 5, 6, 8, 1, 3, 7, 13, 12, 14, 0, 9, 2, 11, 15 },
|
||||
{ 5, 15, 4, 0, 2, 13, 11, 9, 1, 7, 6, 3, 12, 14, 10, 8 },
|
||||
{ 7, 15, 12, 14, 9, 4, 1, 0, 3, 11, 5, 2, 6, 10, 8, 13 },
|
||||
{ 4, 10, 7, 12, 0, 15, 2, 8, 14, 1, 6, 5, 13, 11, 9, 3 },
|
||||
{ 7, 6, 4, 11, 9, 12, 2, 10, 1, 8, 0, 14, 15, 13, 3, 5 },
|
||||
{ 7, 6, 2, 4, 13, 9, 15, 0, 10, 1, 5, 11, 8, 14, 12, 3 },
|
||||
{ 13, 14, 4, 1, 7, 0, 5, 10, 3, 12, 8, 15, 6, 2, 9, 11 },
|
||||
{ 1, 3, 10, 9, 5, 11, 4, 15, 8, 6, 7, 14, 13, 0, 2, 12 },
|
||||
}
|
||||
|
||||
i := 0
|
||||
for a := 0; a < 16; a += 1 {
|
||||
ax := sbox[1][a] << 15
|
||||
bx := sbox[3][a] << 23
|
||||
cx := sbox[5][a]
|
||||
cx = (cx >> 1) | (cx << 31)
|
||||
dx := sbox[7][a] << 7
|
||||
for b := 0; b < 16; b, i = b + 1, i + 1 {
|
||||
SBOX_1[i] = ax | (sbox[0][b] << 11)
|
||||
SBOX_2[i] = bx | (sbox[2][b] << 19)
|
||||
SBOX_3[i] = cx | (sbox[4][b] << 27)
|
||||
SBOX_4[i] = dx | (sbox[6][b] << 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Gost_Context, data: []byte) {
|
||||
length := byte(len(data))
|
||||
j: byte
|
||||
|
||||
i := ctx.partial_bytes
|
||||
for i < 32 && j < length {
|
||||
ctx.partial[i] = data[j]
|
||||
i, j = i + 1, j + 1
|
||||
}
|
||||
|
||||
if i < 32 {
|
||||
ctx.partial_bytes = i
|
||||
return
|
||||
}
|
||||
bytes(ctx, ctx.partial[:], 256)
|
||||
|
||||
for (j + 32) < length {
|
||||
bytes(ctx, data[j:], 256)
|
||||
j += 32
|
||||
}
|
||||
|
||||
i = 0
|
||||
for j < length {
|
||||
ctx.partial[i] = data[j]
|
||||
i, j = i + 1, j + 1
|
||||
}
|
||||
ctx.partial_bytes = i
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Gost_Context, hash: []byte) {
|
||||
if ctx.partial_bytes > 0 {
|
||||
mem.set(&ctx.partial[ctx.partial_bytes], 0, 32 - int(ctx.partial_bytes))
|
||||
bytes(ctx, ctx.partial[:], u32(ctx.partial_bytes) << 3)
|
||||
}
|
||||
|
||||
compress(ctx.hash[:], ctx.len[:])
|
||||
compress(ctx.hash[:], ctx.sum[:])
|
||||
|
||||
for i, j := 0, 0; i < 8; i, j = i + 1, j + 4 {
|
||||
hash[j] = byte(ctx.hash[i])
|
||||
hash[j + 1] = byte(ctx.hash[i] >> 8)
|
||||
hash[j + 2] = byte(ctx.hash[i] >> 16)
|
||||
hash[j + 3] = byte(ctx.hash[i] >> 24)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
GOST implementation
|
||||
*/
|
||||
|
||||
Gost_Context :: struct {
|
||||
sum: [8]u32,
|
||||
hash: [8]u32,
|
||||
len: [8]u32,
|
||||
partial: [32]byte,
|
||||
partial_bytes: byte,
|
||||
}
|
||||
|
||||
SBOX_1: [256]u32
|
||||
SBOX_2: [256]u32
|
||||
SBOX_3: [256]u32
|
||||
SBOX_4: [256]u32
|
||||
|
||||
ENCRYPT_ROUND :: #force_inline proc "contextless" (l, r, t, k1, k2: u32) -> (u32, u32, u32) {
|
||||
l, r, t := l, r, t
|
||||
t = (k1) + r
|
||||
l ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24]
|
||||
t = (k2) + l
|
||||
r ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24]
|
||||
return l, r, t
|
||||
}
|
||||
|
||||
ENCRYPT :: #force_inline proc "contextless" (a, b, c: u32, key: []u32) -> (l, r, t: u32) {
|
||||
l, r, t = ENCRYPT_ROUND(a, b, c, key[0], key[1])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[7], key[6])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[5], key[4])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[3], key[2])
|
||||
l, r, t = ENCRYPT_ROUND(l, r, t, key[1], key[0])
|
||||
t = r
|
||||
r = l
|
||||
l = t
|
||||
return
|
||||
}
|
||||
|
||||
bytes :: proc(ctx: ^Gost_Context, buf: []byte, bits: u32) {
|
||||
a, c: u32
|
||||
m: [8]u32
|
||||
|
||||
for i, j := 0, 0; i < 8; i += 1 {
|
||||
a = u32(buf[j]) | u32(buf[j + 1]) << 8 | u32(buf[j + 2]) << 16 | u32(buf[j + 3]) << 24
|
||||
j += 4
|
||||
m[i] = a
|
||||
c = a + c + ctx.sum[i]
|
||||
ctx.sum[i] = c
|
||||
c = c < a ? 1 : 0
|
||||
}
|
||||
|
||||
compress(ctx.hash[:], m[:])
|
||||
ctx.len[0] += bits
|
||||
if ctx.len[0] < bits {
|
||||
ctx.len[1] += 1
|
||||
}
|
||||
}
|
||||
|
||||
compress :: proc(h, m: []u32) {
|
||||
key, u, v, w, s: [8]u32
|
||||
|
||||
copy(u[:], h)
|
||||
copy(v[:], m)
|
||||
|
||||
for i := 0; i < 8; i += 2 {
|
||||
w[0] = u[0] ~ v[0]
|
||||
w[1] = u[1] ~ v[1]
|
||||
w[2] = u[2] ~ v[2]
|
||||
w[3] = u[3] ~ v[3]
|
||||
w[4] = u[4] ~ v[4]
|
||||
w[5] = u[5] ~ v[5]
|
||||
w[6] = u[6] ~ v[6]
|
||||
w[7] = u[7] ~ v[7]
|
||||
|
||||
key[0] = (w[0] & 0x000000ff) | (w[2] & 0x000000ff) << 8 | (w[4] & 0x000000ff) << 16 | (w[6] & 0x000000ff) << 24
|
||||
key[1] = (w[0] & 0x0000ff00) >> 8 | (w[2] & 0x0000ff00) | (w[4] & 0x0000ff00) << 8 | (w[6] & 0x0000ff00) << 16
|
||||
key[2] = (w[0] & 0x00ff0000) >> 16 | (w[2] & 0x00ff0000) >> 8 | (w[4] & 0x00ff0000) | (w[6] & 0x00ff0000) << 8
|
||||
key[3] = (w[0] & 0xff000000) >> 24 | (w[2] & 0xff000000) >> 16 | (w[4] & 0xff000000) >> 8 | (w[6] & 0xff000000)
|
||||
key[4] = (w[1] & 0x000000ff) | (w[3] & 0x000000ff) << 8 | (w[5] & 0x000000ff) << 16 | (w[7] & 0x000000ff) << 24
|
||||
key[5] = (w[1] & 0x0000ff00) >> 8 | (w[3] & 0x0000ff00) | (w[5] & 0x0000ff00) << 8 | (w[7] & 0x0000ff00) << 16
|
||||
key[6] = (w[1] & 0x00ff0000) >> 16 | (w[3] & 0x00ff0000) >> 8 | (w[5] & 0x00ff0000) | (w[7] & 0x00ff0000) << 8
|
||||
key[7] = (w[1] & 0xff000000) >> 24 | (w[3] & 0xff000000) >> 16 | (w[5] & 0xff000000) >> 8 | (w[7] & 0xff000000)
|
||||
|
||||
r := h[i]
|
||||
l := h[i + 1]
|
||||
t: u32
|
||||
l, r, t = ENCRYPT(l, r, 0, key[:])
|
||||
|
||||
s[i] = r
|
||||
s[i + 1] = l
|
||||
|
||||
if i == 6 {
|
||||
break
|
||||
}
|
||||
|
||||
l = u[0] ~ u[2]
|
||||
r = u[1] ~ u[3]
|
||||
u[0] = u[2]
|
||||
u[1] = u[3]
|
||||
u[2] = u[4]
|
||||
u[3] = u[5]
|
||||
u[4] = u[6]
|
||||
u[5] = u[7]
|
||||
u[6] = l
|
||||
u[7] = r
|
||||
|
||||
if i == 2 {
|
||||
u[0] ~= 0xff00ff00
|
||||
u[1] ~= 0xff00ff00
|
||||
u[2] ~= 0x00ff00ff
|
||||
u[3] ~= 0x00ff00ff
|
||||
u[4] ~= 0x00ffff00
|
||||
u[5] ~= 0xff0000ff
|
||||
u[6] ~= 0x000000ff
|
||||
u[7] ~= 0xff00ffff
|
||||
}
|
||||
|
||||
l = v[0]
|
||||
r = v[2]
|
||||
v[0] = v[4]
|
||||
v[2] = v[6]
|
||||
v[4] = l ~ r
|
||||
v[6] = v[0] ~ r
|
||||
l = v[1]
|
||||
r = v[3]
|
||||
v[1] = v[5]
|
||||
v[3] = v[7]
|
||||
v[5] = l ~ r
|
||||
v[7] = v[1] ~ r
|
||||
}
|
||||
|
||||
u[0] = m[0] ~ s[6]
|
||||
u[1] = m[1] ~ s[7]
|
||||
u[2] = m[2] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff) ~
|
||||
(s[1] & 0xffff) ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[6] ~ (s[6] << 16) ~
|
||||
(s[7] & 0xffff0000) ~ (s[7] >> 16)
|
||||
u[3] = m[3] ~ (s[0] & 0xffff) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~
|
||||
(s[1] << 16) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~
|
||||
(s[3] << 16) ~ s[6] ~ (s[6] << 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~
|
||||
(s[7] << 16) ~ (s[7] >> 16)
|
||||
u[4] = m[4] ~
|
||||
(s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[0] >> 16) ~
|
||||
(s[1] & 0xffff0000) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~
|
||||
(s[3] << 16) ~ (s[3] >> 16) ~ (s[4] << 16) ~ (s[6] << 16) ~
|
||||
(s[6] >> 16) ~(s[7] & 0xffff) ~ (s[7] << 16) ~ (s[7] >> 16)
|
||||
u[5] = m[5] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff0000) ~
|
||||
(s[1] & 0xffff) ~ s[2] ~ (s[2] >> 16) ~ (s[3] << 16) ~ (s[3] >> 16) ~
|
||||
(s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~ (s[6] << 16) ~
|
||||
(s[6] >> 16) ~ (s[7] & 0xffff0000) ~ (s[7] << 16) ~ (s[7] >> 16)
|
||||
u[6] = m[6] ~ s[0] ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[3] ~ (s[3] >> 16) ~
|
||||
(s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~ (s[5] >> 16) ~ s[6] ~
|
||||
(s[6] << 16) ~ (s[6] >> 16) ~ (s[7] << 16)
|
||||
u[7] = m[7] ~ (s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~
|
||||
(s[1] << 16) ~ (s[2] >> 16) ~ (s[3] << 16) ~ s[4] ~ (s[4] >> 16) ~
|
||||
(s[5] << 16) ~ (s[5] >> 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~
|
||||
(s[7] << 16) ~ (s[7] >> 16)
|
||||
|
||||
v[0] = h[0] ~ (u[1] << 16) ~ (u[0] >> 16)
|
||||
v[1] = h[1] ~ (u[2] << 16) ~ (u[1] >> 16)
|
||||
v[2] = h[2] ~ (u[3] << 16) ~ (u[2] >> 16)
|
||||
v[3] = h[3] ~ (u[4] << 16) ~ (u[3] >> 16)
|
||||
v[4] = h[4] ~ (u[5] << 16) ~ (u[4] >> 16)
|
||||
v[5] = h[5] ~ (u[6] << 16) ~ (u[5] >> 16)
|
||||
v[6] = h[6] ~ (u[7] << 16) ~ (u[6] >> 16)
|
||||
v[7] = h[7] ~ (u[0] & 0xffff0000) ~ (u[0] << 16) ~ (u[7] >> 16) ~ (u[1] & 0xffff0000) ~ (u[1] << 16) ~ (u[6] << 16) ~ (u[7] & 0xffff0000)
|
||||
|
||||
h[0] = (v[0] & 0xffff0000) ~ (v[0] << 16) ~ (v[0] >> 16) ~ (v[1] >> 16) ~
|
||||
(v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ (v[4] << 16) ~
|
||||
(v[5] >> 16) ~ v[5] ~ (v[6] >> 16) ~ (v[7] << 16) ~ (v[7] >> 16) ~
|
||||
(v[7] & 0xffff)
|
||||
h[1] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~ (v[1] & 0xffff) ~
|
||||
v[2] ~ (v[2] >> 16) ~ (v[3] << 16) ~ (v[4] >> 16) ~ (v[5] << 16) ~
|
||||
(v[6] << 16) ~ v[6] ~ (v[7] & 0xffff0000) ~ (v[7] >> 16)
|
||||
h[2] = (v[0] & 0xffff) ~ (v[0] << 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~
|
||||
(v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~
|
||||
(v[5] >> 16) ~ v[6] ~ (v[6] >> 16) ~ (v[7] & 0xffff) ~ (v[7] << 16) ~
|
||||
(v[7] >> 16)
|
||||
h[3] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~
|
||||
(v[1] & 0xffff0000) ~ (v[1] >> 16) ~ (v[2] << 16) ~ (v[2] >> 16) ~ v[2] ~
|
||||
(v[3] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~
|
||||
(v[7] & 0xffff) ~ (v[7] >> 16)
|
||||
h[4] = (v[0] >> 16) ~ (v[1] << 16) ~ v[1] ~ (v[2] >> 16) ~ v[2] ~
|
||||
(v[3] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ (v[5] >> 16) ~
|
||||
v[5] ~ (v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16)
|
||||
h[5] = (v[0] << 16) ~ (v[0] & 0xffff0000) ~ (v[1] << 16) ~ (v[1] >> 16) ~
|
||||
(v[1] & 0xffff0000) ~ (v[2] << 16) ~ v[2] ~ (v[3] >> 16) ~ v[3] ~
|
||||
(v[4] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~
|
||||
(v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ (v[7] >> 16) ~ (v[7] & 0xffff0000)
|
||||
h[6] = v[0] ~ v[2] ~ (v[2] >> 16) ~ v[3] ~ (v[3] << 16) ~ v[4] ~
|
||||
(v[4] >> 16) ~ (v[5] << 16) ~ (v[5] >> 16) ~ v[5] ~ (v[6] << 16) ~
|
||||
(v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ v[7]
|
||||
h[7] = v[0] ~ (v[0] >> 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~ (v[2] << 16) ~
|
||||
(v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ v[4] ~ (v[5] >> 16) ~ v[5] ~
|
||||
(v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16) ~ v[7]
|
||||
}
|
||||
@@ -1,653 +0,0 @@
|
||||
package groestl
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the GROESTL hashing algorithm, as defined in <http://www.groestl.info/Groestl.zip>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Groestl_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Groestl_Context) {
|
||||
assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512")
|
||||
if ctx.hashbitlen <= 256 {
|
||||
ctx.rounds = 10
|
||||
ctx.columns = 8
|
||||
ctx.statesize = 64
|
||||
} else {
|
||||
ctx.rounds = 14
|
||||
ctx.columns = 16
|
||||
ctx.statesize = 128
|
||||
}
|
||||
for i := 8 - size_of(i32); i < 8; i += 1 {
|
||||
ctx.chaining[i][ctx.columns - 1] = byte(ctx.hashbitlen >> (8 * (7 - uint(i))))
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Groestl_Context, data: []byte) {
|
||||
databitlen := len(data) * 8
|
||||
msglen := databitlen / 8
|
||||
rem := databitlen % 8
|
||||
|
||||
i: int
|
||||
assert(ctx.bits_in_last_byte == 0)
|
||||
|
||||
if ctx.buf_ptr != 0 {
|
||||
for i = 0; ctx.buf_ptr < ctx.statesize && i < msglen; i, ctx.buf_ptr = i + 1, ctx.buf_ptr + 1 {
|
||||
ctx.buffer[ctx.buf_ptr] = data[i]
|
||||
}
|
||||
|
||||
if ctx.buf_ptr < ctx.statesize {
|
||||
if rem != 0 {
|
||||
ctx.bits_in_last_byte = rem
|
||||
ctx.buffer[ctx.buf_ptr] = data[i]
|
||||
ctx.buf_ptr += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.buf_ptr = 0
|
||||
transform(ctx, ctx.buffer[:], u32(ctx.statesize))
|
||||
}
|
||||
|
||||
transform(ctx, data[i:], u32(msglen - i))
|
||||
i += ((msglen - i) / ctx.statesize) * ctx.statesize
|
||||
for i < msglen {
|
||||
ctx.buffer[ctx.buf_ptr] = data[i]
|
||||
i, ctx.buf_ptr = i + 1, ctx.buf_ptr + 1
|
||||
}
|
||||
|
||||
if rem != 0 {
|
||||
ctx.bits_in_last_byte = rem
|
||||
ctx.buffer[ctx.buf_ptr] = data[i]
|
||||
ctx.buf_ptr += 1
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Groestl_Context, hash: []byte) {
|
||||
hashbytelen := ctx.hashbitlen / 8
|
||||
|
||||
if ctx.bits_in_last_byte != 0 {
|
||||
ctx.buffer[ctx.buf_ptr - 1] &= ((1 << uint(ctx.bits_in_last_byte)) - 1) << (8 - uint(ctx.bits_in_last_byte))
|
||||
ctx.buffer[ctx.buf_ptr - 1] ~= 0x1 << (7 - uint(ctx.bits_in_last_byte))
|
||||
} else {
|
||||
ctx.buffer[ctx.buf_ptr] = 0x80
|
||||
ctx.buf_ptr += 1
|
||||
}
|
||||
|
||||
if ctx.buf_ptr > ctx.statesize - 8 {
|
||||
for ctx.buf_ptr < ctx.statesize {
|
||||
ctx.buffer[ctx.buf_ptr] = 0
|
||||
ctx.buf_ptr += 1
|
||||
}
|
||||
transform(ctx, ctx.buffer[:], u32(ctx.statesize))
|
||||
ctx.buf_ptr = 0
|
||||
}
|
||||
|
||||
for ctx.buf_ptr < ctx.statesize - 8 {
|
||||
ctx.buffer[ctx.buf_ptr] = 0
|
||||
ctx.buf_ptr += 1
|
||||
}
|
||||
|
||||
ctx.block_counter += 1
|
||||
ctx.buf_ptr = ctx.statesize
|
||||
|
||||
for ctx.buf_ptr > ctx.statesize - 8 {
|
||||
ctx.buf_ptr -= 1
|
||||
ctx.buffer[ctx.buf_ptr] = byte(ctx.block_counter)
|
||||
ctx.block_counter >>= 8
|
||||
}
|
||||
|
||||
transform(ctx, ctx.buffer[:], u32(ctx.statesize))
|
||||
output_transformation(ctx)
|
||||
|
||||
for i, j := ctx.statesize - hashbytelen , 0; i < ctx.statesize; i, j = i + 1, j + 1 {
|
||||
hash[j] = ctx.chaining[i % 8][i / 8]
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
GROESTL implementation
|
||||
*/
|
||||
|
||||
SBOX := [256]byte {
|
||||
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
|
||||
0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
|
||||
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
|
||||
0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
|
||||
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc,
|
||||
0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
|
||||
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a,
|
||||
0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
|
||||
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0,
|
||||
0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
|
||||
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
|
||||
0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
|
||||
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85,
|
||||
0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
|
||||
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5,
|
||||
0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
|
||||
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17,
|
||||
0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
|
||||
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88,
|
||||
0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
|
||||
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c,
|
||||
0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
|
||||
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9,
|
||||
0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
|
||||
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6,
|
||||
0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
|
||||
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e,
|
||||
0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
|
||||
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94,
|
||||
0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
|
||||
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68,
|
||||
0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
|
||||
}
|
||||
|
||||
SHIFT := [2][2][8]int {
|
||||
{{0, 1, 2, 3, 4, 5, 6, 7}, {1, 3, 5, 7, 0, 2, 4, 6}},
|
||||
{{0, 1, 2, 3, 4, 5, 6, 11}, {1, 3, 5, 11, 0, 2, 4, 6}},
|
||||
}
|
||||
|
||||
Groestl_Context :: struct {
|
||||
chaining: [8][16]byte,
|
||||
block_counter: u64,
|
||||
hashbitlen: int,
|
||||
buffer: [128]byte,
|
||||
buf_ptr: int,
|
||||
bits_in_last_byte: int,
|
||||
columns: int,
|
||||
rounds: int,
|
||||
statesize: int,
|
||||
}
|
||||
|
||||
Groestl_Variant :: enum {
|
||||
P512 = 0,
|
||||
Q512 = 1,
|
||||
P1024 = 2,
|
||||
Q1024 = 3,
|
||||
}
|
||||
|
||||
MUL2 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return (b >> 7) != 0 ? (b << 1) ~ 0x1b : (b << 1)
|
||||
}
|
||||
|
||||
MUL3 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return MUL2(b) ~ b
|
||||
}
|
||||
|
||||
MUL4 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return MUL2(MUL2(b))
|
||||
}
|
||||
|
||||
MUL5 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return MUL4(b) ~ b
|
||||
}
|
||||
|
||||
MUL6 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return MUL4(b) ~ MUL2(b)
|
||||
}
|
||||
|
||||
MUL7 :: #force_inline proc "contextless"(b: byte) -> byte {
|
||||
return MUL4(b) ~ MUL2(b) ~ b
|
||||
}
|
||||
|
||||
sub_bytes :: #force_inline proc (x: [][16]byte, columns: int) {
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < columns; j += 1 {
|
||||
x[i][j] = SBOX[x[i][j]]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
shift_bytes :: #force_inline proc (x: [][16]byte, columns: int, v: Groestl_Variant) {
|
||||
temp: [16]byte
|
||||
R := &SHIFT[int(v) / 2][int(v) & 1]
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < columns; j += 1 {
|
||||
temp[j] = x[i][(j + R[i]) % columns]
|
||||
}
|
||||
for j := 0; j < columns; j += 1 {
|
||||
x[i][j] = temp[j]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mix_bytes :: #force_inline proc (x: [][16]byte, columns: int) {
|
||||
temp: [8]byte
|
||||
|
||||
for i := 0; i < columns; i += 1 {
|
||||
for j := 0; j < 8; j += 1 {
|
||||
temp[j] = MUL2(x[(j + 0) % 8][i]) ~
|
||||
MUL2(x[(j + 1) % 8][i]) ~
|
||||
MUL3(x[(j + 2) % 8][i]) ~
|
||||
MUL4(x[(j + 3) % 8][i]) ~
|
||||
MUL5(x[(j + 4) % 8][i]) ~
|
||||
MUL3(x[(j + 5) % 8][i]) ~
|
||||
MUL5(x[(j + 6) % 8][i]) ~
|
||||
MUL7(x[(j + 7) % 8][i])
|
||||
}
|
||||
for j := 0; j < 8; j += 1 {
|
||||
x[j][i] = temp[j]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) {
|
||||
v := ctx.columns == 8 ? Groestl_Variant.P512 : Groestl_Variant.P1024
|
||||
for i := 0; i < ctx.rounds; i += 1 {
|
||||
add_roundconstant(x, ctx.columns, byte(i), v)
|
||||
sub_bytes(x, ctx.columns)
|
||||
shift_bytes(x, ctx.columns, v)
|
||||
mix_bytes(x, ctx.columns)
|
||||
}
|
||||
}
|
||||
|
||||
q :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) {
|
||||
v := ctx.columns == 8 ? Groestl_Variant.Q512 : Groestl_Variant.Q1024
|
||||
for i := 0; i < ctx.rounds; i += 1 {
|
||||
add_roundconstant(x, ctx.columns, byte(i), v)
|
||||
sub_bytes(x, ctx.columns)
|
||||
shift_bytes(x, ctx.columns, v)
|
||||
mix_bytes(x, ctx.columns)
|
||||
}
|
||||
}
|
||||
|
||||
transform :: proc(ctx: ^Groestl_Context, input: []byte, msglen: u32) {
|
||||
tmp1, tmp2: [8][16]byte
|
||||
input, msglen := input, msglen
|
||||
|
||||
for msglen >= u32(ctx.statesize) {
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < ctx.columns; j += 1 {
|
||||
tmp1[i][j] = ctx.chaining[i][j] ~ input[j * 8 + i]
|
||||
tmp2[i][j] = input[j * 8 + i]
|
||||
}
|
||||
}
|
||||
|
||||
p(ctx, tmp1[:])
|
||||
q(ctx, tmp2[:])
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < ctx.columns; j += 1 {
|
||||
ctx.chaining[i][j] ~= tmp1[i][j] ~ tmp2[i][j]
|
||||
}
|
||||
}
|
||||
|
||||
ctx.block_counter += 1
|
||||
msglen -= u32(ctx.statesize)
|
||||
input = input[ctx.statesize:]
|
||||
}
|
||||
}
|
||||
|
||||
output_transformation :: proc(ctx: ^Groestl_Context) {
|
||||
temp: [8][16]byte
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < ctx.columns; j += 1 {
|
||||
temp[i][j] = ctx.chaining[i][j]
|
||||
}
|
||||
}
|
||||
|
||||
p(ctx, temp[:])
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
for j := 0; j < ctx.columns; j += 1 {
|
||||
ctx.chaining[i][j] ~= temp[i][j]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
add_roundconstant :: proc(x: [][16]byte, columns: int, round: byte, v: Groestl_Variant) {
|
||||
switch (i32(v) & 1) {
|
||||
case 0:
|
||||
for i := 0; i < columns; i += 1 {
|
||||
x[0][i] ~= byte(i << 4) ~ round
|
||||
}
|
||||
case 1:
|
||||
for i := 0; i < columns; i += 1 {
|
||||
for j := 0; j < 7; j += 1 {
|
||||
x[j][i] ~= 0xff
|
||||
}
|
||||
}
|
||||
for i := 0; i < columns; i += 1 {
|
||||
x[7][i] ~= byte(i << 4) ~ 0xff ~ round
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,584 +0,0 @@
|
||||
package jh
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the JH hashing algorithm, as defined in <https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 224
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 256
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 384
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Jh_Context
|
||||
ctx.hashbitlen = 512
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Jh_Context) {
|
||||
assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512")
|
||||
ctx.H[1] = byte(ctx.hashbitlen) & 0xff
|
||||
ctx.H[0] = byte(ctx.hashbitlen >> 8) & 0xff
|
||||
F8(ctx)
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Jh_Context, data: []byte) {
|
||||
databitlen := u64(len(data)) * 8
|
||||
ctx.databitlen += databitlen
|
||||
i := u64(0)
|
||||
|
||||
if (ctx.buffer_size > 0) && ((ctx.buffer_size + databitlen) < 512) {
|
||||
if (databitlen & 7) == 0 {
|
||||
copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)])
|
||||
} else {
|
||||
copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3) + 1])
|
||||
}
|
||||
ctx.buffer_size += databitlen
|
||||
databitlen = 0
|
||||
}
|
||||
|
||||
if (ctx.buffer_size > 0 ) && ((ctx.buffer_size + databitlen) >= 512) {
|
||||
copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)])
|
||||
i = 64 - (ctx.buffer_size >> 3)
|
||||
databitlen = databitlen - (512 - ctx.buffer_size)
|
||||
F8(ctx)
|
||||
ctx.buffer_size = 0
|
||||
}
|
||||
|
||||
for databitlen >= 512 {
|
||||
copy(ctx.buffer[:], data[i:i + 64])
|
||||
F8(ctx)
|
||||
i += 64
|
||||
databitlen -= 512
|
||||
}
|
||||
|
||||
if databitlen > 0 {
|
||||
if (databitlen & 7) == 0 {
|
||||
copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3)])
|
||||
} else {
|
||||
copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3) + 1])
|
||||
}
|
||||
ctx.buffer_size = databitlen
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Jh_Context, hash: []byte) {
|
||||
if ctx.databitlen & 0x1ff == 0 {
|
||||
for i := 0; i < 64; i += 1 {
|
||||
ctx.buffer[i] = 0
|
||||
}
|
||||
ctx.buffer[0] = 0x80
|
||||
ctx.buffer[63] = byte(ctx.databitlen) & 0xff
|
||||
ctx.buffer[62] = byte(ctx.databitlen >> 8) & 0xff
|
||||
ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff
|
||||
ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff
|
||||
ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff
|
||||
ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff
|
||||
ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff
|
||||
ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff
|
||||
F8(ctx)
|
||||
} else {
|
||||
if ctx.buffer_size & 7 == 0 {
|
||||
for i := (ctx.databitlen & 0x1ff) >> 3; i < 64; i += 1 {
|
||||
ctx.buffer[i] = 0
|
||||
}
|
||||
} else {
|
||||
for i := ((ctx.databitlen & 0x1ff) >> 3) + 1; i < 64; i += 1 {
|
||||
ctx.buffer[i] = 0
|
||||
}
|
||||
}
|
||||
ctx.buffer[(ctx.databitlen & 0x1ff) >> 3] |= 1 << (7 - (ctx.databitlen & 7))
|
||||
F8(ctx)
|
||||
for i := 0; i < 64; i += 1 {
|
||||
ctx.buffer[i] = 0
|
||||
}
|
||||
ctx.buffer[63] = byte(ctx.databitlen) & 0xff
|
||||
ctx.buffer[62] = byte(ctx.databitlen >> 8) & 0xff
|
||||
ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff
|
||||
ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff
|
||||
ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff
|
||||
ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff
|
||||
ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff
|
||||
ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff
|
||||
F8(ctx)
|
||||
}
|
||||
switch ctx.hashbitlen {
|
||||
case 224: copy(hash[:], ctx.H[100:128])
|
||||
case 256: copy(hash[:], ctx.H[96:128])
|
||||
case 384: copy(hash[:], ctx.H[80:128])
|
||||
case 512: copy(hash[:], ctx.H[64:128])
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
JH implementation
|
||||
*/
|
||||
|
||||
ROUNDCONSTANT_ZERO := [64]byte {
|
||||
0x6, 0xa, 0x0, 0x9, 0xe, 0x6, 0x6, 0x7,
|
||||
0xf, 0x3, 0xb, 0xc, 0xc, 0x9, 0x0, 0x8,
|
||||
0xb, 0x2, 0xf, 0xb, 0x1, 0x3, 0x6, 0x6,
|
||||
0xe, 0xa, 0x9, 0x5, 0x7, 0xd, 0x3, 0xe,
|
||||
0x3, 0xa, 0xd, 0xe, 0xc, 0x1, 0x7, 0x5,
|
||||
0x1, 0x2, 0x7, 0x7, 0x5, 0x0, 0x9, 0x9,
|
||||
0xd, 0xa, 0x2, 0xf, 0x5, 0x9, 0x0, 0xb,
|
||||
0x0, 0x6, 0x6, 0x7, 0x3, 0x2, 0x2, 0xa,
|
||||
}
|
||||
|
||||
SBOX := [2][16]byte {
|
||||
{9, 0, 4, 11, 13, 12, 3, 15, 1, 10, 2, 6, 7, 5, 8, 14},
|
||||
{3, 12, 6, 13, 5, 7, 1, 9, 15, 2, 0, 4, 11, 10, 14, 8},
|
||||
}
|
||||
|
||||
Jh_Context :: struct {
|
||||
hashbitlen: int,
|
||||
databitlen: u64,
|
||||
buffer_size: u64,
|
||||
H: [128]byte,
|
||||
A: [256]byte,
|
||||
roundconstant: [64]byte,
|
||||
buffer: [64]byte,
|
||||
}
|
||||
|
||||
E8_finaldegroup :: proc(ctx: ^Jh_Context) {
|
||||
t0,t1,t2,t3: byte
|
||||
tem: [256]byte
|
||||
for i := 0; i < 128; i += 1 {
|
||||
tem[i] = ctx.A[i << 1]
|
||||
tem[i + 128] = ctx.A[(i << 1) + 1]
|
||||
}
|
||||
for i := 0; i < 128; i += 1 {
|
||||
ctx.H[i] = 0
|
||||
}
|
||||
for i := 0; i < 256; i += 1 {
|
||||
t0 = (tem[i] >> 3) & 1
|
||||
t1 = (tem[i] >> 2) & 1
|
||||
t2 = (tem[i] >> 1) & 1
|
||||
t3 = (tem[i] >> 0) & 1
|
||||
|
||||
ctx.H[uint(i) >> 3] |= t0 << (7 - (uint(i) & 7))
|
||||
ctx.H[(uint(i) + 256) >> 3] |= t1 << (7 - (uint(i) & 7))
|
||||
ctx.H[(uint(i) + 512) >> 3] |= t2 << (7 - (uint(i) & 7))
|
||||
ctx.H[(uint(i) + 768) >> 3] |= t3 << (7 - (uint(i) & 7))
|
||||
}
|
||||
}
|
||||
|
||||
update_roundconstant :: proc(ctx: ^Jh_Context) {
|
||||
tem: [64]byte
|
||||
t: byte
|
||||
for i := 0; i < 64; i += 1 {
|
||||
tem[i] = SBOX[0][ctx.roundconstant[i]]
|
||||
}
|
||||
for i := 0; i < 64; i += 2 {
|
||||
tem[i + 1] ~= ((tem[i] << 1) ~ (tem[i] >> 3) ~ ((tem[i] >> 2) & 2)) & 0xf
|
||||
tem[i] ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf
|
||||
}
|
||||
for i := 0; i < 64; i += 4 {
|
||||
t = tem[i + 2]
|
||||
tem[i + 2] = tem[i + 3]
|
||||
tem[i + 3] = t
|
||||
}
|
||||
for i := 0; i < 32; i += 1 {
|
||||
ctx.roundconstant[i] = tem[i << 1]
|
||||
ctx.roundconstant[i + 32] = tem[(i << 1) + 1]
|
||||
}
|
||||
for i := 32; i < 64; i += 2 {
|
||||
t = ctx.roundconstant[i]
|
||||
ctx.roundconstant[i] = ctx.roundconstant[i + 1]
|
||||
ctx.roundconstant[i + 1] = t
|
||||
}
|
||||
}
|
||||
|
||||
R8 :: proc(ctx: ^Jh_Context) {
|
||||
t: byte
|
||||
tem, roundconstant_expanded: [256]byte
|
||||
for i := u32(0); i < 256; i += 1 {
|
||||
roundconstant_expanded[i] = (ctx.roundconstant[i >> 2] >> (3 - (i & 3)) ) & 1
|
||||
}
|
||||
for i := 0; i < 256; i += 1 {
|
||||
tem[i] = SBOX[roundconstant_expanded[i]][ctx.A[i]]
|
||||
}
|
||||
for i := 0; i < 256; i += 2 {
|
||||
tem[i+1] ~= ((tem[i] << 1) ~ (tem[i] >> 3) ~ ((tem[i] >> 2) & 2)) & 0xf
|
||||
tem[i] ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf
|
||||
}
|
||||
for i := 0; i < 256; i += 4 {
|
||||
t = tem[i + 2]
|
||||
tem[i+2] = tem[i + 3]
|
||||
tem[i+3] = t
|
||||
}
|
||||
for i := 0; i < 128; i += 1 {
|
||||
ctx.A[i] = tem[i << 1]
|
||||
ctx.A[i + 128] = tem[(i << 1) + 1]
|
||||
}
|
||||
for i := 128; i < 256; i += 2 {
|
||||
t = ctx.A[i]
|
||||
ctx.A[i] = ctx.A[i + 1]
|
||||
ctx.A[i + 1] = t
|
||||
}
|
||||
}
|
||||
|
||||
E8_initialgroup :: proc(ctx: ^Jh_Context) {
|
||||
t0, t1, t2, t3: byte
|
||||
tem: [256]byte
|
||||
for i := u32(0); i < 256; i += 1 {
|
||||
t0 = (ctx.H[i >> 3] >> (7 - (i & 7))) & 1
|
||||
t1 = (ctx.H[(i + 256) >> 3] >> (7 - (i & 7))) & 1
|
||||
t2 = (ctx.H[(i + 512) >> 3] >> (7 - (i & 7))) & 1
|
||||
t3 = (ctx.H[(i + 768) >> 3] >> (7 - (i & 7))) & 1
|
||||
tem[i] = (t0 << 3) | (t1 << 2) | (t2 << 1) | (t3 << 0)
|
||||
}
|
||||
for i := 0; i < 128; i += 1 {
|
||||
ctx.A[i << 1] = tem[i]
|
||||
ctx.A[(i << 1) + 1] = tem[i + 128]
|
||||
}
|
||||
}
|
||||
|
||||
E8 :: proc(ctx: ^Jh_Context) {
|
||||
for i := 0; i < 64; i += 1 {
|
||||
ctx.roundconstant[i] = ROUNDCONSTANT_ZERO[i]
|
||||
}
|
||||
E8_initialgroup(ctx)
|
||||
for i := 0; i < 42; i += 1 {
|
||||
R8(ctx)
|
||||
update_roundconstant(ctx)
|
||||
}
|
||||
E8_finaldegroup(ctx)
|
||||
}
|
||||
|
||||
F8 :: proc(ctx: ^Jh_Context) {
|
||||
for i := 0; i < 64; i += 1 {
|
||||
ctx.H[i] ~= ctx.buffer[i]
|
||||
}
|
||||
E8(ctx)
|
||||
for i := 0; i < 64; i += 1 {
|
||||
ctx.H[i + 64] ~= ctx.buffer[i]
|
||||
}
|
||||
}
|
||||
@@ -1,374 +0,0 @@
|
||||
package keccak
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Keccak hashing algorithm.
|
||||
This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../_sha3"
|
||||
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Keccak_Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^_sha3.Sha3_Context) {
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
}
|
||||
10
core/crypto/legacy/README.md
Normal file
10
core/crypto/legacy/README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# crypto/legacy
|
||||
|
||||
These are algorithms that are shipped solely for the purpose of
|
||||
interoperability with legacy systems. The use of these packages in
|
||||
any other capacity is discouraged, especially those that are known
|
||||
to be broken.
|
||||
|
||||
- keccak - The draft version of the algorithm that became SHA-3
|
||||
- MD5 - Broken (https://eprint.iacr.org/2005/075)
|
||||
- SHA-1 - Broken (https://eprint.iacr.org/2017/190)
|
||||
377
core/crypto/legacy/keccak/keccak.odin
Normal file
377
core/crypto/legacy/keccak/keccak.odin
Normal file
@@ -0,0 +1,377 @@
|
||||
package keccak
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Keccak hashing algorithm.
|
||||
This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
|
||||
*/
|
||||
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../../_sha3"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_224 :: 28
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_384 :: 48
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
ctx.is_keccak = true
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.is_keccak = true
|
||||
_sha3.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
}
|
||||
295
core/crypto/legacy/md5/md5.odin
Normal file
295
core/crypto/legacy/md5/md5.odin
Normal file
@@ -0,0 +1,295 @@
|
||||
package md5
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
ctx.state[3] = 0x10325476
|
||||
|
||||
ctx.bitlen = 0
|
||||
ctx.datalen = 0
|
||||
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if (ctx.datalen == BLOCK_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.bitlen += 512
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/md5: invalid destination digest size")
|
||||
}
|
||||
|
||||
i := ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < 56 {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
} else if ctx.datalen >= 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < BLOCK_SIZE {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
mem.set(&ctx.data, 0, 56)
|
||||
}
|
||||
|
||||
ctx.bitlen += u64(ctx.datalen * 8)
|
||||
endian.unchecked_put_u64le(ctx.data[56:], ctx.bitlen)
|
||||
transform(ctx, ctx.data[:])
|
||||
|
||||
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
}
|
||||
|
||||
/*
|
||||
MD5 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
/*
|
||||
@note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
|
||||
and II respectively, instead of declaring them separately.
|
||||
*/
|
||||
|
||||
@(private)
|
||||
FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + bits.rotate_left32(a + ((b & c) | (~b & d)) + m + t, s)
|
||||
}
|
||||
|
||||
@(private)
|
||||
GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + bits.rotate_left32(a + ((b & d) | (c & ~d)) + m + t, s)
|
||||
}
|
||||
|
||||
@(private)
|
||||
HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + bits.rotate_left32(a + (b ~ c ~ d) + m + t, s)
|
||||
}
|
||||
|
||||
@(private)
|
||||
II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + bits.rotate_left32(a + (c ~ (b | ~d)) + m + t, s)
|
||||
}
|
||||
|
||||
@(private)
|
||||
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
m: [DIGEST_SIZE]u32
|
||||
|
||||
for i := 0; i < DIGEST_SIZE; i += 1 {
|
||||
m[i] = endian.unchecked_get_u32le(data[i * 4:])
|
||||
}
|
||||
|
||||
a := ctx.state[0]
|
||||
b := ctx.state[1]
|
||||
c := ctx.state[2]
|
||||
d := ctx.state[3]
|
||||
|
||||
a = FF(a, b, c, d, m[0], 7, 0xd76aa478)
|
||||
d = FF(d, a, b, c, m[1], 12, 0xe8c7b756)
|
||||
c = FF(c, d, a, b, m[2], 17, 0x242070db)
|
||||
b = FF(b, c, d, a, m[3], 22, 0xc1bdceee)
|
||||
a = FF(a, b, c, d, m[4], 7, 0xf57c0faf)
|
||||
d = FF(d, a, b, c, m[5], 12, 0x4787c62a)
|
||||
c = FF(c, d, a, b, m[6], 17, 0xa8304613)
|
||||
b = FF(b, c, d, a, m[7], 22, 0xfd469501)
|
||||
a = FF(a, b, c, d, m[8], 7, 0x698098d8)
|
||||
d = FF(d, a, b, c, m[9], 12, 0x8b44f7af)
|
||||
c = FF(c, d, a, b, m[10], 17, 0xffff5bb1)
|
||||
b = FF(b, c, d, a, m[11], 22, 0x895cd7be)
|
||||
a = FF(a, b, c, d, m[12], 7, 0x6b901122)
|
||||
d = FF(d, a, b, c, m[13], 12, 0xfd987193)
|
||||
c = FF(c, d, a, b, m[14], 17, 0xa679438e)
|
||||
b = FF(b, c, d, a, m[15], 22, 0x49b40821)
|
||||
|
||||
a = GG(a, b, c, d, m[1], 5, 0xf61e2562)
|
||||
d = GG(d, a, b, c, m[6], 9, 0xc040b340)
|
||||
c = GG(c, d, a, b, m[11], 14, 0x265e5a51)
|
||||
b = GG(b, c, d, a, m[0], 20, 0xe9b6c7aa)
|
||||
a = GG(a, b, c, d, m[5], 5, 0xd62f105d)
|
||||
d = GG(d, a, b, c, m[10], 9, 0x02441453)
|
||||
c = GG(c, d, a, b, m[15], 14, 0xd8a1e681)
|
||||
b = GG(b, c, d, a, m[4], 20, 0xe7d3fbc8)
|
||||
a = GG(a, b, c, d, m[9], 5, 0x21e1cde6)
|
||||
d = GG(d, a, b, c, m[14], 9, 0xc33707d6)
|
||||
c = GG(c, d, a, b, m[3], 14, 0xf4d50d87)
|
||||
b = GG(b, c, d, a, m[8], 20, 0x455a14ed)
|
||||
a = GG(a, b, c, d, m[13], 5, 0xa9e3e905)
|
||||
d = GG(d, a, b, c, m[2], 9, 0xfcefa3f8)
|
||||
c = GG(c, d, a, b, m[7], 14, 0x676f02d9)
|
||||
b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a)
|
||||
|
||||
a = HH(a, b, c, d, m[5], 4, 0xfffa3942)
|
||||
d = HH(d, a, b, c, m[8], 11, 0x8771f681)
|
||||
c = HH(c, d, a, b, m[11], 16, 0x6d9d6122)
|
||||
b = HH(b, c, d, a, m[14], 23, 0xfde5380c)
|
||||
a = HH(a, b, c, d, m[1], 4, 0xa4beea44)
|
||||
d = HH(d, a, b, c, m[4], 11, 0x4bdecfa9)
|
||||
c = HH(c, d, a, b, m[7], 16, 0xf6bb4b60)
|
||||
b = HH(b, c, d, a, m[10], 23, 0xbebfbc70)
|
||||
a = HH(a, b, c, d, m[13], 4, 0x289b7ec6)
|
||||
d = HH(d, a, b, c, m[0], 11, 0xeaa127fa)
|
||||
c = HH(c, d, a, b, m[3], 16, 0xd4ef3085)
|
||||
b = HH(b, c, d, a, m[6], 23, 0x04881d05)
|
||||
a = HH(a, b, c, d, m[9], 4, 0xd9d4d039)
|
||||
d = HH(d, a, b, c, m[12], 11, 0xe6db99e5)
|
||||
c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8)
|
||||
b = HH(b, c, d, a, m[2], 23, 0xc4ac5665)
|
||||
|
||||
a = II(a, b, c, d, m[0], 6, 0xf4292244)
|
||||
d = II(d, a, b, c, m[7], 10, 0x432aff97)
|
||||
c = II(c, d, a, b, m[14], 15, 0xab9423a7)
|
||||
b = II(b, c, d, a, m[5], 21, 0xfc93a039)
|
||||
a = II(a, b, c, d, m[12], 6, 0x655b59c3)
|
||||
d = II(d, a, b, c, m[3], 10, 0x8f0ccc92)
|
||||
c = II(c, d, a, b, m[10], 15, 0xffeff47d)
|
||||
b = II(b, c, d, a, m[1], 21, 0x85845dd1)
|
||||
a = II(a, b, c, d, m[8], 6, 0x6fa87e4f)
|
||||
d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0)
|
||||
c = II(c, d, a, b, m[6], 15, 0xa3014314)
|
||||
b = II(b, c, d, a, m[13], 21, 0x4e0811a1)
|
||||
a = II(a, b, c, d, m[4], 6, 0xf7537e82)
|
||||
d = II(d, a, b, c, m[11], 10, 0xbd3af235)
|
||||
c = II(c, d, a, b, m[2], 15, 0x2ad7d2bb)
|
||||
b = II(b, c, d, a, m[9], 21, 0xeb86d391)
|
||||
|
||||
ctx.state[0] += a
|
||||
ctx.state[1] += b
|
||||
ctx.state[2] += c
|
||||
ctx.state[3] += d
|
||||
}
|
||||
252
core/crypto/legacy/sha1/sha1.odin
Normal file
252
core/crypto/legacy/sha1/sha1.odin
Normal file
@@ -0,0 +1,252 @@
|
||||
package sha1
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
|
||||
*/
|
||||
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
import "core:math/bits"
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 20
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
ctx.state[3] = 0x10325476
|
||||
ctx.state[4] = 0xc3d2e1f0
|
||||
ctx.k[0] = 0x5a827999
|
||||
ctx.k[1] = 0x6ed9eba1
|
||||
ctx.k[2] = 0x8f1bbcdc
|
||||
ctx.k[3] = 0xca62c1d6
|
||||
|
||||
ctx.datalen = 0
|
||||
ctx.bitlen = 0
|
||||
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if (ctx.datalen == BLOCK_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.bitlen += 512
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/sha1: invalid destination digest size")
|
||||
}
|
||||
|
||||
i := ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < 56 {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
} else {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < BLOCK_SIZE {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
mem.set(&ctx.data, 0, 56)
|
||||
}
|
||||
|
||||
ctx.bitlen += u64(ctx.datalen * 8)
|
||||
endian.unchecked_put_u64be(ctx.data[56:], ctx.bitlen)
|
||||
transform(ctx, ctx.data[:])
|
||||
|
||||
for i = 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
}
|
||||
|
||||
/*
|
||||
SHA1 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
datalen: u32,
|
||||
bitlen: u64,
|
||||
state: [5]u32,
|
||||
k: [4]u32,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
|
||||
a, b, c, d, e, i, t: u32
|
||||
m: [80]u32
|
||||
|
||||
for i = 0; i < 16; i += 1 {
|
||||
m[i] = endian.unchecked_get_u32be(data[i * 4:])
|
||||
}
|
||||
for i < 80 {
|
||||
m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16])
|
||||
m[i] = (m[i] << 1) | (m[i] >> 31)
|
||||
i += 1
|
||||
}
|
||||
|
||||
a = ctx.state[0]
|
||||
b = ctx.state[1]
|
||||
c = ctx.state[2]
|
||||
d = ctx.state[3]
|
||||
e = ctx.state[4]
|
||||
|
||||
for i = 0; i < 20; i += 1 {
|
||||
t = bits.rotate_left32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = bits.rotate_left32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
}
|
||||
for i < 40 {
|
||||
t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = bits.rotate_left32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
for i < 60 {
|
||||
t = bits.rotate_left32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = bits.rotate_left32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
for i < 80 {
|
||||
t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = bits.rotate_left32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
|
||||
ctx.state[0] += a
|
||||
ctx.state[1] += b
|
||||
ctx.state[2] += c
|
||||
ctx.state[3] += d
|
||||
ctx.state[4] += e
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
package md2
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the MD2 hashing algorithm, as defined in RFC 1319 <https://datatracker.ietf.org/doc/html/rfc1319>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md2_Context
|
||||
// init(&ctx) No-op
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Md2_Context
|
||||
// init(&ctx) No-op
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md2_Context
|
||||
// init(&ctx) No-op
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
@(warning="Init is a no-op for MD2")
|
||||
init :: proc(ctx: ^Md2_Context) {
|
||||
// No action needed here
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Md2_Context, data: []byte) {
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if (ctx.datalen == DIGEST_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Md2_Context, hash: []byte) {
|
||||
to_pad := byte(DIGEST_SIZE - ctx.datalen)
|
||||
for ctx.datalen < DIGEST_SIZE {
|
||||
ctx.data[ctx.datalen] = to_pad
|
||||
ctx.datalen += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
transform(ctx, ctx.checksum[:])
|
||||
for i := 0; i < DIGEST_SIZE; i += 1 {
|
||||
hash[i] = ctx.state[i]
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
MD2 implementation
|
||||
*/
|
||||
|
||||
Md2_Context :: struct {
|
||||
data: [DIGEST_SIZE]byte,
|
||||
state: [DIGEST_SIZE * 3]byte,
|
||||
checksum: [DIGEST_SIZE]byte,
|
||||
datalen: int,
|
||||
}
|
||||
|
||||
PI_TABLE := [?]byte {
|
||||
41, 46, 67, 201, 162, 216, 124, 1, 61, 54, 84, 161, 236, 240, 6,
|
||||
19, 98, 167, 5, 243, 192, 199, 115, 140, 152, 147, 43, 217, 188, 76,
|
||||
130, 202, 30, 155, 87, 60, 253, 212, 224, 22, 103, 66, 111, 24, 138,
|
||||
23, 229, 18, 190, 78, 196, 214, 218, 158, 222, 73, 160, 251, 245, 142,
|
||||
187, 47, 238, 122, 169, 104, 121, 145, 21, 178, 7, 63, 148, 194, 16,
|
||||
137, 11, 34, 95, 33, 128, 127, 93, 154, 90, 144, 50, 39, 53, 62,
|
||||
204, 231, 191, 247, 151, 3, 255, 25, 48, 179, 72, 165, 181, 209, 215,
|
||||
94, 146, 42, 172, 86, 170, 198, 79, 184, 56, 210, 150, 164, 125, 182,
|
||||
118, 252, 107, 226, 156, 116, 4, 241, 69, 157, 112, 89, 100, 113, 135,
|
||||
32, 134, 91, 207, 101, 230, 45, 168, 2, 27, 96, 37, 173, 174, 176,
|
||||
185, 246, 28, 70, 97, 105, 52, 64, 126, 15, 85, 71, 163, 35, 221,
|
||||
81, 175, 58, 195, 92, 249, 206, 186, 197, 234, 38, 44, 83, 13, 110,
|
||||
133, 40, 132, 9, 211, 223, 205, 244, 65, 129, 77, 82, 106, 220, 55,
|
||||
200, 108, 193, 171, 250, 36, 225, 123, 8, 12, 189, 177, 74, 120, 136,
|
||||
149, 139, 227, 99, 232, 109, 233, 203, 213, 254, 59, 0, 29, 57, 242,
|
||||
239, 183, 14, 102, 88, 208, 228, 166, 119, 114, 248, 235, 117, 75, 10,
|
||||
49, 68, 80, 180, 143, 237, 31, 26, 219, 153, 141, 51, 159, 17, 131,
|
||||
20,
|
||||
}
|
||||
|
||||
transform :: proc(ctx: ^Md2_Context, data: []byte) {
|
||||
j,k,t: byte
|
||||
for j = 0; j < DIGEST_SIZE; j += 1 {
|
||||
ctx.state[j + DIGEST_SIZE] = data[j]
|
||||
ctx.state[j + DIGEST_SIZE * 2] = (ctx.state[j + DIGEST_SIZE] ~ ctx.state[j])
|
||||
}
|
||||
t = 0
|
||||
for j = 0; j < DIGEST_SIZE + 2; j += 1 {
|
||||
for k = 0; k < DIGEST_SIZE * 3; k += 1 {
|
||||
ctx.state[k] ~= PI_TABLE[t]
|
||||
t = ctx.state[k]
|
||||
}
|
||||
t = (t + j) & 0xff
|
||||
}
|
||||
t = ctx.checksum[DIGEST_SIZE - 1]
|
||||
for j = 0; j < DIGEST_SIZE; j += 1 {
|
||||
ctx.checksum[j] ~= PI_TABLE[data[j] ~ t]
|
||||
t = ctx.checksum[j]
|
||||
}
|
||||
}
|
||||
@@ -1,263 +0,0 @@
|
||||
package md4
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
Jeroen van Rijn: Context design to be able to change from Odin implementation to bindings.
|
||||
|
||||
Implementation of the MD4 hashing algorithm, as defined in RFC 1320 <https://datatracker.ietf.org/doc/html/rfc1320>
|
||||
*/
|
||||
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md4_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Md4_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md4_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Md4_Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
ctx.state[3] = 0x10325476
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Md4_Context, data: []byte) {
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if(ctx.datalen == BLOCK_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.bitlen += 512
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Md4_Context, hash: []byte) {
|
||||
i := ctx.datalen
|
||||
if ctx.datalen < 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < 56 {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
} else if ctx.datalen >= 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < BLOCK_SIZE {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
mem.set(&ctx.data, 0, 56)
|
||||
}
|
||||
|
||||
ctx.bitlen += u64(ctx.datalen * 8)
|
||||
ctx.data[56] = byte(ctx.bitlen)
|
||||
ctx.data[57] = byte(ctx.bitlen >> 8)
|
||||
ctx.data[58] = byte(ctx.bitlen >> 16)
|
||||
ctx.data[59] = byte(ctx.bitlen >> 24)
|
||||
ctx.data[60] = byte(ctx.bitlen >> 32)
|
||||
ctx.data[61] = byte(ctx.bitlen >> 40)
|
||||
ctx.data[62] = byte(ctx.bitlen >> 48)
|
||||
ctx.data[63] = byte(ctx.bitlen >> 56)
|
||||
transform(ctx, ctx.data[:])
|
||||
|
||||
for i = 0; i < 4; i += 1 {
|
||||
hash[i] = byte(ctx.state[0] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 4] = byte(ctx.state[1] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 8] = byte(ctx.state[2] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
MD4 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Md4_Context :: struct {
|
||||
data: [64]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
}
|
||||
|
||||
/*
|
||||
@note(zh): F, G and H, as mentioned in the RFC, have been inlined into FF, GG
|
||||
and HH respectively, instead of declaring them separately.
|
||||
*/
|
||||
|
||||
FF :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
|
||||
return util.ROTL32(a + ((b & c) | (~b & d)) + x, s)
|
||||
}
|
||||
|
||||
GG :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
|
||||
return util.ROTL32(a + ((b & c) | (b & d) | (c & d)) + x + 0x5a827999, s)
|
||||
}
|
||||
|
||||
HH :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
|
||||
return util.ROTL32(a + (b ~ c ~ d) + x + 0x6ed9eba1, s)
|
||||
}
|
||||
|
||||
transform :: proc(ctx: ^Md4_Context, data: []byte) {
|
||||
a, b, c, d, i, j: u32
|
||||
m: [DIGEST_SIZE]u32
|
||||
|
||||
for i, j = 0, 0; i < DIGEST_SIZE; i += 1 {
|
||||
m[i] = u32(data[j]) | (u32(data[j + 1]) << 8) | (u32(data[j + 2]) << 16) | (u32(data[j + 3]) << 24)
|
||||
j += 4
|
||||
}
|
||||
|
||||
a = ctx.state[0]
|
||||
b = ctx.state[1]
|
||||
c = ctx.state[2]
|
||||
d = ctx.state[3]
|
||||
|
||||
a = FF(a, b, c, d, m[0], 3)
|
||||
d = FF(d, a, b, c, m[1], 7)
|
||||
c = FF(c, d, a, b, m[2], 11)
|
||||
b = FF(b, c, d, a, m[3], 19)
|
||||
a = FF(a, b, c, d, m[4], 3)
|
||||
d = FF(d, a, b, c, m[5], 7)
|
||||
c = FF(c, d, a, b, m[6], 11)
|
||||
b = FF(b, c, d, a, m[7], 19)
|
||||
a = FF(a, b, c, d, m[8], 3)
|
||||
d = FF(d, a, b, c, m[9], 7)
|
||||
c = FF(c, d, a, b, m[10], 11)
|
||||
b = FF(b, c, d, a, m[11], 19)
|
||||
a = FF(a, b, c, d, m[12], 3)
|
||||
d = FF(d, a, b, c, m[13], 7)
|
||||
c = FF(c, d, a, b, m[14], 11)
|
||||
b = FF(b, c, d, a, m[15], 19)
|
||||
|
||||
a = GG(a, b, c, d, m[0], 3)
|
||||
d = GG(d, a, b, c, m[4], 5)
|
||||
c = GG(c, d, a, b, m[8], 9)
|
||||
b = GG(b, c, d, a, m[12], 13)
|
||||
a = GG(a, b, c, d, m[1], 3)
|
||||
d = GG(d, a, b, c, m[5], 5)
|
||||
c = GG(c, d, a, b, m[9], 9)
|
||||
b = GG(b, c, d, a, m[13], 13)
|
||||
a = GG(a, b, c, d, m[2], 3)
|
||||
d = GG(d, a, b, c, m[6], 5)
|
||||
c = GG(c, d, a, b, m[10], 9)
|
||||
b = GG(b, c, d, a, m[14], 13)
|
||||
a = GG(a, b, c, d, m[3], 3)
|
||||
d = GG(d, a, b, c, m[7], 5)
|
||||
c = GG(c, d, a, b, m[11], 9)
|
||||
b = GG(b, c, d, a, m[15], 13)
|
||||
|
||||
a = HH(a, b, c, d, m[0], 3)
|
||||
d = HH(d, a, b, c, m[8], 9)
|
||||
c = HH(c, d, a, b, m[4], 11)
|
||||
b = HH(b, c, d, a, m[12], 15)
|
||||
a = HH(a, b, c, d, m[2], 3)
|
||||
d = HH(d, a, b, c, m[10], 9)
|
||||
c = HH(c, d, a, b, m[6], 11)
|
||||
b = HH(b, c, d, a, m[14], 15)
|
||||
a = HH(a, b, c, d, m[1], 3)
|
||||
d = HH(d, a, b, c, m[9], 9)
|
||||
c = HH(c, d, a, b, m[5], 11)
|
||||
b = HH(b, c, d, a, m[13], 15)
|
||||
a = HH(a, b, c, d, m[3], 3)
|
||||
d = HH(d, a, b, c, m[11], 9)
|
||||
c = HH(c, d, a, b, m[7], 11)
|
||||
b = HH(b, c, d, a, m[15], 15)
|
||||
|
||||
ctx.state[0] += a
|
||||
ctx.state[1] += b
|
||||
ctx.state[2] += c
|
||||
ctx.state[3] += d
|
||||
}
|
||||
@@ -1,285 +0,0 @@
|
||||
package md5
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
|
||||
*/
|
||||
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 16
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md5_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Md5_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Md5_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Md5_Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
ctx.state[3] = 0x10325476
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Md5_Context, data: []byte) {
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if(ctx.datalen == BLOCK_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.bitlen += 512
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Md5_Context, hash: []byte){
|
||||
i : u32
|
||||
i = ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < 56 {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
} else if ctx.datalen >= 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < BLOCK_SIZE {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
mem.set(&ctx.data, 0, 56)
|
||||
}
|
||||
|
||||
ctx.bitlen += u64(ctx.datalen * 8)
|
||||
ctx.data[56] = byte(ctx.bitlen)
|
||||
ctx.data[57] = byte(ctx.bitlen >> 8)
|
||||
ctx.data[58] = byte(ctx.bitlen >> 16)
|
||||
ctx.data[59] = byte(ctx.bitlen >> 24)
|
||||
ctx.data[60] = byte(ctx.bitlen >> 32)
|
||||
ctx.data[61] = byte(ctx.bitlen >> 40)
|
||||
ctx.data[62] = byte(ctx.bitlen >> 48)
|
||||
ctx.data[63] = byte(ctx.bitlen >> 56)
|
||||
transform(ctx, ctx.data[:])
|
||||
|
||||
for i = 0; i < 4; i += 1 {
|
||||
hash[i] = byte(ctx.state[0] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 4] = byte(ctx.state[1] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 8] = byte(ctx.state[2] >> (i * 8)) & 0x000000ff
|
||||
hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
MD4 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Md5_Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
state: [4]u32,
|
||||
bitlen: u64,
|
||||
datalen: u32,
|
||||
}
|
||||
|
||||
/*
|
||||
@note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
|
||||
and II respectively, instead of declaring them separately.
|
||||
*/
|
||||
|
||||
FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + util.ROTL32(a + ((b & c) | (~b & d)) + m + t, s)
|
||||
}
|
||||
|
||||
GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + util.ROTL32(a + ((b & d) | (c & ~d)) + m + t, s)
|
||||
}
|
||||
|
||||
HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + util.ROTL32(a + (b ~ c ~ d) + m + t, s)
|
||||
}
|
||||
|
||||
II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
|
||||
return b + util.ROTL32(a + (c ~ (b | ~d)) + m + t, s)
|
||||
}
|
||||
|
||||
transform :: proc(ctx: ^Md5_Context, data: []byte) {
|
||||
i, j: u32
|
||||
m: [DIGEST_SIZE]u32
|
||||
|
||||
for i, j = 0, 0; i < DIGEST_SIZE; i+=1 {
|
||||
m[i] = u32(data[j]) + u32(data[j + 1]) << 8 + u32(data[j + 2]) << 16 + u32(data[j + 3]) << 24
|
||||
j += 4
|
||||
}
|
||||
|
||||
a := ctx.state[0]
|
||||
b := ctx.state[1]
|
||||
c := ctx.state[2]
|
||||
d := ctx.state[3]
|
||||
|
||||
a = FF(a, b, c, d, m[0], 7, 0xd76aa478)
|
||||
d = FF(d, a, b, c, m[1], 12, 0xe8c7b756)
|
||||
c = FF(c, d, a, b, m[2], 17, 0x242070db)
|
||||
b = FF(b, c, d, a, m[3], 22, 0xc1bdceee)
|
||||
a = FF(a, b, c, d, m[4], 7, 0xf57c0faf)
|
||||
d = FF(d, a, b, c, m[5], 12, 0x4787c62a)
|
||||
c = FF(c, d, a, b, m[6], 17, 0xa8304613)
|
||||
b = FF(b, c, d, a, m[7], 22, 0xfd469501)
|
||||
a = FF(a, b, c, d, m[8], 7, 0x698098d8)
|
||||
d = FF(d, a, b, c, m[9], 12, 0x8b44f7af)
|
||||
c = FF(c, d, a, b, m[10], 17, 0xffff5bb1)
|
||||
b = FF(b, c, d, a, m[11], 22, 0x895cd7be)
|
||||
a = FF(a, b, c, d, m[12], 7, 0x6b901122)
|
||||
d = FF(d, a, b, c, m[13], 12, 0xfd987193)
|
||||
c = FF(c, d, a, b, m[14], 17, 0xa679438e)
|
||||
b = FF(b, c, d, a, m[15], 22, 0x49b40821)
|
||||
|
||||
a = GG(a, b, c, d, m[1], 5, 0xf61e2562)
|
||||
d = GG(d, a, b, c, m[6], 9, 0xc040b340)
|
||||
c = GG(c, d, a, b, m[11], 14, 0x265e5a51)
|
||||
b = GG(b, c, d, a, m[0], 20, 0xe9b6c7aa)
|
||||
a = GG(a, b, c, d, m[5], 5, 0xd62f105d)
|
||||
d = GG(d, a, b, c, m[10], 9, 0x02441453)
|
||||
c = GG(c, d, a, b, m[15], 14, 0xd8a1e681)
|
||||
b = GG(b, c, d, a, m[4], 20, 0xe7d3fbc8)
|
||||
a = GG(a, b, c, d, m[9], 5, 0x21e1cde6)
|
||||
d = GG(d, a, b, c, m[14], 9, 0xc33707d6)
|
||||
c = GG(c, d, a, b, m[3], 14, 0xf4d50d87)
|
||||
b = GG(b, c, d, a, m[8], 20, 0x455a14ed)
|
||||
a = GG(a, b, c, d, m[13], 5, 0xa9e3e905)
|
||||
d = GG(d, a, b, c, m[2], 9, 0xfcefa3f8)
|
||||
c = GG(c, d, a, b, m[7], 14, 0x676f02d9)
|
||||
b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a)
|
||||
|
||||
a = HH(a, b, c, d, m[5], 4, 0xfffa3942)
|
||||
d = HH(d, a, b, c, m[8], 11, 0x8771f681)
|
||||
c = HH(c, d, a, b, m[11], 16, 0x6d9d6122)
|
||||
b = HH(b, c, d, a, m[14], 23, 0xfde5380c)
|
||||
a = HH(a, b, c, d, m[1], 4, 0xa4beea44)
|
||||
d = HH(d, a, b, c, m[4], 11, 0x4bdecfa9)
|
||||
c = HH(c, d, a, b, m[7], 16, 0xf6bb4b60)
|
||||
b = HH(b, c, d, a, m[10], 23, 0xbebfbc70)
|
||||
a = HH(a, b, c, d, m[13], 4, 0x289b7ec6)
|
||||
d = HH(d, a, b, c, m[0], 11, 0xeaa127fa)
|
||||
c = HH(c, d, a, b, m[3], 16, 0xd4ef3085)
|
||||
b = HH(b, c, d, a, m[6], 23, 0x04881d05)
|
||||
a = HH(a, b, c, d, m[9], 4, 0xd9d4d039)
|
||||
d = HH(d, a, b, c, m[12], 11, 0xe6db99e5)
|
||||
c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8)
|
||||
b = HH(b, c, d, a, m[2], 23, 0xc4ac5665)
|
||||
|
||||
a = II(a, b, c, d, m[0], 6, 0xf4292244)
|
||||
d = II(d, a, b, c, m[7], 10, 0x432aff97)
|
||||
c = II(c, d, a, b, m[14], 15, 0xab9423a7)
|
||||
b = II(b, c, d, a, m[5], 21, 0xfc93a039)
|
||||
a = II(a, b, c, d, m[12], 6, 0x655b59c3)
|
||||
d = II(d, a, b, c, m[3], 10, 0x8f0ccc92)
|
||||
c = II(c, d, a, b, m[10], 15, 0xffeff47d)
|
||||
b = II(b, c, d, a, m[1], 21, 0x85845dd1)
|
||||
a = II(a, b, c, d, m[8], 6, 0x6fa87e4f)
|
||||
d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0)
|
||||
c = II(c, d, a, b, m[6], 15, 0xa3014314)
|
||||
b = II(b, c, d, a, m[13], 21, 0x4e0811a1)
|
||||
a = II(a, b, c, d, m[4], 6, 0xf7537e82)
|
||||
d = II(d, a, b, c, m[11], 10, 0xbd3af235)
|
||||
c = II(c, d, a, b, m[2], 15, 0x2ad7d2bb)
|
||||
b = II(b, c, d, a, m[9], 21, 0xeb86d391)
|
||||
|
||||
ctx.state[0] += a
|
||||
ctx.state[1] += b
|
||||
ctx.state[2] += c
|
||||
ctx.state[3] += d
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
package poly1305
|
||||
|
||||
import "core:crypto"
|
||||
import "core:crypto/util"
|
||||
import field "core:crypto/_fiat/field_poly1305"
|
||||
import "core:encoding/endian"
|
||||
import "core:mem"
|
||||
|
||||
KEY_SIZE :: 32
|
||||
@@ -52,8 +52,8 @@ init :: proc (ctx: ^Context, key: []byte) {
|
||||
|
||||
// r = le_bytes_to_num(key[0..15])
|
||||
// r = clamp(r) (r &= 0xffffffc0ffffffc0ffffffc0fffffff)
|
||||
tmp_lo := util.U64_LE(key[0:8]) & 0x0ffffffc0fffffff
|
||||
tmp_hi := util.U64_LE(key[8:16]) & 0xffffffc0ffffffc
|
||||
tmp_lo := endian.unchecked_get_u64le(key[0:]) & 0x0ffffffc0fffffff
|
||||
tmp_hi := endian.unchecked_get_u64le(key[8:]) & 0xffffffc0ffffffc
|
||||
field.fe_from_u64s(&ctx._r, tmp_lo, tmp_hi)
|
||||
|
||||
// s = le_bytes_to_num(key[16..31])
|
||||
@@ -151,7 +151,7 @@ _blocks :: proc (ctx: ^Context, msg: []byte, final := false) {
|
||||
data_len := len(data)
|
||||
for data_len >= _BLOCK_SIZE {
|
||||
// n = le_bytes_to_num(msg[((i-1)*16)..*i*16] | [0x01])
|
||||
field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte, false)
|
||||
field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte)
|
||||
|
||||
// a += n
|
||||
field.fe_add(field.fe_relax_cast(&ctx._a), &ctx._a, &n) // _a unreduced
|
||||
|
||||
@@ -1,919 +0,0 @@
|
||||
package ripemd
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation for the RIPEMD hashing algorithm as defined in <https://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_128 :: 16
|
||||
DIGEST_SIZE_160 :: 20
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_320 :: 40
|
||||
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Ripemd128_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Ripemd128_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Ripemd128_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
|
||||
return hash_bytes_160(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: Ripemd160_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_160 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_160 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Ripemd160_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_160 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: Ripemd160_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_160 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_160(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_160(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_160]byte{}, false
|
||||
}
|
||||
|
||||
hash_160 :: proc {
|
||||
hash_stream_160,
|
||||
hash_file_160,
|
||||
hash_bytes_160,
|
||||
hash_string_160,
|
||||
hash_bytes_to_buffer_160,
|
||||
hash_string_to_buffer_160,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Ripemd256_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Ripemd256_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Ripemd256_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_320 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_320 :: proc(data: string) -> [DIGEST_SIZE_320]byte {
|
||||
return hash_bytes_320(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_320 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_320 :: proc(data: []byte) -> [DIGEST_SIZE_320]byte {
|
||||
hash: [DIGEST_SIZE_320]byte
|
||||
ctx: Ripemd320_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_320 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_320 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_320(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_320 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_320 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_320, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Ripemd320_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_320 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_320 :: proc(s: io.Stream) -> ([DIGEST_SIZE_320]byte, bool) {
|
||||
hash: [DIGEST_SIZE_320]byte
|
||||
ctx: Ripemd320_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_320 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_320]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_320(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_320(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_320]byte{}, false
|
||||
}
|
||||
|
||||
hash_320 :: proc {
|
||||
hash_stream_320,
|
||||
hash_file_320,
|
||||
hash_bytes_320,
|
||||
hash_string_320,
|
||||
hash_bytes_to_buffer_320,
|
||||
hash_string_to_buffer_320,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^$T) {
|
||||
when T == Ripemd128_Context {
|
||||
ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3
|
||||
} else when T == Ripemd160_Context {
|
||||
ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4
|
||||
} else when T == Ripemd256_Context {
|
||||
ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3
|
||||
ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7] = S5, S6, S7, S8
|
||||
} else when T == Ripemd320_Context {
|
||||
ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4
|
||||
ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9] = S5, S6, S7, S8, S9
|
||||
}
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^$T, data: []byte) {
|
||||
ctx.tc += u64(len(data))
|
||||
data := data
|
||||
if ctx.nx > 0 {
|
||||
n := len(data)
|
||||
|
||||
when T == Ripemd128_Context {
|
||||
if n > RIPEMD_128_BLOCK_SIZE - ctx.nx {
|
||||
n = RIPEMD_128_BLOCK_SIZE - ctx.nx
|
||||
}
|
||||
} else when T == Ripemd160_Context {
|
||||
if n > RIPEMD_160_BLOCK_SIZE - ctx.nx {
|
||||
n = RIPEMD_160_BLOCK_SIZE - ctx.nx
|
||||
}
|
||||
} else when T == Ripemd256_Context{
|
||||
if n > RIPEMD_256_BLOCK_SIZE - ctx.nx {
|
||||
n = RIPEMD_256_BLOCK_SIZE - ctx.nx
|
||||
}
|
||||
} else when T == Ripemd320_Context{
|
||||
if n > RIPEMD_320_BLOCK_SIZE - ctx.nx {
|
||||
n = RIPEMD_320_BLOCK_SIZE - ctx.nx
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < n; i += 1 {
|
||||
ctx.x[ctx.nx + i] = data[i]
|
||||
}
|
||||
|
||||
ctx.nx += n
|
||||
when T == Ripemd128_Context {
|
||||
if ctx.nx == RIPEMD_128_BLOCK_SIZE {
|
||||
block(ctx, ctx.x[0:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
} else when T == Ripemd160_Context {
|
||||
if ctx.nx == RIPEMD_160_BLOCK_SIZE {
|
||||
block(ctx, ctx.x[0:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
} else when T == Ripemd256_Context{
|
||||
if ctx.nx == RIPEMD_256_BLOCK_SIZE {
|
||||
block(ctx, ctx.x[0:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
} else when T == Ripemd320_Context{
|
||||
if ctx.nx == RIPEMD_320_BLOCK_SIZE {
|
||||
block(ctx, ctx.x[0:])
|
||||
ctx.nx = 0
|
||||
}
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
n := block(ctx, data)
|
||||
data = data[n:]
|
||||
if len(data) > 0 {
|
||||
ctx.nx = copy(ctx.x[:], data)
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^$T, hash: []byte) {
|
||||
d := ctx
|
||||
tc := d.tc
|
||||
tmp: [64]byte
|
||||
tmp[0] = 0x80
|
||||
|
||||
if tc % 64 < 56 {
|
||||
update(d, tmp[0:56 - tc % 64])
|
||||
} else {
|
||||
update(d, tmp[0:64 + 56 - tc % 64])
|
||||
}
|
||||
|
||||
tc <<= 3
|
||||
for i : u32 = 0; i < 8; i += 1 {
|
||||
tmp[i] = byte(tc >> (8 * i))
|
||||
}
|
||||
|
||||
update(d, tmp[0:8])
|
||||
|
||||
when T == Ripemd128_Context {
|
||||
size :: RIPEMD_128_SIZE
|
||||
} else when T == Ripemd160_Context {
|
||||
size :: RIPEMD_160_SIZE
|
||||
} else when T == Ripemd256_Context{
|
||||
size :: RIPEMD_256_SIZE
|
||||
} else when T == Ripemd320_Context{
|
||||
size :: RIPEMD_320_SIZE
|
||||
}
|
||||
|
||||
digest: [size]byte
|
||||
for s, i in d.s {
|
||||
digest[i * 4] = byte(s)
|
||||
digest[i * 4 + 1] = byte(s >> 8)
|
||||
digest[i * 4 + 2] = byte(s >> 16)
|
||||
digest[i * 4 + 3] = byte(s >> 24)
|
||||
}
|
||||
copy(hash[:], digest[:])
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
RIPEMD implementation
|
||||
*/
|
||||
|
||||
Ripemd128_Context :: struct {
|
||||
s: [4]u32,
|
||||
x: [RIPEMD_128_BLOCK_SIZE]byte,
|
||||
nx: int,
|
||||
tc: u64,
|
||||
}
|
||||
|
||||
Ripemd160_Context :: struct {
|
||||
s: [5]u32,
|
||||
x: [RIPEMD_160_BLOCK_SIZE]byte,
|
||||
nx: int,
|
||||
tc: u64,
|
||||
}
|
||||
|
||||
Ripemd256_Context :: struct {
|
||||
s: [8]u32,
|
||||
x: [RIPEMD_256_BLOCK_SIZE]byte,
|
||||
nx: int,
|
||||
tc: u64,
|
||||
}
|
||||
|
||||
Ripemd320_Context :: struct {
|
||||
s: [10]u32,
|
||||
x: [RIPEMD_320_BLOCK_SIZE]byte,
|
||||
nx: int,
|
||||
tc: u64,
|
||||
}
|
||||
|
||||
RIPEMD_128_SIZE :: 16
|
||||
RIPEMD_128_BLOCK_SIZE :: 64
|
||||
RIPEMD_160_SIZE :: 20
|
||||
RIPEMD_160_BLOCK_SIZE :: 64
|
||||
RIPEMD_256_SIZE :: 32
|
||||
RIPEMD_256_BLOCK_SIZE :: 64
|
||||
RIPEMD_320_SIZE :: 40
|
||||
RIPEMD_320_BLOCK_SIZE :: 64
|
||||
|
||||
S0 :: 0x67452301
|
||||
S1 :: 0xefcdab89
|
||||
S2 :: 0x98badcfe
|
||||
S3 :: 0x10325476
|
||||
S4 :: 0xc3d2e1f0
|
||||
S5 :: 0x76543210
|
||||
S6 :: 0xfedcba98
|
||||
S7 :: 0x89abcdef
|
||||
S8 :: 0x01234567
|
||||
S9 :: 0x3c2d1e0f
|
||||
|
||||
RIPEMD_128_N0 := [64]uint {
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8,
|
||||
3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12,
|
||||
1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2,
|
||||
}
|
||||
|
||||
RIPEMD_128_R0 := [64]uint {
|
||||
11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8,
|
||||
7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12,
|
||||
11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5,
|
||||
11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12,
|
||||
}
|
||||
|
||||
RIPEMD_128_N1 := [64]uint {
|
||||
5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12,
|
||||
6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2,
|
||||
15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13,
|
||||
8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14,
|
||||
}
|
||||
|
||||
RIPEMD_128_R1 := [64]uint {
|
||||
8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6,
|
||||
9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11,
|
||||
9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5,
|
||||
15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8,
|
||||
}
|
||||
|
||||
RIPEMD_160_N0 := [80]uint {
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8,
|
||||
3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12,
|
||||
1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2,
|
||||
4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13,
|
||||
}
|
||||
|
||||
RIPEMD_160_R0 := [80]uint {
|
||||
11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8,
|
||||
7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12,
|
||||
11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5,
|
||||
11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12,
|
||||
9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6,
|
||||
}
|
||||
|
||||
RIPEMD_160_N1 := [80]uint {
|
||||
5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12,
|
||||
6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2,
|
||||
15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13,
|
||||
8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14,
|
||||
12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11,
|
||||
}
|
||||
|
||||
RIPEMD_160_R1 := [80]uint {
|
||||
8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6,
|
||||
9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11,
|
||||
9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5,
|
||||
15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8,
|
||||
8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11,
|
||||
}
|
||||
|
||||
block :: #force_inline proc (ctx: ^$T, p: []byte) -> int {
|
||||
when T == Ripemd128_Context {
|
||||
return ripemd_128_block(ctx, p)
|
||||
}
|
||||
else when T == Ripemd160_Context {
|
||||
return ripemd_160_block(ctx, p)
|
||||
}
|
||||
else when T == Ripemd256_Context {
|
||||
return ripemd_256_block(ctx, p)
|
||||
}
|
||||
else when T == Ripemd320_Context {
|
||||
return ripemd_320_block(ctx, p)
|
||||
}
|
||||
}
|
||||
|
||||
ripemd_128_block :: proc(ctx: ^$T, p: []byte) -> int {
|
||||
n := 0
|
||||
x: [16]u32 = ---
|
||||
alpha: u32 = ---
|
||||
p := p
|
||||
for len(p) >= RIPEMD_128_BLOCK_SIZE {
|
||||
a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3]
|
||||
aa, bb, cc, dd := a, b, c, d
|
||||
for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
|
||||
x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
|
||||
}
|
||||
i := 0
|
||||
for i < 16 {
|
||||
alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]]
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd= dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
for i < 32 {
|
||||
alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
for i < 48 {
|
||||
alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
for i < 64 {
|
||||
alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]]
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
c = ctx.s[1] + c + dd
|
||||
ctx.s[1] = ctx.s[2] + d + aa
|
||||
ctx.s[2] = ctx.s[3] + a + bb
|
||||
ctx.s[3] = ctx.s[0] + b + cc
|
||||
ctx.s[0] = c
|
||||
p = p[RIPEMD_128_BLOCK_SIZE:]
|
||||
n += RIPEMD_128_BLOCK_SIZE
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
ripemd_160_block :: proc(ctx: ^$T, p: []byte) -> int {
|
||||
n := 0
|
||||
x: [16]u32 = ---
|
||||
alpha, beta: u32 = ---, ---
|
||||
p := p
|
||||
for len(p) >= RIPEMD_160_BLOCK_SIZE {
|
||||
a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4]
|
||||
aa, bb, cc, dd, ee := a, b, c, d, e
|
||||
for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
|
||||
x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
|
||||
}
|
||||
i := 0
|
||||
for i < 16 {
|
||||
alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]]
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
for i < 32 {
|
||||
alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
for i < 48 {
|
||||
alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
for i < 64 {
|
||||
alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
for i < 80 {
|
||||
alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]]
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
dd += c + ctx.s[1]
|
||||
ctx.s[1] = ctx.s[2] + d + ee
|
||||
ctx.s[2] = ctx.s[3] + e + aa
|
||||
ctx.s[3] = ctx.s[4] + a + bb
|
||||
ctx.s[4] = ctx.s[0] + b + cc
|
||||
ctx.s[0] = dd
|
||||
p = p[RIPEMD_160_BLOCK_SIZE:]
|
||||
n += RIPEMD_160_BLOCK_SIZE
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
ripemd_256_block :: proc(ctx: ^$T, p: []byte) -> int {
|
||||
n := 0
|
||||
x: [16]u32 = ---
|
||||
alpha: u32 = ---
|
||||
p := p
|
||||
for len(p) >= RIPEMD_256_BLOCK_SIZE {
|
||||
a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3]
|
||||
aa, bb, cc, dd := ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7]
|
||||
for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
|
||||
x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
|
||||
}
|
||||
i := 0
|
||||
for i < 16 {
|
||||
alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]]
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd= dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
t := a
|
||||
a = aa
|
||||
aa = t
|
||||
for i < 32 {
|
||||
alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
t = b
|
||||
b = bb
|
||||
bb = t
|
||||
for i < 48 {
|
||||
alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
t = c
|
||||
c = cc
|
||||
cc = t
|
||||
for i < 64 {
|
||||
alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc
|
||||
s := int(RIPEMD_128_R0[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
a, b, c, d = d, alpha, b, c
|
||||
alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]]
|
||||
s = int(RIPEMD_128_R1[i])
|
||||
alpha = util.ROTL32(alpha, s)
|
||||
aa, bb, cc, dd = dd, alpha, bb, cc
|
||||
i += 1
|
||||
}
|
||||
t = d
|
||||
d = dd
|
||||
dd = t
|
||||
ctx.s[0] += a
|
||||
ctx.s[1] += b
|
||||
ctx.s[2] += c
|
||||
ctx.s[3] += d
|
||||
ctx.s[4] += aa
|
||||
ctx.s[5] += bb
|
||||
ctx.s[6] += cc
|
||||
ctx.s[7] += dd
|
||||
p = p[RIPEMD_256_BLOCK_SIZE:]
|
||||
n += RIPEMD_256_BLOCK_SIZE
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
ripemd_320_block :: proc(ctx: ^$T, p: []byte) -> int {
|
||||
n := 0
|
||||
x: [16]u32 = ---
|
||||
alpha, beta: u32 = ---, ---
|
||||
p := p
|
||||
for len(p) >= RIPEMD_320_BLOCK_SIZE {
|
||||
a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4]
|
||||
aa, bb, cc, dd, ee := ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9]
|
||||
for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
|
||||
x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
|
||||
}
|
||||
i := 0
|
||||
for i < 16 {
|
||||
alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]]
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
t := b
|
||||
b = bb
|
||||
bb = t
|
||||
for i < 32 {
|
||||
alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
t = d
|
||||
d = dd
|
||||
dd = t
|
||||
for i < 48 {
|
||||
alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
t = a
|
||||
a = aa
|
||||
aa = t
|
||||
for i < 64 {
|
||||
alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
t = c
|
||||
c = cc
|
||||
cc = t
|
||||
for i < 80 {
|
||||
alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e
|
||||
s := int(RIPEMD_160_R0[i])
|
||||
alpha = util.ROTL32(alpha, s) + e
|
||||
beta = util.ROTL32(c, 10)
|
||||
a, b, c, d, e = e, alpha, b, beta, d
|
||||
alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]]
|
||||
s = int(RIPEMD_160_R1[i])
|
||||
alpha = util.ROTL32(alpha, s) + ee
|
||||
beta = util.ROTL32(cc, 10)
|
||||
aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
|
||||
i += 1
|
||||
}
|
||||
t = e
|
||||
e = ee
|
||||
ee = t
|
||||
ctx.s[0] += a
|
||||
ctx.s[1] += b
|
||||
ctx.s[2] += c
|
||||
ctx.s[3] += d
|
||||
ctx.s[4] += e
|
||||
ctx.s[5] += aa
|
||||
ctx.s[6] += bb
|
||||
ctx.s[7] += cc
|
||||
ctx.s[8] += dd
|
||||
ctx.s[9] += ee
|
||||
p = p[RIPEMD_320_BLOCK_SIZE:]
|
||||
n += RIPEMD_320_BLOCK_SIZE
|
||||
}
|
||||
return n
|
||||
}
|
||||
@@ -1,246 +0,0 @@
|
||||
package sha1
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
|
||||
*/
|
||||
|
||||
import "core:mem"
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 20
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Sha1_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Sha1_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Sha1_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Sha1_Context) {
|
||||
ctx.state[0] = 0x67452301
|
||||
ctx.state[1] = 0xefcdab89
|
||||
ctx.state[2] = 0x98badcfe
|
||||
ctx.state[3] = 0x10325476
|
||||
ctx.state[4] = 0xc3d2e1f0
|
||||
ctx.k[0] = 0x5a827999
|
||||
ctx.k[1] = 0x6ed9eba1
|
||||
ctx.k[2] = 0x8f1bbcdc
|
||||
ctx.k[3] = 0xca62c1d6
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Sha1_Context, data: []byte) {
|
||||
for i := 0; i < len(data); i += 1 {
|
||||
ctx.data[ctx.datalen] = data[i]
|
||||
ctx.datalen += 1
|
||||
if (ctx.datalen == BLOCK_SIZE) {
|
||||
transform(ctx, ctx.data[:])
|
||||
ctx.bitlen += 512
|
||||
ctx.datalen = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Sha1_Context, hash: []byte) {
|
||||
i := ctx.datalen
|
||||
|
||||
if ctx.datalen < 56 {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < 56 {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
ctx.data[i] = 0x80
|
||||
i += 1
|
||||
for i < BLOCK_SIZE {
|
||||
ctx.data[i] = 0x00
|
||||
i += 1
|
||||
}
|
||||
transform(ctx, ctx.data[:])
|
||||
mem.set(&ctx.data, 0, 56)
|
||||
}
|
||||
|
||||
ctx.bitlen += u64(ctx.datalen * 8)
|
||||
ctx.data[63] = u8(ctx.bitlen)
|
||||
ctx.data[62] = u8(ctx.bitlen >> 8)
|
||||
ctx.data[61] = u8(ctx.bitlen >> 16)
|
||||
ctx.data[60] = u8(ctx.bitlen >> 24)
|
||||
ctx.data[59] = u8(ctx.bitlen >> 32)
|
||||
ctx.data[58] = u8(ctx.bitlen >> 40)
|
||||
ctx.data[57] = u8(ctx.bitlen >> 48)
|
||||
ctx.data[56] = u8(ctx.bitlen >> 56)
|
||||
transform(ctx, ctx.data[:])
|
||||
|
||||
for j: u32 = 0; j < 4; j += 1 {
|
||||
hash[j] = u8(ctx.state[0] >> (24 - j * 8)) & 0x000000ff
|
||||
hash[j + 4] = u8(ctx.state[1] >> (24 - j * 8)) & 0x000000ff
|
||||
hash[j + 8] = u8(ctx.state[2] >> (24 - j * 8)) & 0x000000ff
|
||||
hash[j + 12] = u8(ctx.state[3] >> (24 - j * 8)) & 0x000000ff
|
||||
hash[j + 16] = u8(ctx.state[4] >> (24 - j * 8)) & 0x000000ff
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
SHA1 implementation
|
||||
*/
|
||||
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Sha1_Context :: struct {
|
||||
data: [BLOCK_SIZE]byte,
|
||||
datalen: u32,
|
||||
bitlen: u64,
|
||||
state: [5]u32,
|
||||
k: [4]u32,
|
||||
}
|
||||
|
||||
transform :: proc(ctx: ^Sha1_Context, data: []byte) {
|
||||
a, b, c, d, e, i, j, t: u32
|
||||
m: [80]u32
|
||||
|
||||
for i, j = 0, 0; i < 16; i += 1 {
|
||||
m[i] = u32(data[j]) << 24 + u32(data[j + 1]) << 16 + u32(data[j + 2]) << 8 + u32(data[j + 3])
|
||||
j += 4
|
||||
}
|
||||
for i < 80 {
|
||||
m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16])
|
||||
m[i] = (m[i] << 1) | (m[i] >> 31)
|
||||
i += 1
|
||||
}
|
||||
|
||||
a = ctx.state[0]
|
||||
b = ctx.state[1]
|
||||
c = ctx.state[2]
|
||||
d = ctx.state[3]
|
||||
e = ctx.state[4]
|
||||
|
||||
for i = 0; i < 20; i += 1 {
|
||||
t = util.ROTL32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = util.ROTL32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
}
|
||||
for i < 40 {
|
||||
t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = util.ROTL32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
for i < 60 {
|
||||
t = util.ROTL32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = util.ROTL32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
for i < 80 {
|
||||
t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i]
|
||||
e = d
|
||||
d = c
|
||||
c = util.ROTL32(b, 30)
|
||||
b = a
|
||||
a = t
|
||||
i += 1
|
||||
}
|
||||
|
||||
ctx.state[0] += a
|
||||
ctx.state[1] += b
|
||||
ctx.state[2] += c
|
||||
ctx.state[3] += d
|
||||
ctx.state[4] += e
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,8 +11,8 @@ package sha3
|
||||
If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_sha3"
|
||||
|
||||
@@ -28,333 +28,337 @@ DIGEST_SIZE_512 :: 64
|
||||
// hash_string_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
return hash_bytes_224(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_224 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_224 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_224 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_224 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_224]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_224
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_224 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_224(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_224(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_224]byte{}, false
|
||||
}
|
||||
|
||||
hash_224 :: proc {
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
hash_stream_224,
|
||||
hash_file_224,
|
||||
hash_bytes_224,
|
||||
hash_string_224,
|
||||
hash_bytes_to_buffer_224,
|
||||
hash_string_to_buffer_224,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
return hash_bytes_384(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_384 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_384 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_384 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_384 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_384]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_384
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_384 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_384(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_384(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_384]byte{}, false
|
||||
}
|
||||
|
||||
hash_384 :: proc {
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
hash_stream_384,
|
||||
hash_file_384,
|
||||
hash_bytes_384,
|
||||
hash_string_384,
|
||||
hash_bytes_to_buffer_384,
|
||||
hash_string_to_buffer_384,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.final(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_512
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Sha3_Context :: _sha3.Sha3_Context
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^_sha3.Sha3_Context) {
|
||||
_sha3.init(ctx)
|
||||
init :: proc(ctx: ^Context) {
|
||||
_sha3.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.final(ctx, hash)
|
||||
}
|
||||
|
||||
@@ -9,10 +9,13 @@ package shake
|
||||
|
||||
Interface for the SHAKE hashing algorithm.
|
||||
The SHA3 functionality can be found in package sha3.
|
||||
|
||||
TODO: This should provide an incremental squeeze interface, in addition
|
||||
to the one-shot final call.
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
import "core:os"
|
||||
|
||||
import "../_sha3"
|
||||
|
||||
@@ -26,182 +29,178 @@ DIGEST_SIZE_256 :: 32
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_128
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
_sha3.update(&ctx, data)
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash)
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: _sha3.Sha3_Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
_sha3.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_sha3.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_sha3.shake_xof(&ctx)
|
||||
_sha3.shake_out(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Context
|
||||
ctx.mdlen = DIGEST_SIZE_256
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Shake_Context :: _sha3.Sha3_Context
|
||||
Context :: _sha3.Sha3_Context
|
||||
|
||||
init :: proc(ctx: ^_sha3.Sha3_Context) {
|
||||
_sha3.init(ctx)
|
||||
init :: proc(ctx: ^Context) {
|
||||
_sha3.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
_sha3.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
|
||||
_sha3.shake_xof(ctx)
|
||||
_sha3.shake_out(ctx, hash[:])
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
_sha3.shake_xof(ctx)
|
||||
_sha3.shake_out(ctx, hash[:])
|
||||
}
|
||||
|
||||
@@ -13,202 +13,200 @@ package siphash
|
||||
*/
|
||||
|
||||
import "core:crypto"
|
||||
import "core:crypto/util"
|
||||
import "core:encoding/endian"
|
||||
import "core:math/bits"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
KEY_SIZE :: 16
|
||||
KEY_SIZE :: 16
|
||||
DIGEST_SIZE :: 8
|
||||
|
||||
// sum_string_1_3 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_1_3 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_1_3 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 1, 3)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
sum_bytes_1_3 :: proc(msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 1, 3)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_1_3 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_1_3 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
|
||||
hash := sum_bytes_1_3(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
hash := sum_bytes_1_3(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
}
|
||||
|
||||
sum_1_3 :: proc {
|
||||
sum_string_1_3,
|
||||
sum_bytes_1_3,
|
||||
sum_string_to_buffer_1_3,
|
||||
sum_bytes_to_buffer_1_3,
|
||||
sum_string_1_3,
|
||||
sum_bytes_1_3,
|
||||
sum_string_to_buffer_1_3,
|
||||
sum_bytes_to_buffer_1_3,
|
||||
}
|
||||
|
||||
// verify_u64_1_3 will check if the supplied tag matches with the output you
|
||||
// verify_u64_1_3 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_1_3 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_1_3(msg, key) == tag
|
||||
verify_u64_1_3 :: proc(tag: u64, msg, key: []byte) -> bool {
|
||||
return sum_bytes_1_3(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_1_3 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
verify_bytes_1_3 :: proc(tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_1_3 :: proc {
|
||||
verify_bytes_1_3,
|
||||
verify_u64_1_3,
|
||||
verify_bytes_1_3,
|
||||
verify_u64_1_3,
|
||||
}
|
||||
|
||||
// sum_string_2_4 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_2_4 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_2_4 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 2, 4)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
sum_bytes_2_4 :: proc(msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 2, 4)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_2_4 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_2_4 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
|
||||
hash := sum_bytes_2_4(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
hash := sum_bytes_2_4(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
}
|
||||
|
||||
sum_2_4 :: proc {
|
||||
sum_string_2_4,
|
||||
sum_bytes_2_4,
|
||||
sum_string_to_buffer_2_4,
|
||||
sum_bytes_to_buffer_2_4,
|
||||
sum_string_2_4,
|
||||
sum_bytes_2_4,
|
||||
sum_string_to_buffer_2_4,
|
||||
sum_bytes_to_buffer_2_4,
|
||||
}
|
||||
|
||||
sum_string :: sum_string_2_4
|
||||
sum_bytes :: sum_bytes_2_4
|
||||
sum_string :: sum_string_2_4
|
||||
sum_bytes :: sum_bytes_2_4
|
||||
sum_string_to_buffer :: sum_string_to_buffer_2_4
|
||||
sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4
|
||||
sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4
|
||||
sum :: proc {
|
||||
sum_string,
|
||||
sum_bytes,
|
||||
sum_string_to_buffer,
|
||||
sum_bytes_to_buffer,
|
||||
sum_string,
|
||||
sum_bytes,
|
||||
sum_string_to_buffer,
|
||||
sum_bytes_to_buffer,
|
||||
}
|
||||
|
||||
// verify_u64_2_4 will check if the supplied tag matches with the output you
|
||||
// verify_u64_2_4 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_2_4 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_2_4(msg, key) == tag
|
||||
verify_u64_2_4 :: proc(tag: u64, msg, key: []byte) -> bool {
|
||||
return sum_bytes_2_4(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_2_4 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
verify_bytes_2_4 :: proc(tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_2_4 :: proc {
|
||||
verify_bytes_2_4,
|
||||
verify_u64_2_4,
|
||||
verify_bytes_2_4,
|
||||
verify_u64_2_4,
|
||||
}
|
||||
|
||||
verify_bytes :: verify_bytes_2_4
|
||||
verify_u64 :: verify_u64_2_4
|
||||
verify_u64 :: verify_u64_2_4
|
||||
verify :: proc {
|
||||
verify_bytes,
|
||||
verify_u64,
|
||||
verify_bytes,
|
||||
verify_u64,
|
||||
}
|
||||
|
||||
// sum_string_4_8 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_string_4_8 :: proc(msg, key: string) -> u64 {
|
||||
return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
|
||||
}
|
||||
|
||||
// sum_bytes_4_8 will hash the given message with the key and return
|
||||
// the computed hash as a u64
|
||||
sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 4, 8)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
sum_bytes_4_8 :: proc(msg, key: []byte) -> u64 {
|
||||
ctx: Context
|
||||
hash: u64
|
||||
init(&ctx, key, 4, 8)
|
||||
update(&ctx, msg)
|
||||
final(&ctx, &hash)
|
||||
return hash
|
||||
}
|
||||
|
||||
// sum_string_to_buffer_4_8 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) {
|
||||
sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst)
|
||||
}
|
||||
|
||||
// sum_bytes_to_buffer_4_8 will hash the given message with the key and write
|
||||
// the computed hash into the provided destination buffer
|
||||
sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) {
|
||||
assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
|
||||
hash := sum_bytes_4_8(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
hash := sum_bytes_4_8(msg, key)
|
||||
_collect_output(dst[:], hash)
|
||||
}
|
||||
|
||||
sum_4_8 :: proc {
|
||||
sum_string_4_8,
|
||||
sum_bytes_4_8,
|
||||
sum_string_to_buffer_4_8,
|
||||
sum_bytes_to_buffer_4_8,
|
||||
sum_string_4_8,
|
||||
sum_bytes_4_8,
|
||||
sum_string_to_buffer_4_8,
|
||||
sum_bytes_to_buffer_4_8,
|
||||
}
|
||||
|
||||
// verify_u64_4_8 will check if the supplied tag matches with the output you
|
||||
// verify_u64_4_8 will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_u64_4_8 :: proc (tag: u64 msg, key: []byte) -> bool {
|
||||
return sum_bytes_4_8(msg, key) == tag
|
||||
verify_u64_4_8 :: proc(tag: u64, msg, key: []byte) -> bool {
|
||||
return sum_bytes_4_8(msg, key) == tag
|
||||
}
|
||||
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// verify_bytes will check if the supplied tag matches with the output you
|
||||
// will get from the provided message and key
|
||||
verify_bytes_4_8 :: proc (tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
verify_bytes_4_8 :: proc(tag, msg, key: []byte) -> bool {
|
||||
derived_tag: [8]byte
|
||||
sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
|
||||
return crypto.compare_constant_time(derived_tag[:], tag) == 1
|
||||
}
|
||||
|
||||
verify_4_8 :: proc {
|
||||
verify_bytes_4_8,
|
||||
verify_u64_4_8,
|
||||
verify_bytes_4_8,
|
||||
verify_u64_4_8,
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -216,120 +214,150 @@ verify_4_8 :: proc {
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Context, key: []byte, c_rounds, d_rounds: int) {
|
||||
assert(len(key) == KEY_SIZE, "crypto/siphash: Invalid key size, want 16")
|
||||
ctx.c_rounds = c_rounds
|
||||
ctx.d_rounds = d_rounds
|
||||
is_valid_setting := (ctx.c_rounds == 1 && ctx.d_rounds == 3) ||
|
||||
(ctx.c_rounds == 2 && ctx.d_rounds == 4) ||
|
||||
(ctx.c_rounds == 4 && ctx.d_rounds == 8)
|
||||
assert(is_valid_setting, "crypto/siphash: Incorrect rounds set up. Valid pairs are (1,3), (2,4) and (4,8)")
|
||||
ctx.k0 = util.U64_LE(key[:8])
|
||||
ctx.k1 = util.U64_LE(key[8:])
|
||||
ctx.v0 = 0x736f6d6570736575 ~ ctx.k0
|
||||
ctx.v1 = 0x646f72616e646f6d ~ ctx.k1
|
||||
ctx.v2 = 0x6c7967656e657261 ~ ctx.k0
|
||||
ctx.v3 = 0x7465646279746573 ~ ctx.k1
|
||||
ctx.is_initialized = true
|
||||
if len(key) != KEY_SIZE {
|
||||
panic("crypto/siphash; invalid key size")
|
||||
}
|
||||
ctx.c_rounds = c_rounds
|
||||
ctx.d_rounds = d_rounds
|
||||
is_valid_setting :=
|
||||
(ctx.c_rounds == 1 && ctx.d_rounds == 3) ||
|
||||
(ctx.c_rounds == 2 && ctx.d_rounds == 4) ||
|
||||
(ctx.c_rounds == 4 && ctx.d_rounds == 8)
|
||||
if !is_valid_setting {
|
||||
panic("crypto/siphash: incorrect rounds set up")
|
||||
}
|
||||
ctx.k0 = endian.unchecked_get_u64le(key[:8])
|
||||
ctx.k1 = endian.unchecked_get_u64le(key[8:])
|
||||
ctx.v0 = 0x736f6d6570736575 ~ ctx.k0
|
||||
ctx.v1 = 0x646f72616e646f6d ~ ctx.k1
|
||||
ctx.v2 = 0x6c7967656e657261 ~ ctx.k0
|
||||
ctx.v3 = 0x7465646279746573 ~ ctx.k1
|
||||
|
||||
ctx.last_block = 0
|
||||
ctx.total_length = 0
|
||||
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized, "crypto/siphash: Context is not initialized")
|
||||
ctx.last_block = len(data) / 8 * 8
|
||||
ctx.buf = data
|
||||
i := 0
|
||||
m: u64
|
||||
for i < ctx.last_block {
|
||||
m = u64(ctx.buf[i] & 0xff)
|
||||
i += 1
|
||||
assert(ctx.is_initialized, "crypto/siphash: context is not initialized")
|
||||
|
||||
for r in u64(1)..<8 {
|
||||
m |= u64(ctx.buf[i] & 0xff) << (r * 8)
|
||||
i += 1
|
||||
}
|
||||
|
||||
ctx.v3 ~= m
|
||||
for _ in 0..<ctx.c_rounds {
|
||||
_compress(ctx)
|
||||
}
|
||||
|
||||
ctx.v0 ~= m
|
||||
}
|
||||
data := data
|
||||
ctx.total_length += len(data)
|
||||
if ctx.last_block > 0 {
|
||||
n := copy(ctx.buf[ctx.last_block:], data)
|
||||
ctx.last_block += n
|
||||
if ctx.last_block == BLOCK_SIZE {
|
||||
block(ctx, ctx.buf[:])
|
||||
ctx.last_block = 0
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) >= BLOCK_SIZE {
|
||||
n := len(data) &~ (BLOCK_SIZE - 1)
|
||||
block(ctx, data[:n])
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) > 0 {
|
||||
ctx.last_block = copy(ctx.buf[:], data)
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Context, dst: ^u64) {
|
||||
m: u64
|
||||
for i := len(ctx.buf) - 1; i >= ctx.last_block; i -= 1 {
|
||||
m <<= 8
|
||||
m |= u64(ctx.buf[i] & 0xff)
|
||||
}
|
||||
m |= u64(len(ctx.buf) << 56)
|
||||
assert(ctx.is_initialized, "crypto/siphash: context is not initialized")
|
||||
|
||||
ctx.v3 ~= m
|
||||
tmp: [BLOCK_SIZE]byte
|
||||
copy(tmp[:], ctx.buf[:ctx.last_block])
|
||||
tmp[7] = byte(ctx.total_length & 0xff)
|
||||
block(ctx, tmp[:])
|
||||
|
||||
for _ in 0..<ctx.c_rounds {
|
||||
_compress(ctx)
|
||||
}
|
||||
ctx.v2 ~= 0xff
|
||||
|
||||
ctx.v0 ~= m
|
||||
ctx.v2 ~= 0xff
|
||||
for _ in 0 ..< ctx.d_rounds {
|
||||
_compress(ctx)
|
||||
}
|
||||
|
||||
for _ in 0..<ctx.d_rounds {
|
||||
_compress(ctx)
|
||||
}
|
||||
dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3
|
||||
|
||||
dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3
|
||||
|
||||
reset(ctx)
|
||||
reset(ctx)
|
||||
}
|
||||
|
||||
reset :: proc(ctx: ^Context) {
|
||||
ctx.k0, ctx.k1 = 0, 0
|
||||
ctx.v0, ctx.v1 = 0, 0
|
||||
ctx.v2, ctx.v3 = 0, 0
|
||||
ctx.last_block = 0
|
||||
ctx.c_rounds = 0
|
||||
ctx.d_rounds = 0
|
||||
ctx.is_initialized = false
|
||||
ctx.k0, ctx.k1 = 0, 0
|
||||
ctx.v0, ctx.v1 = 0, 0
|
||||
ctx.v2, ctx.v3 = 0, 0
|
||||
ctx.last_block = 0
|
||||
ctx.total_length = 0
|
||||
ctx.c_rounds = 0
|
||||
ctx.d_rounds = 0
|
||||
ctx.is_initialized = false
|
||||
}
|
||||
|
||||
BLOCK_SIZE :: 8
|
||||
|
||||
Context :: struct {
|
||||
v0, v1, v2, v3: u64, // State values
|
||||
k0, k1: u64, // Split key
|
||||
c_rounds: int, // Number of message rounds
|
||||
d_rounds: int, // Number of finalization rounds
|
||||
buf: []byte, // Provided data
|
||||
last_block: int, // Offset from the last block
|
||||
is_initialized: bool,
|
||||
v0, v1, v2, v3: u64, // State values
|
||||
k0, k1: u64, // Split key
|
||||
c_rounds: int, // Number of message rounds
|
||||
d_rounds: int, // Number of finalization rounds
|
||||
buf: [BLOCK_SIZE]byte, // Provided data
|
||||
last_block: int, // Offset from the last block
|
||||
total_length: int,
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
block :: proc "contextless" (ctx: ^Context, buf: []byte) {
|
||||
buf := buf
|
||||
|
||||
for len(buf) >= BLOCK_SIZE {
|
||||
m := endian.unchecked_get_u64le(buf)
|
||||
|
||||
ctx.v3 ~= m
|
||||
for _ in 0 ..< ctx.c_rounds {
|
||||
_compress(ctx)
|
||||
}
|
||||
|
||||
ctx.v0 ~= m
|
||||
|
||||
buf = buf[BLOCK_SIZE:]
|
||||
}
|
||||
}
|
||||
|
||||
@(private)
|
||||
_get_byte :: #force_inline proc "contextless" (byte_num: byte, into: u64) -> byte {
|
||||
return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3))
|
||||
return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3))
|
||||
}
|
||||
|
||||
_collect_output :: #force_inline proc "contextless" (dst: []byte, hash: u64) {
|
||||
dst[0] = _get_byte(7, hash)
|
||||
dst[1] = _get_byte(6, hash)
|
||||
dst[2] = _get_byte(5, hash)
|
||||
dst[3] = _get_byte(4, hash)
|
||||
dst[4] = _get_byte(3, hash)
|
||||
dst[5] = _get_byte(2, hash)
|
||||
dst[6] = _get_byte(1, hash)
|
||||
dst[7] = _get_byte(0, hash)
|
||||
@(private)
|
||||
_collect_output :: #force_inline proc(dst: []byte, hash: u64) {
|
||||
if len(dst) < DIGEST_SIZE {
|
||||
panic("crypto/siphash: invalid tag size")
|
||||
}
|
||||
dst[0] = _get_byte(7, hash)
|
||||
dst[1] = _get_byte(6, hash)
|
||||
dst[2] = _get_byte(5, hash)
|
||||
dst[3] = _get_byte(4, hash)
|
||||
dst[4] = _get_byte(3, hash)
|
||||
dst[5] = _get_byte(2, hash)
|
||||
dst[6] = _get_byte(1, hash)
|
||||
dst[7] = _get_byte(0, hash)
|
||||
}
|
||||
|
||||
@(private)
|
||||
_compress :: #force_inline proc "contextless" (ctx: ^Context) {
|
||||
ctx.v0 += ctx.v1
|
||||
ctx.v1 = util.ROTL64(ctx.v1, 13)
|
||||
ctx.v1 ~= ctx.v0
|
||||
ctx.v0 = util.ROTL64(ctx.v0, 32)
|
||||
ctx.v2 += ctx.v3
|
||||
ctx.v3 = util.ROTL64(ctx.v3, 16)
|
||||
ctx.v3 ~= ctx.v2
|
||||
ctx.v0 += ctx.v3
|
||||
ctx.v3 = util.ROTL64(ctx.v3, 21)
|
||||
ctx.v3 ~= ctx.v0
|
||||
ctx.v2 += ctx.v1
|
||||
ctx.v1 = util.ROTL64(ctx.v1, 17)
|
||||
ctx.v1 ~= ctx.v2
|
||||
ctx.v2 = util.ROTL64(ctx.v2, 32)
|
||||
ctx.v0 += ctx.v1
|
||||
ctx.v1 = bits.rotate_left64(ctx.v1, 13)
|
||||
ctx.v1 ~= ctx.v0
|
||||
ctx.v0 = bits.rotate_left64(ctx.v0, 32)
|
||||
ctx.v2 += ctx.v3
|
||||
ctx.v3 = bits.rotate_left64(ctx.v3, 16)
|
||||
ctx.v3 ~= ctx.v2
|
||||
ctx.v0 += ctx.v3
|
||||
ctx.v3 = bits.rotate_left64(ctx.v3, 21)
|
||||
ctx.v3 ~= ctx.v0
|
||||
ctx.v2 += ctx.v1
|
||||
ctx.v1 = bits.rotate_left64(ctx.v1, 17)
|
||||
ctx.v1 ~= ctx.v2
|
||||
ctx.v2 = bits.rotate_left64(ctx.v2, 32)
|
||||
}
|
||||
|
||||
@@ -10,10 +10,10 @@ package sm3
|
||||
Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:encoding/endian"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
import "core:math/bits"
|
||||
import "core:os"
|
||||
|
||||
/*
|
||||
High level API
|
||||
@@ -24,227 +24,256 @@ DIGEST_SIZE :: 32
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Sm3_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Sm3_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Sm3_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Context
|
||||
init(&ctx)
|
||||
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Sm3_Context) {
|
||||
ctx.state[0] = IV[0]
|
||||
ctx.state[1] = IV[1]
|
||||
ctx.state[2] = IV[2]
|
||||
ctx.state[3] = IV[3]
|
||||
ctx.state[4] = IV[4]
|
||||
ctx.state[5] = IV[5]
|
||||
ctx.state[6] = IV[6]
|
||||
ctx.state[7] = IV[7]
|
||||
init :: proc(ctx: ^Context) {
|
||||
ctx.state[0] = IV[0]
|
||||
ctx.state[1] = IV[1]
|
||||
ctx.state[2] = IV[2]
|
||||
ctx.state[3] = IV[3]
|
||||
ctx.state[4] = IV[4]
|
||||
ctx.state[5] = IV[5]
|
||||
ctx.state[6] = IV[6]
|
||||
ctx.state[7] = IV[7]
|
||||
|
||||
ctx.length = 0
|
||||
ctx.bitlength = 0
|
||||
|
||||
ctx.is_initialized = true
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Sm3_Context, data: []byte) {
|
||||
data := data
|
||||
ctx.length += u64(len(data))
|
||||
update :: proc(ctx: ^Context, data: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
if ctx.bitlength > 0 {
|
||||
n := copy(ctx.x[ctx.bitlength:], data[:])
|
||||
ctx.bitlength += u64(n)
|
||||
if ctx.bitlength == 64 {
|
||||
block(ctx, ctx.x[:])
|
||||
ctx.bitlength = 0
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) >= 64 {
|
||||
n := len(data) &~ (64 - 1)
|
||||
block(ctx, data[:n])
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) > 0 {
|
||||
ctx.bitlength = u64(copy(ctx.x[:], data[:]))
|
||||
}
|
||||
data := data
|
||||
ctx.length += u64(len(data))
|
||||
|
||||
if ctx.bitlength > 0 {
|
||||
n := copy(ctx.x[ctx.bitlength:], data[:])
|
||||
ctx.bitlength += u64(n)
|
||||
if ctx.bitlength == BLOCK_SIZE {
|
||||
block(ctx, ctx.x[:])
|
||||
ctx.bitlength = 0
|
||||
}
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) >= BLOCK_SIZE {
|
||||
n := len(data) &~ (BLOCK_SIZE - 1)
|
||||
block(ctx, data[:n])
|
||||
data = data[n:]
|
||||
}
|
||||
if len(data) > 0 {
|
||||
ctx.bitlength = u64(copy(ctx.x[:], data[:]))
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Sm3_Context, hash: []byte) {
|
||||
length := ctx.length
|
||||
final :: proc(ctx: ^Context, hash: []byte) {
|
||||
assert(ctx.is_initialized)
|
||||
|
||||
pad: [64]byte
|
||||
pad[0] = 0x80
|
||||
if length % 64 < 56 {
|
||||
update(ctx, pad[0: 56 - length % 64])
|
||||
} else {
|
||||
update(ctx, pad[0: 64 + 56 - length % 64])
|
||||
}
|
||||
if len(hash) < DIGEST_SIZE {
|
||||
panic("crypto/sm3: invalid destination digest size")
|
||||
}
|
||||
|
||||
length <<= 3
|
||||
util.PUT_U64_BE(pad[:], length)
|
||||
update(ctx, pad[0: 8])
|
||||
assert(ctx.bitlength == 0)
|
||||
length := ctx.length
|
||||
|
||||
util.PUT_U32_BE(hash[0:], ctx.state[0])
|
||||
util.PUT_U32_BE(hash[4:], ctx.state[1])
|
||||
util.PUT_U32_BE(hash[8:], ctx.state[2])
|
||||
util.PUT_U32_BE(hash[12:], ctx.state[3])
|
||||
util.PUT_U32_BE(hash[16:], ctx.state[4])
|
||||
util.PUT_U32_BE(hash[20:], ctx.state[5])
|
||||
util.PUT_U32_BE(hash[24:], ctx.state[6])
|
||||
util.PUT_U32_BE(hash[28:], ctx.state[7])
|
||||
pad: [BLOCK_SIZE]byte
|
||||
pad[0] = 0x80
|
||||
if length % BLOCK_SIZE < 56 {
|
||||
update(ctx, pad[0:56 - length % BLOCK_SIZE])
|
||||
} else {
|
||||
update(ctx, pad[0:BLOCK_SIZE + 56 - length % BLOCK_SIZE])
|
||||
}
|
||||
|
||||
length <<= 3
|
||||
endian.unchecked_put_u64be(pad[:], length)
|
||||
update(ctx, pad[0:8])
|
||||
assert(ctx.bitlength == 0)
|
||||
|
||||
for i := 0; i < DIGEST_SIZE / 4; i += 1 {
|
||||
endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
|
||||
}
|
||||
|
||||
ctx.is_initialized = false
|
||||
}
|
||||
|
||||
/*
|
||||
SM3 implementation
|
||||
*/
|
||||
|
||||
Sm3_Context :: struct {
|
||||
state: [8]u32,
|
||||
x: [64]byte,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
BLOCK_SIZE :: 64
|
||||
|
||||
Context :: struct {
|
||||
state: [8]u32,
|
||||
x: [BLOCK_SIZE]byte,
|
||||
bitlength: u64,
|
||||
length: u64,
|
||||
|
||||
is_initialized: bool,
|
||||
}
|
||||
|
||||
@(private)
|
||||
IV := [8]u32 {
|
||||
0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
|
||||
0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e,
|
||||
0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
|
||||
0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e,
|
||||
}
|
||||
|
||||
block :: proc "contextless" (ctx: ^Sm3_Context, buf: []byte) {
|
||||
buf := buf
|
||||
@(private)
|
||||
block :: proc "contextless" (ctx: ^Context, buf: []byte) {
|
||||
buf := buf
|
||||
|
||||
w: [68]u32
|
||||
wp: [64]u32
|
||||
w: [68]u32
|
||||
wp: [64]u32
|
||||
|
||||
state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3]
|
||||
state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7]
|
||||
state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3]
|
||||
state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7]
|
||||
|
||||
for len(buf) >= 64 {
|
||||
for i := 0; i < 16; i += 1 {
|
||||
j := i * 4
|
||||
w[i] = u32(buf[j]) << 24 | u32(buf[j + 1]) << 16 | u32(buf[j + 2]) << 8 | u32(buf[j + 3])
|
||||
}
|
||||
for i := 16; i < 68; i += 1 {
|
||||
p1v := w[i - 16] ~ w[i - 9] ~ util.ROTL32(w[i - 3], 15)
|
||||
// @note(zh): inlined P1
|
||||
w[i] = p1v ~ util.ROTL32(p1v, 15) ~ util.ROTL32(p1v, 23) ~ util.ROTL32(w[i - 13], 7) ~ w[i - 6]
|
||||
}
|
||||
for i := 0; i < 64; i += 1 {
|
||||
wp[i] = w[i] ~ w[i + 4]
|
||||
}
|
||||
for len(buf) >= BLOCK_SIZE {
|
||||
for i := 0; i < 16; i += 1 {
|
||||
w[i] = endian.unchecked_get_u32be(buf[i * 4:])
|
||||
}
|
||||
for i := 16; i < 68; i += 1 {
|
||||
p1v := w[i - 16] ~ w[i - 9] ~ bits.rotate_left32(w[i - 3], 15)
|
||||
// @note(zh): inlined P1
|
||||
w[i] =
|
||||
p1v ~
|
||||
bits.rotate_left32(p1v, 15) ~
|
||||
bits.rotate_left32(p1v, 23) ~
|
||||
bits.rotate_left32(w[i - 13], 7) ~
|
||||
w[i - 6]
|
||||
}
|
||||
for i := 0; i < 64; i += 1 {
|
||||
wp[i] = w[i] ~ w[i + 4]
|
||||
}
|
||||
|
||||
a, b, c, d := state0, state1, state2, state3
|
||||
e, f, g, h := state4, state5, state6, state7
|
||||
a, b, c, d := state0, state1, state2, state3
|
||||
e, f, g, h := state4, state5, state6, state7
|
||||
|
||||
for i := 0; i < 16; i += 1 {
|
||||
v1 := util.ROTL32(u32(a), 12)
|
||||
ss1 := util.ROTL32(v1 + u32(e) + util.ROTL32(0x79cc4519, i), 7)
|
||||
ss2 := ss1 ~ v1
|
||||
for i := 0; i < 16; i += 1 {
|
||||
v1 := bits.rotate_left32(u32(a), 12)
|
||||
ss1 := bits.rotate_left32(v1 + u32(e) + bits.rotate_left32(0x79cc4519, i), 7)
|
||||
ss2 := ss1 ~ v1
|
||||
|
||||
// @note(zh): inlined FF1
|
||||
tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i]
|
||||
// @note(zh): inlined GG1
|
||||
tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i]
|
||||
// @note(zh): inlined FF1
|
||||
tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i]
|
||||
// @note(zh): inlined GG1
|
||||
tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i]
|
||||
|
||||
a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c
|
||||
// @note(zh): inlined P0
|
||||
e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g
|
||||
}
|
||||
a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c
|
||||
// @note(zh): inlined P0
|
||||
e, f, g, h =
|
||||
(tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)),
|
||||
e,
|
||||
bits.rotate_left32(u32(f), 19),
|
||||
g
|
||||
}
|
||||
|
||||
for i := 16; i < 64; i += 1 {
|
||||
v := util.ROTL32(u32(a), 12)
|
||||
ss1 := util.ROTL32(v + u32(e) + util.ROTL32(0x7a879d8a, i % 32), 7)
|
||||
ss2 := ss1 ~ v
|
||||
for i := 16; i < 64; i += 1 {
|
||||
v := bits.rotate_left32(u32(a), 12)
|
||||
ss1 := bits.rotate_left32(v + u32(e) + bits.rotate_left32(0x7a879d8a, i % 32), 7)
|
||||
ss2 := ss1 ~ v
|
||||
|
||||
// @note(zh): inlined FF2
|
||||
tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i]
|
||||
// @note(zh): inlined GG2
|
||||
tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i]
|
||||
// @note(zh): inlined FF2
|
||||
tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i]
|
||||
// @note(zh): inlined GG2
|
||||
tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i]
|
||||
|
||||
a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c
|
||||
// @note(zh): inlined P0
|
||||
e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g
|
||||
}
|
||||
a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c
|
||||
// @note(zh): inlined P0
|
||||
e, f, g, h =
|
||||
(tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)),
|
||||
e,
|
||||
bits.rotate_left32(u32(f), 19),
|
||||
g
|
||||
}
|
||||
|
||||
state0 ~= a
|
||||
state1 ~= b
|
||||
state2 ~= c
|
||||
state3 ~= d
|
||||
state4 ~= e
|
||||
state5 ~= f
|
||||
state6 ~= g
|
||||
state7 ~= h
|
||||
state0 ~= a
|
||||
state1 ~= b
|
||||
state2 ~= c
|
||||
state3 ~= d
|
||||
state4 ~= e
|
||||
state5 ~= f
|
||||
state6 ~= g
|
||||
state7 ~= h
|
||||
|
||||
buf = buf[64:]
|
||||
}
|
||||
buf = buf[BLOCK_SIZE:]
|
||||
}
|
||||
|
||||
ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3
|
||||
ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7
|
||||
ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3
|
||||
ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7
|
||||
}
|
||||
|
||||
@@ -1,517 +0,0 @@
|
||||
package streebog
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the Streebog hashing algorithm, standardized as GOST R 34.11-2012 in RFC 6986 <https://datatracker.ietf.org/doc/html/rfc6986>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_256 :: 32
|
||||
DIGEST_SIZE_512 :: 64
|
||||
|
||||
// hash_string_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
||||
return hash_bytes_256(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_256 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Streebog_Context
|
||||
ctx.is256 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_256 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_256 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Streebog_Context
|
||||
ctx.is256 = true
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_256 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
hash: [DIGEST_SIZE_256]byte
|
||||
ctx: Streebog_Context
|
||||
ctx.is256 = true
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_256 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_256(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_256(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_256]byte{}, false
|
||||
}
|
||||
|
||||
hash_256 :: proc {
|
||||
hash_stream_256,
|
||||
hash_file_256,
|
||||
hash_bytes_256,
|
||||
hash_string_256,
|
||||
hash_bytes_to_buffer_256,
|
||||
hash_string_to_buffer_256,
|
||||
}
|
||||
|
||||
// hash_string_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
||||
return hash_bytes_512(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_512 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Streebog_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_512 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_512 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Streebog_Context
|
||||
init(&ctx)
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
}
|
||||
|
||||
// hash_stream_512 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
hash: [DIGEST_SIZE_512]byte
|
||||
ctx: Streebog_Context
|
||||
init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_512 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_512(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_512(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_512]byte{}, false
|
||||
}
|
||||
|
||||
hash_512 :: proc {
|
||||
hash_stream_512,
|
||||
hash_file_512,
|
||||
hash_bytes_512,
|
||||
hash_string_512,
|
||||
hash_bytes_to_buffer_512,
|
||||
hash_string_to_buffer_512,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
init :: proc(ctx: ^Streebog_Context) {
|
||||
if ctx.is256 {
|
||||
ctx.hash_size = 256
|
||||
for _, i in ctx.h {
|
||||
ctx.h[i] = 0x01
|
||||
}
|
||||
} else {
|
||||
ctx.hash_size = 512
|
||||
}
|
||||
ctx.v_512[1] = 0x02
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Streebog_Context, data: []byte) {
|
||||
length := u64(len(data))
|
||||
chk_size: u64
|
||||
data := data
|
||||
for (length > 63) && (ctx.buf_size == 0) {
|
||||
stage2(ctx, data)
|
||||
data = data[64:]
|
||||
length -= 64
|
||||
}
|
||||
|
||||
for length != 0 {
|
||||
chk_size = 64 - ctx.buf_size
|
||||
if chk_size > length {
|
||||
chk_size = length
|
||||
}
|
||||
copy(ctx.buffer[ctx.buf_size:], data[:chk_size])
|
||||
ctx.buf_size += chk_size
|
||||
length -= chk_size
|
||||
data = data[chk_size:]
|
||||
if ctx.buf_size == 64 {
|
||||
stage2(ctx, ctx.buffer[:])
|
||||
ctx.buf_size = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Streebog_Context, hash: []byte) {
|
||||
t: [64]byte
|
||||
t[1] = byte((ctx.buf_size * 8) >> 8) & 0xff
|
||||
t[0] = byte((ctx.buf_size) * 8) & 0xff
|
||||
|
||||
padding(ctx)
|
||||
|
||||
G(ctx.h[:], ctx.n[:], ctx.buffer[:])
|
||||
|
||||
add_mod_512(ctx.n[:], t[:], ctx.n[:])
|
||||
add_mod_512(ctx.sigma[:], ctx.buffer[:], ctx.sigma[:])
|
||||
|
||||
G(ctx.h[:], ctx.v_0[:], ctx.n[:])
|
||||
G(ctx.h[:], ctx.v_0[:], ctx.sigma[:])
|
||||
|
||||
if ctx.is256 {
|
||||
copy(hash[:], ctx.h[32:])
|
||||
} else {
|
||||
copy(hash[:], ctx.h[:])
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Streebog implementation
|
||||
*/
|
||||
|
||||
PI := [256]byte {
|
||||
252, 238, 221, 17, 207, 110, 49, 22, 251, 196, 250, 218, 35, 197, 4, 77,
|
||||
233, 119, 240, 219, 147, 46, 153, 186, 23, 54, 241, 187, 20, 205, 95, 193,
|
||||
249, 24, 101, 90, 226, 92, 239, 33, 129, 28, 60, 66, 139, 1, 142, 79,
|
||||
5, 132, 2, 174, 227, 106, 143, 160, 6, 11, 237, 152, 127, 212, 211, 31,
|
||||
235, 52, 44, 81, 234, 200, 72, 171, 242, 42, 104, 162, 253, 58, 206, 204,
|
||||
181, 112, 14, 86, 8, 12, 118, 18, 191, 114, 19, 71, 156, 183, 93, 135,
|
||||
21, 161, 150, 41, 16, 123, 154, 199, 243, 145, 120, 111, 157, 158, 178, 177,
|
||||
50, 117, 25, 61, 255, 53, 138, 126, 109, 84, 198, 128, 195, 189, 13, 87,
|
||||
223, 245, 36, 169, 62, 168, 67, 201, 215, 121, 214, 246, 124, 34, 185, 3,
|
||||
224, 15, 236, 222, 122, 148, 176, 188, 220, 232, 40, 80, 78, 51, 10, 74,
|
||||
167, 151, 96, 115, 30, 0, 98, 68, 26, 184, 56, 130, 100, 159, 38, 65,
|
||||
173, 69, 70, 146, 39, 94, 85, 47, 140, 163, 165, 125, 105, 213, 149, 59,
|
||||
7, 88, 179, 64, 134, 172, 29, 247, 48, 55, 107, 228, 136, 217, 231, 137,
|
||||
225, 27, 131, 73, 76, 63, 248, 254, 141, 83, 170, 144, 202, 216, 133, 97,
|
||||
32, 113, 103, 164, 45, 43, 9, 91, 203, 155, 37, 208, 190, 229, 108, 82,
|
||||
89, 166, 116, 210, 230, 244, 180, 192, 209, 102, 175, 194, 57, 75, 99, 182,
|
||||
}
|
||||
|
||||
TAU := [64]byte {
|
||||
0, 8, 16, 24, 32, 40, 48, 56,
|
||||
1, 9, 17, 25, 33, 41, 49, 57,
|
||||
2, 10, 18, 26, 34, 42, 50, 58,
|
||||
3, 11, 19, 27, 35, 43, 51, 59,
|
||||
4, 12, 20, 28, 36, 44, 52, 60,
|
||||
5, 13, 21, 29, 37, 45, 53, 61,
|
||||
6, 14, 22, 30, 38, 46, 54, 62,
|
||||
7, 15, 23, 31, 39, 47, 55, 63,
|
||||
}
|
||||
|
||||
STREEBOG_A := [64]u64 {
|
||||
0x8e20faa72ba0b470, 0x47107ddd9b505a38, 0xad08b0e0c3282d1c, 0xd8045870ef14980e,
|
||||
0x6c022c38f90a4c07, 0x3601161cf205268d, 0x1b8e0b0e798c13c8, 0x83478b07b2468764,
|
||||
0xa011d380818e8f40, 0x5086e740ce47c920, 0x2843fd2067adea10, 0x14aff010bdd87508,
|
||||
0x0ad97808d06cb404, 0x05e23c0468365a02, 0x8c711e02341b2d01, 0x46b60f011a83988e,
|
||||
0x90dab52a387ae76f, 0x486dd4151c3dfdb9, 0x24b86a840e90f0d2, 0x125c354207487869,
|
||||
0x092e94218d243cba, 0x8a174a9ec8121e5d, 0x4585254f64090fa0, 0xaccc9ca9328a8950,
|
||||
0x9d4df05d5f661451, 0xc0a878a0a1330aa6, 0x60543c50de970553, 0x302a1e286fc58ca7,
|
||||
0x18150f14b9ec46dd, 0x0c84890ad27623e0, 0x0642ca05693b9f70, 0x0321658cba93c138,
|
||||
0x86275df09ce8aaa8, 0x439da0784e745554, 0xafc0503c273aa42a, 0xd960281e9d1d5215,
|
||||
0xe230140fc0802984, 0x71180a8960409a42, 0xb60c05ca30204d21, 0x5b068c651810a89e,
|
||||
0x456c34887a3805b9, 0xac361a443d1c8cd2, 0x561b0d22900e4669, 0x2b838811480723ba,
|
||||
0x9bcf4486248d9f5d, 0xc3e9224312c8c1a0, 0xeffa11af0964ee50, 0xf97d86d98a327728,
|
||||
0xe4fa2054a80b329c, 0x727d102a548b194e, 0x39b008152acb8227, 0x9258048415eb419d,
|
||||
0x492c024284fbaec0, 0xaa16012142f35760, 0x550b8e9e21f7a530, 0xa48b474f9ef5dc18,
|
||||
0x70a6a56e2440598e, 0x3853dc371220a247, 0x1ca76e95091051ad, 0x0edd37c48a08a6d8,
|
||||
0x07e095624504536c, 0x8d70c431ac02a736, 0xc83862965601dd1b, 0x641c314b2b8ee083,
|
||||
}
|
||||
|
||||
STREEBOG_C := [12][64]byte {
|
||||
{
|
||||
0x07, 0x45, 0xa6, 0xf2, 0x59, 0x65, 0x80, 0xdd,
|
||||
0x23, 0x4d, 0x74, 0xcc, 0x36, 0x74, 0x76, 0x05,
|
||||
0x15, 0xd3, 0x60, 0xa4, 0x08, 0x2a, 0x42, 0xa2,
|
||||
0x01, 0x69, 0x67, 0x92, 0x91, 0xe0, 0x7c, 0x4b,
|
||||
0xfc, 0xc4, 0x85, 0x75, 0x8d, 0xb8, 0x4e, 0x71,
|
||||
0x16, 0xd0, 0x45, 0x2e, 0x43, 0x76, 0x6a, 0x2f,
|
||||
0x1f, 0x7c, 0x65, 0xc0, 0x81, 0x2f, 0xcb, 0xeb,
|
||||
0xe9, 0xda, 0xca, 0x1e, 0xda, 0x5b, 0x08, 0xb1,
|
||||
},
|
||||
{
|
||||
0xb7, 0x9b, 0xb1, 0x21, 0x70, 0x04, 0x79, 0xe6,
|
||||
0x56, 0xcd, 0xcb, 0xd7, 0x1b, 0xa2, 0xdd, 0x55,
|
||||
0xca, 0xa7, 0x0a, 0xdb, 0xc2, 0x61, 0xb5, 0x5c,
|
||||
0x58, 0x99, 0xd6, 0x12, 0x6b, 0x17, 0xb5, 0x9a,
|
||||
0x31, 0x01, 0xb5, 0x16, 0x0f, 0x5e, 0xd5, 0x61,
|
||||
0x98, 0x2b, 0x23, 0x0a, 0x72, 0xea, 0xfe, 0xf3,
|
||||
0xd7, 0xb5, 0x70, 0x0f, 0x46, 0x9d, 0xe3, 0x4f,
|
||||
0x1a, 0x2f, 0x9d, 0xa9, 0x8a, 0xb5, 0xa3, 0x6f,
|
||||
},
|
||||
{
|
||||
0xb2, 0x0a, 0xba, 0x0a, 0xf5, 0x96, 0x1e, 0x99,
|
||||
0x31, 0xdb, 0x7a, 0x86, 0x43, 0xf4, 0xb6, 0xc2,
|
||||
0x09, 0xdb, 0x62, 0x60, 0x37, 0x3a, 0xc9, 0xc1,
|
||||
0xb1, 0x9e, 0x35, 0x90, 0xe4, 0x0f, 0xe2, 0xd3,
|
||||
0x7b, 0x7b, 0x29, 0xb1, 0x14, 0x75, 0xea, 0xf2,
|
||||
0x8b, 0x1f, 0x9c, 0x52, 0x5f, 0x5e, 0xf1, 0x06,
|
||||
0x35, 0x84, 0x3d, 0x6a, 0x28, 0xfc, 0x39, 0x0a,
|
||||
0xc7, 0x2f, 0xce, 0x2b, 0xac, 0xdc, 0x74, 0xf5,
|
||||
},
|
||||
{
|
||||
0x2e, 0xd1, 0xe3, 0x84, 0xbc, 0xbe, 0x0c, 0x22,
|
||||
0xf1, 0x37, 0xe8, 0x93, 0xa1, 0xea, 0x53, 0x34,
|
||||
0xbe, 0x03, 0x52, 0x93, 0x33, 0x13, 0xb7, 0xd8,
|
||||
0x75, 0xd6, 0x03, 0xed, 0x82, 0x2c, 0xd7, 0xa9,
|
||||
0x3f, 0x35, 0x5e, 0x68, 0xad, 0x1c, 0x72, 0x9d,
|
||||
0x7d, 0x3c, 0x5c, 0x33, 0x7e, 0x85, 0x8e, 0x48,
|
||||
0xdd, 0xe4, 0x71, 0x5d, 0xa0, 0xe1, 0x48, 0xf9,
|
||||
0xd2, 0x66, 0x15, 0xe8, 0xb3, 0xdf, 0x1f, 0xef,
|
||||
},
|
||||
{
|
||||
0x57, 0xfe, 0x6c, 0x7c, 0xfd, 0x58, 0x17, 0x60,
|
||||
0xf5, 0x63, 0xea, 0xa9, 0x7e, 0xa2, 0x56, 0x7a,
|
||||
0x16, 0x1a, 0x27, 0x23, 0xb7, 0x00, 0xff, 0xdf,
|
||||
0xa3, 0xf5, 0x3a, 0x25, 0x47, 0x17, 0xcd, 0xbf,
|
||||
0xbd, 0xff, 0x0f, 0x80, 0xd7, 0x35, 0x9e, 0x35,
|
||||
0x4a, 0x10, 0x86, 0x16, 0x1f, 0x1c, 0x15, 0x7f,
|
||||
0x63, 0x23, 0xa9, 0x6c, 0x0c, 0x41, 0x3f, 0x9a,
|
||||
0x99, 0x47, 0x47, 0xad, 0xac, 0x6b, 0xea, 0x4b,
|
||||
},
|
||||
{
|
||||
0x6e, 0x7d, 0x64, 0x46, 0x7a, 0x40, 0x68, 0xfa,
|
||||
0x35, 0x4f, 0x90, 0x36, 0x72, 0xc5, 0x71, 0xbf,
|
||||
0xb6, 0xc6, 0xbe, 0xc2, 0x66, 0x1f, 0xf2, 0x0a,
|
||||
0xb4, 0xb7, 0x9a, 0x1c, 0xb7, 0xa6, 0xfa, 0xcf,
|
||||
0xc6, 0x8e, 0xf0, 0x9a, 0xb4, 0x9a, 0x7f, 0x18,
|
||||
0x6c, 0xa4, 0x42, 0x51, 0xf9, 0xc4, 0x66, 0x2d,
|
||||
0xc0, 0x39, 0x30, 0x7a, 0x3b, 0xc3, 0xa4, 0x6f,
|
||||
0xd9, 0xd3, 0x3a, 0x1d, 0xae, 0xae, 0x4f, 0xae,
|
||||
},
|
||||
{
|
||||
0x93, 0xd4, 0x14, 0x3a, 0x4d, 0x56, 0x86, 0x88,
|
||||
0xf3, 0x4a, 0x3c, 0xa2, 0x4c, 0x45, 0x17, 0x35,
|
||||
0x04, 0x05, 0x4a, 0x28, 0x83, 0x69, 0x47, 0x06,
|
||||
0x37, 0x2c, 0x82, 0x2d, 0xc5, 0xab, 0x92, 0x09,
|
||||
0xc9, 0x93, 0x7a, 0x19, 0x33, 0x3e, 0x47, 0xd3,
|
||||
0xc9, 0x87, 0xbf, 0xe6, 0xc7, 0xc6, 0x9e, 0x39,
|
||||
0x54, 0x09, 0x24, 0xbf, 0xfe, 0x86, 0xac, 0x51,
|
||||
0xec, 0xc5, 0xaa, 0xee, 0x16, 0x0e, 0xc7, 0xf4,
|
||||
},
|
||||
{
|
||||
0x1e, 0xe7, 0x02, 0xbf, 0xd4, 0x0d, 0x7f, 0xa4,
|
||||
0xd9, 0xa8, 0x51, 0x59, 0x35, 0xc2, 0xac, 0x36,
|
||||
0x2f, 0xc4, 0xa5, 0xd1, 0x2b, 0x8d, 0xd1, 0x69,
|
||||
0x90, 0x06, 0x9b, 0x92, 0xcb, 0x2b, 0x89, 0xf4,
|
||||
0x9a, 0xc4, 0xdb, 0x4d, 0x3b, 0x44, 0xb4, 0x89,
|
||||
0x1e, 0xde, 0x36, 0x9c, 0x71, 0xf8, 0xb7, 0x4e,
|
||||
0x41, 0x41, 0x6e, 0x0c, 0x02, 0xaa, 0xe7, 0x03,
|
||||
0xa7, 0xc9, 0x93, 0x4d, 0x42, 0x5b, 0x1f, 0x9b,
|
||||
},
|
||||
{
|
||||
0xdb, 0x5a, 0x23, 0x83, 0x51, 0x44, 0x61, 0x72,
|
||||
0x60, 0x2a, 0x1f, 0xcb, 0x92, 0xdc, 0x38, 0x0e,
|
||||
0x54, 0x9c, 0x07, 0xa6, 0x9a, 0x8a, 0x2b, 0x7b,
|
||||
0xb1, 0xce, 0xb2, 0xdb, 0x0b, 0x44, 0x0a, 0x80,
|
||||
0x84, 0x09, 0x0d, 0xe0, 0xb7, 0x55, 0xd9, 0x3c,
|
||||
0x24, 0x42, 0x89, 0x25, 0x1b, 0x3a, 0x7d, 0x3a,
|
||||
0xde, 0x5f, 0x16, 0xec, 0xd8, 0x9a, 0x4c, 0x94,
|
||||
0x9b, 0x22, 0x31, 0x16, 0x54, 0x5a, 0x8f, 0x37,
|
||||
},
|
||||
{
|
||||
0xed, 0x9c, 0x45, 0x98, 0xfb, 0xc7, 0xb4, 0x74,
|
||||
0xc3, 0xb6, 0x3b, 0x15, 0xd1, 0xfa, 0x98, 0x36,
|
||||
0xf4, 0x52, 0x76, 0x3b, 0x30, 0x6c, 0x1e, 0x7a,
|
||||
0x4b, 0x33, 0x69, 0xaf, 0x02, 0x67, 0xe7, 0x9f,
|
||||
0x03, 0x61, 0x33, 0x1b, 0x8a, 0xe1, 0xff, 0x1f,
|
||||
0xdb, 0x78, 0x8a, 0xff, 0x1c, 0xe7, 0x41, 0x89,
|
||||
0xf3, 0xf3, 0xe4, 0xb2, 0x48, 0xe5, 0x2a, 0x38,
|
||||
0x52, 0x6f, 0x05, 0x80, 0xa6, 0xde, 0xbe, 0xab,
|
||||
},
|
||||
{
|
||||
0x1b, 0x2d, 0xf3, 0x81, 0xcd, 0xa4, 0xca, 0x6b,
|
||||
0x5d, 0xd8, 0x6f, 0xc0, 0x4a, 0x59, 0xa2, 0xde,
|
||||
0x98, 0x6e, 0x47, 0x7d, 0x1d, 0xcd, 0xba, 0xef,
|
||||
0xca, 0xb9, 0x48, 0xea, 0xef, 0x71, 0x1d, 0x8a,
|
||||
0x79, 0x66, 0x84, 0x14, 0x21, 0x80, 0x01, 0x20,
|
||||
0x61, 0x07, 0xab, 0xeb, 0xbb, 0x6b, 0xfa, 0xd8,
|
||||
0x94, 0xfe, 0x5a, 0x63, 0xcd, 0xc6, 0x02, 0x30,
|
||||
0xfb, 0x89, 0xc8, 0xef, 0xd0, 0x9e, 0xcd, 0x7b,
|
||||
},
|
||||
{
|
||||
0x20, 0xd7, 0x1b, 0xf1, 0x4a, 0x92, 0xbc, 0x48,
|
||||
0x99, 0x1b, 0xb2, 0xd9, 0xd5, 0x17, 0xf4, 0xfa,
|
||||
0x52, 0x28, 0xe1, 0x88, 0xaa, 0xa4, 0x1d, 0xe7,
|
||||
0x86, 0xcc, 0x91, 0x18, 0x9d, 0xef, 0x80, 0x5d,
|
||||
0x9b, 0x9f, 0x21, 0x30, 0xd4, 0x12, 0x20, 0xf8,
|
||||
0x77, 0x1d, 0xdf, 0xbc, 0x32, 0x3c, 0xa4, 0xcd,
|
||||
0x7a, 0xb1, 0x49, 0x04, 0xb0, 0x80, 0x13, 0xd2,
|
||||
0xba, 0x31, 0x16, 0xf1, 0x67, 0xe7, 0x8e, 0x37,
|
||||
},
|
||||
}
|
||||
|
||||
Streebog_Context :: struct {
|
||||
buffer: [64]byte,
|
||||
h: [64]byte,
|
||||
n: [64]byte,
|
||||
sigma: [64]byte,
|
||||
v_0: [64]byte,
|
||||
v_512: [64]byte,
|
||||
buf_size: u64,
|
||||
hash_size: int,
|
||||
is256: bool,
|
||||
}
|
||||
|
||||
add_mod_512 :: proc(first_vector, second_vector, result_vector: []byte) {
|
||||
t: i32 = 0
|
||||
for i: i32 = 0; i < 64; i += 1 {
|
||||
t = i32(first_vector[i]) + i32(second_vector[i]) + (t >> 8)
|
||||
result_vector[i] = byte(t & 0xff)
|
||||
}
|
||||
}
|
||||
|
||||
X :: #force_inline proc(a, k, out: []byte) {
|
||||
for i := 0; i < 64; i += 1 {
|
||||
out[i] = a[i] ~ k[i]
|
||||
}
|
||||
}
|
||||
|
||||
S :: #force_inline proc(state: []byte) {
|
||||
t: [64]byte
|
||||
for i: i32 = 63; i >= 0; i -= 1 {
|
||||
t[i] = PI[state[i]]
|
||||
}
|
||||
copy(state, t[:])
|
||||
}
|
||||
|
||||
P :: #force_inline proc(state: []byte) {
|
||||
t: [64]byte
|
||||
for i: i32 = 63; i >= 0; i -= 1 {
|
||||
t[i] = state[TAU[i]]
|
||||
}
|
||||
copy(state, t[:])
|
||||
}
|
||||
|
||||
L :: #force_inline proc(state: []byte) {
|
||||
ins := util.cast_slice([]u64, state)
|
||||
out: [8]u64
|
||||
for i: i32 = 7; i >= 0; i -= 1 {
|
||||
for j: i32 = 63; j >= 0; j -= 1 {
|
||||
if (ins[i] >> u32(j)) & 1 != 0 {
|
||||
out[i] ~= STREEBOG_A[63 - j]
|
||||
}
|
||||
}
|
||||
}
|
||||
copy(state, util.cast_slice([]byte, out[:]))
|
||||
}
|
||||
|
||||
E :: #force_inline proc(K, m, state: []byte) {
|
||||
X(m, K, state)
|
||||
for i: i32 = 0; i < 12; i += 1 {
|
||||
S(state)
|
||||
P(state)
|
||||
L(state)
|
||||
get_key(K, i)
|
||||
X(state, K, state)
|
||||
}
|
||||
}
|
||||
|
||||
get_key :: #force_inline proc(K: []byte, i: i32) {
|
||||
X(K, STREEBOG_C[i][:], K)
|
||||
S(K)
|
||||
P(K)
|
||||
L(K)
|
||||
}
|
||||
|
||||
G :: #force_inline proc(h, N, m: []byte) {
|
||||
t, K: [64]byte
|
||||
X(N, h, K[:])
|
||||
S(K[:])
|
||||
P(K[:])
|
||||
L(K[:])
|
||||
E(K[:], m, t[:])
|
||||
X(t[:], h, t[:])
|
||||
X(t[:], m, h)
|
||||
}
|
||||
|
||||
stage2 :: proc(ctx: ^Streebog_Context, m: []byte) {
|
||||
G(ctx.h[:], ctx.n[:], m)
|
||||
add_mod_512(ctx.n[:], ctx.v_512[:], ctx.n[:])
|
||||
add_mod_512(ctx.sigma[:], m, ctx.sigma[:])
|
||||
}
|
||||
|
||||
padding :: proc(ctx: ^Streebog_Context) {
|
||||
if ctx.buf_size < 64 {
|
||||
t: [64]byte
|
||||
copy(t[:], ctx.buffer[:int(ctx.buf_size)])
|
||||
t[ctx.buf_size] = 0x01
|
||||
copy(ctx.buffer[:], t[:])
|
||||
}
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package tiger
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Tiger1 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../_tiger"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_128 :: 16
|
||||
DIGEST_SIZE_160 :: 20
|
||||
DIGEST_SIZE_192 :: 24
|
||||
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
|
||||
return hash_bytes_160(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_160 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_160 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_160 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_160 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_160(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_160(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_160]byte{}, false
|
||||
}
|
||||
|
||||
hash_160 :: proc {
|
||||
hash_stream_160,
|
||||
hash_file_160,
|
||||
hash_bytes_160,
|
||||
hash_string_160,
|
||||
hash_bytes_to_buffer_160,
|
||||
hash_string_to_buffer_160,
|
||||
}
|
||||
|
||||
// hash_string_192 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
|
||||
return hash_bytes_192(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_192 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
|
||||
hash: [DIGEST_SIZE_192]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_192 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_192 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_192(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_192 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_192 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_192 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
|
||||
hash: [DIGEST_SIZE_192]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 1
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_192 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_192(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_192(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_192]byte{}, false
|
||||
}
|
||||
|
||||
hash_192 :: proc {
|
||||
hash_stream_192,
|
||||
hash_file_192,
|
||||
hash_bytes_192,
|
||||
hash_string_192,
|
||||
hash_bytes_to_buffer_192,
|
||||
hash_string_to_buffer_192,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Tiger_Context :: _tiger.Tiger_Context
|
||||
|
||||
init :: proc(ctx: ^_tiger.Tiger_Context) {
|
||||
ctx.ver = 1
|
||||
_tiger.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) {
|
||||
_tiger.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) {
|
||||
_tiger.final(ctx, hash)
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package tiger2
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Interface for the Tiger2 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../_tiger"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE_128 :: 16
|
||||
DIGEST_SIZE_160 :: 20
|
||||
DIGEST_SIZE_192 :: 24
|
||||
|
||||
// hash_string_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
|
||||
return hash_bytes_128(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_128 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_128 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_128 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_128 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
hash: [DIGEST_SIZE_128]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_128 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_128(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_128(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_128]byte{}, false
|
||||
}
|
||||
|
||||
hash_128 :: proc {
|
||||
hash_stream_128,
|
||||
hash_file_128,
|
||||
hash_bytes_128,
|
||||
hash_string_128,
|
||||
hash_bytes_to_buffer_128,
|
||||
hash_string_to_buffer_128,
|
||||
}
|
||||
|
||||
// hash_string_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
|
||||
return hash_bytes_160(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_160 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_160 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_160 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_160 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
hash: [DIGEST_SIZE_160]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_160 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_160(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_160(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_160]byte{}, false
|
||||
}
|
||||
|
||||
hash_160 :: proc {
|
||||
hash_stream_160,
|
||||
hash_file_160,
|
||||
hash_bytes_160,
|
||||
hash_string_160,
|
||||
hash_bytes_to_buffer_160,
|
||||
hash_string_to_buffer_160,
|
||||
}
|
||||
|
||||
// hash_string_192 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
|
||||
return hash_bytes_192(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes_192 will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
|
||||
hash: [DIGEST_SIZE_192]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer_192 will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer_192 :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer_192(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer_192 will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer_192 :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
_tiger.update(&ctx, data)
|
||||
_tiger.final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream_192 will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
|
||||
hash: [DIGEST_SIZE_192]byte
|
||||
ctx: _tiger.Tiger_Context
|
||||
ctx.ver = 2
|
||||
_tiger.init(&ctx)
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
_tiger.update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
_tiger.final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file_192 will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream_192(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes_192(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE_192]byte{}, false
|
||||
}
|
||||
|
||||
hash_192 :: proc {
|
||||
hash_stream_192,
|
||||
hash_file_192,
|
||||
hash_bytes_192,
|
||||
hash_string_192,
|
||||
hash_bytes_to_buffer_192,
|
||||
hash_string_to_buffer_192,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
Tiger_Context :: _tiger.Tiger_Context
|
||||
|
||||
init :: proc(ctx: ^_tiger.Tiger_Context) {
|
||||
ctx.ver = 2
|
||||
_tiger.init(ctx)
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) {
|
||||
_tiger.update(ctx, data)
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) {
|
||||
_tiger.final(ctx, hash)
|
||||
}
|
||||
@@ -1,146 +0,0 @@
|
||||
package util
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Various utility procedures
|
||||
*/
|
||||
|
||||
import "core:mem"
|
||||
// Keep vet happy
|
||||
_ :: mem
|
||||
|
||||
// @note(bp): this can replace the other two
|
||||
cast_slice :: #force_inline proc "contextless" ($D: typeid/[]$DE, src: $S/[]$SE) -> D {
|
||||
src := src
|
||||
dst := (^mem.Raw_Slice)(&src)
|
||||
|
||||
when size_of(DE) < size_of(SE) {
|
||||
when size_of(DE) % size_of(SE) == 0 {
|
||||
dst.len /= size_of(SE) / size_of(DE)
|
||||
} else {
|
||||
dst.len *= size_of(SE)
|
||||
dst.len /= size_of(DE)
|
||||
}
|
||||
} else when size_of(DE) > size_of(SE) {
|
||||
when size_of(DE) % size_of(SE) == 0 {
|
||||
dst.len *= size_of(DE) / size_of(SE)
|
||||
} else {
|
||||
dst.len *= size_of(SE)
|
||||
dst.len /= size_of(DE)
|
||||
}
|
||||
} else when size_of(DE) != size_of(SE) {
|
||||
#assert(size_of(DE) % size_of(SE) == 0, "Different size detected")
|
||||
dst.len *= size_of(SE)
|
||||
dst.len /= size_of(DE)
|
||||
}
|
||||
|
||||
return (^D)(dst)^
|
||||
}
|
||||
|
||||
bytes_to_slice :: #force_inline proc "contextless" ($T: typeid/[]$E, bytes: []byte) -> T {
|
||||
s := transmute(mem.Raw_Slice)bytes
|
||||
s.len /= size_of(E)
|
||||
return transmute(T)s
|
||||
}
|
||||
|
||||
slice_to_bytes :: #force_inline proc "contextless" (slice: $E/[]$T) -> []byte {
|
||||
s := transmute(mem.Raw_Slice)slice
|
||||
s.len *= size_of(T)
|
||||
return transmute([]byte)s
|
||||
}
|
||||
|
||||
ROTL16 :: #force_inline proc "contextless" (a, b: u16) -> u16 {
|
||||
return ((a << b) | (a >> (16 - b)))
|
||||
}
|
||||
|
||||
ROTR16 :: #force_inline proc "contextless" (a, b: u16) -> u16 {
|
||||
return ((a >> b) | (a << (16 - b)))
|
||||
}
|
||||
|
||||
ROTL32 :: #force_inline proc "contextless"(a: u32, b: int) -> u32 {
|
||||
s := uint(b) & 31
|
||||
return (a << s) | (a >> (32 - s))
|
||||
}
|
||||
|
||||
ROTR32 :: #force_inline proc "contextless" (a: u32, b: int) -> u32 {
|
||||
s := uint(b) & 31
|
||||
return (a >> s) | (a << (32 - s))
|
||||
}
|
||||
|
||||
ROTL64 :: #force_inline proc "contextless" (a, b: u64) -> u64 {
|
||||
return ((a << b) | (a >> (64 - b)))
|
||||
}
|
||||
|
||||
ROTR64 :: #force_inline proc "contextless" (a, b: u64) -> u64 {
|
||||
return ((a >> b) | (a << (64 - b)))
|
||||
}
|
||||
|
||||
ROTL128 :: #force_inline proc "contextless" (a, b, c, d: ^u32, n: uint) {
|
||||
a, b, c, d := a, b, c, d
|
||||
t := a^ >> (32 - n)
|
||||
a^ = ((a^ << n) | (b^ >> (32 - n)))
|
||||
b^ = ((b^ << n) | (c^ >> (32 - n)))
|
||||
c^ = ((c^ << n) | (d^ >> (32 - n)))
|
||||
d^ = ((d^ << n) | t)
|
||||
}
|
||||
|
||||
U32_LE :: #force_inline proc "contextless" (b: []byte) -> u32 {
|
||||
return u32(b[0]) | u32(b[1]) << 8 | u32(b[2]) << 16 | u32(b[3]) << 24
|
||||
}
|
||||
|
||||
U64_LE :: #force_inline proc "contextless" (b: []byte) -> u64 {
|
||||
return u64(b[0]) | u64(b[1]) << 8 | u64(b[2]) << 16 | u64(b[3]) << 24 |
|
||||
u64(b[4]) << 32 | u64(b[5]) << 40 | u64(b[6]) << 48 | u64(b[7]) << 56
|
||||
}
|
||||
|
||||
U64_BE :: #force_inline proc "contextless" (b: []byte) -> u64 {
|
||||
return u64(b[7]) | u64(b[6]) << 8 | u64(b[5]) << 16 | u64(b[4]) << 24 |
|
||||
u64(b[3]) << 32 | u64(b[2]) << 40 | u64(b[1]) << 48 | u64(b[0]) << 56
|
||||
}
|
||||
|
||||
PUT_U64_LE :: #force_inline proc "contextless" (b: []byte, v: u64) {
|
||||
b[0] = byte(v)
|
||||
b[1] = byte(v >> 8)
|
||||
b[2] = byte(v >> 16)
|
||||
b[3] = byte(v >> 24)
|
||||
b[4] = byte(v >> 32)
|
||||
b[5] = byte(v >> 40)
|
||||
b[6] = byte(v >> 48)
|
||||
b[7] = byte(v >> 56)
|
||||
}
|
||||
|
||||
PUT_U32_LE :: #force_inline proc "contextless" (b: []byte, v: u32) {
|
||||
b[0] = byte(v)
|
||||
b[1] = byte(v >> 8)
|
||||
b[2] = byte(v >> 16)
|
||||
b[3] = byte(v >> 24)
|
||||
}
|
||||
|
||||
PUT_U32_BE :: #force_inline proc "contextless" (b: []byte, v: u32) {
|
||||
b[0] = byte(v >> 24)
|
||||
b[1] = byte(v >> 16)
|
||||
b[2] = byte(v >> 8)
|
||||
b[3] = byte(v)
|
||||
}
|
||||
|
||||
PUT_U64_BE :: #force_inline proc "contextless" (b: []byte, v: u64) {
|
||||
b[0] = byte(v >> 56)
|
||||
b[1] = byte(v >> 48)
|
||||
b[2] = byte(v >> 40)
|
||||
b[3] = byte(v >> 32)
|
||||
b[4] = byte(v >> 24)
|
||||
b[5] = byte(v >> 16)
|
||||
b[6] = byte(v >> 8)
|
||||
b[7] = byte(v)
|
||||
}
|
||||
|
||||
XOR_BUF :: #force_inline proc "contextless" (input, output: []byte) {
|
||||
for i := 0; i < len(input); i += 1 {
|
||||
output[i] ~= input[i]
|
||||
}
|
||||
}
|
||||
@@ -1,806 +0,0 @@
|
||||
package whirlpool
|
||||
|
||||
/*
|
||||
Copyright 2021 zhibog
|
||||
Made available under the BSD-3 license.
|
||||
|
||||
List of contributors:
|
||||
zhibog, dotbmp: Initial implementation.
|
||||
|
||||
Implementation of the Whirlpool hashing algorithm, as defined in <https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html>
|
||||
*/
|
||||
|
||||
import "core:os"
|
||||
import "core:io"
|
||||
|
||||
import "../util"
|
||||
|
||||
/*
|
||||
High level API
|
||||
*/
|
||||
|
||||
DIGEST_SIZE :: 64
|
||||
|
||||
// hash_string will hash the given input and return the
|
||||
// computed hash
|
||||
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
|
||||
return hash_bytes(transmute([]byte)(data))
|
||||
}
|
||||
|
||||
// hash_bytes will hash the given input and return the
|
||||
// computed hash
|
||||
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Whirlpool_Context
|
||||
// init(&ctx) No-op
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash[:])
|
||||
return hash
|
||||
}
|
||||
|
||||
// hash_string_to_buffer will hash the given input and assign the
|
||||
// computed hash to the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_string_to_buffer :: proc(data: string, hash: []byte) {
|
||||
hash_bytes_to_buffer(transmute([]byte)(data), hash)
|
||||
}
|
||||
|
||||
// hash_bytes_to_buffer will hash the given input and write the
|
||||
// computed hash into the second parameter.
|
||||
// It requires that the destination buffer is at least as big as the digest size
|
||||
hash_bytes_to_buffer :: proc(data, hash: []byte) {
|
||||
assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
|
||||
ctx: Whirlpool_Context
|
||||
// init(&ctx) No-op
|
||||
update(&ctx, data)
|
||||
final(&ctx, hash)
|
||||
}
|
||||
|
||||
// hash_stream will read the stream in chunks and compute a
|
||||
// hash from its contents
|
||||
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
|
||||
hash: [DIGEST_SIZE]byte
|
||||
ctx: Whirlpool_Context
|
||||
// init(&ctx) No-op
|
||||
buf := make([]byte, 512)
|
||||
defer delete(buf)
|
||||
read := 1
|
||||
for read > 0 {
|
||||
read, _ = io.read(s, buf)
|
||||
if read > 0 {
|
||||
update(&ctx, buf[:read])
|
||||
}
|
||||
}
|
||||
final(&ctx, hash[:])
|
||||
return hash, true
|
||||
}
|
||||
|
||||
// hash_file will read the file provided by the given handle
|
||||
// and compute a hash
|
||||
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
|
||||
if !load_at_once {
|
||||
return hash_stream(os.stream_from_handle(hd))
|
||||
} else {
|
||||
if buf, ok := os.read_entire_file(hd); ok {
|
||||
return hash_bytes(buf[:]), ok
|
||||
}
|
||||
}
|
||||
return [DIGEST_SIZE]byte{}, false
|
||||
}
|
||||
|
||||
hash :: proc {
|
||||
hash_stream,
|
||||
hash_file,
|
||||
hash_bytes,
|
||||
hash_string,
|
||||
hash_bytes_to_buffer,
|
||||
hash_string_to_buffer,
|
||||
}
|
||||
|
||||
/*
|
||||
Low level API
|
||||
*/
|
||||
|
||||
@(warning="Init is a no-op for Whirlpool")
|
||||
init :: proc(ctx: ^Whirlpool_Context) {
|
||||
// No action needed here
|
||||
}
|
||||
|
||||
update :: proc(ctx: ^Whirlpool_Context, source: []byte) {
|
||||
source_pos: int
|
||||
nn := len(source)
|
||||
source_bits := u64(nn * 8)
|
||||
source_gap := u32((8 - (int(source_bits & 7))) & 7)
|
||||
buffer_rem := uint(ctx.buffer_bits & 7)
|
||||
b: u32
|
||||
|
||||
for i, carry, value := 31, u32(0), u32(source_bits); i >= 0 && (carry != 0 || value != 0); i -= 1 {
|
||||
carry += u32(ctx.bitlength[i]) + (u32(value & 0xff))
|
||||
ctx.bitlength[i] = byte(carry)
|
||||
carry >>= 8
|
||||
value >>= 8
|
||||
}
|
||||
|
||||
for source_bits > 8 {
|
||||
b = u32(u32((source[source_pos] << source_gap) & 0xff) | u32((source[source_pos+1] & 0xff) >> (8 - source_gap)))
|
||||
|
||||
ctx.buffer[ctx.buffer_pos] |= u8(b >> buffer_rem)
|
||||
ctx.buffer_pos += 1
|
||||
ctx.buffer_bits += int(8 - buffer_rem)
|
||||
|
||||
if ctx.buffer_bits == 512 {
|
||||
transform(ctx)
|
||||
ctx.buffer_bits = 0
|
||||
ctx.buffer_pos = 0
|
||||
}
|
||||
ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem))
|
||||
ctx.buffer_bits += int(buffer_rem)
|
||||
source_bits -= 8
|
||||
source_pos += 1
|
||||
}
|
||||
|
||||
if source_bits > 0 {
|
||||
b = u32((source[source_pos] << source_gap) & 0xff)
|
||||
ctx.buffer[ctx.buffer_pos] |= byte(b) >> buffer_rem
|
||||
} else {b = 0}
|
||||
|
||||
if u64(buffer_rem) + source_bits < 8 {
|
||||
ctx.buffer_bits += int(source_bits)
|
||||
} else {
|
||||
ctx.buffer_pos += 1
|
||||
ctx.buffer_bits += 8 - int(buffer_rem)
|
||||
source_bits -= u64(8 - buffer_rem)
|
||||
|
||||
if ctx.buffer_bits == 512 {
|
||||
transform(ctx)
|
||||
ctx.buffer_bits = 0
|
||||
ctx.buffer_pos = 0
|
||||
}
|
||||
ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem))
|
||||
ctx.buffer_bits += int(source_bits)
|
||||
}
|
||||
}
|
||||
|
||||
final :: proc(ctx: ^Whirlpool_Context, hash: []byte) {
|
||||
n := ctx
|
||||
n.buffer[n.buffer_pos] |= 0x80 >> (uint(n.buffer_bits) & 7)
|
||||
n.buffer_pos += 1
|
||||
|
||||
if n.buffer_pos > 64 - 32 {
|
||||
if n.buffer_pos < 64 {
|
||||
for i := 0; i < 64 - n.buffer_pos; i += 1 {
|
||||
n.buffer[n.buffer_pos + i] = 0
|
||||
}
|
||||
}
|
||||
transform(ctx)
|
||||
n.buffer_pos = 0
|
||||
}
|
||||
|
||||
if n.buffer_pos < 64 - 32 {
|
||||
for i := 0; i < (64 - 32) - n.buffer_pos; i += 1 {
|
||||
n.buffer[n.buffer_pos + i] = 0
|
||||
}
|
||||
}
|
||||
n.buffer_pos = 64 - 32
|
||||
|
||||
for i := 0; i < 32; i += 1 {
|
||||
n.buffer[n.buffer_pos + i] = n.bitlength[i]
|
||||
}
|
||||
transform(ctx)
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
hash[i * 8] = byte(n.hash[i] >> 56)
|
||||
hash[i * 8 + 1] = byte(n.hash[i] >> 48)
|
||||
hash[i * 8 + 2] = byte(n.hash[i] >> 40)
|
||||
hash[i * 8 + 3] = byte(n.hash[i] >> 32)
|
||||
hash[i * 8 + 4] = byte(n.hash[i] >> 24)
|
||||
hash[i * 8 + 5] = byte(n.hash[i] >> 16)
|
||||
hash[i * 8 + 6] = byte(n.hash[i] >> 8)
|
||||
hash[i * 8 + 7] = byte(n.hash[i])
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Whirlpool implementation
|
||||
*/
|
||||
|
||||
ROUNDS :: 10
|
||||
|
||||
Whirlpool_Context :: struct {
|
||||
bitlength: [32]byte,
|
||||
buffer: [64]byte,
|
||||
buffer_bits: int,
|
||||
buffer_pos: int,
|
||||
hash: [8]u64,
|
||||
}
|
||||
|
||||
C0 := [256]u64 {
|
||||
0x18186018c07830d8, 0x23238c2305af4626, 0xc6c63fc67ef991b8, 0xe8e887e8136fcdfb,
|
||||
0x878726874ca113cb, 0xb8b8dab8a9626d11, 0x0101040108050209, 0x4f4f214f426e9e0d,
|
||||
0x3636d836adee6c9b, 0xa6a6a2a6590451ff, 0xd2d26fd2debdb90c, 0xf5f5f3f5fb06f70e,
|
||||
0x7979f979ef80f296, 0x6f6fa16f5fcede30, 0x91917e91fcef3f6d, 0x52525552aa07a4f8,
|
||||
0x60609d6027fdc047, 0xbcbccabc89766535, 0x9b9b569baccd2b37, 0x8e8e028e048c018a,
|
||||
0xa3a3b6a371155bd2, 0x0c0c300c603c186c, 0x7b7bf17bff8af684, 0x3535d435b5e16a80,
|
||||
0x1d1d741de8693af5, 0xe0e0a7e05347ddb3, 0xd7d77bd7f6acb321, 0xc2c22fc25eed999c,
|
||||
0x2e2eb82e6d965c43, 0x4b4b314b627a9629, 0xfefedffea321e15d, 0x575741578216aed5,
|
||||
0x15155415a8412abd, 0x7777c1779fb6eee8, 0x3737dc37a5eb6e92, 0xe5e5b3e57b56d79e,
|
||||
0x9f9f469f8cd92313, 0xf0f0e7f0d317fd23, 0x4a4a354a6a7f9420, 0xdada4fda9e95a944,
|
||||
0x58587d58fa25b0a2, 0xc9c903c906ca8fcf, 0x2929a429558d527c, 0x0a0a280a5022145a,
|
||||
0xb1b1feb1e14f7f50, 0xa0a0baa0691a5dc9, 0x6b6bb16b7fdad614, 0x85852e855cab17d9,
|
||||
0xbdbdcebd8173673c, 0x5d5d695dd234ba8f, 0x1010401080502090, 0xf4f4f7f4f303f507,
|
||||
0xcbcb0bcb16c08bdd, 0x3e3ef83eedc67cd3, 0x0505140528110a2d, 0x676781671fe6ce78,
|
||||
0xe4e4b7e47353d597, 0x27279c2725bb4e02, 0x4141194132588273, 0x8b8b168b2c9d0ba7,
|
||||
0xa7a7a6a7510153f6, 0x7d7de97dcf94fab2, 0x95956e95dcfb3749, 0xd8d847d88e9fad56,
|
||||
0xfbfbcbfb8b30eb70, 0xeeee9fee2371c1cd, 0x7c7ced7cc791f8bb, 0x6666856617e3cc71,
|
||||
0xdddd53dda68ea77b, 0x17175c17b84b2eaf, 0x4747014702468e45, 0x9e9e429e84dc211a,
|
||||
0xcaca0fca1ec589d4, 0x2d2db42d75995a58, 0xbfbfc6bf9179632e, 0x07071c07381b0e3f,
|
||||
0xadad8ead012347ac, 0x5a5a755aea2fb4b0, 0x838336836cb51bef, 0x3333cc3385ff66b6,
|
||||
0x636391633ff2c65c, 0x02020802100a0412, 0xaaaa92aa39384993, 0x7171d971afa8e2de,
|
||||
0xc8c807c80ecf8dc6, 0x19196419c87d32d1, 0x494939497270923b, 0xd9d943d9869aaf5f,
|
||||
0xf2f2eff2c31df931, 0xe3e3abe34b48dba8, 0x5b5b715be22ab6b9, 0x88881a8834920dbc,
|
||||
0x9a9a529aa4c8293e, 0x262698262dbe4c0b, 0x3232c8328dfa64bf, 0xb0b0fab0e94a7d59,
|
||||
0xe9e983e91b6acff2, 0x0f0f3c0f78331e77, 0xd5d573d5e6a6b733, 0x80803a8074ba1df4,
|
||||
0xbebec2be997c6127, 0xcdcd13cd26de87eb, 0x3434d034bde46889, 0x48483d487a759032,
|
||||
0xffffdbffab24e354, 0x7a7af57af78ff48d, 0x90907a90f4ea3d64, 0x5f5f615fc23ebe9d,
|
||||
0x202080201da0403d, 0x6868bd6867d5d00f, 0x1a1a681ad07234ca, 0xaeae82ae192c41b7,
|
||||
0xb4b4eab4c95e757d, 0x54544d549a19a8ce, 0x93937693ece53b7f, 0x222288220daa442f,
|
||||
0x64648d6407e9c863, 0xf1f1e3f1db12ff2a, 0x7373d173bfa2e6cc, 0x12124812905a2482,
|
||||
0x40401d403a5d807a, 0x0808200840281048, 0xc3c32bc356e89b95, 0xecec97ec337bc5df,
|
||||
0xdbdb4bdb9690ab4d, 0xa1a1bea1611f5fc0, 0x8d8d0e8d1c830791, 0x3d3df43df5c97ac8,
|
||||
0x97976697ccf1335b, 0x0000000000000000, 0xcfcf1bcf36d483f9, 0x2b2bac2b4587566e,
|
||||
0x7676c57697b3ece1, 0x8282328264b019e6, 0xd6d67fd6fea9b128, 0x1b1b6c1bd87736c3,
|
||||
0xb5b5eeb5c15b7774, 0xafaf86af112943be, 0x6a6ab56a77dfd41d, 0x50505d50ba0da0ea,
|
||||
0x45450945124c8a57, 0xf3f3ebf3cb18fb38, 0x3030c0309df060ad, 0xefef9bef2b74c3c4,
|
||||
0x3f3ffc3fe5c37eda, 0x55554955921caac7, 0xa2a2b2a2791059db, 0xeaea8fea0365c9e9,
|
||||
0x656589650fecca6a, 0xbabad2bab9686903, 0x2f2fbc2f65935e4a, 0xc0c027c04ee79d8e,
|
||||
0xdede5fdebe81a160, 0x1c1c701ce06c38fc, 0xfdfdd3fdbb2ee746, 0x4d4d294d52649a1f,
|
||||
0x92927292e4e03976, 0x7575c9758fbceafa, 0x06061806301e0c36, 0x8a8a128a249809ae,
|
||||
0xb2b2f2b2f940794b, 0xe6e6bfe66359d185, 0x0e0e380e70361c7e, 0x1f1f7c1ff8633ee7,
|
||||
0x6262956237f7c455, 0xd4d477d4eea3b53a, 0xa8a89aa829324d81, 0x96966296c4f43152,
|
||||
0xf9f9c3f99b3aef62, 0xc5c533c566f697a3, 0x2525942535b14a10, 0x59597959f220b2ab,
|
||||
0x84842a8454ae15d0, 0x7272d572b7a7e4c5, 0x3939e439d5dd72ec, 0x4c4c2d4c5a619816,
|
||||
0x5e5e655eca3bbc94, 0x7878fd78e785f09f, 0x3838e038ddd870e5, 0x8c8c0a8c14860598,
|
||||
0xd1d163d1c6b2bf17, 0xa5a5aea5410b57e4, 0xe2e2afe2434dd9a1, 0x616199612ff8c24e,
|
||||
0xb3b3f6b3f1457b42, 0x2121842115a54234, 0x9c9c4a9c94d62508, 0x1e1e781ef0663cee,
|
||||
0x4343114322528661, 0xc7c73bc776fc93b1, 0xfcfcd7fcb32be54f, 0x0404100420140824,
|
||||
0x51515951b208a2e3, 0x99995e99bcc72f25, 0x6d6da96d4fc4da22, 0x0d0d340d68391a65,
|
||||
0xfafacffa8335e979, 0xdfdf5bdfb684a369, 0x7e7ee57ed79bfca9, 0x242490243db44819,
|
||||
0x3b3bec3bc5d776fe, 0xabab96ab313d4b9a, 0xcece1fce3ed181f0, 0x1111441188552299,
|
||||
0x8f8f068f0c890383, 0x4e4e254e4a6b9c04, 0xb7b7e6b7d1517366, 0xebeb8beb0b60cbe0,
|
||||
0x3c3cf03cfdcc78c1, 0x81813e817cbf1ffd, 0x94946a94d4fe3540, 0xf7f7fbf7eb0cf31c,
|
||||
0xb9b9deb9a1676f18, 0x13134c13985f268b, 0x2c2cb02c7d9c5851, 0xd3d36bd3d6b8bb05,
|
||||
0xe7e7bbe76b5cd38c, 0x6e6ea56e57cbdc39, 0xc4c437c46ef395aa, 0x03030c03180f061b,
|
||||
0x565645568a13acdc, 0x44440d441a49885e, 0x7f7fe17fdf9efea0, 0xa9a99ea921374f88,
|
||||
0x2a2aa82a4d825467, 0xbbbbd6bbb16d6b0a, 0xc1c123c146e29f87, 0x53535153a202a6f1,
|
||||
0xdcdc57dcae8ba572, 0x0b0b2c0b58271653, 0x9d9d4e9d9cd32701, 0x6c6cad6c47c1d82b,
|
||||
0x3131c43195f562a4, 0x7474cd7487b9e8f3, 0xf6f6fff6e309f115, 0x464605460a438c4c,
|
||||
0xacac8aac092645a5, 0x89891e893c970fb5, 0x14145014a04428b4, 0xe1e1a3e15b42dfba,
|
||||
0x16165816b04e2ca6, 0x3a3ae83acdd274f7, 0x6969b9696fd0d206, 0x09092409482d1241,
|
||||
0x7070dd70a7ade0d7, 0xb6b6e2b6d954716f, 0xd0d067d0ceb7bd1e, 0xeded93ed3b7ec7d6,
|
||||
0xcccc17cc2edb85e2, 0x424215422a578468, 0x98985a98b4c22d2c, 0xa4a4aaa4490e55ed,
|
||||
0x2828a0285d885075, 0x5c5c6d5cda31b886, 0xf8f8c7f8933fed6b, 0x8686228644a411c2,
|
||||
}
|
||||
|
||||
C1 := [256]u64 {
|
||||
0xd818186018c07830, 0x2623238c2305af46, 0xb8c6c63fc67ef991, 0xfbe8e887e8136fcd,
|
||||
0xcb878726874ca113, 0x11b8b8dab8a9626d, 0x0901010401080502, 0x0d4f4f214f426e9e,
|
||||
0x9b3636d836adee6c, 0xffa6a6a2a6590451, 0x0cd2d26fd2debdb9, 0x0ef5f5f3f5fb06f7,
|
||||
0x967979f979ef80f2, 0x306f6fa16f5fcede, 0x6d91917e91fcef3f, 0xf852525552aa07a4,
|
||||
0x4760609d6027fdc0, 0x35bcbccabc897665, 0x379b9b569baccd2b, 0x8a8e8e028e048c01,
|
||||
0xd2a3a3b6a371155b, 0x6c0c0c300c603c18, 0x847b7bf17bff8af6, 0x803535d435b5e16a,
|
||||
0xf51d1d741de8693a, 0xb3e0e0a7e05347dd, 0x21d7d77bd7f6acb3, 0x9cc2c22fc25eed99,
|
||||
0x432e2eb82e6d965c, 0x294b4b314b627a96, 0x5dfefedffea321e1, 0xd5575741578216ae,
|
||||
0xbd15155415a8412a, 0xe87777c1779fb6ee, 0x923737dc37a5eb6e, 0x9ee5e5b3e57b56d7,
|
||||
0x139f9f469f8cd923, 0x23f0f0e7f0d317fd, 0x204a4a354a6a7f94, 0x44dada4fda9e95a9,
|
||||
0xa258587d58fa25b0, 0xcfc9c903c906ca8f, 0x7c2929a429558d52, 0x5a0a0a280a502214,
|
||||
0x50b1b1feb1e14f7f, 0xc9a0a0baa0691a5d, 0x146b6bb16b7fdad6, 0xd985852e855cab17,
|
||||
0x3cbdbdcebd817367, 0x8f5d5d695dd234ba, 0x9010104010805020, 0x07f4f4f7f4f303f5,
|
||||
0xddcbcb0bcb16c08b, 0xd33e3ef83eedc67c, 0x2d0505140528110a, 0x78676781671fe6ce,
|
||||
0x97e4e4b7e47353d5, 0x0227279c2725bb4e, 0x7341411941325882, 0xa78b8b168b2c9d0b,
|
||||
0xf6a7a7a6a7510153, 0xb27d7de97dcf94fa, 0x4995956e95dcfb37, 0x56d8d847d88e9fad,
|
||||
0x70fbfbcbfb8b30eb, 0xcdeeee9fee2371c1, 0xbb7c7ced7cc791f8, 0x716666856617e3cc,
|
||||
0x7bdddd53dda68ea7, 0xaf17175c17b84b2e, 0x454747014702468e, 0x1a9e9e429e84dc21,
|
||||
0xd4caca0fca1ec589, 0x582d2db42d75995a, 0x2ebfbfc6bf917963, 0x3f07071c07381b0e,
|
||||
0xacadad8ead012347, 0xb05a5a755aea2fb4, 0xef838336836cb51b, 0xb63333cc3385ff66,
|
||||
0x5c636391633ff2c6, 0x1202020802100a04, 0x93aaaa92aa393849, 0xde7171d971afa8e2,
|
||||
0xc6c8c807c80ecf8d, 0xd119196419c87d32, 0x3b49493949727092, 0x5fd9d943d9869aaf,
|
||||
0x31f2f2eff2c31df9, 0xa8e3e3abe34b48db, 0xb95b5b715be22ab6, 0xbc88881a8834920d,
|
||||
0x3e9a9a529aa4c829, 0x0b262698262dbe4c, 0xbf3232c8328dfa64, 0x59b0b0fab0e94a7d,
|
||||
0xf2e9e983e91b6acf, 0x770f0f3c0f78331e, 0x33d5d573d5e6a6b7, 0xf480803a8074ba1d,
|
||||
0x27bebec2be997c61, 0xebcdcd13cd26de87, 0x893434d034bde468, 0x3248483d487a7590,
|
||||
0x54ffffdbffab24e3, 0x8d7a7af57af78ff4, 0x6490907a90f4ea3d, 0x9d5f5f615fc23ebe,
|
||||
0x3d202080201da040, 0x0f6868bd6867d5d0, 0xca1a1a681ad07234, 0xb7aeae82ae192c41,
|
||||
0x7db4b4eab4c95e75, 0xce54544d549a19a8, 0x7f93937693ece53b, 0x2f222288220daa44,
|
||||
0x6364648d6407e9c8, 0x2af1f1e3f1db12ff, 0xcc7373d173bfa2e6, 0x8212124812905a24,
|
||||
0x7a40401d403a5d80, 0x4808082008402810, 0x95c3c32bc356e89b, 0xdfecec97ec337bc5,
|
||||
0x4ddbdb4bdb9690ab, 0xc0a1a1bea1611f5f, 0x918d8d0e8d1c8307, 0xc83d3df43df5c97a,
|
||||
0x5b97976697ccf133, 0x0000000000000000, 0xf9cfcf1bcf36d483, 0x6e2b2bac2b458756,
|
||||
0xe17676c57697b3ec, 0xe68282328264b019, 0x28d6d67fd6fea9b1, 0xc31b1b6c1bd87736,
|
||||
0x74b5b5eeb5c15b77, 0xbeafaf86af112943, 0x1d6a6ab56a77dfd4, 0xea50505d50ba0da0,
|
||||
0x5745450945124c8a, 0x38f3f3ebf3cb18fb, 0xad3030c0309df060, 0xc4efef9bef2b74c3,
|
||||
0xda3f3ffc3fe5c37e, 0xc755554955921caa, 0xdba2a2b2a2791059, 0xe9eaea8fea0365c9,
|
||||
0x6a656589650fecca, 0x03babad2bab96869, 0x4a2f2fbc2f65935e, 0x8ec0c027c04ee79d,
|
||||
0x60dede5fdebe81a1, 0xfc1c1c701ce06c38, 0x46fdfdd3fdbb2ee7, 0x1f4d4d294d52649a,
|
||||
0x7692927292e4e039, 0xfa7575c9758fbcea, 0x3606061806301e0c, 0xae8a8a128a249809,
|
||||
0x4bb2b2f2b2f94079, 0x85e6e6bfe66359d1, 0x7e0e0e380e70361c, 0xe71f1f7c1ff8633e,
|
||||
0x556262956237f7c4, 0x3ad4d477d4eea3b5, 0x81a8a89aa829324d, 0x5296966296c4f431,
|
||||
0x62f9f9c3f99b3aef, 0xa3c5c533c566f697, 0x102525942535b14a, 0xab59597959f220b2,
|
||||
0xd084842a8454ae15, 0xc57272d572b7a7e4, 0xec3939e439d5dd72, 0x164c4c2d4c5a6198,
|
||||
0x945e5e655eca3bbc, 0x9f7878fd78e785f0, 0xe53838e038ddd870, 0x988c8c0a8c148605,
|
||||
0x17d1d163d1c6b2bf, 0xe4a5a5aea5410b57, 0xa1e2e2afe2434dd9, 0x4e616199612ff8c2,
|
||||
0x42b3b3f6b3f1457b, 0x342121842115a542, 0x089c9c4a9c94d625, 0xee1e1e781ef0663c,
|
||||
0x6143431143225286, 0xb1c7c73bc776fc93, 0x4ffcfcd7fcb32be5, 0x2404041004201408,
|
||||
0xe351515951b208a2, 0x2599995e99bcc72f, 0x226d6da96d4fc4da, 0x650d0d340d68391a,
|
||||
0x79fafacffa8335e9, 0x69dfdf5bdfb684a3, 0xa97e7ee57ed79bfc, 0x19242490243db448,
|
||||
0xfe3b3bec3bc5d776, 0x9aabab96ab313d4b, 0xf0cece1fce3ed181, 0x9911114411885522,
|
||||
0x838f8f068f0c8903, 0x044e4e254e4a6b9c, 0x66b7b7e6b7d15173, 0xe0ebeb8beb0b60cb,
|
||||
0xc13c3cf03cfdcc78, 0xfd81813e817cbf1f, 0x4094946a94d4fe35, 0x1cf7f7fbf7eb0cf3,
|
||||
0x18b9b9deb9a1676f, 0x8b13134c13985f26, 0x512c2cb02c7d9c58, 0x05d3d36bd3d6b8bb,
|
||||
0x8ce7e7bbe76b5cd3, 0x396e6ea56e57cbdc, 0xaac4c437c46ef395, 0x1b03030c03180f06,
|
||||
0xdc565645568a13ac, 0x5e44440d441a4988, 0xa07f7fe17fdf9efe, 0x88a9a99ea921374f,
|
||||
0x672a2aa82a4d8254, 0x0abbbbd6bbb16d6b, 0x87c1c123c146e29f, 0xf153535153a202a6,
|
||||
0x72dcdc57dcae8ba5, 0x530b0b2c0b582716, 0x019d9d4e9d9cd327, 0x2b6c6cad6c47c1d8,
|
||||
0xa43131c43195f562, 0xf37474cd7487b9e8, 0x15f6f6fff6e309f1, 0x4c464605460a438c,
|
||||
0xa5acac8aac092645, 0xb589891e893c970f, 0xb414145014a04428, 0xbae1e1a3e15b42df,
|
||||
0xa616165816b04e2c, 0xf73a3ae83acdd274, 0x066969b9696fd0d2, 0x4109092409482d12,
|
||||
0xd77070dd70a7ade0, 0x6fb6b6e2b6d95471, 0x1ed0d067d0ceb7bd, 0xd6eded93ed3b7ec7,
|
||||
0xe2cccc17cc2edb85, 0x68424215422a5784, 0x2c98985a98b4c22d, 0xeda4a4aaa4490e55,
|
||||
0x752828a0285d8850, 0x865c5c6d5cda31b8, 0x6bf8f8c7f8933fed, 0xc28686228644a411,
|
||||
}
|
||||
|
||||
C2 := [256]u64 {
|
||||
0x30d818186018c078, 0x462623238c2305af, 0x91b8c6c63fc67ef9, 0xcdfbe8e887e8136f,
|
||||
0x13cb878726874ca1, 0x6d11b8b8dab8a962, 0x0209010104010805, 0x9e0d4f4f214f426e,
|
||||
0x6c9b3636d836adee, 0x51ffa6a6a2a65904, 0xb90cd2d26fd2debd, 0xf70ef5f5f3f5fb06,
|
||||
0xf2967979f979ef80, 0xde306f6fa16f5fce, 0x3f6d91917e91fcef, 0xa4f852525552aa07,
|
||||
0xc04760609d6027fd, 0x6535bcbccabc8976, 0x2b379b9b569baccd, 0x018a8e8e028e048c,
|
||||
0x5bd2a3a3b6a37115, 0x186c0c0c300c603c, 0xf6847b7bf17bff8a, 0x6a803535d435b5e1,
|
||||
0x3af51d1d741de869, 0xddb3e0e0a7e05347, 0xb321d7d77bd7f6ac, 0x999cc2c22fc25eed,
|
||||
0x5c432e2eb82e6d96, 0x96294b4b314b627a, 0xe15dfefedffea321, 0xaed5575741578216,
|
||||
0x2abd15155415a841, 0xeee87777c1779fb6, 0x6e923737dc37a5eb, 0xd79ee5e5b3e57b56,
|
||||
0x23139f9f469f8cd9, 0xfd23f0f0e7f0d317, 0x94204a4a354a6a7f, 0xa944dada4fda9e95,
|
||||
0xb0a258587d58fa25, 0x8fcfc9c903c906ca, 0x527c2929a429558d, 0x145a0a0a280a5022,
|
||||
0x7f50b1b1feb1e14f, 0x5dc9a0a0baa0691a, 0xd6146b6bb16b7fda, 0x17d985852e855cab,
|
||||
0x673cbdbdcebd8173, 0xba8f5d5d695dd234, 0x2090101040108050, 0xf507f4f4f7f4f303,
|
||||
0x8bddcbcb0bcb16c0, 0x7cd33e3ef83eedc6, 0x0a2d050514052811, 0xce78676781671fe6,
|
||||
0xd597e4e4b7e47353, 0x4e0227279c2725bb, 0x8273414119413258, 0x0ba78b8b168b2c9d,
|
||||
0x53f6a7a7a6a75101, 0xfab27d7de97dcf94, 0x374995956e95dcfb, 0xad56d8d847d88e9f,
|
||||
0xeb70fbfbcbfb8b30, 0xc1cdeeee9fee2371, 0xf8bb7c7ced7cc791, 0xcc716666856617e3,
|
||||
0xa77bdddd53dda68e, 0x2eaf17175c17b84b, 0x8e45474701470246, 0x211a9e9e429e84dc,
|
||||
0x89d4caca0fca1ec5, 0x5a582d2db42d7599, 0x632ebfbfc6bf9179, 0x0e3f07071c07381b,
|
||||
0x47acadad8ead0123, 0xb4b05a5a755aea2f, 0x1bef838336836cb5, 0x66b63333cc3385ff,
|
||||
0xc65c636391633ff2, 0x041202020802100a, 0x4993aaaa92aa3938, 0xe2de7171d971afa8,
|
||||
0x8dc6c8c807c80ecf, 0x32d119196419c87d, 0x923b494939497270, 0xaf5fd9d943d9869a,
|
||||
0xf931f2f2eff2c31d, 0xdba8e3e3abe34b48, 0xb6b95b5b715be22a, 0x0dbc88881a883492,
|
||||
0x293e9a9a529aa4c8, 0x4c0b262698262dbe, 0x64bf3232c8328dfa, 0x7d59b0b0fab0e94a,
|
||||
0xcff2e9e983e91b6a, 0x1e770f0f3c0f7833, 0xb733d5d573d5e6a6, 0x1df480803a8074ba,
|
||||
0x6127bebec2be997c, 0x87ebcdcd13cd26de, 0x68893434d034bde4, 0x903248483d487a75,
|
||||
0xe354ffffdbffab24, 0xf48d7a7af57af78f, 0x3d6490907a90f4ea, 0xbe9d5f5f615fc23e,
|
||||
0x403d202080201da0, 0xd00f6868bd6867d5, 0x34ca1a1a681ad072, 0x41b7aeae82ae192c,
|
||||
0x757db4b4eab4c95e, 0xa8ce54544d549a19, 0x3b7f93937693ece5, 0x442f222288220daa,
|
||||
0xc86364648d6407e9, 0xff2af1f1e3f1db12, 0xe6cc7373d173bfa2, 0x248212124812905a,
|
||||
0x807a40401d403a5d, 0x1048080820084028, 0x9b95c3c32bc356e8, 0xc5dfecec97ec337b,
|
||||
0xab4ddbdb4bdb9690, 0x5fc0a1a1bea1611f, 0x07918d8d0e8d1c83, 0x7ac83d3df43df5c9,
|
||||
0x335b97976697ccf1, 0x0000000000000000, 0x83f9cfcf1bcf36d4, 0x566e2b2bac2b4587,
|
||||
0xece17676c57697b3, 0x19e68282328264b0, 0xb128d6d67fd6fea9, 0x36c31b1b6c1bd877,
|
||||
0x7774b5b5eeb5c15b, 0x43beafaf86af1129, 0xd41d6a6ab56a77df, 0xa0ea50505d50ba0d,
|
||||
0x8a5745450945124c, 0xfb38f3f3ebf3cb18, 0x60ad3030c0309df0, 0xc3c4efef9bef2b74,
|
||||
0x7eda3f3ffc3fe5c3, 0xaac755554955921c, 0x59dba2a2b2a27910, 0xc9e9eaea8fea0365,
|
||||
0xca6a656589650fec, 0x6903babad2bab968, 0x5e4a2f2fbc2f6593, 0x9d8ec0c027c04ee7,
|
||||
0xa160dede5fdebe81, 0x38fc1c1c701ce06c, 0xe746fdfdd3fdbb2e, 0x9a1f4d4d294d5264,
|
||||
0x397692927292e4e0, 0xeafa7575c9758fbc, 0x0c3606061806301e, 0x09ae8a8a128a2498,
|
||||
0x794bb2b2f2b2f940, 0xd185e6e6bfe66359, 0x1c7e0e0e380e7036, 0x3ee71f1f7c1ff863,
|
||||
0xc4556262956237f7, 0xb53ad4d477d4eea3, 0x4d81a8a89aa82932, 0x315296966296c4f4,
|
||||
0xef62f9f9c3f99b3a, 0x97a3c5c533c566f6, 0x4a102525942535b1, 0xb2ab59597959f220,
|
||||
0x15d084842a8454ae, 0xe4c57272d572b7a7, 0x72ec3939e439d5dd, 0x98164c4c2d4c5a61,
|
||||
0xbc945e5e655eca3b, 0xf09f7878fd78e785, 0x70e53838e038ddd8, 0x05988c8c0a8c1486,
|
||||
0xbf17d1d163d1c6b2, 0x57e4a5a5aea5410b, 0xd9a1e2e2afe2434d, 0xc24e616199612ff8,
|
||||
0x7b42b3b3f6b3f145, 0x42342121842115a5, 0x25089c9c4a9c94d6, 0x3cee1e1e781ef066,
|
||||
0x8661434311432252, 0x93b1c7c73bc776fc, 0xe54ffcfcd7fcb32b, 0x0824040410042014,
|
||||
0xa2e351515951b208, 0x2f2599995e99bcc7, 0xda226d6da96d4fc4, 0x1a650d0d340d6839,
|
||||
0xe979fafacffa8335, 0xa369dfdf5bdfb684, 0xfca97e7ee57ed79b, 0x4819242490243db4,
|
||||
0x76fe3b3bec3bc5d7, 0x4b9aabab96ab313d, 0x81f0cece1fce3ed1, 0x2299111144118855,
|
||||
0x03838f8f068f0c89, 0x9c044e4e254e4a6b, 0x7366b7b7e6b7d151, 0xcbe0ebeb8beb0b60,
|
||||
0x78c13c3cf03cfdcc, 0x1ffd81813e817cbf, 0x354094946a94d4fe, 0xf31cf7f7fbf7eb0c,
|
||||
0x6f18b9b9deb9a167, 0x268b13134c13985f, 0x58512c2cb02c7d9c, 0xbb05d3d36bd3d6b8,
|
||||
0xd38ce7e7bbe76b5c, 0xdc396e6ea56e57cb, 0x95aac4c437c46ef3, 0x061b03030c03180f,
|
||||
0xacdc565645568a13, 0x885e44440d441a49, 0xfea07f7fe17fdf9e, 0x4f88a9a99ea92137,
|
||||
0x54672a2aa82a4d82, 0x6b0abbbbd6bbb16d, 0x9f87c1c123c146e2, 0xa6f153535153a202,
|
||||
0xa572dcdc57dcae8b, 0x16530b0b2c0b5827, 0x27019d9d4e9d9cd3, 0xd82b6c6cad6c47c1,
|
||||
0x62a43131c43195f5, 0xe8f37474cd7487b9, 0xf115f6f6fff6e309, 0x8c4c464605460a43,
|
||||
0x45a5acac8aac0926, 0x0fb589891e893c97, 0x28b414145014a044, 0xdfbae1e1a3e15b42,
|
||||
0x2ca616165816b04e, 0x74f73a3ae83acdd2, 0xd2066969b9696fd0, 0x124109092409482d,
|
||||
0xe0d77070dd70a7ad, 0x716fb6b6e2b6d954, 0xbd1ed0d067d0ceb7, 0xc7d6eded93ed3b7e,
|
||||
0x85e2cccc17cc2edb, 0x8468424215422a57, 0x2d2c98985a98b4c2, 0x55eda4a4aaa4490e,
|
||||
0x50752828a0285d88, 0xb8865c5c6d5cda31, 0xed6bf8f8c7f8933f, 0x11c28686228644a4,
|
||||
}
|
||||
|
||||
C3 := [256]u64 {
|
||||
0x7830d818186018c0, 0xaf462623238c2305, 0xf991b8c6c63fc67e, 0x6fcdfbe8e887e813,
|
||||
0xa113cb878726874c, 0x626d11b8b8dab8a9, 0x0502090101040108, 0x6e9e0d4f4f214f42,
|
||||
0xee6c9b3636d836ad, 0x0451ffa6a6a2a659, 0xbdb90cd2d26fd2de, 0x06f70ef5f5f3f5fb,
|
||||
0x80f2967979f979ef, 0xcede306f6fa16f5f, 0xef3f6d91917e91fc, 0x07a4f852525552aa,
|
||||
0xfdc04760609d6027, 0x766535bcbccabc89, 0xcd2b379b9b569bac, 0x8c018a8e8e028e04,
|
||||
0x155bd2a3a3b6a371, 0x3c186c0c0c300c60, 0x8af6847b7bf17bff, 0xe16a803535d435b5,
|
||||
0x693af51d1d741de8, 0x47ddb3e0e0a7e053, 0xacb321d7d77bd7f6, 0xed999cc2c22fc25e,
|
||||
0x965c432e2eb82e6d, 0x7a96294b4b314b62, 0x21e15dfefedffea3, 0x16aed55757415782,
|
||||
0x412abd15155415a8, 0xb6eee87777c1779f, 0xeb6e923737dc37a5, 0x56d79ee5e5b3e57b,
|
||||
0xd923139f9f469f8c, 0x17fd23f0f0e7f0d3, 0x7f94204a4a354a6a, 0x95a944dada4fda9e,
|
||||
0x25b0a258587d58fa, 0xca8fcfc9c903c906, 0x8d527c2929a42955, 0x22145a0a0a280a50,
|
||||
0x4f7f50b1b1feb1e1, 0x1a5dc9a0a0baa069, 0xdad6146b6bb16b7f, 0xab17d985852e855c,
|
||||
0x73673cbdbdcebd81, 0x34ba8f5d5d695dd2, 0x5020901010401080, 0x03f507f4f4f7f4f3,
|
||||
0xc08bddcbcb0bcb16, 0xc67cd33e3ef83eed, 0x110a2d0505140528, 0xe6ce78676781671f,
|
||||
0x53d597e4e4b7e473, 0xbb4e0227279c2725, 0x5882734141194132, 0x9d0ba78b8b168b2c,
|
||||
0x0153f6a7a7a6a751, 0x94fab27d7de97dcf, 0xfb374995956e95dc, 0x9fad56d8d847d88e,
|
||||
0x30eb70fbfbcbfb8b, 0x71c1cdeeee9fee23, 0x91f8bb7c7ced7cc7, 0xe3cc716666856617,
|
||||
0x8ea77bdddd53dda6, 0x4b2eaf17175c17b8, 0x468e454747014702, 0xdc211a9e9e429e84,
|
||||
0xc589d4caca0fca1e, 0x995a582d2db42d75, 0x79632ebfbfc6bf91, 0x1b0e3f07071c0738,
|
||||
0x2347acadad8ead01, 0x2fb4b05a5a755aea, 0xb51bef838336836c, 0xff66b63333cc3385,
|
||||
0xf2c65c636391633f, 0x0a04120202080210, 0x384993aaaa92aa39, 0xa8e2de7171d971af,
|
||||
0xcf8dc6c8c807c80e, 0x7d32d119196419c8, 0x70923b4949394972, 0x9aaf5fd9d943d986,
|
||||
0x1df931f2f2eff2c3, 0x48dba8e3e3abe34b, 0x2ab6b95b5b715be2, 0x920dbc88881a8834,
|
||||
0xc8293e9a9a529aa4, 0xbe4c0b262698262d, 0xfa64bf3232c8328d, 0x4a7d59b0b0fab0e9,
|
||||
0x6acff2e9e983e91b, 0x331e770f0f3c0f78, 0xa6b733d5d573d5e6, 0xba1df480803a8074,
|
||||
0x7c6127bebec2be99, 0xde87ebcdcd13cd26, 0xe468893434d034bd, 0x75903248483d487a,
|
||||
0x24e354ffffdbffab, 0x8ff48d7a7af57af7, 0xea3d6490907a90f4, 0x3ebe9d5f5f615fc2,
|
||||
0xa0403d202080201d, 0xd5d00f6868bd6867, 0x7234ca1a1a681ad0, 0x2c41b7aeae82ae19,
|
||||
0x5e757db4b4eab4c9, 0x19a8ce54544d549a, 0xe53b7f93937693ec, 0xaa442f222288220d,
|
||||
0xe9c86364648d6407, 0x12ff2af1f1e3f1db, 0xa2e6cc7373d173bf, 0x5a24821212481290,
|
||||
0x5d807a40401d403a, 0x2810480808200840, 0xe89b95c3c32bc356, 0x7bc5dfecec97ec33,
|
||||
0x90ab4ddbdb4bdb96, 0x1f5fc0a1a1bea161, 0x8307918d8d0e8d1c, 0xc97ac83d3df43df5,
|
||||
0xf1335b97976697cc, 0x0000000000000000, 0xd483f9cfcf1bcf36, 0x87566e2b2bac2b45,
|
||||
0xb3ece17676c57697, 0xb019e68282328264, 0xa9b128d6d67fd6fe, 0x7736c31b1b6c1bd8,
|
||||
0x5b7774b5b5eeb5c1, 0x2943beafaf86af11, 0xdfd41d6a6ab56a77, 0x0da0ea50505d50ba,
|
||||
0x4c8a574545094512, 0x18fb38f3f3ebf3cb, 0xf060ad3030c0309d, 0x74c3c4efef9bef2b,
|
||||
0xc37eda3f3ffc3fe5, 0x1caac75555495592, 0x1059dba2a2b2a279, 0x65c9e9eaea8fea03,
|
||||
0xecca6a656589650f, 0x686903babad2bab9, 0x935e4a2f2fbc2f65, 0xe79d8ec0c027c04e,
|
||||
0x81a160dede5fdebe, 0x6c38fc1c1c701ce0, 0x2ee746fdfdd3fdbb, 0x649a1f4d4d294d52,
|
||||
0xe0397692927292e4, 0xbceafa7575c9758f, 0x1e0c360606180630, 0x9809ae8a8a128a24,
|
||||
0x40794bb2b2f2b2f9, 0x59d185e6e6bfe663, 0x361c7e0e0e380e70, 0x633ee71f1f7c1ff8,
|
||||
0xf7c4556262956237, 0xa3b53ad4d477d4ee, 0x324d81a8a89aa829, 0xf4315296966296c4,
|
||||
0x3aef62f9f9c3f99b, 0xf697a3c5c533c566, 0xb14a102525942535, 0x20b2ab59597959f2,
|
||||
0xae15d084842a8454, 0xa7e4c57272d572b7, 0xdd72ec3939e439d5, 0x6198164c4c2d4c5a,
|
||||
0x3bbc945e5e655eca, 0x85f09f7878fd78e7, 0xd870e53838e038dd, 0x8605988c8c0a8c14,
|
||||
0xb2bf17d1d163d1c6, 0x0b57e4a5a5aea541, 0x4dd9a1e2e2afe243, 0xf8c24e616199612f,
|
||||
0x457b42b3b3f6b3f1, 0xa542342121842115, 0xd625089c9c4a9c94, 0x663cee1e1e781ef0,
|
||||
0x5286614343114322, 0xfc93b1c7c73bc776, 0x2be54ffcfcd7fcb3, 0x1408240404100420,
|
||||
0x08a2e351515951b2, 0xc72f2599995e99bc, 0xc4da226d6da96d4f, 0x391a650d0d340d68,
|
||||
0x35e979fafacffa83, 0x84a369dfdf5bdfb6, 0x9bfca97e7ee57ed7, 0xb44819242490243d,
|
||||
0xd776fe3b3bec3bc5, 0x3d4b9aabab96ab31, 0xd181f0cece1fce3e, 0x5522991111441188,
|
||||
0x8903838f8f068f0c, 0x6b9c044e4e254e4a, 0x517366b7b7e6b7d1, 0x60cbe0ebeb8beb0b,
|
||||
0xcc78c13c3cf03cfd, 0xbf1ffd81813e817c, 0xfe354094946a94d4, 0x0cf31cf7f7fbf7eb,
|
||||
0x676f18b9b9deb9a1, 0x5f268b13134c1398, 0x9c58512c2cb02c7d, 0xb8bb05d3d36bd3d6,
|
||||
0x5cd38ce7e7bbe76b, 0xcbdc396e6ea56e57, 0xf395aac4c437c46e, 0x0f061b03030c0318,
|
||||
0x13acdc565645568a, 0x49885e44440d441a, 0x9efea07f7fe17fdf, 0x374f88a9a99ea921,
|
||||
0x8254672a2aa82a4d, 0x6d6b0abbbbd6bbb1, 0xe29f87c1c123c146, 0x02a6f153535153a2,
|
||||
0x8ba572dcdc57dcae, 0x2716530b0b2c0b58, 0xd327019d9d4e9d9c, 0xc1d82b6c6cad6c47,
|
||||
0xf562a43131c43195, 0xb9e8f37474cd7487, 0x09f115f6f6fff6e3, 0x438c4c464605460a,
|
||||
0x2645a5acac8aac09, 0x970fb589891e893c, 0x4428b414145014a0, 0x42dfbae1e1a3e15b,
|
||||
0x4e2ca616165816b0, 0xd274f73a3ae83acd, 0xd0d2066969b9696f, 0x2d12410909240948,
|
||||
0xade0d77070dd70a7, 0x54716fb6b6e2b6d9, 0xb7bd1ed0d067d0ce, 0x7ec7d6eded93ed3b,
|
||||
0xdb85e2cccc17cc2e, 0x578468424215422a, 0xc22d2c98985a98b4, 0x0e55eda4a4aaa449,
|
||||
0x8850752828a0285d, 0x31b8865c5c6d5cda, 0x3fed6bf8f8c7f893, 0xa411c28686228644,
|
||||
}
|
||||
|
||||
C4 := [256]u64 {
|
||||
0xc07830d818186018, 0x05af462623238c23, 0x7ef991b8c6c63fc6, 0x136fcdfbe8e887e8,
|
||||
0x4ca113cb87872687, 0xa9626d11b8b8dab8, 0x0805020901010401, 0x426e9e0d4f4f214f,
|
||||
0xadee6c9b3636d836, 0x590451ffa6a6a2a6, 0xdebdb90cd2d26fd2, 0xfb06f70ef5f5f3f5,
|
||||
0xef80f2967979f979, 0x5fcede306f6fa16f, 0xfcef3f6d91917e91, 0xaa07a4f852525552,
|
||||
0x27fdc04760609d60, 0x89766535bcbccabc, 0xaccd2b379b9b569b, 0x048c018a8e8e028e,
|
||||
0x71155bd2a3a3b6a3, 0x603c186c0c0c300c, 0xff8af6847b7bf17b, 0xb5e16a803535d435,
|
||||
0xe8693af51d1d741d, 0x5347ddb3e0e0a7e0, 0xf6acb321d7d77bd7, 0x5eed999cc2c22fc2,
|
||||
0x6d965c432e2eb82e, 0x627a96294b4b314b, 0xa321e15dfefedffe, 0x8216aed557574157,
|
||||
0xa8412abd15155415, 0x9fb6eee87777c177, 0xa5eb6e923737dc37, 0x7b56d79ee5e5b3e5,
|
||||
0x8cd923139f9f469f, 0xd317fd23f0f0e7f0, 0x6a7f94204a4a354a, 0x9e95a944dada4fda,
|
||||
0xfa25b0a258587d58, 0x06ca8fcfc9c903c9, 0x558d527c2929a429, 0x5022145a0a0a280a,
|
||||
0xe14f7f50b1b1feb1, 0x691a5dc9a0a0baa0, 0x7fdad6146b6bb16b, 0x5cab17d985852e85,
|
||||
0x8173673cbdbdcebd, 0xd234ba8f5d5d695d, 0x8050209010104010, 0xf303f507f4f4f7f4,
|
||||
0x16c08bddcbcb0bcb, 0xedc67cd33e3ef83e, 0x28110a2d05051405, 0x1fe6ce7867678167,
|
||||
0x7353d597e4e4b7e4, 0x25bb4e0227279c27, 0x3258827341411941, 0x2c9d0ba78b8b168b,
|
||||
0x510153f6a7a7a6a7, 0xcf94fab27d7de97d, 0xdcfb374995956e95, 0x8e9fad56d8d847d8,
|
||||
0x8b30eb70fbfbcbfb, 0x2371c1cdeeee9fee, 0xc791f8bb7c7ced7c, 0x17e3cc7166668566,
|
||||
0xa68ea77bdddd53dd, 0xb84b2eaf17175c17, 0x02468e4547470147, 0x84dc211a9e9e429e,
|
||||
0x1ec589d4caca0fca, 0x75995a582d2db42d, 0x9179632ebfbfc6bf, 0x381b0e3f07071c07,
|
||||
0x012347acadad8ead, 0xea2fb4b05a5a755a, 0x6cb51bef83833683, 0x85ff66b63333cc33,
|
||||
0x3ff2c65c63639163, 0x100a041202020802, 0x39384993aaaa92aa, 0xafa8e2de7171d971,
|
||||
0x0ecf8dc6c8c807c8, 0xc87d32d119196419, 0x7270923b49493949, 0x869aaf5fd9d943d9,
|
||||
0xc31df931f2f2eff2, 0x4b48dba8e3e3abe3, 0xe22ab6b95b5b715b, 0x34920dbc88881a88,
|
||||
0xa4c8293e9a9a529a, 0x2dbe4c0b26269826, 0x8dfa64bf3232c832, 0xe94a7d59b0b0fab0,
|
||||
0x1b6acff2e9e983e9, 0x78331e770f0f3c0f, 0xe6a6b733d5d573d5, 0x74ba1df480803a80,
|
||||
0x997c6127bebec2be, 0x26de87ebcdcd13cd, 0xbde468893434d034, 0x7a75903248483d48,
|
||||
0xab24e354ffffdbff, 0xf78ff48d7a7af57a, 0xf4ea3d6490907a90, 0xc23ebe9d5f5f615f,
|
||||
0x1da0403d20208020, 0x67d5d00f6868bd68, 0xd07234ca1a1a681a, 0x192c41b7aeae82ae,
|
||||
0xc95e757db4b4eab4, 0x9a19a8ce54544d54, 0xece53b7f93937693, 0x0daa442f22228822,
|
||||
0x07e9c86364648d64, 0xdb12ff2af1f1e3f1, 0xbfa2e6cc7373d173, 0x905a248212124812,
|
||||
0x3a5d807a40401d40, 0x4028104808082008, 0x56e89b95c3c32bc3, 0x337bc5dfecec97ec,
|
||||
0x9690ab4ddbdb4bdb, 0x611f5fc0a1a1bea1, 0x1c8307918d8d0e8d, 0xf5c97ac83d3df43d,
|
||||
0xccf1335b97976697, 0x0000000000000000, 0x36d483f9cfcf1bcf, 0x4587566e2b2bac2b,
|
||||
0x97b3ece17676c576, 0x64b019e682823282, 0xfea9b128d6d67fd6, 0xd87736c31b1b6c1b,
|
||||
0xc15b7774b5b5eeb5, 0x112943beafaf86af, 0x77dfd41d6a6ab56a, 0xba0da0ea50505d50,
|
||||
0x124c8a5745450945, 0xcb18fb38f3f3ebf3, 0x9df060ad3030c030, 0x2b74c3c4efef9bef,
|
||||
0xe5c37eda3f3ffc3f, 0x921caac755554955, 0x791059dba2a2b2a2, 0x0365c9e9eaea8fea,
|
||||
0x0fecca6a65658965, 0xb9686903babad2ba, 0x65935e4a2f2fbc2f, 0x4ee79d8ec0c027c0,
|
||||
0xbe81a160dede5fde, 0xe06c38fc1c1c701c, 0xbb2ee746fdfdd3fd, 0x52649a1f4d4d294d,
|
||||
0xe4e0397692927292, 0x8fbceafa7575c975, 0x301e0c3606061806, 0x249809ae8a8a128a,
|
||||
0xf940794bb2b2f2b2, 0x6359d185e6e6bfe6, 0x70361c7e0e0e380e, 0xf8633ee71f1f7c1f,
|
||||
0x37f7c45562629562, 0xeea3b53ad4d477d4, 0x29324d81a8a89aa8, 0xc4f4315296966296,
|
||||
0x9b3aef62f9f9c3f9, 0x66f697a3c5c533c5, 0x35b14a1025259425, 0xf220b2ab59597959,
|
||||
0x54ae15d084842a84, 0xb7a7e4c57272d572, 0xd5dd72ec3939e439, 0x5a6198164c4c2d4c,
|
||||
0xca3bbc945e5e655e, 0xe785f09f7878fd78, 0xddd870e53838e038, 0x148605988c8c0a8c,
|
||||
0xc6b2bf17d1d163d1, 0x410b57e4a5a5aea5, 0x434dd9a1e2e2afe2, 0x2ff8c24e61619961,
|
||||
0xf1457b42b3b3f6b3, 0x15a5423421218421, 0x94d625089c9c4a9c, 0xf0663cee1e1e781e,
|
||||
0x2252866143431143, 0x76fc93b1c7c73bc7, 0xb32be54ffcfcd7fc, 0x2014082404041004,
|
||||
0xb208a2e351515951, 0xbcc72f2599995e99, 0x4fc4da226d6da96d, 0x68391a650d0d340d,
|
||||
0x8335e979fafacffa, 0xb684a369dfdf5bdf, 0xd79bfca97e7ee57e, 0x3db4481924249024,
|
||||
0xc5d776fe3b3bec3b, 0x313d4b9aabab96ab, 0x3ed181f0cece1fce, 0x8855229911114411,
|
||||
0x0c8903838f8f068f, 0x4a6b9c044e4e254e, 0xd1517366b7b7e6b7, 0x0b60cbe0ebeb8beb,
|
||||
0xfdcc78c13c3cf03c, 0x7cbf1ffd81813e81, 0xd4fe354094946a94, 0xeb0cf31cf7f7fbf7,
|
||||
0xa1676f18b9b9deb9, 0x985f268b13134c13, 0x7d9c58512c2cb02c, 0xd6b8bb05d3d36bd3,
|
||||
0x6b5cd38ce7e7bbe7, 0x57cbdc396e6ea56e, 0x6ef395aac4c437c4, 0x180f061b03030c03,
|
||||
0x8a13acdc56564556, 0x1a49885e44440d44, 0xdf9efea07f7fe17f, 0x21374f88a9a99ea9,
|
||||
0x4d8254672a2aa82a, 0xb16d6b0abbbbd6bb, 0x46e29f87c1c123c1, 0xa202a6f153535153,
|
||||
0xae8ba572dcdc57dc, 0x582716530b0b2c0b, 0x9cd327019d9d4e9d, 0x47c1d82b6c6cad6c,
|
||||
0x95f562a43131c431, 0x87b9e8f37474cd74, 0xe309f115f6f6fff6, 0x0a438c4c46460546,
|
||||
0x092645a5acac8aac, 0x3c970fb589891e89, 0xa04428b414145014, 0x5b42dfbae1e1a3e1,
|
||||
0xb04e2ca616165816, 0xcdd274f73a3ae83a, 0x6fd0d2066969b969, 0x482d124109092409,
|
||||
0xa7ade0d77070dd70, 0xd954716fb6b6e2b6, 0xceb7bd1ed0d067d0, 0x3b7ec7d6eded93ed,
|
||||
0x2edb85e2cccc17cc, 0x2a57846842421542, 0xb4c22d2c98985a98, 0x490e55eda4a4aaa4,
|
||||
0x5d8850752828a028, 0xda31b8865c5c6d5c, 0x933fed6bf8f8c7f8, 0x44a411c286862286,
|
||||
}
|
||||
|
||||
C5 := [256]u64 {
|
||||
0x18c07830d8181860, 0x2305af462623238c, 0xc67ef991b8c6c63f, 0xe8136fcdfbe8e887,
|
||||
0x874ca113cb878726, 0xb8a9626d11b8b8da, 0x0108050209010104, 0x4f426e9e0d4f4f21,
|
||||
0x36adee6c9b3636d8, 0xa6590451ffa6a6a2, 0xd2debdb90cd2d26f, 0xf5fb06f70ef5f5f3,
|
||||
0x79ef80f2967979f9, 0x6f5fcede306f6fa1, 0x91fcef3f6d91917e, 0x52aa07a4f8525255,
|
||||
0x6027fdc04760609d, 0xbc89766535bcbcca, 0x9baccd2b379b9b56, 0x8e048c018a8e8e02,
|
||||
0xa371155bd2a3a3b6, 0x0c603c186c0c0c30, 0x7bff8af6847b7bf1, 0x35b5e16a803535d4,
|
||||
0x1de8693af51d1d74, 0xe05347ddb3e0e0a7, 0xd7f6acb321d7d77b, 0xc25eed999cc2c22f,
|
||||
0x2e6d965c432e2eb8, 0x4b627a96294b4b31, 0xfea321e15dfefedf, 0x578216aed5575741,
|
||||
0x15a8412abd151554, 0x779fb6eee87777c1, 0x37a5eb6e923737dc, 0xe57b56d79ee5e5b3,
|
||||
0x9f8cd923139f9f46, 0xf0d317fd23f0f0e7, 0x4a6a7f94204a4a35, 0xda9e95a944dada4f,
|
||||
0x58fa25b0a258587d, 0xc906ca8fcfc9c903, 0x29558d527c2929a4, 0x0a5022145a0a0a28,
|
||||
0xb1e14f7f50b1b1fe, 0xa0691a5dc9a0a0ba, 0x6b7fdad6146b6bb1, 0x855cab17d985852e,
|
||||
0xbd8173673cbdbdce, 0x5dd234ba8f5d5d69, 0x1080502090101040, 0xf4f303f507f4f4f7,
|
||||
0xcb16c08bddcbcb0b, 0x3eedc67cd33e3ef8, 0x0528110a2d050514, 0x671fe6ce78676781,
|
||||
0xe47353d597e4e4b7, 0x2725bb4e0227279c, 0x4132588273414119, 0x8b2c9d0ba78b8b16,
|
||||
0xa7510153f6a7a7a6, 0x7dcf94fab27d7de9, 0x95dcfb374995956e, 0xd88e9fad56d8d847,
|
||||
0xfb8b30eb70fbfbcb, 0xee2371c1cdeeee9f, 0x7cc791f8bb7c7ced, 0x6617e3cc71666685,
|
||||
0xdda68ea77bdddd53, 0x17b84b2eaf17175c, 0x4702468e45474701, 0x9e84dc211a9e9e42,
|
||||
0xca1ec589d4caca0f, 0x2d75995a582d2db4, 0xbf9179632ebfbfc6, 0x07381b0e3f07071c,
|
||||
0xad012347acadad8e, 0x5aea2fb4b05a5a75, 0x836cb51bef838336, 0x3385ff66b63333cc,
|
||||
0x633ff2c65c636391, 0x02100a0412020208, 0xaa39384993aaaa92, 0x71afa8e2de7171d9,
|
||||
0xc80ecf8dc6c8c807, 0x19c87d32d1191964, 0x497270923b494939, 0xd9869aaf5fd9d943,
|
||||
0xf2c31df931f2f2ef, 0xe34b48dba8e3e3ab, 0x5be22ab6b95b5b71, 0x8834920dbc88881a,
|
||||
0x9aa4c8293e9a9a52, 0x262dbe4c0b262698, 0x328dfa64bf3232c8, 0xb0e94a7d59b0b0fa,
|
||||
0xe91b6acff2e9e983, 0x0f78331e770f0f3c, 0xd5e6a6b733d5d573, 0x8074ba1df480803a,
|
||||
0xbe997c6127bebec2, 0xcd26de87ebcdcd13, 0x34bde468893434d0, 0x487a75903248483d,
|
||||
0xffab24e354ffffdb, 0x7af78ff48d7a7af5, 0x90f4ea3d6490907a, 0x5fc23ebe9d5f5f61,
|
||||
0x201da0403d202080, 0x6867d5d00f6868bd, 0x1ad07234ca1a1a68, 0xae192c41b7aeae82,
|
||||
0xb4c95e757db4b4ea, 0x549a19a8ce54544d, 0x93ece53b7f939376, 0x220daa442f222288,
|
||||
0x6407e9c86364648d, 0xf1db12ff2af1f1e3, 0x73bfa2e6cc7373d1, 0x12905a2482121248,
|
||||
0x403a5d807a40401d, 0x0840281048080820, 0xc356e89b95c3c32b, 0xec337bc5dfecec97,
|
||||
0xdb9690ab4ddbdb4b, 0xa1611f5fc0a1a1be, 0x8d1c8307918d8d0e, 0x3df5c97ac83d3df4,
|
||||
0x97ccf1335b979766, 0x0000000000000000, 0xcf36d483f9cfcf1b, 0x2b4587566e2b2bac,
|
||||
0x7697b3ece17676c5, 0x8264b019e6828232, 0xd6fea9b128d6d67f, 0x1bd87736c31b1b6c,
|
||||
0xb5c15b7774b5b5ee, 0xaf112943beafaf86, 0x6a77dfd41d6a6ab5, 0x50ba0da0ea50505d,
|
||||
0x45124c8a57454509, 0xf3cb18fb38f3f3eb, 0x309df060ad3030c0, 0xef2b74c3c4efef9b,
|
||||
0x3fe5c37eda3f3ffc, 0x55921caac7555549, 0xa2791059dba2a2b2, 0xea0365c9e9eaea8f,
|
||||
0x650fecca6a656589, 0xbab9686903babad2, 0x2f65935e4a2f2fbc, 0xc04ee79d8ec0c027,
|
||||
0xdebe81a160dede5f, 0x1ce06c38fc1c1c70, 0xfdbb2ee746fdfdd3, 0x4d52649a1f4d4d29,
|
||||
0x92e4e03976929272, 0x758fbceafa7575c9, 0x06301e0c36060618, 0x8a249809ae8a8a12,
|
||||
0xb2f940794bb2b2f2, 0xe66359d185e6e6bf, 0x0e70361c7e0e0e38, 0x1ff8633ee71f1f7c,
|
||||
0x6237f7c455626295, 0xd4eea3b53ad4d477, 0xa829324d81a8a89a, 0x96c4f43152969662,
|
||||
0xf99b3aef62f9f9c3, 0xc566f697a3c5c533, 0x2535b14a10252594, 0x59f220b2ab595979,
|
||||
0x8454ae15d084842a, 0x72b7a7e4c57272d5, 0x39d5dd72ec3939e4, 0x4c5a6198164c4c2d,
|
||||
0x5eca3bbc945e5e65, 0x78e785f09f7878fd, 0x38ddd870e53838e0, 0x8c148605988c8c0a,
|
||||
0xd1c6b2bf17d1d163, 0xa5410b57e4a5a5ae, 0xe2434dd9a1e2e2af, 0x612ff8c24e616199,
|
||||
0xb3f1457b42b3b3f6, 0x2115a54234212184, 0x9c94d625089c9c4a, 0x1ef0663cee1e1e78,
|
||||
0x4322528661434311, 0xc776fc93b1c7c73b, 0xfcb32be54ffcfcd7, 0x0420140824040410,
|
||||
0x51b208a2e3515159, 0x99bcc72f2599995e, 0x6d4fc4da226d6da9, 0x0d68391a650d0d34,
|
||||
0xfa8335e979fafacf, 0xdfb684a369dfdf5b, 0x7ed79bfca97e7ee5, 0x243db44819242490,
|
||||
0x3bc5d776fe3b3bec, 0xab313d4b9aabab96, 0xce3ed181f0cece1f, 0x1188552299111144,
|
||||
0x8f0c8903838f8f06, 0x4e4a6b9c044e4e25, 0xb7d1517366b7b7e6, 0xeb0b60cbe0ebeb8b,
|
||||
0x3cfdcc78c13c3cf0, 0x817cbf1ffd81813e, 0x94d4fe354094946a, 0xf7eb0cf31cf7f7fb,
|
||||
0xb9a1676f18b9b9de, 0x13985f268b13134c, 0x2c7d9c58512c2cb0, 0xd3d6b8bb05d3d36b,
|
||||
0xe76b5cd38ce7e7bb, 0x6e57cbdc396e6ea5, 0xc46ef395aac4c437, 0x03180f061b03030c,
|
||||
0x568a13acdc565645, 0x441a49885e44440d, 0x7fdf9efea07f7fe1, 0xa921374f88a9a99e,
|
||||
0x2a4d8254672a2aa8, 0xbbb16d6b0abbbbd6, 0xc146e29f87c1c123, 0x53a202a6f1535351,
|
||||
0xdcae8ba572dcdc57, 0x0b582716530b0b2c, 0x9d9cd327019d9d4e, 0x6c47c1d82b6c6cad,
|
||||
0x3195f562a43131c4, 0x7487b9e8f37474cd, 0xf6e309f115f6f6ff, 0x460a438c4c464605,
|
||||
0xac092645a5acac8a, 0x893c970fb589891e, 0x14a04428b4141450, 0xe15b42dfbae1e1a3,
|
||||
0x16b04e2ca6161658, 0x3acdd274f73a3ae8, 0x696fd0d2066969b9, 0x09482d1241090924,
|
||||
0x70a7ade0d77070dd, 0xb6d954716fb6b6e2, 0xd0ceb7bd1ed0d067, 0xed3b7ec7d6eded93,
|
||||
0xcc2edb85e2cccc17, 0x422a578468424215, 0x98b4c22d2c98985a, 0xa4490e55eda4a4aa,
|
||||
0x285d8850752828a0, 0x5cda31b8865c5c6d, 0xf8933fed6bf8f8c7, 0x8644a411c2868622,
|
||||
}
|
||||
|
||||
C6 := [256]u64 {
|
||||
0x6018c07830d81818, 0x8c2305af46262323, 0x3fc67ef991b8c6c6, 0x87e8136fcdfbe8e8,
|
||||
0x26874ca113cb8787, 0xdab8a9626d11b8b8, 0x0401080502090101, 0x214f426e9e0d4f4f,
|
||||
0xd836adee6c9b3636, 0xa2a6590451ffa6a6, 0x6fd2debdb90cd2d2, 0xf3f5fb06f70ef5f5,
|
||||
0xf979ef80f2967979, 0xa16f5fcede306f6f, 0x7e91fcef3f6d9191, 0x5552aa07a4f85252,
|
||||
0x9d6027fdc0476060, 0xcabc89766535bcbc, 0x569baccd2b379b9b, 0x028e048c018a8e8e,
|
||||
0xb6a371155bd2a3a3, 0x300c603c186c0c0c, 0xf17bff8af6847b7b, 0xd435b5e16a803535,
|
||||
0x741de8693af51d1d, 0xa7e05347ddb3e0e0, 0x7bd7f6acb321d7d7, 0x2fc25eed999cc2c2,
|
||||
0xb82e6d965c432e2e, 0x314b627a96294b4b, 0xdffea321e15dfefe, 0x41578216aed55757,
|
||||
0x5415a8412abd1515, 0xc1779fb6eee87777, 0xdc37a5eb6e923737, 0xb3e57b56d79ee5e5,
|
||||
0x469f8cd923139f9f, 0xe7f0d317fd23f0f0, 0x354a6a7f94204a4a, 0x4fda9e95a944dada,
|
||||
0x7d58fa25b0a25858, 0x03c906ca8fcfc9c9, 0xa429558d527c2929, 0x280a5022145a0a0a,
|
||||
0xfeb1e14f7f50b1b1, 0xbaa0691a5dc9a0a0, 0xb16b7fdad6146b6b, 0x2e855cab17d98585,
|
||||
0xcebd8173673cbdbd, 0x695dd234ba8f5d5d, 0x4010805020901010, 0xf7f4f303f507f4f4,
|
||||
0x0bcb16c08bddcbcb, 0xf83eedc67cd33e3e, 0x140528110a2d0505, 0x81671fe6ce786767,
|
||||
0xb7e47353d597e4e4, 0x9c2725bb4e022727, 0x1941325882734141, 0x168b2c9d0ba78b8b,
|
||||
0xa6a7510153f6a7a7, 0xe97dcf94fab27d7d, 0x6e95dcfb37499595, 0x47d88e9fad56d8d8,
|
||||
0xcbfb8b30eb70fbfb, 0x9fee2371c1cdeeee, 0xed7cc791f8bb7c7c, 0x856617e3cc716666,
|
||||
0x53dda68ea77bdddd, 0x5c17b84b2eaf1717, 0x014702468e454747, 0x429e84dc211a9e9e,
|
||||
0x0fca1ec589d4caca, 0xb42d75995a582d2d, 0xc6bf9179632ebfbf, 0x1c07381b0e3f0707,
|
||||
0x8ead012347acadad, 0x755aea2fb4b05a5a, 0x36836cb51bef8383, 0xcc3385ff66b63333,
|
||||
0x91633ff2c65c6363, 0x0802100a04120202, 0x92aa39384993aaaa, 0xd971afa8e2de7171,
|
||||
0x07c80ecf8dc6c8c8, 0x6419c87d32d11919, 0x39497270923b4949, 0x43d9869aaf5fd9d9,
|
||||
0xeff2c31df931f2f2, 0xabe34b48dba8e3e3, 0x715be22ab6b95b5b, 0x1a8834920dbc8888,
|
||||
0x529aa4c8293e9a9a, 0x98262dbe4c0b2626, 0xc8328dfa64bf3232, 0xfab0e94a7d59b0b0,
|
||||
0x83e91b6acff2e9e9, 0x3c0f78331e770f0f, 0x73d5e6a6b733d5d5, 0x3a8074ba1df48080,
|
||||
0xc2be997c6127bebe, 0x13cd26de87ebcdcd, 0xd034bde468893434, 0x3d487a7590324848,
|
||||
0xdbffab24e354ffff, 0xf57af78ff48d7a7a, 0x7a90f4ea3d649090, 0x615fc23ebe9d5f5f,
|
||||
0x80201da0403d2020, 0xbd6867d5d00f6868, 0x681ad07234ca1a1a, 0x82ae192c41b7aeae,
|
||||
0xeab4c95e757db4b4, 0x4d549a19a8ce5454, 0x7693ece53b7f9393, 0x88220daa442f2222,
|
||||
0x8d6407e9c8636464, 0xe3f1db12ff2af1f1, 0xd173bfa2e6cc7373, 0x4812905a24821212,
|
||||
0x1d403a5d807a4040, 0x2008402810480808, 0x2bc356e89b95c3c3, 0x97ec337bc5dfecec,
|
||||
0x4bdb9690ab4ddbdb, 0xbea1611f5fc0a1a1, 0x0e8d1c8307918d8d, 0xf43df5c97ac83d3d,
|
||||
0x6697ccf1335b9797, 0x0000000000000000, 0x1bcf36d483f9cfcf, 0xac2b4587566e2b2b,
|
||||
0xc57697b3ece17676, 0x328264b019e68282, 0x7fd6fea9b128d6d6, 0x6c1bd87736c31b1b,
|
||||
0xeeb5c15b7774b5b5, 0x86af112943beafaf, 0xb56a77dfd41d6a6a, 0x5d50ba0da0ea5050,
|
||||
0x0945124c8a574545, 0xebf3cb18fb38f3f3, 0xc0309df060ad3030, 0x9bef2b74c3c4efef,
|
||||
0xfc3fe5c37eda3f3f, 0x4955921caac75555, 0xb2a2791059dba2a2, 0x8fea0365c9e9eaea,
|
||||
0x89650fecca6a6565, 0xd2bab9686903baba, 0xbc2f65935e4a2f2f, 0x27c04ee79d8ec0c0,
|
||||
0x5fdebe81a160dede, 0x701ce06c38fc1c1c, 0xd3fdbb2ee746fdfd, 0x294d52649a1f4d4d,
|
||||
0x7292e4e039769292, 0xc9758fbceafa7575, 0x1806301e0c360606, 0x128a249809ae8a8a,
|
||||
0xf2b2f940794bb2b2, 0xbfe66359d185e6e6, 0x380e70361c7e0e0e, 0x7c1ff8633ee71f1f,
|
||||
0x956237f7c4556262, 0x77d4eea3b53ad4d4, 0x9aa829324d81a8a8, 0x6296c4f431529696,
|
||||
0xc3f99b3aef62f9f9, 0x33c566f697a3c5c5, 0x942535b14a102525, 0x7959f220b2ab5959,
|
||||
0x2a8454ae15d08484, 0xd572b7a7e4c57272, 0xe439d5dd72ec3939, 0x2d4c5a6198164c4c,
|
||||
0x655eca3bbc945e5e, 0xfd78e785f09f7878, 0xe038ddd870e53838, 0x0a8c148605988c8c,
|
||||
0x63d1c6b2bf17d1d1, 0xaea5410b57e4a5a5, 0xafe2434dd9a1e2e2, 0x99612ff8c24e6161,
|
||||
0xf6b3f1457b42b3b3, 0x842115a542342121, 0x4a9c94d625089c9c, 0x781ef0663cee1e1e,
|
||||
0x1143225286614343, 0x3bc776fc93b1c7c7, 0xd7fcb32be54ffcfc, 0x1004201408240404,
|
||||
0x5951b208a2e35151, 0x5e99bcc72f259999, 0xa96d4fc4da226d6d, 0x340d68391a650d0d,
|
||||
0xcffa8335e979fafa, 0x5bdfb684a369dfdf, 0xe57ed79bfca97e7e, 0x90243db448192424,
|
||||
0xec3bc5d776fe3b3b, 0x96ab313d4b9aabab, 0x1fce3ed181f0cece, 0x4411885522991111,
|
||||
0x068f0c8903838f8f, 0x254e4a6b9c044e4e, 0xe6b7d1517366b7b7, 0x8beb0b60cbe0ebeb,
|
||||
0xf03cfdcc78c13c3c, 0x3e817cbf1ffd8181, 0x6a94d4fe35409494, 0xfbf7eb0cf31cf7f7,
|
||||
0xdeb9a1676f18b9b9, 0x4c13985f268b1313, 0xb02c7d9c58512c2c, 0x6bd3d6b8bb05d3d3,
|
||||
0xbbe76b5cd38ce7e7, 0xa56e57cbdc396e6e, 0x37c46ef395aac4c4, 0x0c03180f061b0303,
|
||||
0x45568a13acdc5656, 0x0d441a49885e4444, 0xe17fdf9efea07f7f, 0x9ea921374f88a9a9,
|
||||
0xa82a4d8254672a2a, 0xd6bbb16d6b0abbbb, 0x23c146e29f87c1c1, 0x5153a202a6f15353,
|
||||
0x57dcae8ba572dcdc, 0x2c0b582716530b0b, 0x4e9d9cd327019d9d, 0xad6c47c1d82b6c6c,
|
||||
0xc43195f562a43131, 0xcd7487b9e8f37474, 0xfff6e309f115f6f6, 0x05460a438c4c4646,
|
||||
0x8aac092645a5acac, 0x1e893c970fb58989, 0x5014a04428b41414, 0xa3e15b42dfbae1e1,
|
||||
0x5816b04e2ca61616, 0xe83acdd274f73a3a, 0xb9696fd0d2066969, 0x2409482d12410909,
|
||||
0xdd70a7ade0d77070, 0xe2b6d954716fb6b6, 0x67d0ceb7bd1ed0d0, 0x93ed3b7ec7d6eded,
|
||||
0x17cc2edb85e2cccc, 0x15422a5784684242, 0x5a98b4c22d2c9898, 0xaaa4490e55eda4a4,
|
||||
0xa0285d8850752828, 0x6d5cda31b8865c5c, 0xc7f8933fed6bf8f8, 0x228644a411c28686,
|
||||
}
|
||||
|
||||
C7 := [256]u64 {
|
||||
0x186018c07830d818, 0x238c2305af462623, 0xc63fc67ef991b8c6, 0xe887e8136fcdfbe8,
|
||||
0x8726874ca113cb87, 0xb8dab8a9626d11b8, 0x0104010805020901, 0x4f214f426e9e0d4f,
|
||||
0x36d836adee6c9b36, 0xa6a2a6590451ffa6, 0xd26fd2debdb90cd2, 0xf5f3f5fb06f70ef5,
|
||||
0x79f979ef80f29679, 0x6fa16f5fcede306f, 0x917e91fcef3f6d91, 0x525552aa07a4f852,
|
||||
0x609d6027fdc04760, 0xbccabc89766535bc, 0x9b569baccd2b379b, 0x8e028e048c018a8e,
|
||||
0xa3b6a371155bd2a3, 0x0c300c603c186c0c, 0x7bf17bff8af6847b, 0x35d435b5e16a8035,
|
||||
0x1d741de8693af51d, 0xe0a7e05347ddb3e0, 0xd77bd7f6acb321d7, 0xc22fc25eed999cc2,
|
||||
0x2eb82e6d965c432e, 0x4b314b627a96294b, 0xfedffea321e15dfe, 0x5741578216aed557,
|
||||
0x155415a8412abd15, 0x77c1779fb6eee877, 0x37dc37a5eb6e9237, 0xe5b3e57b56d79ee5,
|
||||
0x9f469f8cd923139f, 0xf0e7f0d317fd23f0, 0x4a354a6a7f94204a, 0xda4fda9e95a944da,
|
||||
0x587d58fa25b0a258, 0xc903c906ca8fcfc9, 0x29a429558d527c29, 0x0a280a5022145a0a,
|
||||
0xb1feb1e14f7f50b1, 0xa0baa0691a5dc9a0, 0x6bb16b7fdad6146b, 0x852e855cab17d985,
|
||||
0xbdcebd8173673cbd, 0x5d695dd234ba8f5d, 0x1040108050209010, 0xf4f7f4f303f507f4,
|
||||
0xcb0bcb16c08bddcb, 0x3ef83eedc67cd33e, 0x05140528110a2d05, 0x6781671fe6ce7867,
|
||||
0xe4b7e47353d597e4, 0x279c2725bb4e0227, 0x4119413258827341, 0x8b168b2c9d0ba78b,
|
||||
0xa7a6a7510153f6a7, 0x7de97dcf94fab27d, 0x956e95dcfb374995, 0xd847d88e9fad56d8,
|
||||
0xfbcbfb8b30eb70fb, 0xee9fee2371c1cdee, 0x7ced7cc791f8bb7c, 0x66856617e3cc7166,
|
||||
0xdd53dda68ea77bdd, 0x175c17b84b2eaf17, 0x47014702468e4547, 0x9e429e84dc211a9e,
|
||||
0xca0fca1ec589d4ca, 0x2db42d75995a582d, 0xbfc6bf9179632ebf, 0x071c07381b0e3f07,
|
||||
0xad8ead012347acad, 0x5a755aea2fb4b05a, 0x8336836cb51bef83, 0x33cc3385ff66b633,
|
||||
0x6391633ff2c65c63, 0x020802100a041202, 0xaa92aa39384993aa, 0x71d971afa8e2de71,
|
||||
0xc807c80ecf8dc6c8, 0x196419c87d32d119, 0x4939497270923b49, 0xd943d9869aaf5fd9,
|
||||
0xf2eff2c31df931f2, 0xe3abe34b48dba8e3, 0x5b715be22ab6b95b, 0x881a8834920dbc88,
|
||||
0x9a529aa4c8293e9a, 0x2698262dbe4c0b26, 0x32c8328dfa64bf32, 0xb0fab0e94a7d59b0,
|
||||
0xe983e91b6acff2e9, 0x0f3c0f78331e770f, 0xd573d5e6a6b733d5, 0x803a8074ba1df480,
|
||||
0xbec2be997c6127be, 0xcd13cd26de87ebcd, 0x34d034bde4688934, 0x483d487a75903248,
|
||||
0xffdbffab24e354ff, 0x7af57af78ff48d7a, 0x907a90f4ea3d6490, 0x5f615fc23ebe9d5f,
|
||||
0x2080201da0403d20, 0x68bd6867d5d00f68, 0x1a681ad07234ca1a, 0xae82ae192c41b7ae,
|
||||
0xb4eab4c95e757db4, 0x544d549a19a8ce54, 0x937693ece53b7f93, 0x2288220daa442f22,
|
||||
0x648d6407e9c86364, 0xf1e3f1db12ff2af1, 0x73d173bfa2e6cc73, 0x124812905a248212,
|
||||
0x401d403a5d807a40, 0x0820084028104808, 0xc32bc356e89b95c3, 0xec97ec337bc5dfec,
|
||||
0xdb4bdb9690ab4ddb, 0xa1bea1611f5fc0a1, 0x8d0e8d1c8307918d, 0x3df43df5c97ac83d,
|
||||
0x976697ccf1335b97, 0x0000000000000000, 0xcf1bcf36d483f9cf, 0x2bac2b4587566e2b,
|
||||
0x76c57697b3ece176, 0x82328264b019e682, 0xd67fd6fea9b128d6, 0x1b6c1bd87736c31b,
|
||||
0xb5eeb5c15b7774b5, 0xaf86af112943beaf, 0x6ab56a77dfd41d6a, 0x505d50ba0da0ea50,
|
||||
0x450945124c8a5745, 0xf3ebf3cb18fb38f3, 0x30c0309df060ad30, 0xef9bef2b74c3c4ef,
|
||||
0x3ffc3fe5c37eda3f, 0x554955921caac755, 0xa2b2a2791059dba2, 0xea8fea0365c9e9ea,
|
||||
0x6589650fecca6a65, 0xbad2bab9686903ba, 0x2fbc2f65935e4a2f, 0xc027c04ee79d8ec0,
|
||||
0xde5fdebe81a160de, 0x1c701ce06c38fc1c, 0xfdd3fdbb2ee746fd, 0x4d294d52649a1f4d,
|
||||
0x927292e4e0397692, 0x75c9758fbceafa75, 0x061806301e0c3606, 0x8a128a249809ae8a,
|
||||
0xb2f2b2f940794bb2, 0xe6bfe66359d185e6, 0x0e380e70361c7e0e, 0x1f7c1ff8633ee71f,
|
||||
0x62956237f7c45562, 0xd477d4eea3b53ad4, 0xa89aa829324d81a8, 0x966296c4f4315296,
|
||||
0xf9c3f99b3aef62f9, 0xc533c566f697a3c5, 0x25942535b14a1025, 0x597959f220b2ab59,
|
||||
0x842a8454ae15d084, 0x72d572b7a7e4c572, 0x39e439d5dd72ec39, 0x4c2d4c5a6198164c,
|
||||
0x5e655eca3bbc945e, 0x78fd78e785f09f78, 0x38e038ddd870e538, 0x8c0a8c148605988c,
|
||||
0xd163d1c6b2bf17d1, 0xa5aea5410b57e4a5, 0xe2afe2434dd9a1e2, 0x6199612ff8c24e61,
|
||||
0xb3f6b3f1457b42b3, 0x21842115a5423421, 0x9c4a9c94d625089c, 0x1e781ef0663cee1e,
|
||||
0x4311432252866143, 0xc73bc776fc93b1c7, 0xfcd7fcb32be54ffc, 0x0410042014082404,
|
||||
0x515951b208a2e351, 0x995e99bcc72f2599, 0x6da96d4fc4da226d, 0x0d340d68391a650d,
|
||||
0xfacffa8335e979fa, 0xdf5bdfb684a369df, 0x7ee57ed79bfca97e, 0x2490243db4481924,
|
||||
0x3bec3bc5d776fe3b, 0xab96ab313d4b9aab, 0xce1fce3ed181f0ce, 0x1144118855229911,
|
||||
0x8f068f0c8903838f, 0x4e254e4a6b9c044e, 0xb7e6b7d1517366b7, 0xeb8beb0b60cbe0eb,
|
||||
0x3cf03cfdcc78c13c, 0x813e817cbf1ffd81, 0x946a94d4fe354094, 0xf7fbf7eb0cf31cf7,
|
||||
0xb9deb9a1676f18b9, 0x134c13985f268b13, 0x2cb02c7d9c58512c, 0xd36bd3d6b8bb05d3,
|
||||
0xe7bbe76b5cd38ce7, 0x6ea56e57cbdc396e, 0xc437c46ef395aac4, 0x030c03180f061b03,
|
||||
0x5645568a13acdc56, 0x440d441a49885e44, 0x7fe17fdf9efea07f, 0xa99ea921374f88a9,
|
||||
0x2aa82a4d8254672a, 0xbbd6bbb16d6b0abb, 0xc123c146e29f87c1, 0x535153a202a6f153,
|
||||
0xdc57dcae8ba572dc, 0x0b2c0b582716530b, 0x9d4e9d9cd327019d, 0x6cad6c47c1d82b6c,
|
||||
0x31c43195f562a431, 0x74cd7487b9e8f374, 0xf6fff6e309f115f6, 0x4605460a438c4c46,
|
||||
0xac8aac092645a5ac, 0x891e893c970fb589, 0x145014a04428b414, 0xe1a3e15b42dfbae1,
|
||||
0x165816b04e2ca616, 0x3ae83acdd274f73a, 0x69b9696fd0d20669, 0x092409482d124109,
|
||||
0x70dd70a7ade0d770, 0xb6e2b6d954716fb6, 0xd067d0ceb7bd1ed0, 0xed93ed3b7ec7d6ed,
|
||||
0xcc17cc2edb85e2cc, 0x4215422a57846842, 0x985a98b4c22d2c98, 0xa4aaa4490e55eda4,
|
||||
0x28a0285d88507528, 0x5c6d5cda31b8865c, 0xf8c7f8933fed6bf8, 0x86228644a411c286,
|
||||
}
|
||||
|
||||
RC := [ROUNDS + 1]u64 {
|
||||
0x0000000000000000,
|
||||
0x1823c6e887b8014f,
|
||||
0x36a6d2f5796f9152,
|
||||
0x60bc9b8ea30c7b35,
|
||||
0x1de0d7c22e4bfe57,
|
||||
0x157737e59ff04ada,
|
||||
0x58c9290ab1a06b85,
|
||||
0xbd5d10f4cb3e0567,
|
||||
0xe427418ba77d95d8,
|
||||
0xfbee7c66dd17479e,
|
||||
0xca2dbf07ad5a8333,
|
||||
}
|
||||
|
||||
transform :: proc (ctx: ^Whirlpool_Context) {
|
||||
K, block, state, L: [8]u64
|
||||
|
||||
for i := 0; i < 8; i += 1 {block[i] = util.U64_BE(ctx.buffer[8 * i:])}
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
K[i] = ctx.hash[i]
|
||||
state[i] = block[i] ~ K[i]
|
||||
}
|
||||
|
||||
for r := 1; r <= ROUNDS; r += 1 {
|
||||
for i := 0; i < 8; i += 1 {
|
||||
L[i] = C0[byte(K[i % 8] >> 56)] ~
|
||||
C1[byte(K[(i + 7) % 8] >> 48)] ~
|
||||
C2[byte(K[(i + 6) % 8] >> 40)] ~
|
||||
C3[byte(K[(i + 5) % 8] >> 32)] ~
|
||||
C4[byte(K[(i + 4) % 8] >> 24)] ~
|
||||
C5[byte(K[(i + 3) % 8] >> 16)] ~
|
||||
C6[byte(K[(i + 2) % 8] >> 8)] ~
|
||||
C7[byte(K[(i + 1) % 8])]
|
||||
}
|
||||
L[0] ~= RC[r]
|
||||
|
||||
for i := 0; i < 8; i += 1 {K[i] = L[i]}
|
||||
|
||||
for i := 0; i < 8; i += 1 {
|
||||
L[i] = C0[byte(state[i % 8] >> 56)] ~
|
||||
C1[byte(state[(i + 7) % 8] >> 48)] ~
|
||||
C2[byte(state[(i + 6) % 8] >> 40)] ~
|
||||
C3[byte(state[(i + 5) % 8] >> 32)] ~
|
||||
C4[byte(state[(i + 4) % 8] >> 24)] ~
|
||||
C5[byte(state[(i + 3) % 8] >> 16)] ~
|
||||
C6[byte(state[(i + 2) % 8] >> 8)] ~
|
||||
C7[byte(state[(i + 1) % 8])] ~
|
||||
K[i % 8]
|
||||
}
|
||||
for i := 0; i < 8; i += 1 {state[i] = L[i]}
|
||||
}
|
||||
for i := 0; i < 8; i += 1 {ctx.hash[i] ~= state[i] ~ block[i]}
|
||||
}
|
||||
@@ -1,7 +1,11 @@
|
||||
//+build ignore
|
||||
/*
|
||||
Package core:dynlib implements loading of shared libraries/DLLs and their symbols.
|
||||
|
||||
The behaviour of dynamically loaded libraries is specific to the target platform of the program.
|
||||
For in depth detail on the underlying behaviour please refer to your target platform's documentation.
|
||||
|
||||
See `example` directory for an example library exporting 3 symbols and a host program loading them automatically
|
||||
by defining a symbol table struct.
|
||||
*/
|
||||
package dynlib
|
||||
package dynlib
|
||||
45
core/dynlib/example/example.odin
Normal file
45
core/dynlib/example/example.odin
Normal file
@@ -0,0 +1,45 @@
|
||||
package example
|
||||
|
||||
import "core:dynlib"
|
||||
import "core:fmt"
|
||||
|
||||
Symbols :: struct {
|
||||
// `foo_` is prefixed, so we look for the symbol `foo_add`.
|
||||
add: proc "c" (int, int) -> int,
|
||||
// We use the tag here to override the symbol to look for, namely `bar_sub`.
|
||||
sub: proc "c" (int, int) -> int `dynlib:"bar_sub"`,
|
||||
|
||||
// Exported global (if exporting an i32, the type must be ^i32 because the symbol is a pointer to the export.)
|
||||
// If it's not a pointer or procedure type, we'll skip the struct field.
|
||||
hellope: ^i32,
|
||||
|
||||
// Handle to free library.
|
||||
// We can have more than one of these so we can match symbols for more than one DLL with one struct.
|
||||
_my_lib_handle: dynlib.Library,
|
||||
}
|
||||
|
||||
main :: proc() {
|
||||
sym: Symbols
|
||||
|
||||
// Load symbols from `lib.dll` into Symbols struct.
|
||||
// Each struct field is prefixed with `foo_` before lookup in the DLL's symbol table.
|
||||
// The library's Handle (to unload) will be stored in `sym._my_lib_handle`. This way you can load multiple DLLs in one struct.
|
||||
count, ok := dynlib.initialize_symbols(&sym, "lib.dll", "foo_", "_my_lib_handle")
|
||||
defer dynlib.unload_library(sym._my_lib_handle)
|
||||
fmt.printf("(Initial DLL Load) ok: %v. %v symbols loaded from lib.dll (%p).\n", ok, count, sym._my_lib_handle)
|
||||
|
||||
if count > 0 {
|
||||
fmt.println("42 + 42 =", sym.add(42, 42))
|
||||
fmt.println("84 - 13 =", sym.sub(84, 13))
|
||||
fmt.println("hellope =", sym.hellope^)
|
||||
}
|
||||
|
||||
count, ok = dynlib.initialize_symbols(&sym, "lib.dll", "foo_", "_my_lib_handle")
|
||||
fmt.printf("(DLL Reload) ok: %v. %v symbols loaded from lib.dll (%p).\n", ok, count, sym._my_lib_handle)
|
||||
|
||||
if count > 0 {
|
||||
fmt.println("42 + 42 =", sym.add(42, 42))
|
||||
fmt.println("84 - 13 =", sym.sub(84, 13))
|
||||
fmt.println("hellope =", sym.hellope^)
|
||||
}
|
||||
}
|
||||
14
core/dynlib/example/lib.odin
Normal file
14
core/dynlib/example/lib.odin
Normal file
@@ -0,0 +1,14 @@
|
||||
package library
|
||||
|
||||
@(export)
|
||||
foo_add :: proc "c" (a, b: int) -> (res: int) {
|
||||
return a + b
|
||||
}
|
||||
|
||||
@(export)
|
||||
bar_sub :: proc "c" (a, b: int) -> (res: int) {
|
||||
return a - b
|
||||
}
|
||||
|
||||
@(export)
|
||||
foo_hellope: i32 = 42
|
||||
@@ -1,5 +1,12 @@
|
||||
package dynlib
|
||||
|
||||
import "core:intrinsics"
|
||||
import "core:reflect"
|
||||
import "core:runtime"
|
||||
_ :: intrinsics
|
||||
_ :: reflect
|
||||
_ :: runtime
|
||||
|
||||
/*
|
||||
A handle to a dynamically loaded library.
|
||||
*/
|
||||
@@ -12,11 +19,11 @@ library available to resolve references in subsequently loaded libraries.
|
||||
The paramater `global_symbols` is only used for the platforms `linux`, `darwin`, `freebsd` and `openbsd`.
|
||||
On `windows` this paramater is ignored.
|
||||
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlopen`.
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlopen`.
|
||||
On `windows` refer to `LoadLibraryW`.
|
||||
|
||||
**Implicit Allocators**
|
||||
**Implicit Allocators**
|
||||
`context.temp_allocator`
|
||||
|
||||
Example:
|
||||
@@ -27,6 +34,7 @@ Example:
|
||||
LIBRARY_PATH :: "my_library.dll"
|
||||
library, ok := dynlib.load_library(LIBRARY_PATH)
|
||||
if ! ok {
|
||||
fmt.eprintln(dynlib.last_error())
|
||||
return
|
||||
}
|
||||
fmt.println("The library %q was successfully loaded", LIBRARY_PATH)
|
||||
@@ -39,8 +47,8 @@ load_library :: proc(path: string, global_symbols := false) -> (library: Library
|
||||
/*
|
||||
Unloads a dynamic library.
|
||||
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlclose`.
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlclose`.
|
||||
On `windows` refer to `FreeLibrary`.
|
||||
|
||||
Example:
|
||||
@@ -51,10 +59,12 @@ Example:
|
||||
LIBRARY_PATH :: "my_library.dll"
|
||||
library, ok := dynlib.load_library(LIBRARY_PATH)
|
||||
if ! ok {
|
||||
fmt.eprintln(dynlib.last_error())
|
||||
return
|
||||
}
|
||||
did_unload := dynlib.unload_library(library)
|
||||
if ! did_unload {
|
||||
fmt.eprintln(dynlib.last_error())
|
||||
return
|
||||
}
|
||||
fmt.println("The library %q was successfully unloaded", LIBRARY_PATH)
|
||||
@@ -67,11 +77,11 @@ unload_library :: proc(library: Library) -> (did_unload: bool) {
|
||||
/*
|
||||
Loads the address of a procedure/variable from a dynamic library.
|
||||
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlsym`.
|
||||
The underlying behaviour is platform specific.
|
||||
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlsym`.
|
||||
On `windows` refer to `GetProcAddress`.
|
||||
|
||||
**Implicit Allocators**
|
||||
**Implicit Allocators**
|
||||
`context.temp_allocator`
|
||||
|
||||
Example:
|
||||
@@ -82,13 +92,101 @@ Example:
|
||||
LIBRARY_PATH :: "my_library.dll"
|
||||
library, ok := dynlib.load_library(LIBRARY_PATH)
|
||||
if ! ok {
|
||||
fmt.eprintln(dynlib.last_error())
|
||||
return
|
||||
}
|
||||
|
||||
a, found_a := dynlib.symbol_address(library, "a")
|
||||
if found_a do fmt.printf("The symbol %q was found at the address %v", "a", a)
|
||||
if found_a {
|
||||
fmt.printf("The symbol %q was found at the address %v", "a", a)
|
||||
} else {
|
||||
fmt.eprintln(dynlib.last_error())
|
||||
}
|
||||
}
|
||||
*/
|
||||
symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found: bool) #optional_ok {
|
||||
return _symbol_address(library, symbol)
|
||||
}
|
||||
|
||||
/*
|
||||
Scans a dynamic library for symbols matching a struct's members, assigning found procedure pointers to the corresponding entry.
|
||||
Optionally takes a symbol prefix added to the struct's member name to construct the symbol looked up in the library.
|
||||
Optionally also takes the struct member to assign the library handle to, `__handle` by default.
|
||||
|
||||
This allows using one struct to hold library handles and symbol pointers for more than 1 dynamic library.
|
||||
|
||||
Loading the same library twice unloads the previous incarnation, allowing for straightforward hot reload support.
|
||||
|
||||
Returns:
|
||||
* `-1, false` if the library could not be loaded.
|
||||
* The number of symbols assigned on success. `ok` = true if `count` > 0
|
||||
|
||||
See doc.odin for an example.
|
||||
*/
|
||||
initialize_symbols :: proc(symbol_table: ^$T, library_path: string, symbol_prefix := "", handle_field_name := "__handle") -> (count: int, ok: bool) where intrinsics.type_is_struct(T) {
|
||||
assert(symbol_table != nil)
|
||||
handle: Library
|
||||
|
||||
if handle, ok = load_library(library_path); !ok {
|
||||
return -1, false
|
||||
}
|
||||
|
||||
// `symbol_table` must be a struct because of the where clause, so this can't fail.
|
||||
ti := runtime.type_info_base(type_info_of(T))
|
||||
s, _ := ti.variant.(runtime.Type_Info_Struct)
|
||||
|
||||
// Buffer to concatenate the prefix + symbol name.
|
||||
prefixed_symbol_buf: [2048]u8 = ---
|
||||
|
||||
sym_ptr: rawptr
|
||||
for field_name, i in s.names {
|
||||
// Calculate address of struct member
|
||||
field_ptr := rawptr(uintptr(rawptr(symbol_table)) + uintptr(s.offsets[i]))
|
||||
|
||||
// If we've come across the struct member for the handle, store it and continue scanning for other symbols.
|
||||
if field_name == handle_field_name {
|
||||
// We appear to be hot reloading. Unload previous incarnation of the library.
|
||||
if old_handle := (^Library)(field_ptr)^; old_handle != nil {
|
||||
if ok = unload_library(old_handle); !ok {
|
||||
return count, ok
|
||||
}
|
||||
}
|
||||
(^Library)(field_ptr)^ = handle
|
||||
continue
|
||||
}
|
||||
|
||||
// We're not the library handle, so the field needs to be a pointer type, be it a procedure pointer or an exported global.
|
||||
if !(reflect.is_procedure(s.types[i]) || reflect.is_pointer(s.types[i])) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Let's look up or construct the symbol name to find in the library
|
||||
prefixed_name: string
|
||||
|
||||
// Do we have a symbol override tag?
|
||||
if override, tag_ok := reflect.struct_tag_lookup(reflect.Struct_Tag(s.tags[i]), "dynlib"); tag_ok {
|
||||
prefixed_name = string(override)
|
||||
}
|
||||
|
||||
// No valid symbol override tag found, fall back to `<symbol_prefix>name`.
|
||||
if len(prefixed_name) == 0 {
|
||||
offset := copy(prefixed_symbol_buf[:], symbol_prefix)
|
||||
copy(prefixed_symbol_buf[offset:], field_name)
|
||||
prefixed_name = string(prefixed_symbol_buf[:len(symbol_prefix) + len(field_name)])
|
||||
}
|
||||
|
||||
// Assign procedure (or global) pointer if found.
|
||||
if sym_ptr, ok = symbol_address(handle, prefixed_name); ok {
|
||||
(^rawptr)(field_ptr)^ = sym_ptr
|
||||
count += 1
|
||||
}
|
||||
}
|
||||
return count, count > 0
|
||||
}
|
||||
|
||||
/*
|
||||
Returns an error message for the last failed procedure call.
|
||||
*/
|
||||
last_error :: proc() -> string {
|
||||
return _last_error()
|
||||
}
|
||||
|
||||
@@ -13,3 +13,7 @@ _unload_library :: proc(library: Library) -> bool {
|
||||
_symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found: bool) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
_last_error :: proc() -> string {
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -22,3 +22,8 @@ _symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found
|
||||
found = ptr != nil
|
||||
return
|
||||
}
|
||||
|
||||
_last_error :: proc() -> string {
|
||||
err := os.dlerror()
|
||||
return "unknown" if err == "" else err
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ package dynlib
|
||||
import win32 "core:sys/windows"
|
||||
import "core:strings"
|
||||
import "core:runtime"
|
||||
import "core:reflect"
|
||||
|
||||
_load_library :: proc(path: string, global_symbols := false) -> (Library, bool) {
|
||||
// NOTE(bill): 'global_symbols' is here only for consistency with POSIX which has RTLD_GLOBAL
|
||||
@@ -27,3 +28,9 @@ _symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found
|
||||
found = ptr != nil
|
||||
return
|
||||
}
|
||||
|
||||
_last_error :: proc() -> string {
|
||||
err := win32.System_Error(win32.GetLastError())
|
||||
err_msg := reflect.enum_string(err)
|
||||
return "unknown" if err_msg == "" else err_msg
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Package endian implements sa simple translation between bytes and numbers with
|
||||
Package endian implements a simple translation between bytes and numbers with
|
||||
specific endian encodings.
|
||||
|
||||
buf: [100]u8
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
package encoding_endian
|
||||
|
||||
import "core:intrinsics"
|
||||
import "core:math/bits"
|
||||
|
||||
Byte_Order :: enum u8 {
|
||||
Little,
|
||||
Big,
|
||||
@@ -7,147 +10,154 @@ Byte_Order :: enum u8 {
|
||||
|
||||
PLATFORM_BYTE_ORDER :: Byte_Order.Little when ODIN_ENDIAN == .Little else Byte_Order.Big
|
||||
|
||||
get_u16 :: proc(b: []byte, order: Byte_Order) -> (v: u16, ok: bool) {
|
||||
unchecked_get_u16le :: #force_inline proc "contextless" (b: []byte) -> u16 {
|
||||
return bits.from_le_u16(intrinsics.unaligned_load((^u16)(raw_data(b))))
|
||||
}
|
||||
unchecked_get_u32le :: #force_inline proc "contextless" (b: []byte) -> u32 {
|
||||
return bits.from_le_u32(intrinsics.unaligned_load((^u32)(raw_data(b))))
|
||||
}
|
||||
unchecked_get_u64le :: #force_inline proc "contextless" (b: []byte) -> u64 {
|
||||
return bits.from_le_u64(intrinsics.unaligned_load((^u64)(raw_data(b))))
|
||||
}
|
||||
unchecked_get_u16be :: #force_inline proc "contextless" (b: []byte) -> u16 {
|
||||
return bits.from_be_u16(intrinsics.unaligned_load((^u16)(raw_data(b))))
|
||||
}
|
||||
unchecked_get_u32be :: #force_inline proc "contextless" (b: []byte) -> u32 {
|
||||
return bits.from_be_u32(intrinsics.unaligned_load((^u32)(raw_data(b))))
|
||||
}
|
||||
unchecked_get_u64be :: #force_inline proc "contextless" (b: []byte) -> u64 {
|
||||
return bits.from_be_u64(intrinsics.unaligned_load((^u64)(raw_data(b))))
|
||||
}
|
||||
|
||||
get_u16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u16, ok: bool) {
|
||||
if len(b) < 2 {
|
||||
return 0, false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
v = u16(b[0]) | u16(b[1])<<8
|
||||
if order == .Little {
|
||||
v = unchecked_get_u16le(b)
|
||||
} else {
|
||||
v = u16(b[1]) | u16(b[0])<<8
|
||||
v = unchecked_get_u16be(b)
|
||||
}
|
||||
return v, true
|
||||
}
|
||||
get_u32 :: proc(b: []byte, order: Byte_Order) -> (v: u32, ok: bool) {
|
||||
get_u32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u32, ok: bool) {
|
||||
if len(b) < 4 {
|
||||
return 0, false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
v = u32(b[0]) | u32(b[1])<<8 | u32(b[2])<<16 | u32(b[3])<<24
|
||||
if order == .Little {
|
||||
v = unchecked_get_u32le(b)
|
||||
} else {
|
||||
v = u32(b[3]) | u32(b[2])<<8 | u32(b[1])<<16 | u32(b[0])<<24
|
||||
v = unchecked_get_u32be(b)
|
||||
}
|
||||
return v, true
|
||||
}
|
||||
|
||||
get_u64 :: proc(b: []byte, order: Byte_Order) -> (v: u64, ok: bool) {
|
||||
get_u64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u64, ok: bool) {
|
||||
if len(b) < 8 {
|
||||
return 0, false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
v = u64(b[0]) | u64(b[1])<<8 | u64(b[2])<<16 | u64(b[3])<<24 |
|
||||
u64(b[4])<<32 | u64(b[5])<<40 | u64(b[6])<<48 | u64(b[7])<<56
|
||||
if order == .Little {
|
||||
v = unchecked_get_u64le(b)
|
||||
} else {
|
||||
v = u64(b[7]) | u64(b[6])<<8 | u64(b[5])<<16 | u64(b[4])<<24 |
|
||||
u64(b[3])<<32 | u64(b[2])<<40 | u64(b[1])<<48 | u64(b[0])<<56
|
||||
v = unchecked_get_u64be(b)
|
||||
}
|
||||
return v, true
|
||||
}
|
||||
|
||||
get_i16 :: proc(b: []byte, order: Byte_Order) -> (i16, bool) {
|
||||
get_i16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i16, bool) {
|
||||
v, ok := get_u16(b, order)
|
||||
return i16(v), ok
|
||||
}
|
||||
get_i32 :: proc(b: []byte, order: Byte_Order) -> (i32, bool) {
|
||||
get_i32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i32, bool) {
|
||||
v, ok := get_u32(b, order)
|
||||
return i32(v), ok
|
||||
}
|
||||
get_i64 :: proc(b: []byte, order: Byte_Order) -> (i64, bool) {
|
||||
get_i64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i64, bool) {
|
||||
v, ok := get_u64(b, order)
|
||||
return i64(v), ok
|
||||
}
|
||||
|
||||
get_f16 :: proc(b: []byte, order: Byte_Order) -> (f16, bool) {
|
||||
get_f16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f16, bool) {
|
||||
v, ok := get_u16(b, order)
|
||||
return transmute(f16)v, ok
|
||||
}
|
||||
get_f32 :: proc(b: []byte, order: Byte_Order) -> (f32, bool) {
|
||||
get_f32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f32, bool) {
|
||||
v, ok := get_u32(b, order)
|
||||
return transmute(f32)v, ok
|
||||
}
|
||||
get_f64 :: proc(b: []byte, order: Byte_Order) -> (f64, bool) {
|
||||
get_f64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f64, bool) {
|
||||
v, ok := get_u64(b, order)
|
||||
return transmute(f64)v, ok
|
||||
}
|
||||
|
||||
unchecked_put_u16le :: #force_inline proc "contextless" (b: []byte, v: u16) {
|
||||
intrinsics.unaligned_store((^u16)(raw_data(b)), bits.to_le_u16(v))
|
||||
}
|
||||
unchecked_put_u32le :: #force_inline proc "contextless" (b: []byte, v: u32) {
|
||||
intrinsics.unaligned_store((^u32)(raw_data(b)), bits.to_le_u32(v))
|
||||
}
|
||||
unchecked_put_u64le :: #force_inline proc "contextless" (b: []byte, v: u64) {
|
||||
intrinsics.unaligned_store((^u64)(raw_data(b)), bits.to_le_u64(v))
|
||||
}
|
||||
unchecked_put_u16be :: #force_inline proc "contextless" (b: []byte, v: u16) {
|
||||
intrinsics.unaligned_store((^u16)(raw_data(b)), bits.to_be_u16(v))
|
||||
}
|
||||
unchecked_put_u32be :: #force_inline proc "contextless" (b: []byte, v: u32) {
|
||||
intrinsics.unaligned_store((^u32)(raw_data(b)), bits.to_be_u32(v))
|
||||
}
|
||||
unchecked_put_u64be :: #force_inline proc "contextless" (b: []byte, v: u64) {
|
||||
intrinsics.unaligned_store((^u64)(raw_data(b)), bits.to_be_u64(v))
|
||||
}
|
||||
|
||||
put_u16 :: proc(b: []byte, order: Byte_Order, v: u16) -> bool {
|
||||
put_u16 :: proc "contextless" (b: []byte, order: Byte_Order, v: u16) -> bool {
|
||||
if len(b) < 2 {
|
||||
return false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
b[0] = byte(v)
|
||||
b[1] = byte(v >> 8)
|
||||
if order == .Little {
|
||||
unchecked_put_u16le(b, v)
|
||||
} else {
|
||||
b[0] = byte(v >> 8)
|
||||
b[1] = byte(v)
|
||||
unchecked_put_u16be(b, v)
|
||||
}
|
||||
return true
|
||||
}
|
||||
put_u32 :: proc(b: []byte, order: Byte_Order, v: u32) -> bool {
|
||||
put_u32 :: proc "contextless" (b: []byte, order: Byte_Order, v: u32) -> bool {
|
||||
if len(b) < 4 {
|
||||
return false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
b[0] = byte(v)
|
||||
b[1] = byte(v >> 8)
|
||||
b[2] = byte(v >> 16)
|
||||
b[3] = byte(v >> 24)
|
||||
if order == .Little {
|
||||
unchecked_put_u32le(b, v)
|
||||
} else {
|
||||
b[0] = byte(v >> 24)
|
||||
b[1] = byte(v >> 16)
|
||||
b[2] = byte(v >> 8)
|
||||
b[3] = byte(v)
|
||||
unchecked_put_u32be(b, v)
|
||||
}
|
||||
return true
|
||||
}
|
||||
put_u64 :: proc(b: []byte, order: Byte_Order, v: u64) -> bool {
|
||||
put_u64 :: proc "contextless" (b: []byte, order: Byte_Order, v: u64) -> bool {
|
||||
if len(b) < 8 {
|
||||
return false
|
||||
}
|
||||
#no_bounds_check if order == .Little {
|
||||
b[0] = byte(v >> 0)
|
||||
b[1] = byte(v >> 8)
|
||||
b[2] = byte(v >> 16)
|
||||
b[3] = byte(v >> 24)
|
||||
b[4] = byte(v >> 32)
|
||||
b[5] = byte(v >> 40)
|
||||
b[6] = byte(v >> 48)
|
||||
b[7] = byte(v >> 56)
|
||||
if order == .Little {
|
||||
unchecked_put_u64le(b, v)
|
||||
} else {
|
||||
b[0] = byte(v >> 56)
|
||||
b[1] = byte(v >> 48)
|
||||
b[2] = byte(v >> 40)
|
||||
b[3] = byte(v >> 32)
|
||||
b[4] = byte(v >> 24)
|
||||
b[5] = byte(v >> 16)
|
||||
b[6] = byte(v >> 8)
|
||||
b[7] = byte(v)
|
||||
unchecked_put_u64be(b, v)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
put_i16 :: proc(b: []byte, order: Byte_Order, v: i16) -> bool {
|
||||
put_i16 :: proc "contextless" (b: []byte, order: Byte_Order, v: i16) -> bool {
|
||||
return put_u16(b, order, u16(v))
|
||||
}
|
||||
|
||||
put_i32 :: proc(b: []byte, order: Byte_Order, v: i32) -> bool {
|
||||
put_i32 :: proc "contextless" (b: []byte, order: Byte_Order, v: i32) -> bool {
|
||||
return put_u32(b, order, u32(v))
|
||||
}
|
||||
|
||||
put_i64 :: proc(b: []byte, order: Byte_Order, v: i64) -> bool {
|
||||
put_i64 :: proc "contextless" (b: []byte, order: Byte_Order, v: i64) -> bool {
|
||||
return put_u64(b, order, u64(v))
|
||||
}
|
||||
|
||||
|
||||
put_f16 :: proc(b: []byte, order: Byte_Order, v: f16) -> bool {
|
||||
put_f16 :: proc "contextless" (b: []byte, order: Byte_Order, v: f16) -> bool {
|
||||
return put_u16(b, order, transmute(u16)v)
|
||||
}
|
||||
|
||||
put_f32 :: proc(b: []byte, order: Byte_Order, v: f32) -> bool {
|
||||
put_f32 :: proc "contextless" (b: []byte, order: Byte_Order, v: f32) -> bool {
|
||||
return put_u32(b, order, transmute(u32)v)
|
||||
}
|
||||
|
||||
put_f64 :: proc(b: []byte, order: Byte_Order, v: f64) -> bool {
|
||||
put_f64 :: proc "contextless" (b: []byte, order: Byte_Order, v: f64) -> bool {
|
||||
return put_u64(b, order, transmute(u64)v)
|
||||
}
|
||||
|
||||
@@ -47,8 +47,6 @@ _entities :: proc() {
|
||||
}
|
||||
|
||||
_main :: proc() {
|
||||
using fmt
|
||||
|
||||
options := xml.Options{ flags = { .Ignore_Unsupported, .Intern_Comments, .Unbox_CDATA, .Decode_SGML_Entities }}
|
||||
|
||||
doc, _ := xml.parse(#load("test.html"), options)
|
||||
@@ -58,8 +56,6 @@ _main :: proc() {
|
||||
}
|
||||
|
||||
main :: proc() {
|
||||
using fmt
|
||||
|
||||
track: mem.Tracking_Allocator
|
||||
mem.tracking_allocator_init(&track, context.allocator)
|
||||
context.allocator = mem.tracking_allocator(&track)
|
||||
@@ -68,9 +64,9 @@ main :: proc() {
|
||||
_entities()
|
||||
|
||||
if len(track.allocation_map) > 0 {
|
||||
println()
|
||||
fmt.println()
|
||||
for _, v in track.allocation_map {
|
||||
printf("%v Leaked %v bytes.\n", v.location, v.size)
|
||||
fmt.printf("%v Leaked %v bytes.\n", v.location, v.size)
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,6 +7,7 @@ import "core:strconv"
|
||||
import "core:strings"
|
||||
import "core:reflect"
|
||||
import "core:io"
|
||||
import "core:slice"
|
||||
|
||||
Marshal_Data_Error :: enum {
|
||||
None,
|
||||
@@ -18,29 +19,40 @@ Marshal_Error :: union #shared_nil {
|
||||
io.Error,
|
||||
}
|
||||
|
||||
// careful with MJSON maps & non quotes usage as keys without whitespace will lead to bad results
|
||||
// careful with MJSON maps & non quotes usage as keys with whitespace will lead to bad results
|
||||
Marshal_Options :: struct {
|
||||
// output based on spec
|
||||
spec: Specification,
|
||||
|
||||
// use line breaks & tab|spaces
|
||||
// Use line breaks & tabs/spaces
|
||||
pretty: bool,
|
||||
|
||||
// spacing
|
||||
// Use spaces for indentation instead of tabs
|
||||
use_spaces: bool,
|
||||
|
||||
// Given use_spaces true, use this many spaces per indent level. 0 means 4 spaces.
|
||||
spaces: int,
|
||||
|
||||
// state
|
||||
indentation: int,
|
||||
|
||||
// option to output uint in JSON5 & MJSON
|
||||
// Output uint as hex in JSON5 & MJSON
|
||||
write_uint_as_hex: bool,
|
||||
|
||||
// mjson output options
|
||||
// If spec is MJSON and this is true, then keys will be quoted.
|
||||
//
|
||||
// WARNING: If your keys contain whitespace and this is false, then the
|
||||
// output will be bad.
|
||||
mjson_keys_use_quotes: bool,
|
||||
|
||||
// If spec is MJSON and this is true, then use '=' as delimiter between
|
||||
// keys and values, otherwise ':' is used.
|
||||
mjson_keys_use_equal_sign: bool,
|
||||
|
||||
// mjson state
|
||||
// When outputting a map, sort the output by key.
|
||||
//
|
||||
// NOTE: This will temp allocate and sort a list for each map.
|
||||
sort_maps_by_key: bool,
|
||||
|
||||
// Internal state
|
||||
indentation: int,
|
||||
mjson_skipped_first_braces_start: bool,
|
||||
mjson_skipped_first_braces_end: bool,
|
||||
}
|
||||
@@ -50,6 +62,9 @@ marshal :: proc(v: any, opt: Marshal_Options = {}, allocator := context.allocato
|
||||
defer if err != nil {
|
||||
strings.builder_destroy(&b)
|
||||
}
|
||||
|
||||
// temp guard in case we are sorting map keys, which will use temp allocations
|
||||
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = allocator == context.temp_allocator)
|
||||
|
||||
opt := opt
|
||||
marshal_to_builder(&b, v, &opt) or_return
|
||||
@@ -263,36 +278,81 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
|
||||
map_cap := uintptr(runtime.map_cap(m^))
|
||||
ks, vs, hs, _, _ := runtime.map_kvh_data_dynamic(m^, info.map_info)
|
||||
|
||||
i := 0
|
||||
for bucket_index in 0..<map_cap {
|
||||
runtime.map_hash_is_valid(hs[bucket_index]) or_continue
|
||||
if !opt.sort_maps_by_key {
|
||||
i := 0
|
||||
for bucket_index in 0..<map_cap {
|
||||
runtime.map_hash_is_valid(hs[bucket_index]) or_continue
|
||||
|
||||
opt_write_iteration(w, opt, i) or_return
|
||||
i += 1
|
||||
opt_write_iteration(w, opt, i) or_return
|
||||
i += 1
|
||||
|
||||
key := rawptr(runtime.map_cell_index_dynamic(ks, info.map_info.ks, bucket_index))
|
||||
value := rawptr(runtime.map_cell_index_dynamic(vs, info.map_info.vs, bucket_index))
|
||||
key := rawptr(runtime.map_cell_index_dynamic(ks, info.map_info.ks, bucket_index))
|
||||
value := rawptr(runtime.map_cell_index_dynamic(vs, info.map_info.vs, bucket_index))
|
||||
|
||||
// check for string type
|
||||
{
|
||||
v := any{key, info.key.id}
|
||||
ti := runtime.type_info_base(type_info_of(v.id))
|
||||
a := any{v.data, ti.id}
|
||||
name: string
|
||||
// check for string type
|
||||
{
|
||||
v := any{key, info.key.id}
|
||||
ti := runtime.type_info_base(type_info_of(v.id))
|
||||
a := any{v.data, ti.id}
|
||||
name: string
|
||||
|
||||
#partial switch info in ti.variant {
|
||||
case runtime.Type_Info_String:
|
||||
switch s in a {
|
||||
case string: name = s
|
||||
case cstring: name = string(s)
|
||||
#partial switch info in ti.variant {
|
||||
case runtime.Type_Info_String:
|
||||
switch s in a {
|
||||
case string: name = s
|
||||
case cstring: name = string(s)
|
||||
}
|
||||
opt_write_key(w, opt, name) or_return
|
||||
|
||||
case: return .Unsupported_Type
|
||||
}
|
||||
opt_write_key(w, opt, name) or_return
|
||||
|
||||
case: return .Unsupported_Type
|
||||
}
|
||||
|
||||
marshal_to_writer(w, any{value, info.value.id}, opt) or_return
|
||||
}
|
||||
} else {
|
||||
Entry :: struct {
|
||||
key: string,
|
||||
value: any,
|
||||
}
|
||||
|
||||
marshal_to_writer(w, any{value, info.value.id}, opt) or_return
|
||||
// If we are sorting the map by key, then we temp alloc an array
|
||||
// and sort it, then output the result.
|
||||
sorted := make([dynamic]Entry, 0, map_cap, context.temp_allocator)
|
||||
for bucket_index in 0..<map_cap {
|
||||
runtime.map_hash_is_valid(hs[bucket_index]) or_continue
|
||||
|
||||
key := rawptr(runtime.map_cell_index_dynamic(ks, info.map_info.ks, bucket_index))
|
||||
value := rawptr(runtime.map_cell_index_dynamic(vs, info.map_info.vs, bucket_index))
|
||||
name: string
|
||||
|
||||
// check for string type
|
||||
{
|
||||
v := any{key, info.key.id}
|
||||
ti := runtime.type_info_base(type_info_of(v.id))
|
||||
a := any{v.data, ti.id}
|
||||
|
||||
#partial switch info in ti.variant {
|
||||
case runtime.Type_Info_String:
|
||||
switch s in a {
|
||||
case string: name = s
|
||||
case cstring: name = string(s)
|
||||
}
|
||||
|
||||
case: return .Unsupported_Type
|
||||
}
|
||||
}
|
||||
|
||||
append(&sorted, Entry { key = name, value = any{value, info.value.id}})
|
||||
}
|
||||
|
||||
slice.sort_by(sorted[:], proc(i, j: Entry) -> bool { return i.key < j.key })
|
||||
|
||||
for s, i in sorted {
|
||||
opt_write_iteration(w, opt, i) or_return
|
||||
opt_write_key(w, opt, s.key) or_return
|
||||
marshal_to_writer(w, s.value, opt) or_return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -424,8 +484,9 @@ opt_write_key :: proc(w: io.Writer, opt: ^Marshal_Options, name: string) -> (err
|
||||
|
||||
// insert start byte and increase indentation on pretty
|
||||
opt_write_start :: proc(w: io.Writer, opt: ^Marshal_Options, c: byte) -> (err: io.Error) {
|
||||
// skip mjson starting braces
|
||||
if opt.spec == .MJSON && !opt.mjson_skipped_first_braces_start {
|
||||
// Skip MJSON starting braces. We make sure to only do this for c == '{',
|
||||
// skipping a starting '[' is not allowed.
|
||||
if opt.spec == .MJSON && !opt.mjson_skipped_first_braces_start && opt.indentation == 0 && c == '{' {
|
||||
opt.mjson_skipped_first_braces_start = true
|
||||
return
|
||||
}
|
||||
@@ -473,11 +534,9 @@ opt_write_iteration :: proc(w: io.Writer, opt: ^Marshal_Options, iteration: int)
|
||||
|
||||
// decrease indent, write spacing and insert end byte
|
||||
opt_write_end :: proc(w: io.Writer, opt: ^Marshal_Options, c: byte) -> (err: io.Error) {
|
||||
if opt.spec == .MJSON && opt.mjson_skipped_first_braces_start && !opt.mjson_skipped_first_braces_end {
|
||||
if opt.indentation == 0 {
|
||||
opt.mjson_skipped_first_braces_end = true
|
||||
return
|
||||
}
|
||||
if opt.spec == .MJSON && opt.mjson_skipped_first_braces_start && !opt.mjson_skipped_first_braces_end && opt.indentation == 0 && c == '}' {
|
||||
opt.mjson_skipped_first_braces_end = true
|
||||
return
|
||||
}
|
||||
|
||||
opt.indentation -= 1
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package json
|
||||
|
||||
import "core:strings"
|
||||
|
||||
/*
|
||||
JSON
|
||||
strict JSON
|
||||
@@ -104,4 +106,27 @@ destroy_value :: proc(value: Value, allocator := context.allocator) {
|
||||
case String:
|
||||
delete(v)
|
||||
}
|
||||
}
|
||||
|
||||
clone_value :: proc(value: Value, allocator := context.allocator) -> Value {
|
||||
context.allocator = allocator
|
||||
|
||||
#partial switch &v in value {
|
||||
case Object:
|
||||
new_o := make(Object, len(v))
|
||||
for key, elem in v {
|
||||
new_o[strings.clone(key)] = clone_value(elem)
|
||||
}
|
||||
return new_o
|
||||
case Array:
|
||||
new_a := make(Array, len(v))
|
||||
for elem, idx in v {
|
||||
new_a[idx] = clone_value(elem)
|
||||
}
|
||||
return new_a
|
||||
case String:
|
||||
return strings.clone(v)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
@@ -137,9 +137,9 @@ assign_float :: proc(val: any, f: $T) -> bool {
|
||||
case complex64: dst = complex(f32(f), 0)
|
||||
case complex128: dst = complex(f64(f), 0)
|
||||
|
||||
case quaternion64: dst = quaternion(f16(f), 0, 0, 0)
|
||||
case quaternion128: dst = quaternion(f32(f), 0, 0, 0)
|
||||
case quaternion256: dst = quaternion(f64(f), 0, 0, 0)
|
||||
case quaternion64: dst = quaternion(w=f16(f), x=0, y=0, z=0)
|
||||
case quaternion128: dst = quaternion(w=f32(f), x=0, y=0, z=0)
|
||||
case quaternion256: dst = quaternion(w=f64(f), x=0, y=0, z=0)
|
||||
|
||||
case: return false
|
||||
}
|
||||
@@ -201,20 +201,37 @@ unmarshal_string_token :: proc(p: ^Parser, val: any, str: string, ti: ^reflect.T
|
||||
unmarshal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
|
||||
UNSUPPORTED_TYPE := Unsupported_Type_Error{v.id, p.curr_token}
|
||||
token := p.curr_token
|
||||
|
||||
|
||||
v := v
|
||||
ti := reflect.type_info_base(type_info_of(v.id))
|
||||
// NOTE: If it's a union with only one variant, then treat it as that variant
|
||||
if u, ok := ti.variant.(reflect.Type_Info_Union); ok && len(u.variants) == 1 && token.kind != .Null {
|
||||
variant := u.variants[0]
|
||||
v.id = variant.id
|
||||
ti = reflect.type_info_base(variant)
|
||||
if !reflect.is_pointer_internally(variant) {
|
||||
tag := any{rawptr(uintptr(v.data) + u.tag_offset), u.tag_type.id}
|
||||
assign_int(tag, 1)
|
||||
if u, ok := ti.variant.(reflect.Type_Info_Union); ok && token.kind != .Null {
|
||||
// NOTE: If it's a union with only one variant, then treat it as that variant
|
||||
if len(u.variants) == 1 {
|
||||
variant := u.variants[0]
|
||||
v.id = variant.id
|
||||
ti = reflect.type_info_base(variant)
|
||||
if !reflect.is_pointer_internally(variant) {
|
||||
tag := any{rawptr(uintptr(v.data) + u.tag_offset), u.tag_type.id}
|
||||
assign_int(tag, 1)
|
||||
}
|
||||
} else if v.id != Value {
|
||||
for variant, i in u.variants {
|
||||
variant_any := any{v.data, variant.id}
|
||||
variant_p := p^
|
||||
if err = unmarshal_value(&variant_p, variant_any); err == nil {
|
||||
p^ = variant_p
|
||||
|
||||
raw_tag := i
|
||||
if !u.no_nil { raw_tag += 1 }
|
||||
tag := any{rawptr(uintptr(v.data) + u.tag_offset), u.tag_type.id}
|
||||
assign_int(tag, raw_tag)
|
||||
return
|
||||
}
|
||||
}
|
||||
return UNSUPPORTED_TYPE
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
switch &dst in v {
|
||||
// Handle json.Value as an unknown type
|
||||
case Value:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
package xml
|
||||
|
||||
/*
|
||||
An XML 1.0 / 1.1 parser
|
||||
|
||||
@@ -9,7 +11,7 @@
|
||||
List of contributors:
|
||||
Jeroen van Rijn: Initial implementation.
|
||||
*/
|
||||
package xml
|
||||
|
||||
|
||||
import "core:io"
|
||||
import "core:fmt"
|
||||
@@ -81,4 +83,4 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID,
|
||||
}
|
||||
|
||||
return written, .None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ example :: proc() {
|
||||
xml.destroy(docs[round])
|
||||
}
|
||||
|
||||
DOC :: #load("../../../../tests/core/assets/XML/unicode.xml")
|
||||
DOC :: #load("../../../../tests/core/assets/XML/utf8.xml")
|
||||
input := DOC
|
||||
|
||||
for round in 0..<N {
|
||||
@@ -109,4 +109,4 @@ main :: proc() {
|
||||
}
|
||||
}
|
||||
println("Done and cleaned up!")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
package xml
|
||||
|
||||
/*
|
||||
An XML 1.0 / 1.1 parser
|
||||
|
||||
@@ -6,7 +8,7 @@
|
||||
|
||||
This file contains helper functions.
|
||||
*/
|
||||
package xml
|
||||
|
||||
|
||||
// Find parent's nth child with a given ident.
|
||||
find_child_by_ident :: proc(doc: ^Document, parent_id: Element_ID, ident: string, nth := 0) -> (res: Element_ID, found: bool) {
|
||||
@@ -47,4 +49,4 @@ find_attribute_val_by_key :: proc(doc: ^Document, parent_id: Element_ID, key: st
|
||||
if attr.key == key { return attr.val, true }
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
package xml
|
||||
|
||||
/*
|
||||
An XML 1.0 / 1.1 parser
|
||||
|
||||
@@ -9,7 +11,7 @@
|
||||
List of contributors:
|
||||
Jeroen van Rijn: Initial implementation.
|
||||
*/
|
||||
package xml
|
||||
|
||||
|
||||
import "core:fmt"
|
||||
import "core:unicode"
|
||||
@@ -433,4 +435,4 @@ scan :: proc(t: ^Tokenizer) -> Token {
|
||||
lit = string(t.src[offset : t.offset])
|
||||
}
|
||||
return Token{kind, lit, pos}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
An XML 1.0 / 1.1 parser
|
||||
XML 1.0 / 1.1 parser
|
||||
|
||||
Copyright 2021-2022 Jeroen van Rijn <nom@duclavier.com>.
|
||||
Made available under Odin's BSD-3 license.
|
||||
2021-2022 Jeroen van Rijn <nom@duclavier.com>.
|
||||
available under Odin's BSD-3 license.
|
||||
|
||||
A from-scratch XML implementation, loosely modelled on the [spec](https://www.w3.org/TR/2006/REC-xml11-20060816).
|
||||
from-scratch XML implementation, loosely modelled on the [spec](https://www.w3.org/TR/2006/REC-xml11-20060816).
|
||||
|
||||
Features:
|
||||
- Supports enough of the XML 1.0/1.1 spec to handle the 99.9% of XML documents in common current usage.
|
||||
- Simple to understand and use. Small.
|
||||
Features:
|
||||
- Supports enough of the XML 1.0/1.1 spec to handle the 99.9% of XML documents in common current usage.
|
||||
- Simple to understand and use. Small.
|
||||
|
||||
Caveats:
|
||||
- We do NOT support HTML in this package, as that may or may not be valid XML.
|
||||
If it works, great. If it doesn't, that's not considered a bug.
|
||||
Caveats:
|
||||
- We do NOT support HTML in this package, as that may or may not be valid XML.
|
||||
If it works, great. If it doesn't, that's not considered a bug.
|
||||
|
||||
- We do NOT support UTF-16. If you have a UTF-16 XML file, please convert it to UTF-8 first. Also, our condolences.
|
||||
- <[!ELEMENT and <[!ATTLIST are not supported, and will be either ignored or return an error depending on the parser options.
|
||||
- We do NOT support UTF-16. If you have a UTF-16 XML file, please convert it to UTF-8 first. Also, our condolences.
|
||||
- <[!ELEMENT and <[!ATTLIST are not supported, and will be either ignored or return an error depending on the parser options.
|
||||
|
||||
MAYBE:
|
||||
- XML writer?
|
||||
- Serialize/deserialize Odin types?
|
||||
MAYBE:
|
||||
- XML writer?
|
||||
- Serialize/deserialize Odin types?
|
||||
|
||||
List of contributors:
|
||||
Jeroen van Rijn: Initial implementation.
|
||||
List of contributors:
|
||||
- Jeroen van Rijn: Initial implementation.
|
||||
*/
|
||||
package xml
|
||||
// An XML 1.0 / 1.1 parser
|
||||
@@ -43,48 +43,32 @@ DEFAULT_OPTIONS :: Options{
|
||||
}
|
||||
|
||||
Option_Flag :: enum {
|
||||
/*
|
||||
If the caller says that input may be modified, we can perform in-situ parsing.
|
||||
If this flag isn't provided, the XML parser first duplicates the input so that it can.
|
||||
*/
|
||||
// If the caller says that input may be modified, we can perform in-situ parsing.
|
||||
// If this flag isn't provided, the XML parser first duplicates the input so that it can.
|
||||
Input_May_Be_Modified,
|
||||
|
||||
/*
|
||||
Document MUST start with `<?xml` prologue.
|
||||
*/
|
||||
// Document MUST start with `<?xml` prologue.
|
||||
Must_Have_Prolog,
|
||||
|
||||
/*
|
||||
Document MUST have a `<!DOCTYPE`.
|
||||
*/
|
||||
// Document MUST have a `<!DOCTYPE`.
|
||||
Must_Have_DocType,
|
||||
|
||||
/*
|
||||
By default we skip comments. Use this option to intern a comment on a parented Element.
|
||||
*/
|
||||
// By default we skip comments. Use this option to intern a comment on a parented Element.
|
||||
Intern_Comments,
|
||||
|
||||
/*
|
||||
How to handle unsupported parts of the specification, like <! other than <!DOCTYPE and <![CDATA[
|
||||
*/
|
||||
// How to handle unsupported parts of the specification, like <! other than <!DOCTYPE and <![CDATA[
|
||||
Error_on_Unsupported,
|
||||
Ignore_Unsupported,
|
||||
|
||||
/*
|
||||
By default CDATA tags are passed-through as-is.
|
||||
This option unwraps them when encountered.
|
||||
*/
|
||||
// By default CDATA tags are passed-through as-is.
|
||||
// This option unwraps them when encountered.
|
||||
Unbox_CDATA,
|
||||
|
||||
/*
|
||||
By default SGML entities like `>`, ` ` and ` ` are passed-through as-is.
|
||||
This option decodes them when encountered.
|
||||
*/
|
||||
// By default SGML entities like `>`, ` ` and ` ` are passed-through as-is.
|
||||
// This option decodes them when encountered.
|
||||
Decode_SGML_Entities,
|
||||
|
||||
/*
|
||||
If a tag body has a comment, it will be stripped unless this option is given.
|
||||
*/
|
||||
// If a tag body has a comment, it will be stripped unless this option is given.
|
||||
Keep_Tag_Body_Comments,
|
||||
}
|
||||
Option_Flags :: bit_set[Option_Flag; u16]
|
||||
@@ -97,28 +81,20 @@ Document :: struct {
|
||||
encoding: Encoding,
|
||||
|
||||
doctype: struct {
|
||||
/*
|
||||
We only scan the <!DOCTYPE IDENT part and skip the rest.
|
||||
*/
|
||||
// We only scan the <!DOCTYPE IDENT part and skip the rest.
|
||||
ident: string,
|
||||
rest: string,
|
||||
},
|
||||
|
||||
/*
|
||||
If we encounter comments before the root node, and the option to intern comments is given, this is where they'll live.
|
||||
Otherwise they'll be in the element tree.
|
||||
*/
|
||||
// If we encounter comments before the root node, and the option to intern comments is given, this is where they'll live.
|
||||
// Otherwise they'll be in the element tree.
|
||||
comments: [dynamic]string,
|
||||
|
||||
/*
|
||||
Internal
|
||||
*/
|
||||
// Internal
|
||||
tokenizer: ^Tokenizer,
|
||||
allocator: mem.Allocator,
|
||||
|
||||
/*
|
||||
Input. Either the original buffer, or a copy if `.Input_May_Be_Modified` isn't specified.
|
||||
*/
|
||||
// Input. Either the original buffer, or a copy if `.Input_May_Be_Modified` isn't specified.
|
||||
input: []u8,
|
||||
strings_to_free: [dynamic]string,
|
||||
}
|
||||
@@ -158,34 +134,24 @@ Encoding :: enum {
|
||||
UTF_8,
|
||||
ISO_8859_1,
|
||||
|
||||
/*
|
||||
Aliases
|
||||
*/
|
||||
// Aliases
|
||||
LATIN_1 = ISO_8859_1,
|
||||
}
|
||||
|
||||
Error :: enum {
|
||||
/*
|
||||
General return values.
|
||||
*/
|
||||
// General return values.
|
||||
None = 0,
|
||||
General_Error,
|
||||
Unexpected_Token,
|
||||
Invalid_Token,
|
||||
|
||||
/*
|
||||
Couldn't find, open or read file.
|
||||
*/
|
||||
// Couldn't find, open or read file.
|
||||
File_Error,
|
||||
|
||||
/*
|
||||
File too short.
|
||||
*/
|
||||
// File too short.
|
||||
Premature_EOF,
|
||||
|
||||
/*
|
||||
XML-specific errors.
|
||||
*/
|
||||
// XML-specific errors.
|
||||
No_Prolog,
|
||||
Invalid_Prolog,
|
||||
Too_Many_Prologs,
|
||||
@@ -194,11 +160,9 @@ Error :: enum {
|
||||
Too_Many_DocTypes,
|
||||
DocType_Must_Preceed_Elements,
|
||||
|
||||
/*
|
||||
If a DOCTYPE is present _or_ the caller
|
||||
asked for a specific DOCTYPE and the DOCTYPE
|
||||
and root tag don't match, we return `.Invalid_DocType`.
|
||||
*/
|
||||
// If a DOCTYPE is present _or_ the caller
|
||||
// asked for a specific DOCTYPE and the DOCTYPE
|
||||
// and root tag don't match, we return `.Invalid_DocType`.
|
||||
Invalid_DocType,
|
||||
|
||||
Invalid_Tag_Value,
|
||||
@@ -211,27 +175,20 @@ Error :: enum {
|
||||
Unsupported_Version,
|
||||
Unsupported_Encoding,
|
||||
|
||||
/*
|
||||
<!FOO are usually skipped.
|
||||
*/
|
||||
// <!FOO are usually skipped.
|
||||
Unhandled_Bang,
|
||||
|
||||
Duplicate_Attribute,
|
||||
Conflicting_Options,
|
||||
}
|
||||
|
||||
/*
|
||||
Implementation starts here.
|
||||
*/
|
||||
parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_handler := default_error_handler, allocator := context.allocator) -> (doc: ^Document, err: Error) {
|
||||
data := data
|
||||
context.allocator = allocator
|
||||
|
||||
opts := validate_options(options) or_return
|
||||
|
||||
/*
|
||||
If `.Input_May_Be_Modified` is not specified, we duplicate the input so that we can modify it in-place.
|
||||
*/
|
||||
// If `.Input_May_Be_Modified` is not specified, we duplicate the input so that we can modify it in-place.
|
||||
if .Input_May_Be_Modified not_in opts.flags {
|
||||
data = bytes.clone(data)
|
||||
}
|
||||
@@ -252,10 +209,8 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
element, parent: Element_ID
|
||||
open: Token
|
||||
|
||||
/*
|
||||
If a DOCTYPE is present, the root tag has to match.
|
||||
If an expected DOCTYPE is given in options (i.e. it's non-empty), the DOCTYPE (if present) and root tag have to match.
|
||||
*/
|
||||
// If a DOCTYPE is present, the root tag has to match.
|
||||
// If an expected DOCTYPE is given in options (i.e. it's non-empty), the DOCTYPE (if present) and root tag have to match.
|
||||
expected_doctype := options.expected_doctype
|
||||
|
||||
loop: for {
|
||||
@@ -263,17 +218,13 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
// NOTE(Jeroen): This is faster as a switch.
|
||||
switch t.ch {
|
||||
case '<':
|
||||
/*
|
||||
Consume peeked `<`
|
||||
*/
|
||||
// Consume peeked `<`
|
||||
advance_rune(t)
|
||||
|
||||
open = scan(t)
|
||||
// NOTE(Jeroen): We're not using a switch because this if-else chain ordered by likelihood is 2.5% faster at -o:size and -o:speed.
|
||||
if likely(open.kind, Token_Kind.Ident) == .Ident {
|
||||
/*
|
||||
e.g. <odin - Start of new element.
|
||||
*/
|
||||
// e.g. <odin - Start of new element.
|
||||
element = new_element(doc)
|
||||
if element == 0 { // First Element
|
||||
parent = element
|
||||
@@ -286,11 +237,9 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
|
||||
parse_attributes(doc, &doc.elements[element].attribs) or_return
|
||||
|
||||
/*
|
||||
If a DOCTYPE is present _or_ the caller
|
||||
asked for a specific DOCTYPE and the DOCTYPE
|
||||
and root tag don't match, we return .Invalid_Root_Tag.
|
||||
*/
|
||||
// If a DOCTYPE is present _or_ the caller
|
||||
// asked for a specific DOCTYPE and the DOCTYPE
|
||||
// and root tag don't match, we return .Invalid_Root_Tag.
|
||||
if element == 0 { // Root tag?
|
||||
if len(expected_doctype) > 0 && expected_doctype != open.text {
|
||||
error(t, t.offset, "Root Tag doesn't match DOCTYPE. Expected: %v, got: %v\n", expected_doctype, open.text)
|
||||
@@ -298,23 +247,17 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
One of these should follow:
|
||||
- `>`, which means we've just opened this tag and expect a later element to close it.
|
||||
- `/>`, which means this is an 'empty' or self-closing tag.
|
||||
*/
|
||||
// One of these should follow:
|
||||
// - `>`, which means we've just opened this tag and expect a later element to close it.
|
||||
// - `/>`, which means this is an 'empty' or self-closing tag.
|
||||
end_token := scan(t)
|
||||
#partial switch end_token.kind {
|
||||
case .Gt:
|
||||
/*
|
||||
We're now the new parent.
|
||||
*/
|
||||
// We're now the new parent.
|
||||
parent = element
|
||||
|
||||
case .Slash:
|
||||
/*
|
||||
Empty tag. Close it.
|
||||
*/
|
||||
// Empty tag. Close it.
|
||||
expect(t, .Gt) or_return
|
||||
parent = doc.elements[element].parent
|
||||
element = parent
|
||||
@@ -325,9 +268,7 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
}
|
||||
|
||||
} else if open.kind == .Slash {
|
||||
/*
|
||||
Close tag.
|
||||
*/
|
||||
// Close tag.
|
||||
ident := expect(t, .Ident) or_return
|
||||
_ = expect(t, .Gt) or_return
|
||||
|
||||
@@ -339,9 +280,7 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
element = parent
|
||||
|
||||
} else if open.kind == .Exclaim {
|
||||
/*
|
||||
<!
|
||||
*/
|
||||
// <!
|
||||
next := scan(t)
|
||||
#partial switch next.kind {
|
||||
case .Ident:
|
||||
@@ -370,10 +309,8 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
}
|
||||
|
||||
case .Dash:
|
||||
/*
|
||||
Comment: <!-- -->.
|
||||
The grammar does not allow a comment to end in --->
|
||||
*/
|
||||
// Comment: <!-- -->.
|
||||
// The grammar does not allow a comment to end in --->
|
||||
expect(t, .Dash)
|
||||
comment := scan_comment(t) or_return
|
||||
|
||||
@@ -395,23 +332,17 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
}
|
||||
|
||||
} else if open.kind == .Question {
|
||||
/*
|
||||
<?xml
|
||||
*/
|
||||
// <?xml
|
||||
next := scan(t)
|
||||
#partial switch next.kind {
|
||||
case .Ident:
|
||||
if len(next.text) == 3 && strings.equal_fold(next.text, "xml") {
|
||||
parse_prologue(doc) or_return
|
||||
} else if len(doc.prologue) > 0 {
|
||||
/*
|
||||
We've already seen a prologue.
|
||||
*/
|
||||
// We've already seen a prologue.
|
||||
return doc, .Too_Many_Prologs
|
||||
} else {
|
||||
/*
|
||||
Could be `<?xml-stylesheet`, etc. Ignore it.
|
||||
*/
|
||||
// Could be `<?xml-stylesheet`, etc. Ignore it.
|
||||
skip_element(t) or_return
|
||||
}
|
||||
case:
|
||||
@@ -425,15 +356,11 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
|
||||
}
|
||||
|
||||
case -1:
|
||||
/*
|
||||
End of file.
|
||||
*/
|
||||
// End of file.
|
||||
break loop
|
||||
|
||||
case:
|
||||
/*
|
||||
This should be a tag's body text.
|
||||
*/
|
||||
// This should be a tag's body text.
|
||||
body_text := scan_string(t, t.offset) or_return
|
||||
needs_processing := .Unbox_CDATA in opts.flags
|
||||
needs_processing |= .Decode_SGML_Entities in opts.flags
|
||||
@@ -613,9 +540,7 @@ parse_prologue :: proc(doc: ^Document) -> (err: Error) {
|
||||
doc.encoding = .LATIN_1
|
||||
|
||||
case:
|
||||
/*
|
||||
Unrecognized encoding, assume UTF-8.
|
||||
*/
|
||||
// Unrecognized encoding, assume UTF-8.
|
||||
error(t, offset, "[parse_prologue] Warning: Unrecognized encoding: %v\n", attr.val)
|
||||
}
|
||||
|
||||
@@ -658,11 +583,11 @@ skip_element :: proc(t: ^Tokenizer) -> (err: Error) {
|
||||
|
||||
parse_doctype :: proc(doc: ^Document) -> (err: Error) {
|
||||
/*
|
||||
<!DOCTYPE greeting SYSTEM "hello.dtd">
|
||||
<!DOCTYPE greeting SYSTEM "hello.dtd">
|
||||
|
||||
<!DOCTYPE greeting [
|
||||
<!ELEMENT greeting (#PCDATA)>
|
||||
]>
|
||||
<!DOCTYPE greeting [
|
||||
<!ELEMENT greeting (#PCDATA)>
|
||||
]>
|
||||
*/
|
||||
assert(doc != nil)
|
||||
context.allocator = doc.allocator
|
||||
@@ -675,9 +600,7 @@ parse_doctype :: proc(doc: ^Document) -> (err: Error) {
|
||||
offset := t.offset
|
||||
skip_element(t) or_return
|
||||
|
||||
/*
|
||||
-1 because the current offset is that of the closing tag, so the rest of the DOCTYPE tag ends just before it.
|
||||
*/
|
||||
// -1 because the current offset is that of the closing tag, so the rest of the DOCTYPE tag ends just before it.
|
||||
doc.doctype.rest = string(t.src[offset : t.offset - 1])
|
||||
return .None
|
||||
}
|
||||
@@ -700,4 +623,4 @@ new_element :: proc(doc: ^Document) -> (id: Element_ID) {
|
||||
cur := doc.element_count
|
||||
doc.element_count += 1
|
||||
return cur
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,7 +216,7 @@ tprintf :: proc(fmt: string, args: ..any) -> string {
|
||||
// Returns: A formatted string
|
||||
//
|
||||
bprint :: proc(buf: []byte, args: ..any, sep := " ") -> string {
|
||||
sb := strings.builder_from_bytes(buf[0:len(buf)])
|
||||
sb := strings.builder_from_bytes(buf)
|
||||
return sbprint(&sb, ..args, sep=sep)
|
||||
}
|
||||
// Creates a formatted string using a supplied buffer as the backing array, appends newline. Writes into the buffer.
|
||||
@@ -229,7 +229,7 @@ bprint :: proc(buf: []byte, args: ..any, sep := " ") -> string {
|
||||
// Returns: A formatted string with a newline character at the end
|
||||
//
|
||||
bprintln :: proc(buf: []byte, args: ..any, sep := " ") -> string {
|
||||
sb := strings.builder_from_bytes(buf[0:len(buf)])
|
||||
sb := strings.builder_from_bytes(buf)
|
||||
return sbprintln(&sb, ..args, sep=sep)
|
||||
}
|
||||
// Creates a formatted string using a supplied buffer as the backing array. Writes into the buffer.
|
||||
@@ -242,7 +242,7 @@ bprintln :: proc(buf: []byte, args: ..any, sep := " ") -> string {
|
||||
// Returns: A formatted string
|
||||
//
|
||||
bprintf :: proc(buf: []byte, fmt: string, args: ..any) -> string {
|
||||
sb := strings.builder_from_bytes(buf[0:len(buf)])
|
||||
sb := strings.builder_from_bytes(buf)
|
||||
return sbprintf(&sb, fmt, ..args)
|
||||
}
|
||||
// Runtime assertion with a formatted message
|
||||
@@ -253,18 +253,24 @@ bprintf :: proc(buf: []byte, fmt: string, args: ..any) -> string {
|
||||
// - args: A variadic list of arguments to be formatted
|
||||
// - loc: The location of the caller
|
||||
//
|
||||
// Returns: True if the condition is met, otherwise triggers a runtime assertion with a formatted message
|
||||
//
|
||||
assertf :: proc(condition: bool, fmt: string, args: ..any, loc := #caller_location) -> bool {
|
||||
@(disabled=ODIN_DISABLE_ASSERT)
|
||||
assertf :: proc(condition: bool, fmt: string, args: ..any, loc := #caller_location) {
|
||||
if !condition {
|
||||
p := context.assertion_failure_proc
|
||||
if p == nil {
|
||||
p = runtime.default_assertion_failure_proc
|
||||
// NOTE(dragos): We are using the same trick as in builtin.assert
|
||||
// to improve performance to make the CPU not
|
||||
// execute speculatively, making it about an order of
|
||||
// magnitude faster
|
||||
@(cold)
|
||||
internal :: proc(loc: runtime.Source_Code_Location, fmt: string, args: ..any) {
|
||||
p := context.assertion_failure_proc
|
||||
if p == nil {
|
||||
p = runtime.default_assertion_failure_proc
|
||||
}
|
||||
message := tprintf(fmt, ..args)
|
||||
p("Runtime assertion", message, loc)
|
||||
}
|
||||
message := tprintf(fmt, ..args)
|
||||
p("Runtime assertion", message, loc)
|
||||
internal(loc, fmt, ..args)
|
||||
}
|
||||
return condition
|
||||
}
|
||||
// Runtime panic with a formatted message
|
||||
//
|
||||
@@ -1232,8 +1238,12 @@ _pad :: proc(fi: ^Info, s: string) {
|
||||
//
|
||||
// NOTE: Can return "NaN", "+Inf", "-Inf", "+<value>", or "-<value>".
|
||||
//
|
||||
_fmt_float_as :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune, float_fmt: byte) {
|
||||
prec := fi.prec if fi.prec_set else 3
|
||||
_fmt_float_as :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune, float_fmt: byte, prec: int) {
|
||||
prec := prec
|
||||
if fi.prec_set {
|
||||
prec = fi.prec
|
||||
}
|
||||
|
||||
buf: [386]byte
|
||||
|
||||
// Can return "NaN", "+Inf", "-Inf", "+<value>", "-<value>".
|
||||
@@ -1242,7 +1252,7 @@ _fmt_float_as :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune, float_fmt: b
|
||||
if !fi.plus {
|
||||
// Strip sign from "+<value>" but not "+Inf".
|
||||
if str[0] == '+' && str[1] != 'I' {
|
||||
str = str[1:]
|
||||
str = str[1:]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1258,11 +1268,13 @@ _fmt_float_as :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune, float_fmt: b
|
||||
//
|
||||
fmt_float :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune) {
|
||||
switch verb {
|
||||
case 'f', 'F', 'g', 'G', 'v':
|
||||
_fmt_float_as(fi, v, bit_size, verb, 'f')
|
||||
case 'g', 'G', 'v':
|
||||
_fmt_float_as(fi, v, bit_size, verb, 'g', -1)
|
||||
case 'f', 'F':
|
||||
_fmt_float_as(fi, v, bit_size, verb, 'f', 3)
|
||||
case 'e', 'E':
|
||||
// BUG(): "%.3e" returns "3.000e+00"
|
||||
_fmt_float_as(fi, v, bit_size, verb, 'e')
|
||||
_fmt_float_as(fi, v, bit_size, verb, 'e', 6)
|
||||
|
||||
case 'h', 'H':
|
||||
prev_fi := fi^
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
//+build !freestanding !js
|
||||
//+build !freestanding
|
||||
//+build !js
|
||||
package fmt
|
||||
|
||||
import "core:runtime"
|
||||
|
||||
@@ -162,7 +162,14 @@ type_is_matrix :: proc($T: typeid) -> bool ---
|
||||
type_has_nil :: proc($T: typeid) -> bool ---
|
||||
|
||||
type_is_specialization_of :: proc($T, $S: typeid) -> bool ---
|
||||
|
||||
type_is_variant_of :: proc($U, $V: typeid) -> bool where type_is_union(U) ---
|
||||
type_union_tag_type :: proc($T: typeid) -> typeid where type_is_union(T) ---
|
||||
type_union_tag_offset :: proc($T: typeid) -> uintptr where type_is_union(T) ---
|
||||
type_union_base_tag_value :: proc($T: typeid) -> int where type_is_union(U) ---
|
||||
type_union_variant_count :: proc($T: typeid) -> int where type_is_union(T) ---
|
||||
type_variant_type_of :: proc($T: typeid, $index: int) -> typeid where type_is_union(T) ---
|
||||
type_variant_index_of :: proc($U, $V: typeid) -> int where type_is_union(U) ---
|
||||
|
||||
type_has_field :: proc($T: typeid, $name: string) -> bool ---
|
||||
type_field_type :: proc($T: typeid, $name: string) -> typeid ---
|
||||
|
||||
@@ -34,7 +34,7 @@ Error :: enum i32 {
|
||||
|
||||
// No_Progress is returned by some implementations of `io.Reader` when many calls
|
||||
// to `read` have failed to return any data or error.
|
||||
// This is usually a signed of a broken `io.Reader` implementation
|
||||
// This is usually a sign of a broken `io.Reader` implementation
|
||||
No_Progress,
|
||||
|
||||
Invalid_Whence,
|
||||
|
||||
@@ -60,9 +60,9 @@ Logger_Proc :: runtime.Logger_Proc
|
||||
/*
|
||||
Logger :: struct {
|
||||
procedure: Logger_Proc,
|
||||
data: rawptr,
|
||||
data: rawptr,
|
||||
lowest_level: Level,
|
||||
options: Logger_Options,
|
||||
options: Logger_Options,
|
||||
}
|
||||
*/
|
||||
Logger :: runtime.Logger
|
||||
@@ -116,6 +116,42 @@ panicf :: proc(fmt_str: string, args: ..any, location := #caller_location) -> !
|
||||
runtime.panic("log.panicf", location)
|
||||
}
|
||||
|
||||
@(disabled=ODIN_DISABLE_ASSERT)
|
||||
assert :: proc(condition: bool, message := "", loc := #caller_location) {
|
||||
if !condition {
|
||||
@(cold)
|
||||
internal :: proc(message: string, loc: runtime.Source_Code_Location) {
|
||||
p := context.assertion_failure_proc
|
||||
if p == nil {
|
||||
p = runtime.default_assertion_failure_proc
|
||||
}
|
||||
log(.Fatal, message, location=loc)
|
||||
p("runtime assertion", message, loc)
|
||||
}
|
||||
internal(message, loc)
|
||||
}
|
||||
}
|
||||
|
||||
@(disabled=ODIN_DISABLE_ASSERT)
|
||||
assertf :: proc(condition: bool, fmt_str: string, args: ..any, loc := #caller_location) {
|
||||
if !condition {
|
||||
// NOTE(dragos): We are using the same trick as in builtin.assert
|
||||
// to improve performance to make the CPU not
|
||||
// execute speculatively, making it about an order of
|
||||
// magnitude faster
|
||||
@(cold)
|
||||
internal :: proc(loc: runtime.Source_Code_Location, fmt_str: string, args: ..any) {
|
||||
p := context.assertion_failure_proc
|
||||
if p == nil {
|
||||
p = runtime.default_assertion_failure_proc
|
||||
}
|
||||
message := fmt.tprintf(fmt_str, ..args)
|
||||
log(.Fatal, message, location=loc)
|
||||
p("Runtime assertion", message, loc)
|
||||
}
|
||||
internal(loc, fmt_str, ..args)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,19 +1,28 @@
|
||||
package log
|
||||
|
||||
import "core:runtime"
|
||||
import "core:fmt"
|
||||
|
||||
Log_Allocator_Format :: enum {
|
||||
Bytes, // Actual number of bytes.
|
||||
Human, // Bytes in human units like bytes, kibibytes, etc. as appropriate.
|
||||
}
|
||||
|
||||
Log_Allocator :: struct {
|
||||
allocator: runtime.Allocator,
|
||||
level: Level,
|
||||
prefix: string,
|
||||
locked: bool,
|
||||
size_fmt: Log_Allocator_Format,
|
||||
}
|
||||
|
||||
log_allocator_init :: proc(la: ^Log_Allocator, level: Level, allocator := context.allocator, prefix := "") {
|
||||
log_allocator_init :: proc(la: ^Log_Allocator, level: Level, size_fmt := Log_Allocator_Format.Bytes,
|
||||
allocator := context.allocator, prefix := "") {
|
||||
la.allocator = allocator
|
||||
la.level = level
|
||||
la.prefix = prefix
|
||||
la.locked = false
|
||||
la.size_fmt = size_fmt
|
||||
}
|
||||
|
||||
|
||||
@@ -29,71 +38,80 @@ log_allocator_proc :: proc(allocator_data: rawptr, mode: runtime.Allocator_Mode,
|
||||
old_memory: rawptr, old_size: int, location := #caller_location) -> ([]byte, runtime.Allocator_Error) {
|
||||
la := (^Log_Allocator)(allocator_data)
|
||||
|
||||
if context.logger.procedure == nil || la.level < context.logger.lowest_level {
|
||||
return la.allocator.procedure(la.allocator.data, mode, size, alignment, old_memory, old_size, location)
|
||||
}
|
||||
|
||||
padding := " " if la.prefix != "" else ""
|
||||
|
||||
buf: [256]byte = ---
|
||||
|
||||
if !la.locked {
|
||||
la.locked = true
|
||||
defer la.locked = false
|
||||
|
||||
switch mode {
|
||||
case .Alloc:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s>>> ALLOCATOR(mode=.Alloc, size=%d, alignment=%d)",
|
||||
la.prefix, padding, size, alignment,
|
||||
location = location,
|
||||
)
|
||||
format: string
|
||||
switch la.size_fmt {
|
||||
case .Bytes: format = "%s%s>>> ALLOCATOR(mode=.Alloc, size=%d, alignment=%d)"
|
||||
case .Human: format = "%s%s>>> ALLOCATOR(mode=.Alloc, size=%m, alignment=%d)"
|
||||
}
|
||||
str := fmt.bprintf(buf[:], format, la.prefix, padding, size, alignment)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Alloc_Non_Zeroed:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s>>> ALLOCATOR(mode=.Alloc_Non_Zeroed, size=%d, alignment=%d)",
|
||||
la.prefix, padding, size, alignment,
|
||||
location = location,
|
||||
)
|
||||
format: string
|
||||
switch la.size_fmt {
|
||||
case .Bytes: format = "%s%s>>> ALLOCATOR(mode=.Alloc_Non_Zeroed, size=%d, alignment=%d)"
|
||||
case .Human: format = "%s%s>>> ALLOCATOR(mode=.Alloc_Non_Zeroed, size=%m, alignment=%d)"
|
||||
}
|
||||
str := fmt.bprintf(buf[:], format, la.prefix, padding, size, alignment)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Free:
|
||||
if old_size != 0 {
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s<<< ALLOCATOR(mode=.Free, ptr=%p, size=%d)",
|
||||
la.prefix, padding, old_memory, old_size,
|
||||
location = location,
|
||||
)
|
||||
format: string
|
||||
switch la.size_fmt {
|
||||
case .Bytes: format = "%s%s<<< ALLOCATOR(mode=.Free, ptr=%p, size=%d)"
|
||||
case .Human: format = "%s%s<<< ALLOCATOR(mode=.Free, ptr=%p, size=%m)"
|
||||
}
|
||||
str := fmt.bprintf(buf[:], format, la.prefix, padding, old_memory, old_size)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
} else {
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s<<< ALLOCATOR(mode=.Free, ptr=%p)",
|
||||
la.prefix, padding, old_memory,
|
||||
location = location,
|
||||
)
|
||||
str := fmt.bprintf(buf[:], "%s%s<<< ALLOCATOR(mode=.Free, ptr=%p)", la.prefix, padding, old_memory)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
}
|
||||
|
||||
case .Free_All:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s<<< ALLOCATOR(mode=.Free_All)",
|
||||
la.prefix, padding,
|
||||
location = location,
|
||||
)
|
||||
str := fmt.bprintf(buf[:], "%s%s<<< ALLOCATOR(mode=.Free_All)", la.prefix, padding)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Resize:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%s>>> ALLOCATOR(mode=.Resize, ptr=%p, old_size=%d, size=%d, alignment=%d)",
|
||||
la.prefix, padding, old_memory, old_size, size, alignment,
|
||||
location = location,
|
||||
)
|
||||
format: string
|
||||
switch la.size_fmt {
|
||||
case .Bytes: format = "%s%s>>> ALLOCATOR(mode=.Resize, ptr=%p, old_size=%d, size=%d, alignment=%d)"
|
||||
case .Human: format = "%s%s>>> ALLOCATOR(mode=.Resize, ptr=%p, old_size=%m, size=%m, alignment=%d)"
|
||||
}
|
||||
str := fmt.bprintf(buf[:], format, la.prefix, padding, old_memory, old_size, size, alignment)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Resize_Non_Zeroed:
|
||||
format: string
|
||||
switch la.size_fmt {
|
||||
case .Bytes: format = "%s%s>>> ALLOCATOR(mode=.Resize_Non_Zeroed, ptr=%p, old_size=%d, size=%d, alignment=%d)"
|
||||
case .Human: format = "%s%s>>> ALLOCATOR(mode=.Resize_Non_Zeroed, ptr=%p, old_size=%m, size=%m, alignment=%d)"
|
||||
}
|
||||
str := fmt.bprintf(buf[:], format, la.prefix, padding, old_memory, old_size, size, alignment)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Query_Features:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%ALLOCATOR(mode=.Query_Features)",
|
||||
la.prefix, padding,
|
||||
location = location,
|
||||
)
|
||||
str := fmt.bprintf(buf[:], "%s%sALLOCATOR(mode=.Query_Features)", la.prefix, padding)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
|
||||
case .Query_Info:
|
||||
logf(
|
||||
la.level,
|
||||
"%s%ALLOCATOR(mode=.Query_Info)",
|
||||
la.prefix, padding,
|
||||
location = location,
|
||||
)
|
||||
str := fmt.bprintf(buf[:], "%s%sALLOCATOR(mode=.Query_Info)", la.prefix, padding)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,13 +120,9 @@ log_allocator_proc :: proc(allocator_data: rawptr, mode: runtime.Allocator_Mode,
|
||||
la.locked = true
|
||||
defer la.locked = false
|
||||
if err != nil {
|
||||
logf(
|
||||
la.level,
|
||||
"%s%ALLOCATOR ERROR=%v",
|
||||
la.prefix, padding, error,
|
||||
location = location,
|
||||
)
|
||||
str := fmt.bprintf(buf[:], "%s%sALLOCATOR ERROR=%v", la.prefix, padding, err)
|
||||
context.logger.procedure(context.logger.data, la.level, str, context.logger.options, location)
|
||||
}
|
||||
}
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2856,7 +2856,7 @@ internal_int_random :: proc(dest: ^Int, bits: int, r: ^rnd.Rand = nil, allocator
|
||||
dest.digit[digits - 1] &= ((1 << uint(bits)) - 1)
|
||||
}
|
||||
dest.used = digits
|
||||
return nil
|
||||
return internal_clamp(dest)
|
||||
}
|
||||
internal_random :: proc { internal_int_random, }
|
||||
|
||||
|
||||
@@ -88,17 +88,19 @@ div_sat :: proc(x, y: $T/Fixed($Backing, $Fraction_Width)) -> (z: T) {
|
||||
|
||||
@(require_results)
|
||||
floor :: proc(x: $T/Fixed($Backing, $Fraction_Width)) -> Backing {
|
||||
return x.i >> Fraction_Width
|
||||
if x.i >= 0 {
|
||||
return x.i >> Fraction_Width
|
||||
} else {
|
||||
return (x.i - (1 << (Fraction_Width - 1)) + (1 << (Fraction_Width - 2))) >> Fraction_Width
|
||||
}
|
||||
}
|
||||
@(require_results)
|
||||
ceil :: proc(x: $T/Fixed($Backing, $Fraction_Width)) -> Backing {
|
||||
Integer :: 8*size_of(Backing) - Fraction_Width
|
||||
return (x.i + (1 << Integer-1)) >> Fraction_Width
|
||||
return (x.i + (1 << Fraction_Width - 1)) >> Fraction_Width
|
||||
}
|
||||
@(require_results)
|
||||
round :: proc(x: $T/Fixed($Backing, $Fraction_Width)) -> Backing {
|
||||
Integer :: 8*size_of(Backing) - Fraction_Width
|
||||
return (x.i + (1 << (Integer - 1))) >> Fraction_Width
|
||||
return (x.i + (1 << (Fraction_Width - 1))) >> Fraction_Width
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ outer_product :: builtin.outer_product
|
||||
|
||||
@(require_results)
|
||||
quaternion_inverse :: proc "contextless" (q: $Q) -> Q where IS_QUATERNION(Q) {
|
||||
return conj(q) * quaternion(1.0/dot(q, q), 0, 0, 0)
|
||||
return conj(q) * quaternion(w=1.0/dot(q, q), x=0, y=0, z=0)
|
||||
}
|
||||
|
||||
|
||||
@@ -217,7 +217,7 @@ quaternion64_mul_vector3 :: proc "contextless" (q: $Q/quaternion64, v: $V/[3]$F/
|
||||
Raw_Quaternion :: struct {xyz: [3]f16, r: f16}
|
||||
|
||||
q := transmute(Raw_Quaternion)q
|
||||
v := transmute([3]f16)v
|
||||
v := v
|
||||
|
||||
t := cross(2*q.xyz, v)
|
||||
return V(v + q.r*t + cross(q.xyz, t))
|
||||
@@ -227,7 +227,7 @@ quaternion128_mul_vector3 :: proc "contextless" (q: $Q/quaternion128, v: $V/[3]$
|
||||
Raw_Quaternion :: struct {xyz: [3]f32, r: f32}
|
||||
|
||||
q := transmute(Raw_Quaternion)q
|
||||
v := transmute([3]f32)v
|
||||
v := v
|
||||
|
||||
t := cross(2*q.xyz, v)
|
||||
return V(v + q.r*t + cross(q.xyz, t))
|
||||
@@ -237,7 +237,7 @@ quaternion256_mul_vector3 :: proc "contextless" (q: $Q/quaternion256, v: $V/[3]$
|
||||
Raw_Quaternion :: struct {xyz: [3]f64, r: f64}
|
||||
|
||||
q := transmute(Raw_Quaternion)q
|
||||
v := transmute([3]f64)v
|
||||
v := v
|
||||
|
||||
t := cross(2*q.xyz, v)
|
||||
return V(v + q.r*t + cross(q.xyz, t))
|
||||
|
||||
@@ -7,96 +7,96 @@ F16_EPSILON :: 1e-3
|
||||
F32_EPSILON :: 1e-7
|
||||
F64_EPSILON :: 1e-15
|
||||
|
||||
Vector2f16 :: distinct [2]f16
|
||||
Vector3f16 :: distinct [3]f16
|
||||
Vector4f16 :: distinct [4]f16
|
||||
Vector2f16 :: [2]f16
|
||||
Vector3f16 :: [3]f16
|
||||
Vector4f16 :: [4]f16
|
||||
|
||||
Matrix1x1f16 :: distinct matrix[1, 1]f16
|
||||
Matrix1x2f16 :: distinct matrix[1, 2]f16
|
||||
Matrix1x3f16 :: distinct matrix[1, 3]f16
|
||||
Matrix1x4f16 :: distinct matrix[1, 4]f16
|
||||
Matrix1x1f16 :: matrix[1, 1]f16
|
||||
Matrix1x2f16 :: matrix[1, 2]f16
|
||||
Matrix1x3f16 :: matrix[1, 3]f16
|
||||
Matrix1x4f16 :: matrix[1, 4]f16
|
||||
|
||||
Matrix2x1f16 :: distinct matrix[2, 1]f16
|
||||
Matrix2x2f16 :: distinct matrix[2, 2]f16
|
||||
Matrix2x3f16 :: distinct matrix[2, 3]f16
|
||||
Matrix2x4f16 :: distinct matrix[2, 4]f16
|
||||
Matrix2x1f16 :: matrix[2, 1]f16
|
||||
Matrix2x2f16 :: matrix[2, 2]f16
|
||||
Matrix2x3f16 :: matrix[2, 3]f16
|
||||
Matrix2x4f16 :: matrix[2, 4]f16
|
||||
|
||||
Matrix3x1f16 :: distinct matrix[3, 1]f16
|
||||
Matrix3x2f16 :: distinct matrix[3, 2]f16
|
||||
Matrix3x3f16 :: distinct matrix[3, 3]f16
|
||||
Matrix3x4f16 :: distinct matrix[3, 4]f16
|
||||
Matrix3x1f16 :: matrix[3, 1]f16
|
||||
Matrix3x2f16 :: matrix[3, 2]f16
|
||||
Matrix3x3f16 :: matrix[3, 3]f16
|
||||
Matrix3x4f16 :: matrix[3, 4]f16
|
||||
|
||||
Matrix4x1f16 :: distinct matrix[4, 1]f16
|
||||
Matrix4x2f16 :: distinct matrix[4, 2]f16
|
||||
Matrix4x3f16 :: distinct matrix[4, 3]f16
|
||||
Matrix4x4f16 :: distinct matrix[4, 4]f16
|
||||
Matrix4x1f16 :: matrix[4, 1]f16
|
||||
Matrix4x2f16 :: matrix[4, 2]f16
|
||||
Matrix4x3f16 :: matrix[4, 3]f16
|
||||
Matrix4x4f16 :: matrix[4, 4]f16
|
||||
|
||||
Matrix1f16 :: Matrix1x1f16
|
||||
Matrix2f16 :: Matrix2x2f16
|
||||
Matrix3f16 :: Matrix3x3f16
|
||||
Matrix4f16 :: Matrix4x4f16
|
||||
|
||||
Vector2f32 :: distinct [2]f32
|
||||
Vector3f32 :: distinct [3]f32
|
||||
Vector4f32 :: distinct [4]f32
|
||||
Vector2f32 :: [2]f32
|
||||
Vector3f32 :: [3]f32
|
||||
Vector4f32 :: [4]f32
|
||||
|
||||
Matrix1x1f32 :: distinct matrix[1, 1]f32
|
||||
Matrix1x2f32 :: distinct matrix[1, 2]f32
|
||||
Matrix1x3f32 :: distinct matrix[1, 3]f32
|
||||
Matrix1x4f32 :: distinct matrix[1, 4]f32
|
||||
Matrix1x1f32 :: matrix[1, 1]f32
|
||||
Matrix1x2f32 :: matrix[1, 2]f32
|
||||
Matrix1x3f32 :: matrix[1, 3]f32
|
||||
Matrix1x4f32 :: matrix[1, 4]f32
|
||||
|
||||
Matrix2x1f32 :: distinct matrix[2, 1]f32
|
||||
Matrix2x2f32 :: distinct matrix[2, 2]f32
|
||||
Matrix2x3f32 :: distinct matrix[2, 3]f32
|
||||
Matrix2x4f32 :: distinct matrix[2, 4]f32
|
||||
Matrix2x1f32 :: matrix[2, 1]f32
|
||||
Matrix2x2f32 :: matrix[2, 2]f32
|
||||
Matrix2x3f32 :: matrix[2, 3]f32
|
||||
Matrix2x4f32 :: matrix[2, 4]f32
|
||||
|
||||
Matrix3x1f32 :: distinct matrix[3, 1]f32
|
||||
Matrix3x2f32 :: distinct matrix[3, 2]f32
|
||||
Matrix3x3f32 :: distinct matrix[3, 3]f32
|
||||
Matrix3x4f32 :: distinct matrix[3, 4]f32
|
||||
Matrix3x1f32 :: matrix[3, 1]f32
|
||||
Matrix3x2f32 :: matrix[3, 2]f32
|
||||
Matrix3x3f32 :: matrix[3, 3]f32
|
||||
Matrix3x4f32 :: matrix[3, 4]f32
|
||||
|
||||
Matrix4x1f32 :: distinct matrix[4, 1]f32
|
||||
Matrix4x2f32 :: distinct matrix[4, 2]f32
|
||||
Matrix4x3f32 :: distinct matrix[4, 3]f32
|
||||
Matrix4x4f32 :: distinct matrix[4, 4]f32
|
||||
Matrix4x1f32 :: matrix[4, 1]f32
|
||||
Matrix4x2f32 :: matrix[4, 2]f32
|
||||
Matrix4x3f32 :: matrix[4, 3]f32
|
||||
Matrix4x4f32 :: matrix[4, 4]f32
|
||||
|
||||
Matrix1f32 :: Matrix1x1f32
|
||||
Matrix2f32 :: Matrix2x2f32
|
||||
Matrix3f32 :: Matrix3x3f32
|
||||
Matrix4f32 :: Matrix4x4f32
|
||||
|
||||
Vector2f64 :: distinct [2]f64
|
||||
Vector3f64 :: distinct [3]f64
|
||||
Vector4f64 :: distinct [4]f64
|
||||
Vector2f64 :: [2]f64
|
||||
Vector3f64 :: [3]f64
|
||||
Vector4f64 :: [4]f64
|
||||
|
||||
Matrix1x1f64 :: distinct matrix[1, 1]f64
|
||||
Matrix1x2f64 :: distinct matrix[1, 2]f64
|
||||
Matrix1x3f64 :: distinct matrix[1, 3]f64
|
||||
Matrix1x4f64 :: distinct matrix[1, 4]f64
|
||||
Matrix1x1f64 :: matrix[1, 1]f64
|
||||
Matrix1x2f64 :: matrix[1, 2]f64
|
||||
Matrix1x3f64 :: matrix[1, 3]f64
|
||||
Matrix1x4f64 :: matrix[1, 4]f64
|
||||
|
||||
Matrix2x1f64 :: distinct matrix[2, 1]f64
|
||||
Matrix2x2f64 :: distinct matrix[2, 2]f64
|
||||
Matrix2x3f64 :: distinct matrix[2, 3]f64
|
||||
Matrix2x4f64 :: distinct matrix[2, 4]f64
|
||||
Matrix2x1f64 :: matrix[2, 1]f64
|
||||
Matrix2x2f64 :: matrix[2, 2]f64
|
||||
Matrix2x3f64 :: matrix[2, 3]f64
|
||||
Matrix2x4f64 :: matrix[2, 4]f64
|
||||
|
||||
Matrix3x1f64 :: distinct matrix[3, 1]f64
|
||||
Matrix3x2f64 :: distinct matrix[3, 2]f64
|
||||
Matrix3x3f64 :: distinct matrix[3, 3]f64
|
||||
Matrix3x4f64 :: distinct matrix[3, 4]f64
|
||||
Matrix3x1f64 :: matrix[3, 1]f64
|
||||
Matrix3x2f64 :: matrix[3, 2]f64
|
||||
Matrix3x3f64 :: matrix[3, 3]f64
|
||||
Matrix3x4f64 :: matrix[3, 4]f64
|
||||
|
||||
Matrix4x1f64 :: distinct matrix[4, 1]f64
|
||||
Matrix4x2f64 :: distinct matrix[4, 2]f64
|
||||
Matrix4x3f64 :: distinct matrix[4, 3]f64
|
||||
Matrix4x4f64 :: distinct matrix[4, 4]f64
|
||||
Matrix4x1f64 :: matrix[4, 1]f64
|
||||
Matrix4x2f64 :: matrix[4, 2]f64
|
||||
Matrix4x3f64 :: matrix[4, 3]f64
|
||||
Matrix4x4f64 :: matrix[4, 4]f64
|
||||
|
||||
Matrix1f64 :: Matrix1x1f64
|
||||
Matrix2f64 :: Matrix2x2f64
|
||||
Matrix3f64 :: Matrix3x3f64
|
||||
Matrix4f64 :: Matrix4x4f64
|
||||
|
||||
Quaternionf16 :: distinct quaternion64
|
||||
Quaternionf32 :: distinct quaternion128
|
||||
Quaternionf64 :: distinct quaternion256
|
||||
Quaternionf16 :: quaternion64
|
||||
Quaternionf32 :: quaternion128
|
||||
Quaternionf64 :: quaternion256
|
||||
|
||||
MATRIX1F16_IDENTITY :: Matrix1f16(1)
|
||||
MATRIX2F16_IDENTITY :: Matrix2f16(1)
|
||||
|
||||
@@ -2312,17 +2312,17 @@ F32_NORMALIZE :: 0
|
||||
F32_RADIX :: 2
|
||||
F32_ROUNDS :: 1
|
||||
|
||||
F64_DIG :: 15 // # of decimal digits of precision
|
||||
F64_EPSILON :: 2.2204460492503131e-016 // smallest such that 1.0+F64_EPSILON != 1.0
|
||||
F64_MANT_DIG :: 53 // # of bits in mantissa
|
||||
F64_MAX :: 1.7976931348623158e+308 // max value
|
||||
F64_MAX_10_EXP :: 308 // max decimal exponent
|
||||
F64_MAX_EXP :: 1024 // max binary exponent
|
||||
F64_MIN :: 2.2250738585072014e-308 // min positive value
|
||||
F64_MIN_10_EXP :: -307 // min decimal exponent
|
||||
F64_MIN_EXP :: -1021 // min binary exponent
|
||||
F64_RADIX :: 2 // exponent radix
|
||||
F64_ROUNDS :: 1 // addition rounding: near
|
||||
F64_DIG :: 15 // Number of representable decimal digits.
|
||||
F64_EPSILON :: 2.2204460492503131e-016 // Smallest number such that `1.0 + F64_EPSILON != 1.0`.
|
||||
F64_MANT_DIG :: 53 // Number of bits in the mantissa.
|
||||
F64_MAX :: 1.7976931348623158e+308 // Maximum representable value.
|
||||
F64_MAX_10_EXP :: 308 // Maximum base-10 exponent yielding normalized value.
|
||||
F64_MAX_EXP :: 1024 // One greater than the maximum possible base-2 exponent yielding normalized value.
|
||||
F64_MIN :: 2.2250738585072014e-308 // Minimum positive normalized value.
|
||||
F64_MIN_10_EXP :: -307 // Minimum base-10 exponent yielding normalized value.
|
||||
F64_MIN_EXP :: -1021 // One greater than the minimum possible base-2 exponent yielding normalized value.
|
||||
F64_RADIX :: 2 // Exponent radix.
|
||||
F64_ROUNDS :: 1 // Addition rounding: near.
|
||||
|
||||
|
||||
F16_MASK :: 0x1f
|
||||
|
||||
@@ -11,6 +11,8 @@ Allocator_Mode :: enum byte {
|
||||
Free_All,
|
||||
Resize,
|
||||
Query_Features,
|
||||
Alloc_Non_Zeroed,
|
||||
Resize_Non_Zeroed,
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -243,12 +245,26 @@ default_resize_align :: proc(old_memory: rawptr, old_size, new_size, alignment:
|
||||
res = raw_data(data)
|
||||
return
|
||||
}
|
||||
|
||||
@(require_results)
|
||||
default_resize_bytes_align_non_zeroed :: proc(old_data: []byte, new_size, alignment: int, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
|
||||
return _default_resize_bytes_align(old_data, new_size, alignment, false, allocator, loc)
|
||||
}
|
||||
@(require_results)
|
||||
default_resize_bytes_align :: proc(old_data: []byte, new_size, alignment: int, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
|
||||
return _default_resize_bytes_align(old_data, new_size, alignment, true, allocator, loc)
|
||||
}
|
||||
|
||||
@(require_results)
|
||||
_default_resize_bytes_align :: #force_inline proc(old_data: []byte, new_size, alignment: int, should_zero: bool, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
|
||||
old_memory := raw_data(old_data)
|
||||
old_size := len(old_data)
|
||||
if old_memory == nil {
|
||||
return alloc_bytes(new_size, alignment, allocator, loc)
|
||||
if should_zero {
|
||||
return alloc_bytes(new_size, alignment, allocator, loc)
|
||||
} else {
|
||||
return alloc_bytes_non_zeroed(new_size, alignment, allocator, loc)
|
||||
}
|
||||
}
|
||||
|
||||
if new_size == 0 {
|
||||
@@ -260,7 +276,13 @@ default_resize_bytes_align :: proc(old_data: []byte, new_size, alignment: int, a
|
||||
return old_data, .None
|
||||
}
|
||||
|
||||
new_memory, err := alloc_bytes(new_size, alignment, allocator, loc)
|
||||
new_memory : []byte
|
||||
err : Allocator_Error
|
||||
if should_zero {
|
||||
new_memory, err = alloc_bytes(new_size, alignment, allocator, loc)
|
||||
} else {
|
||||
new_memory, err = alloc_bytes_non_zeroed(new_size, alignment, allocator, loc)
|
||||
}
|
||||
if new_memory == nil || err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -85,13 +85,16 @@ arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
case .Free_All:
|
||||
arena.offset = 0
|
||||
|
||||
case .Resize:
|
||||
return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena))
|
||||
case .Resize:
|
||||
return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena))
|
||||
|
||||
case .Resize_Non_Zeroed:
|
||||
return default_resize_bytes_align_non_zeroed(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena))
|
||||
|
||||
case .Query_Features:
|
||||
set := (^Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Query_Features}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
@@ -259,7 +262,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
}
|
||||
clear(&s.leaked_allocations)
|
||||
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
begin := uintptr(raw_data(s.data))
|
||||
end := begin + uintptr(len(s.data))
|
||||
old_ptr := uintptr(old_memory)
|
||||
@@ -278,7 +281,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
case .Query_Features:
|
||||
set := (^Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Query_Features}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
@@ -406,9 +409,9 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
s.prev_offset = 0
|
||||
s.curr_offset = 0
|
||||
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_memory == nil {
|
||||
return raw_alloc(s, size, alignment, true)
|
||||
return raw_alloc(s, size, alignment, mode == .Resize)
|
||||
}
|
||||
if size == 0 {
|
||||
return nil, nil
|
||||
@@ -434,7 +437,7 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
|
||||
|
||||
if old_offset != header.prev_offset {
|
||||
data, err := raw_alloc(s, size, alignment, true)
|
||||
data, err := raw_alloc(s, size, alignment, mode == .Resize)
|
||||
if err == nil {
|
||||
runtime.copy(data, byte_slice(old_memory, old_size))
|
||||
}
|
||||
@@ -455,7 +458,7 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
case .Query_Features:
|
||||
set := (^Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Query_Features}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
|
||||
}
|
||||
return nil, nil
|
||||
case .Query_Info:
|
||||
@@ -565,9 +568,9 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
case .Free_All:
|
||||
s.offset = 0
|
||||
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_memory == nil {
|
||||
return raw_alloc(s, size, align, true)
|
||||
return raw_alloc(s, size, align, mode == .Resize)
|
||||
}
|
||||
if size == 0 {
|
||||
return nil, nil
|
||||
@@ -590,7 +593,7 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
return byte_slice(old_memory, size), nil
|
||||
}
|
||||
|
||||
data, err := raw_alloc(s, size, align, true)
|
||||
data, err := raw_alloc(s, size, align, mode == .Resize)
|
||||
if err == nil {
|
||||
runtime.copy(data, byte_slice(old_memory, old_size))
|
||||
}
|
||||
@@ -599,7 +602,7 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
case .Query_Features:
|
||||
set := (^Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Query_Features}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
@@ -649,7 +652,7 @@ dynamic_pool_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
|
||||
case .Free_All:
|
||||
dynamic_pool_free_all(pool)
|
||||
return nil, nil
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_size >= size {
|
||||
return byte_slice(old_memory, size), nil
|
||||
}
|
||||
@@ -662,7 +665,7 @@ dynamic_pool_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
|
||||
case .Query_Features:
|
||||
set := (^Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Query_Features, .Query_Info}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features, .Query_Info}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
@@ -826,6 +829,10 @@ panic_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
if size > 0 {
|
||||
panic("mem: panic allocator, .Resize called", loc=loc)
|
||||
}
|
||||
case .Resize_Non_Zeroed:
|
||||
if size > 0 {
|
||||
panic("mem: panic allocator, .Resize_Non_Zeroed called", loc=loc)
|
||||
}
|
||||
case .Free:
|
||||
if old_memory != nil {
|
||||
panic("mem: panic allocator, .Free called", loc=loc)
|
||||
@@ -958,7 +965,7 @@ tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
|
||||
if data.clear_on_free_all {
|
||||
clear_map(&data.allocation_map)
|
||||
}
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_memory != result_ptr {
|
||||
delete_key(&data.allocation_map, old_memory)
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ arena_init_growing :: proc(arena: ^Arena, reserved: uint = DEFAULT_ARENA_GROWING
|
||||
// Initialization of an `Arena` to be a `.Static` variant.
|
||||
// A static arena contains a single `Memory_Block` allocated with virtual memory.
|
||||
@(require_results)
|
||||
arena_init_static :: proc(arena: ^Arena, reserved: uint, commit_size: uint = DEFAULT_ARENA_STATIC_COMMIT_SIZE) -> (err: Allocator_Error) {
|
||||
arena_init_static :: proc(arena: ^Arena, reserved: uint = DEFAULT_ARENA_STATIC_RESERVE_SIZE, commit_size: uint = DEFAULT_ARENA_STATIC_COMMIT_SIZE) -> (err: Allocator_Error) {
|
||||
arena.kind = .Static
|
||||
arena.curr_block = memory_block_alloc(commit_size, reserved, {}) or_return
|
||||
arena.total_used = 0
|
||||
@@ -98,15 +98,15 @@ arena_alloc :: proc(arena: ^Arena, size: uint, alignment: uint, loc := #caller_l
|
||||
|
||||
switch arena.kind {
|
||||
case .Growing:
|
||||
if arena.curr_block == nil || (safe_add(arena.curr_block.used, size) or_else 0) > arena.curr_block.reserved {
|
||||
size = mem.align_forward_uint(size, alignment)
|
||||
needed := mem.align_forward_uint(size, alignment)
|
||||
if arena.curr_block == nil || (safe_add(arena.curr_block.used, needed) or_else 0) > arena.curr_block.reserved {
|
||||
if arena.minimum_block_size == 0 {
|
||||
arena.minimum_block_size = DEFAULT_ARENA_GROWING_MINIMUM_BLOCK_SIZE
|
||||
}
|
||||
|
||||
block_size := max(size, arena.minimum_block_size)
|
||||
block_size := max(needed, arena.minimum_block_size)
|
||||
|
||||
new_block := memory_block_alloc(size, block_size, {}) or_return
|
||||
new_block := memory_block_alloc(needed, block_size, alignment, {}) or_return
|
||||
new_block.prev = arena.curr_block
|
||||
arena.curr_block = new_block
|
||||
arena.total_reserved += new_block.reserved
|
||||
@@ -288,7 +288,7 @@ arena_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
|
||||
err = .Mode_Not_Implemented
|
||||
case .Free_All:
|
||||
arena_free_all(arena, location)
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
old_data := ([^]byte)(old_memory)
|
||||
|
||||
switch {
|
||||
|
||||
47
core/mem/virtual/file.odin
Normal file
47
core/mem/virtual/file.odin
Normal file
@@ -0,0 +1,47 @@
|
||||
package mem_virtual
|
||||
|
||||
import "core:os"
|
||||
|
||||
Map_File_Error :: enum {
|
||||
None,
|
||||
Open_Failure,
|
||||
Stat_Failure,
|
||||
Negative_Size,
|
||||
Too_Large_Size,
|
||||
Map_Failure,
|
||||
}
|
||||
|
||||
Map_File_Flag :: enum u32 {
|
||||
Read,
|
||||
Write,
|
||||
}
|
||||
Map_File_Flags :: distinct bit_set[Map_File_Flag; u32]
|
||||
|
||||
map_file :: proc{
|
||||
map_file_from_path,
|
||||
map_file_from_file_descriptor,
|
||||
}
|
||||
|
||||
map_file_from_path :: proc(filename: string, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
fd, err := os.open(filename, os.O_RDWR)
|
||||
if err != 0 {
|
||||
return nil, .Open_Failure
|
||||
}
|
||||
defer os.close(fd)
|
||||
|
||||
return map_file_from_file_descriptor(uintptr(fd), flags)
|
||||
}
|
||||
|
||||
map_file_from_file_descriptor :: proc(fd: uintptr, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
size, os_err := os.file_size(os.Handle(fd))
|
||||
if os_err != 0 {
|
||||
return nil, .Stat_Failure
|
||||
}
|
||||
if size < 0 {
|
||||
return nil, .Negative_Size
|
||||
}
|
||||
if size != i64(int(size)) {
|
||||
return nil, .Too_Large_Size
|
||||
}
|
||||
return _map_file(fd, size, flags)
|
||||
}
|
||||
@@ -68,7 +68,7 @@ align_formula :: #force_inline proc "contextless" (size, align: uint) -> uint {
|
||||
}
|
||||
|
||||
@(require_results)
|
||||
memory_block_alloc :: proc(committed, reserved: uint, flags: Memory_Block_Flags) -> (block: ^Memory_Block, err: Allocator_Error) {
|
||||
memory_block_alloc :: proc(committed, reserved: uint, alignment: uint = 0, flags: Memory_Block_Flags = {}) -> (block: ^Memory_Block, err: Allocator_Error) {
|
||||
page_size := DEFAULT_PAGE_SIZE
|
||||
assert(mem.is_power_of_two(uintptr(page_size)))
|
||||
|
||||
@@ -79,8 +79,8 @@ memory_block_alloc :: proc(committed, reserved: uint, flags: Memory_Block_Flags)
|
||||
reserved = align_formula(reserved, page_size)
|
||||
committed = clamp(committed, 0, reserved)
|
||||
|
||||
total_size := uint(reserved + size_of(Platform_Memory_Block))
|
||||
base_offset := uintptr(size_of(Platform_Memory_Block))
|
||||
total_size := uint(reserved + max(alignment, size_of(Platform_Memory_Block)))
|
||||
base_offset := uintptr(max(alignment, size_of(Platform_Memory_Block)))
|
||||
protect_offset := uintptr(0)
|
||||
|
||||
do_protection := false
|
||||
@@ -183,4 +183,4 @@ memory_block_dealloc :: proc(block_to_free: ^Memory_Block) {
|
||||
safe_add :: #force_inline proc "contextless" (x, y: uint) -> (uint, bool) {
|
||||
z, did_overflow := intrinsics.overflow_add(x, y)
|
||||
return z, !did_overflow
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,3 +22,7 @@ _protect :: proc "contextless" (data: rawptr, size: uint, flags: Protect_Flags)
|
||||
_platform_memory_init :: proc() {
|
||||
|
||||
}
|
||||
|
||||
_map_file :: proc "contextless" (fd: uintptr, size: i64, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
return nil, .Map_Failure
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ _protect :: proc "contextless" (data: rawptr, size: uint, flags: Protect_Flags)
|
||||
if .Write in flags { pflags |= PROT_WRITE }
|
||||
if .Execute in flags { pflags |= PROT_EXEC }
|
||||
err := _mprotect(data, size, pflags)
|
||||
return err != 0
|
||||
return err == 0
|
||||
}
|
||||
|
||||
|
||||
@@ -146,3 +146,20 @@ _platform_memory_init :: proc() {
|
||||
// is power of two
|
||||
assert(DEFAULT_PAGE_SIZE != 0 && (DEFAULT_PAGE_SIZE & (DEFAULT_PAGE_SIZE-1)) == 0)
|
||||
}
|
||||
|
||||
|
||||
_map_file :: proc "contextless" (fd: uintptr, size: i64, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
prot, mflags: c.int
|
||||
if .Read in flags {
|
||||
prot |= PROT_READ
|
||||
}
|
||||
if .Write in flags {
|
||||
prot |= PROT_WRITE
|
||||
}
|
||||
mflags |= MAP_SHARED
|
||||
addr := _mmap(nil, c.size_t(size), prot, mflags, i32(fd), 0)
|
||||
if addr == nil {
|
||||
return nil, .Map_Failure
|
||||
}
|
||||
return ([^]byte)(addr)[:size], nil
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ _protect :: proc "contextless" (data: rawptr, size: uint, flags: Protect_Flags)
|
||||
if .Write in flags { pflags |= {.WRITE} }
|
||||
if .Execute in flags { pflags |= {.EXEC} }
|
||||
errno := linux.mprotect(data, size, pflags)
|
||||
return errno != .NONE
|
||||
return errno == .NONE
|
||||
}
|
||||
|
||||
_platform_memory_init :: proc() {
|
||||
@@ -48,3 +48,21 @@ _platform_memory_init :: proc() {
|
||||
// is power of two
|
||||
assert(DEFAULT_PAGE_SIZE != 0 && (DEFAULT_PAGE_SIZE & (DEFAULT_PAGE_SIZE-1)) == 0)
|
||||
}
|
||||
|
||||
|
||||
_map_file :: proc "contextless" (fd: uintptr, size: i64, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
prot: linux.Mem_Protection
|
||||
if .Read in flags {
|
||||
prot += {.READ}
|
||||
}
|
||||
if .Write in flags {
|
||||
prot += {.WRITE}
|
||||
}
|
||||
|
||||
flags := linux.Map_Flags{.SHARED}
|
||||
addr, errno := linux.mmap(0, uint(size), prot, flags, linux.Fd(fd), offset=0)
|
||||
if addr == nil || errno != nil {
|
||||
return nil, .Map_Failure
|
||||
}
|
||||
return ([^]byte)(addr)[:size], nil
|
||||
}
|
||||
|
||||
@@ -50,19 +50,39 @@ PAGE_WRITECOPY :: 0x08
|
||||
PAGE_TARGETS_INVALID :: 0x40000000
|
||||
PAGE_TARGETS_NO_UPDATE :: 0x40000000
|
||||
|
||||
SECTION_MAP_WRITE :: 0x0002
|
||||
SECTION_MAP_READ :: 0x0004
|
||||
FILE_MAP_WRITE :: SECTION_MAP_WRITE
|
||||
FILE_MAP_READ :: SECTION_MAP_READ
|
||||
|
||||
ERROR_INVALID_ADDRESS :: 487
|
||||
ERROR_COMMITMENT_LIMIT :: 1455
|
||||
|
||||
@(default_calling_convention="stdcall")
|
||||
@(default_calling_convention="system")
|
||||
foreign Kernel32 {
|
||||
GetSystemInfo :: proc(lpSystemInfo: LPSYSTEM_INFO) ---
|
||||
VirtualAlloc :: proc(lpAddress: rawptr, dwSize: uint, flAllocationType: u32, flProtect: u32) -> rawptr ---
|
||||
VirtualFree :: proc(lpAddress: rawptr, dwSize: uint, dwFreeType: u32) -> b32 ---
|
||||
VirtualProtect :: proc(lpAddress: rawptr, dwSize: uint, flNewProtect: u32, lpflOldProtect: ^u32) -> b32 ---
|
||||
GetLastError :: proc() -> u32 ---
|
||||
|
||||
CreateFileMappingW :: proc(
|
||||
hFile: rawptr,
|
||||
lpFileMappingAttributes: rawptr,
|
||||
flProtect: u32,
|
||||
dwMaximumSizeHigh: u32,
|
||||
dwMaximumSizeLow: u32,
|
||||
lpName: [^]u16,
|
||||
) -> rawptr ---
|
||||
|
||||
MapViewOfFile :: proc(
|
||||
hFileMappingObject: rawptr,
|
||||
dwDesiredAccess: u32,
|
||||
dwFileOffsetHigh: u32,
|
||||
dwFileOffsetLow: u32,
|
||||
dwNumberOfBytesToMap: uint,
|
||||
) -> rawptr ---
|
||||
}
|
||||
|
||||
|
||||
_reserve :: proc "contextless" (size: uint) -> (data: []byte, err: Allocator_Error) {
|
||||
result := VirtualAlloc(nil, size, MEM_RESERVE, PAGE_READWRITE)
|
||||
if result == nil {
|
||||
@@ -125,3 +145,33 @@ _platform_memory_init :: proc() {
|
||||
// is power of two
|
||||
assert(DEFAULT_PAGE_SIZE != 0 && (DEFAULT_PAGE_SIZE & (DEFAULT_PAGE_SIZE-1)) == 0)
|
||||
}
|
||||
|
||||
|
||||
_map_file :: proc "contextless" (fd: uintptr, size: i64, flags: Map_File_Flags) -> (data: []byte, error: Map_File_Error) {
|
||||
page_flags: u32
|
||||
if flags == {.Read} {
|
||||
page_flags = PAGE_READONLY
|
||||
} else if flags == {.Write} {
|
||||
page_flags = PAGE_READWRITE
|
||||
} else if flags == {.Read, .Write} {
|
||||
page_flags = PAGE_READWRITE
|
||||
} else {
|
||||
page_flags = PAGE_NOACCESS
|
||||
}
|
||||
maximum_size := transmute([2]u32)size
|
||||
handle := CreateFileMappingW(rawptr(fd), nil, page_flags, maximum_size[1], maximum_size[0], nil)
|
||||
if handle == nil {
|
||||
return nil, .Map_Failure
|
||||
}
|
||||
|
||||
desired_access: u32
|
||||
if .Read in flags {
|
||||
desired_access |= FILE_MAP_READ
|
||||
}
|
||||
if .Write in flags {
|
||||
desired_access |= FILE_MAP_WRITE
|
||||
}
|
||||
|
||||
file_data := MapViewOfFile(handle, desired_access, 0, 0, uint(size))
|
||||
return ([^]byte)(file_data)[:size], nil
|
||||
}
|
||||
|
||||
@@ -148,7 +148,29 @@ recv_udp :: proc(socket: UDP_Socket, buf: []byte) -> (bytes_read: int, remote_en
|
||||
return _recv_udp(socket, buf)
|
||||
}
|
||||
|
||||
recv :: proc{recv_tcp, recv_udp}
|
||||
/*
|
||||
Receive data from into a buffer from any socket.
|
||||
|
||||
Note: `remote_endpoint` parameter is non-nil only if the socket type is UDP. On TCP sockets it
|
||||
will always return `nil`.
|
||||
*/
|
||||
recv_any :: proc(socket: Any_Socket, buf: []byte) -> (
|
||||
bytes_read: int,
|
||||
remote_endpoint: Maybe(Endpoint),
|
||||
err: Network_Error,
|
||||
) {
|
||||
switch socktype in socket {
|
||||
case TCP_Socket:
|
||||
bytes_read, err := recv_tcp(socktype, buf)
|
||||
return bytes_read, nil, err
|
||||
case UDP_Socket:
|
||||
bytes_read, endpoint, err := recv_udp(socktype, buf)
|
||||
return bytes_read, endpoint, err
|
||||
case: panic("Not supported")
|
||||
}
|
||||
}
|
||||
|
||||
recv :: proc{recv_tcp, recv_udp, recv_any}
|
||||
|
||||
/*
|
||||
Repeatedly sends data until the entire buffer is sent.
|
||||
@@ -168,7 +190,20 @@ send_udp :: proc(socket: UDP_Socket, buf: []byte, to: Endpoint) -> (bytes_writte
|
||||
return _send_udp(socket, buf, to)
|
||||
}
|
||||
|
||||
send :: proc{send_tcp, send_udp}
|
||||
send_any :: proc(socket: Any_Socket, buf: []byte, to: Maybe(Endpoint) = nil) -> (
|
||||
bytes_written: int,
|
||||
err: Network_Error,
|
||||
) {
|
||||
switch socktype in socket {
|
||||
case TCP_Socket:
|
||||
return send_tcp(socktype, buf)
|
||||
case UDP_Socket:
|
||||
return send_udp(socktype, buf, to.(Endpoint))
|
||||
case: panic("Not supported")
|
||||
}
|
||||
}
|
||||
|
||||
send :: proc{send_tcp, send_udp, send_any}
|
||||
|
||||
shutdown :: proc(socket: Any_Socket, manner: Shutdown_Manner) -> (err: Network_Error) {
|
||||
return _shutdown(socket, manner)
|
||||
@@ -180,4 +215,4 @@ set_option :: proc(socket: Any_Socket, option: Socket_Option, value: any, loc :=
|
||||
|
||||
set_blocking :: proc(socket: Any_Socket, should_block: bool) -> (err: Network_Error) {
|
||||
return _set_blocking(socket, should_block)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ _create_socket :: proc(family: Address_Family, protocol: Socket_Protocol) -> (An
|
||||
}
|
||||
|
||||
@(private)
|
||||
_dial_tcp_from_endpoint :: proc(endpoint: Endpoint, options := default_tcp_options) -> (tcp_sock: TCP_Socket, err: Network_Error) {
|
||||
_dial_tcp_from_endpoint :: proc(endpoint: Endpoint, options := default_tcp_options) -> (TCP_Socket, Network_Error) {
|
||||
errno: linux.Errno
|
||||
if endpoint.port == 0 {
|
||||
return 0, .Port_Required
|
||||
|
||||
@@ -7,7 +7,7 @@ import "core:reflect"
|
||||
import "core:odin/tokenizer"
|
||||
_ :: intrinsics
|
||||
|
||||
new :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T {
|
||||
new_from_positions :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T {
|
||||
n, _ := mem.new(T)
|
||||
n.pos = pos
|
||||
n.end = end
|
||||
@@ -23,6 +23,15 @@ new :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T {
|
||||
return n
|
||||
}
|
||||
|
||||
new_from_pos_and_end_node :: proc($T: typeid, pos: tokenizer.Pos, end: ^Node) -> ^T {
|
||||
return new(T, pos, end != nil ? end.end : pos)
|
||||
}
|
||||
|
||||
new :: proc {
|
||||
new_from_positions,
|
||||
new_from_pos_and_end_node,
|
||||
}
|
||||
|
||||
clone :: proc{
|
||||
clone_node,
|
||||
clone_expr,
|
||||
|
||||
@@ -436,6 +436,24 @@ expect_closing_brace_of_field_list :: proc(p: ^Parser) -> tokenizer.Token {
|
||||
return expect_brace
|
||||
}
|
||||
|
||||
expect_closing_parentheses_of_field_list :: proc(p: ^Parser) -> tokenizer.Token {
|
||||
token := p.curr_tok
|
||||
if allow_token(p, .Close_Paren) {
|
||||
return token
|
||||
}
|
||||
|
||||
if allow_token(p, .Semicolon) && !tokenizer.is_newline(token) {
|
||||
str := tokenizer.token_to_string(token)
|
||||
error(p, end_of_line_pos(p, p.prev_tok), "expected a comma, got %s", str)
|
||||
}
|
||||
|
||||
for p.curr_tok.kind != .Close_Paren && p.curr_tok.kind != .EOF && !is_non_inserted_semicolon(p.curr_tok) {
|
||||
advance_token(p)
|
||||
}
|
||||
|
||||
return expect_token(p, .Close_Paren)
|
||||
}
|
||||
|
||||
is_non_inserted_semicolon :: proc(tok: tokenizer.Token) -> bool {
|
||||
return tok.kind == .Semicolon && tok.text != "\n"
|
||||
}
|
||||
@@ -786,8 +804,11 @@ parse_if_stmt :: proc(p: ^Parser) -> ^ast.If_Stmt {
|
||||
else_stmt = ast.new(ast.Bad_Stmt, p.curr_tok.pos, end_pos(p.curr_tok))
|
||||
}
|
||||
}
|
||||
|
||||
end := body.end
|
||||
|
||||
end: tokenizer.Pos
|
||||
if body != nil {
|
||||
end = body.end
|
||||
}
|
||||
if else_stmt != nil {
|
||||
end = else_stmt.end
|
||||
}
|
||||
@@ -850,7 +871,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
body = parse_body(p)
|
||||
}
|
||||
|
||||
range_stmt := ast.new(ast.Range_Stmt, tok.pos, body.end)
|
||||
range_stmt := ast.new(ast.Range_Stmt, tok.pos, body)
|
||||
range_stmt.for_pos = tok.pos
|
||||
range_stmt.in_pos = in_tok.pos
|
||||
range_stmt.expr = rhs
|
||||
@@ -910,7 +931,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
rhs = assign_stmt.rhs[0]
|
||||
}
|
||||
|
||||
range_stmt := ast.new(ast.Range_Stmt, tok.pos, body.end)
|
||||
range_stmt := ast.new(ast.Range_Stmt, tok.pos, body)
|
||||
range_stmt.for_pos = tok.pos
|
||||
range_stmt.vals = vals
|
||||
range_stmt.in_pos = assign_stmt.op.pos
|
||||
@@ -920,7 +941,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
}
|
||||
|
||||
cond_expr := convert_stmt_to_expr(p, cond, "boolean expression")
|
||||
for_stmt := ast.new(ast.For_Stmt, tok.pos, body.end)
|
||||
for_stmt := ast.new(ast.For_Stmt, tok.pos, body)
|
||||
for_stmt.for_pos = tok.pos
|
||||
for_stmt.init = init
|
||||
for_stmt.cond = cond_expr
|
||||
@@ -976,7 +997,7 @@ parse_switch_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
lhs[0] = new_blank_ident(p, tok.pos)
|
||||
rhs[0] = parse_expr(p, true)
|
||||
|
||||
as := ast.new(ast.Assign_Stmt, tok.pos, rhs[0].end)
|
||||
as := ast.new(ast.Assign_Stmt, tok.pos, rhs[0])
|
||||
as.lhs = lhs
|
||||
as.op = in_tok
|
||||
as.rhs = rhs
|
||||
@@ -1010,14 +1031,14 @@ parse_switch_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
body.stmts = clauses[:]
|
||||
|
||||
if is_type_switch {
|
||||
ts := ast.new(ast.Type_Switch_Stmt, tok.pos, body.end)
|
||||
ts := ast.new(ast.Type_Switch_Stmt, tok.pos, body)
|
||||
ts.tag = tag
|
||||
ts.body = body
|
||||
ts.switch_pos = tok.pos
|
||||
return ts
|
||||
} else {
|
||||
cond := convert_stmt_to_expr(p, tag, "switch expression")
|
||||
ts := ast.new(ast.Switch_Stmt, tok.pos, body.end)
|
||||
ts := ast.new(ast.Switch_Stmt, tok.pos, body)
|
||||
ts.init = init
|
||||
ts.cond = cond
|
||||
ts.body = body
|
||||
@@ -1044,7 +1065,7 @@ parse_attribute :: proc(p: ^Parser, tok: tokenizer.Token, open_kind, close_kind:
|
||||
if p.curr_tok.kind == .Eq {
|
||||
eq := expect_token(p, .Eq)
|
||||
value := parse_value(p)
|
||||
fv := ast.new(ast.Field_Value, elem.pos, value.end)
|
||||
fv := ast.new(ast.Field_Value, elem.pos, value)
|
||||
fv.field = elem
|
||||
fv.sep = eq.pos
|
||||
fv.value = value
|
||||
@@ -1137,7 +1158,7 @@ parse_foreign_block :: proc(p: ^Parser, tok: tokenizer.Token) -> ^ast.Foreign_Bl
|
||||
body.stmts = decls[:]
|
||||
body.close = close.pos
|
||||
|
||||
decl := ast.new(ast.Foreign_Block_Decl, tok.pos, body.end)
|
||||
decl := ast.new(ast.Foreign_Block_Decl, tok.pos, body)
|
||||
decl.docs = docs
|
||||
decl.tok = tok
|
||||
decl.foreign_library = foreign_library
|
||||
@@ -1248,7 +1269,7 @@ parse_unrolled_for_loop :: proc(p: ^Parser, inline_tok: tokenizer.Token) -> ^ast
|
||||
return ast.new(ast.Bad_Stmt, inline_tok.pos, end_pos(p.prev_tok))
|
||||
}
|
||||
|
||||
range_stmt := ast.new(ast.Inline_Range_Stmt, inline_tok.pos, body.end)
|
||||
range_stmt := ast.new(ast.Inline_Range_Stmt, inline_tok.pos, body)
|
||||
range_stmt.inline_pos = inline_tok.pos
|
||||
range_stmt.for_pos = for_tok.pos
|
||||
range_stmt.val0 = val0
|
||||
@@ -1304,7 +1325,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
case ^ast.Return_Stmt:
|
||||
error(p, s.pos, "you cannot defer a return statement")
|
||||
}
|
||||
ds := ast.new(ast.Defer_Stmt, tok.pos, stmt.end)
|
||||
ds := ast.new(ast.Defer_Stmt, tok.pos, stmt)
|
||||
ds.stmt = stmt
|
||||
return ds
|
||||
|
||||
@@ -1341,8 +1362,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
if tok.kind != .Fallthrough && p.curr_tok.kind == .Ident {
|
||||
label = parse_ident(p)
|
||||
}
|
||||
end := label.end if label != nil else end_pos(tok)
|
||||
s := ast.new(ast.Branch_Stmt, tok.pos, end)
|
||||
s := ast.new(ast.Branch_Stmt, tok.pos, label)
|
||||
s.tok = tok
|
||||
s.label = label
|
||||
expect_semicolon(p, s)
|
||||
@@ -1366,7 +1386,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
if p.curr_tok.kind != .Colon {
|
||||
end := list[len(list)-1]
|
||||
expect_semicolon(p, end)
|
||||
us := ast.new(ast.Using_Stmt, tok.pos, end.end)
|
||||
us := ast.new(ast.Using_Stmt, tok.pos, end)
|
||||
us.list = list
|
||||
return us
|
||||
}
|
||||
@@ -1416,13 +1436,13 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
bd.tok = tok
|
||||
bd.name = name
|
||||
ce := parse_call_expr(p, bd)
|
||||
es := ast.new(ast.Expr_Stmt, ce.pos, ce.end)
|
||||
es := ast.new(ast.Expr_Stmt, ce.pos, ce)
|
||||
es.expr = ce
|
||||
return es
|
||||
|
||||
case "force_inline", "force_no_inline":
|
||||
expr := parse_inlining_operand(p, true, tag)
|
||||
es := ast.new(ast.Expr_Stmt, expr.pos, expr.end)
|
||||
es := ast.new(ast.Expr_Stmt, expr.pos, expr)
|
||||
es.expr = expr
|
||||
return es
|
||||
case "unroll":
|
||||
@@ -1436,7 +1456,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
}
|
||||
range.reverse = true
|
||||
} else {
|
||||
error(p, range.pos, "#reverse can only be applied to a 'for in' statement")
|
||||
error(p, stmt.pos, "#reverse can only be applied to a 'for in' statement")
|
||||
}
|
||||
return stmt
|
||||
case "include":
|
||||
@@ -1444,7 +1464,8 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
|
||||
return ast.new(ast.Bad_Stmt, tok.pos, end_pos(tag))
|
||||
case:
|
||||
stmt := parse_stmt(p)
|
||||
te := ast.new(ast.Tag_Stmt, tok.pos, stmt.pos)
|
||||
end := stmt.pos if stmt != nil else end_pos(tok)
|
||||
te := ast.new(ast.Tag_Stmt, tok.pos, end)
|
||||
te.op = tok
|
||||
te.name = name
|
||||
te.stmt = stmt
|
||||
@@ -1572,7 +1593,7 @@ convert_stmt_to_body :: proc(p: ^Parser, stmt: ^ast.Stmt) -> ^ast.Stmt {
|
||||
error(p, stmt.pos, "expected a non-empty statement")
|
||||
}
|
||||
|
||||
bs := ast.new(ast.Block_Stmt, stmt.pos, stmt.end)
|
||||
bs := ast.new(ast.Block_Stmt, stmt.pos, stmt)
|
||||
bs.open = stmt.pos
|
||||
bs.stmts = make([]^ast.Stmt, 1)
|
||||
bs.stmts[0] = stmt
|
||||
@@ -1741,7 +1762,7 @@ parse_var_type :: proc(p: ^Parser, flags: ast.Field_Flags) -> ^ast.Expr {
|
||||
error(p, tok.pos, "variadic field missing type after '..'")
|
||||
type = ast.new(ast.Bad_Expr, tok.pos, end_pos(tok))
|
||||
}
|
||||
e := ast.new(ast.Ellipsis, type.pos, type.end)
|
||||
e := ast.new(ast.Ellipsis, type.pos, type)
|
||||
e.expr = type
|
||||
return e
|
||||
}
|
||||
@@ -1808,7 +1829,7 @@ parse_ident_list :: proc(p: ^Parser, allow_poly_names: bool) -> []^ast.Expr {
|
||||
if is_blank_ident(ident) {
|
||||
error(p, ident.pos, "invalid polymorphic type definition with a blank identifier")
|
||||
}
|
||||
poly_name := ast.new(ast.Poly_Type, tok.pos, ident.end)
|
||||
poly_name := ast.new(ast.Poly_Type, tok.pos, ident)
|
||||
poly_name.type = ident
|
||||
append(&list, poly_name)
|
||||
} else {
|
||||
@@ -2092,7 +2113,7 @@ parse_proc_type :: proc(p: ^Parser, tok: tokenizer.Token) -> ^ast.Proc_Type {
|
||||
|
||||
expect_token(p, .Open_Paren)
|
||||
params, _ := parse_field_list(p, .Close_Paren, ast.Field_Flags_Signature_Params)
|
||||
expect_token(p, .Close_Paren)
|
||||
expect_closing_parentheses_of_field_list(p)
|
||||
results, diverging := parse_results(p)
|
||||
|
||||
is_generic := false
|
||||
@@ -2154,7 +2175,7 @@ parse_inlining_operand :: proc(p: ^Parser, lhs: bool, tok: tokenizer.Token) -> ^
|
||||
e.inlining = pi
|
||||
case:
|
||||
error(p, tok.pos, "'%s' must be followed by a procedure literal or call", tok.text)
|
||||
return ast.new(ast.Bad_Expr, tok.pos, expr.end)
|
||||
return ast.new(ast.Bad_Expr, tok.pos, expr)
|
||||
}
|
||||
return expr
|
||||
}
|
||||
@@ -2204,7 +2225,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
case .Distinct:
|
||||
tok := advance_token(p)
|
||||
type := parse_type(p)
|
||||
dt := ast.new(ast.Distinct_Type, tok.pos, type.end)
|
||||
dt := ast.new(ast.Distinct_Type, tok.pos, type)
|
||||
dt.tok = tok.kind
|
||||
dt.type = type
|
||||
return dt
|
||||
@@ -2215,7 +2236,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
switch name.text {
|
||||
case "type":
|
||||
type := parse_type(p)
|
||||
hp := ast.new(ast.Helper_Type, tok.pos, type.end)
|
||||
hp := ast.new(ast.Helper_Type, tok.pos, type)
|
||||
hp.tok = tok.kind
|
||||
hp.type = type
|
||||
return hp
|
||||
@@ -2319,7 +2340,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
tag_call := parse_call_expr(p, tag)
|
||||
type := parse_type(p)
|
||||
|
||||
rt := ast.new(ast.Relative_Type, tok.pos, type.end)
|
||||
rt := ast.new(ast.Relative_Type, tok.pos, type)
|
||||
rt.tag = tag_call
|
||||
rt.type = type
|
||||
return rt
|
||||
@@ -2328,7 +2349,8 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
return parse_inlining_operand(p, lhs, name)
|
||||
case:
|
||||
expr := parse_expr(p, lhs)
|
||||
te := ast.new(ast.Tag_Expr, tok.pos, expr.pos)
|
||||
end := expr.pos if expr != nil else end_pos(tok)
|
||||
te := ast.new(ast.Tag_Expr, tok.pos, end)
|
||||
te.op = tok
|
||||
te.name = name.text
|
||||
te.expr = expr
|
||||
@@ -2456,7 +2478,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
case .Pointer:
|
||||
tok := expect_token(p, .Pointer)
|
||||
elem := parse_type(p)
|
||||
ptr := ast.new(ast.Pointer_Type, tok.pos, elem.end)
|
||||
ptr := ast.new(ast.Pointer_Type, tok.pos, elem)
|
||||
ptr.pointer = tok.pos
|
||||
ptr.elem = elem
|
||||
return ptr
|
||||
@@ -2470,7 +2492,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
tok := expect_token(p, .Pointer)
|
||||
close := expect_token(p, .Close_Bracket)
|
||||
elem := parse_type(p)
|
||||
t := ast.new(ast.Multi_Pointer_Type, open.pos, elem.end)
|
||||
t := ast.new(ast.Multi_Pointer_Type, open.pos, elem)
|
||||
t.open = open.pos
|
||||
t.pointer = tok.pos
|
||||
t.close = close.pos
|
||||
@@ -2480,7 +2502,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
tok := expect_token(p, .Dynamic)
|
||||
close := expect_token(p, .Close_Bracket)
|
||||
elem := parse_type(p)
|
||||
da := ast.new(ast.Dynamic_Array_Type, open.pos, elem.end)
|
||||
da := ast.new(ast.Dynamic_Array_Type, open.pos, elem)
|
||||
da.open = open.pos
|
||||
da.dynamic_pos = tok.pos
|
||||
da.close = close.pos
|
||||
@@ -2500,7 +2522,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
}
|
||||
close := expect_token(p, .Close_Bracket)
|
||||
elem := parse_type(p)
|
||||
at := ast.new(ast.Array_Type, open.pos, elem.end)
|
||||
at := ast.new(ast.Array_Type, open.pos, elem)
|
||||
at.open = open.pos
|
||||
at.len = count
|
||||
at.close = close.pos
|
||||
@@ -2514,7 +2536,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
expect_token(p, .Close_Bracket)
|
||||
value := parse_type(p)
|
||||
|
||||
mt := ast.new(ast.Map_Type, tok.pos, value.end)
|
||||
mt := ast.new(ast.Map_Type, tok.pos, value)
|
||||
mt.tok_pos = tok.pos
|
||||
mt.key = key
|
||||
mt.value = value
|
||||
@@ -2755,7 +2777,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
expect_token(p, .Close_Bracket)
|
||||
elem := parse_type(p)
|
||||
|
||||
mt := ast.new(ast.Matrix_Type, tok.pos, elem.end)
|
||||
mt := ast.new(ast.Matrix_Type, tok.pos, elem)
|
||||
mt.tok_pos = tok.pos
|
||||
mt.row_count = row_count
|
||||
mt.column_count = column_count
|
||||
@@ -2893,7 +2915,7 @@ parse_elem_list :: proc(p: ^Parser) -> []^ast.Expr {
|
||||
eq := expect_token(p, .Eq)
|
||||
value := parse_value(p)
|
||||
|
||||
fv := ast.new(ast.Field_Value, elem.pos, value.end)
|
||||
fv := ast.new(ast.Field_Value, elem.pos, value)
|
||||
fv.field = elem
|
||||
fv.sep = eq.pos
|
||||
fv.value = value
|
||||
@@ -2962,7 +2984,7 @@ parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr {
|
||||
}
|
||||
|
||||
value := parse_value(p)
|
||||
fv := ast.new(ast.Field_Value, arg.pos, value.end)
|
||||
fv := ast.new(ast.Field_Value, arg.pos, value)
|
||||
fv.field = arg
|
||||
fv.sep = eq.pos
|
||||
fv.value = value
|
||||
@@ -2993,7 +3015,7 @@ parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr {
|
||||
|
||||
o := ast.unparen_expr(operand)
|
||||
if se, ok := o.derived.(^ast.Selector_Expr); ok && se.op.kind == .Arrow_Right {
|
||||
sce := ast.new(ast.Selector_Call_Expr, ce.pos, ce.end)
|
||||
sce := ast.new(ast.Selector_Call_Expr, ce.pos, ce)
|
||||
sce.expr = o
|
||||
sce.call = ce
|
||||
return sce
|
||||
@@ -3101,7 +3123,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a
|
||||
case .Ident:
|
||||
field := parse_ident(p)
|
||||
|
||||
sel := ast.new(ast.Selector_Expr, operand.pos, field.end)
|
||||
sel := ast.new(ast.Selector_Expr, operand.pos, field)
|
||||
sel.expr = operand
|
||||
sel.op = tok
|
||||
sel.field = field
|
||||
@@ -3127,7 +3149,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a
|
||||
type.op = question
|
||||
type.expr = nil
|
||||
|
||||
ta := ast.new(ast.Type_Assertion, operand.pos, type.end)
|
||||
ta := ast.new(ast.Type_Assertion, operand.pos, type)
|
||||
ta.expr = operand
|
||||
ta.type = type
|
||||
|
||||
@@ -3145,7 +3167,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a
|
||||
case .Ident:
|
||||
field := parse_ident(p)
|
||||
|
||||
sel := ast.new(ast.Selector_Expr, operand.pos, field.end)
|
||||
sel := ast.new(ast.Selector_Expr, operand.pos, field)
|
||||
sel.expr = operand
|
||||
sel.op = tok
|
||||
sel.field = field
|
||||
@@ -3225,7 +3247,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
close := expect_token(p, .Close_Paren)
|
||||
expr := parse_unary_expr(p, lhs)
|
||||
|
||||
tc := ast.new(ast.Type_Cast, tok.pos, expr.end)
|
||||
tc := ast.new(ast.Type_Cast, tok.pos, expr)
|
||||
tc.tok = tok
|
||||
tc.open = open.pos
|
||||
tc.type = type
|
||||
@@ -3237,7 +3259,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
op := advance_token(p)
|
||||
expr := parse_unary_expr(p, lhs)
|
||||
|
||||
ac := ast.new(ast.Auto_Cast, op.pos, expr.end)
|
||||
ac := ast.new(ast.Auto_Cast, op.pos, expr)
|
||||
ac.op = op
|
||||
ac.expr = expr
|
||||
return ac
|
||||
@@ -3247,8 +3269,8 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
.And:
|
||||
op := advance_token(p)
|
||||
expr := parse_unary_expr(p, lhs)
|
||||
|
||||
ue := ast.new(ast.Unary_Expr, op.pos, expr.end)
|
||||
|
||||
ue := ast.new(ast.Unary_Expr, op.pos, expr)
|
||||
ue.op = op
|
||||
ue.expr = expr
|
||||
return ue
|
||||
@@ -3258,7 +3280,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
error(p, op.pos, "unary '%s' operator is not supported", op.text)
|
||||
expr := parse_unary_expr(p, lhs)
|
||||
|
||||
ue := ast.new(ast.Unary_Expr, op.pos, expr.end)
|
||||
ue := ast.new(ast.Unary_Expr, op.pos, expr)
|
||||
ue.op = op
|
||||
ue.expr = expr
|
||||
return ue
|
||||
@@ -3266,7 +3288,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
|
||||
case .Period:
|
||||
op := advance_token(p)
|
||||
field := parse_ident(p)
|
||||
ise := ast.new(ast.Implicit_Selector_Expr, op.pos, field.end)
|
||||
ise := ast.new(ast.Implicit_Selector_Expr, op.pos, field)
|
||||
ise.field = field
|
||||
return ise
|
||||
|
||||
@@ -3407,7 +3429,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt {
|
||||
error(p, p.curr_tok.pos, "no right-hand side in assignment statement")
|
||||
return ast.new(ast.Bad_Stmt, start_tok.pos, end_pos(p.curr_tok))
|
||||
}
|
||||
stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1].end)
|
||||
stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1])
|
||||
stmt.lhs = lhs
|
||||
stmt.op = op
|
||||
stmt.rhs = rhs
|
||||
@@ -3424,7 +3446,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt {
|
||||
rhs := make([]^ast.Expr, 1)
|
||||
rhs[0] = expr
|
||||
|
||||
stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1].end)
|
||||
stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1])
|
||||
stmt.lhs = lhs
|
||||
stmt.op = op
|
||||
stmt.rhs = rhs
|
||||
@@ -3466,7 +3488,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt {
|
||||
error(p, op.pos, "postfix '%s' statement is not supported", op.text)
|
||||
}
|
||||
|
||||
es := ast.new(ast.Expr_Stmt, lhs[0].pos, lhs[0].end)
|
||||
es := ast.new(ast.Expr_Stmt, lhs[0].pos, lhs[0])
|
||||
es.expr = lhs[0]
|
||||
return es
|
||||
}
|
||||
|
||||
@@ -149,7 +149,7 @@ read_console :: proc(handle: win32.HANDLE, b: []byte) -> (n: int, err: Errno) {
|
||||
return
|
||||
}
|
||||
|
||||
read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
|
||||
read :: proc(fd: Handle, data: []byte) -> (total_read: int, err: Errno) {
|
||||
if len(data) == 0 {
|
||||
return 0, ERROR_NONE
|
||||
}
|
||||
@@ -158,32 +158,32 @@ read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
|
||||
|
||||
m: u32
|
||||
is_console := win32.GetConsoleMode(handle, &m)
|
||||
|
||||
single_read_length: win32.DWORD
|
||||
total_read: int
|
||||
length := len(data)
|
||||
|
||||
// NOTE(Jeroen): `length` can't be casted to win32.DWORD here because it'll overflow if > 4 GiB and return 0 if exactly that.
|
||||
to_read := min(i64(length), MAX_RW)
|
||||
|
||||
e: win32.BOOL
|
||||
if is_console {
|
||||
n, err := read_console(handle, data[total_read:][:to_read])
|
||||
total_read += n
|
||||
total_read, err = read_console(handle, data[total_read:][:to_read])
|
||||
if err != 0 {
|
||||
return int(total_read), err
|
||||
return total_read, err
|
||||
}
|
||||
} else {
|
||||
// NOTE(Jeroen): So we cast it here *after* we've ensured that `to_read` is at most MAX_RW (1 GiB)
|
||||
e = win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &single_read_length, nil)
|
||||
bytes_read: win32.DWORD
|
||||
if e := win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &bytes_read, nil); e {
|
||||
// Successful read can mean two things, including EOF, see:
|
||||
// https://learn.microsoft.com/en-us/windows/win32/fileio/testing-for-the-end-of-a-file
|
||||
if bytes_read == 0 {
|
||||
return 0, ERROR_HANDLE_EOF
|
||||
} else {
|
||||
return int(bytes_read), ERROR_NONE
|
||||
}
|
||||
} else {
|
||||
return 0, Errno(win32.GetLastError())
|
||||
}
|
||||
}
|
||||
if single_read_length <= 0 || !e {
|
||||
err := Errno(win32.GetLastError())
|
||||
return int(total_read), err
|
||||
}
|
||||
total_read += int(single_read_length)
|
||||
|
||||
return int(total_read), ERROR_NONE
|
||||
return total_read, ERROR_NONE
|
||||
}
|
||||
|
||||
seek :: proc(fd: Handle, offset: i64, whence: int) -> (i64, Errno) {
|
||||
|
||||
@@ -210,15 +210,15 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
|
||||
}
|
||||
}
|
||||
|
||||
aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> (new_memory: []byte, err: mem.Allocator_Error) {
|
||||
aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int, zero_memory := true) -> (new_memory: []byte, err: mem.Allocator_Error) {
|
||||
if p == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
new_memory = aligned_alloc(new_size, new_alignment, p) or_return
|
||||
new_memory = aligned_alloc(new_size, new_alignment, p, zero_memory) or_return
|
||||
|
||||
// NOTE: heap_resize does not zero the new memory, so we do it
|
||||
if new_size > old_size {
|
||||
if zero_memory && new_size > old_size {
|
||||
new_region := mem.raw_data(new_memory[old_size:])
|
||||
mem.zero(new_region, new_size - old_size)
|
||||
}
|
||||
@@ -235,16 +235,16 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
|
||||
case .Free_All:
|
||||
return nil, .Mode_Not_Implemented
|
||||
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_memory == nil {
|
||||
return aligned_alloc(size, alignment)
|
||||
return aligned_alloc(size, alignment, nil, mode == .Resize)
|
||||
}
|
||||
return aligned_resize(old_memory, old_size, size, alignment)
|
||||
return aligned_resize(old_memory, old_size, size, alignment, mode == .Resize)
|
||||
|
||||
case .Query_Features:
|
||||
set := (^mem.Allocator_Mode_Set)(old_memory)
|
||||
if set != nil {
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Resize, .Query_Features}
|
||||
set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Resize, .Resize_Non_Zeroed, .Query_Features}
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ _heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
|
||||
case .Free_All:
|
||||
return nil, .Mode_Not_Implemented
|
||||
|
||||
case .Resize:
|
||||
case .Resize, .Resize_Non_Zeroed:
|
||||
if old_memory == nil {
|
||||
return aligned_alloc(size, alignment, true)
|
||||
}
|
||||
|
||||
@@ -263,26 +263,48 @@ Unix_File_Time :: struct {
|
||||
nanoseconds: i64,
|
||||
}
|
||||
|
||||
OS_Stat :: struct {
|
||||
device_id: u64, // ID of device containing file
|
||||
serial: u64, // File serial number
|
||||
nlink: u64, // Number of hard links
|
||||
mode: u32, // Mode of the file
|
||||
uid: u32, // User ID of the file's owner
|
||||
gid: u32, // Group ID of the file's group
|
||||
_padding: i32, // 32 bits of padding
|
||||
rdev: u64, // Device ID, if device
|
||||
size: i64, // Size of the file, in bytes
|
||||
block_size: i64, // Optimal bllocksize for I/O
|
||||
blocks: i64, // Number of 512-byte blocks allocated
|
||||
when ODIN_ARCH == .arm64 {
|
||||
OS_Stat :: struct {
|
||||
device_id: u64, // ID of device containing file
|
||||
serial: u64, // File serial number
|
||||
mode: u32, // Mode of the file
|
||||
nlink: u32, // Number of hard links
|
||||
uid: u32, // User ID of the file's owner
|
||||
gid: u32, // Group ID of the file's group
|
||||
rdev: u64, // Device ID, if device
|
||||
_: u64, // Padding
|
||||
size: i64, // Size of the file, in bytes
|
||||
block_size: i32, // Optimal blocksize for I/O
|
||||
_: i32, // Padding
|
||||
blocks: i64, // Number of 512-byte blocks allocated
|
||||
|
||||
last_access: Unix_File_Time, // Time of last access
|
||||
modified: Unix_File_Time, // Time of last modification
|
||||
status_change: Unix_File_Time, // Time of last status change
|
||||
last_access: Unix_File_Time, // Time of last access
|
||||
modified: Unix_File_Time, // Time of last modification
|
||||
status_change: Unix_File_Time, // Time of last status change
|
||||
|
||||
_reserve1,
|
||||
_reserve2,
|
||||
_reserve3: i64,
|
||||
_reserved: [2]i32,
|
||||
}
|
||||
#assert(size_of(OS_Stat) == 128)
|
||||
} else {
|
||||
OS_Stat :: struct {
|
||||
device_id: u64, // ID of device containing file
|
||||
serial: u64, // File serial number
|
||||
nlink: u64, // Number of hard links
|
||||
mode: u32, // Mode of the file
|
||||
uid: u32, // User ID of the file's owner
|
||||
gid: u32, // Group ID of the file's group
|
||||
_: i32, // 32 bits of padding
|
||||
rdev: u64, // Device ID, if device
|
||||
size: i64, // Size of the file, in bytes
|
||||
block_size: i64, // Optimal bllocksize for I/O
|
||||
blocks: i64, // Number of 512-byte blocks allocated
|
||||
|
||||
last_access: Unix_File_Time, // Time of last access
|
||||
modified: Unix_File_Time, // Time of last modification
|
||||
status_change: Unix_File_Time, // Time of last status change
|
||||
|
||||
_reserved: [3]i64,
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(laleksic, 2021-01-21): Comment and rename these to match OS_Stat above
|
||||
|
||||
@@ -27,9 +27,7 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
|
||||
case .Read:
|
||||
n_int, os_err = read(fd, p)
|
||||
n = i64(n_int)
|
||||
if os_err != 0 {
|
||||
err = .Unknown
|
||||
}
|
||||
|
||||
case .Read_At:
|
||||
when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
|
||||
n_int, os_err = read_at(fd, p, offset)
|
||||
@@ -57,6 +55,11 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
|
||||
}
|
||||
}
|
||||
if err == nil && os_err != 0 {
|
||||
when ODIN_OS == .Windows {
|
||||
if os_err == ERROR_HANDLE_EOF {
|
||||
return n, .EOF
|
||||
}
|
||||
}
|
||||
err = .Unknown
|
||||
}
|
||||
return
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
// This could change at a later date if the all these data structures are
|
||||
// implemented within the compiler rather than in this "preload" file
|
||||
//
|
||||
//+no-instrumentation
|
||||
package runtime
|
||||
|
||||
import "core:intrinsics"
|
||||
@@ -306,6 +307,7 @@ Allocator_Mode :: enum byte {
|
||||
Query_Features,
|
||||
Query_Info,
|
||||
Alloc_Non_Zeroed,
|
||||
Resize_Non_Zeroed,
|
||||
}
|
||||
|
||||
Allocator_Mode_Set :: distinct bit_set[Allocator_Mode]
|
||||
|
||||
@@ -109,7 +109,7 @@ remove_range :: proc(array: ^$D/[dynamic]$T, lo, hi: int, loc := #caller_locatio
|
||||
|
||||
// `pop` will remove and return the end value of dynamic array `array` and reduces the length of `array` by 1.
|
||||
//
|
||||
// Note: If the dynamic array as no elements (`len(array) == 0`), this procedure will panic.
|
||||
// Note: If the dynamic array has no elements (`len(array) == 0`), this procedure will panic.
|
||||
@builtin
|
||||
pop :: proc(array: ^$T/[dynamic]$E, loc := #caller_location) -> (res: E) #no_bounds_check {
|
||||
assert(len(array) > 0, loc=loc)
|
||||
@@ -169,10 +169,16 @@ clear :: proc{clear_dynamic_array, clear_map}
|
||||
@builtin
|
||||
reserve :: proc{reserve_dynamic_array, reserve_map}
|
||||
|
||||
// `resize` will try to resize memory of a passed dynamic array or map to the requested element count (setting the `len`, and possibly `cap`).
|
||||
@builtin
|
||||
non_zero_reserve :: proc{non_zero_reserve_dynamic_array}
|
||||
|
||||
// `resize` will try to resize memory of a passed dynamic array to the requested element count (setting the `len`, and possibly `cap`).
|
||||
@builtin
|
||||
resize :: proc{resize_dynamic_array}
|
||||
|
||||
@builtin
|
||||
non_zero_resize :: proc{non_zero_resize_dynamic_array}
|
||||
|
||||
// Shrinks the capacity of a dynamic array or map down to the current length, or the given capacity.
|
||||
@builtin
|
||||
shrink :: proc{shrink_dynamic_array, shrink_map}
|
||||
@@ -234,6 +240,8 @@ delete :: proc{
|
||||
delete_dynamic_array,
|
||||
delete_slice,
|
||||
delete_map,
|
||||
delete_soa_slice,
|
||||
delete_soa_dynamic_array,
|
||||
}
|
||||
|
||||
|
||||
@@ -346,7 +354,7 @@ make_multi_pointer :: proc($T: typeid/[^]$E, #any_int len: int, allocator := con
|
||||
//
|
||||
// Similar to `new`, the first argument is a type, not a value. Unlike new, make's return type is the same as the
|
||||
// type of its argument, not a pointer to it.
|
||||
// Make uses the specified allocator, default is context.allocator, default is context.allocator
|
||||
// Make uses the specified allocator, default is context.allocator.
|
||||
@builtin
|
||||
make :: proc{
|
||||
make_slice,
|
||||
@@ -404,10 +412,7 @@ delete_key :: proc(m: ^$T/map[$K]$V, key: K) -> (deleted_key: K, deleted_value:
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
|
||||
@builtin
|
||||
append_elem :: proc(array: ^$T/[dynamic]$E, arg: E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
_append_elem :: #force_inline proc(array: ^$T/[dynamic]$E, arg: E, should_zero: bool, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
if array == nil {
|
||||
return 0, nil
|
||||
}
|
||||
@@ -418,7 +423,13 @@ append_elem :: proc(array: ^$T/[dynamic]$E, arg: E, loc := #caller_location) ->
|
||||
} else {
|
||||
if cap(array) < len(array)+1 {
|
||||
cap := 2 * cap(array) + max(8, 1)
|
||||
err = reserve(array, cap, loc) // do not 'or_return' here as it could be a partial success
|
||||
|
||||
// do not 'or_return' here as it could be a partial success
|
||||
if should_zero {
|
||||
err = reserve(array, cap, loc)
|
||||
} else {
|
||||
err = non_zero_reserve(array, cap, loc)
|
||||
}
|
||||
}
|
||||
if cap(array)-len(array) > 0 {
|
||||
a := (^Raw_Dynamic_Array)(array)
|
||||
@@ -435,7 +446,16 @@ append_elem :: proc(array: ^$T/[dynamic]$E, arg: E, loc := #caller_location) ->
|
||||
}
|
||||
|
||||
@builtin
|
||||
append_elems :: proc(array: ^$T/[dynamic]$E, args: ..E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
append_elem :: proc(array: ^$T/[dynamic]$E, arg: E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
return _append_elem(array, arg, true, loc=loc)
|
||||
}
|
||||
|
||||
@builtin
|
||||
non_zero_append_elem :: proc(array: ^$T/[dynamic]$E, arg: E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
return _append_elem(array, arg, false, loc=loc)
|
||||
}
|
||||
|
||||
_append_elems :: #force_inline proc(array: ^$T/[dynamic]$E, should_zero: bool, loc := #caller_location, args: ..E) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
if array == nil {
|
||||
return 0, nil
|
||||
}
|
||||
@@ -452,7 +472,13 @@ append_elems :: proc(array: ^$T/[dynamic]$E, args: ..E, loc := #caller_location)
|
||||
} else {
|
||||
if cap(array) < len(array)+arg_len {
|
||||
cap := 2 * cap(array) + max(8, arg_len)
|
||||
err = reserve(array, cap, loc) // do not 'or_return' here as it could be a partial success
|
||||
|
||||
// do not 'or_return' here as it could be a partial success
|
||||
if should_zero {
|
||||
err = reserve(array, cap, loc)
|
||||
} else {
|
||||
err = non_zero_reserve(array, cap, loc)
|
||||
}
|
||||
}
|
||||
arg_len = min(cap(array)-len(array), arg_len)
|
||||
if arg_len > 0 {
|
||||
@@ -468,11 +494,33 @@ append_elems :: proc(array: ^$T/[dynamic]$E, args: ..E, loc := #caller_location)
|
||||
}
|
||||
}
|
||||
|
||||
@builtin
|
||||
append_elems :: proc(array: ^$T/[dynamic]$E, args: ..E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
return _append_elems(array, true, loc, ..args)
|
||||
}
|
||||
|
||||
@builtin
|
||||
non_zero_append_elems :: proc(array: ^$T/[dynamic]$E, args: ..E, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
return _append_elems(array, false, loc, ..args)
|
||||
}
|
||||
|
||||
// The append_string built-in procedure appends a string to the end of a [dynamic]u8 like type
|
||||
_append_elem_string :: proc(array: ^$T/[dynamic]$E/u8, arg: $A/string, should_zero: bool, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
args := transmute([]E)arg
|
||||
if should_zero {
|
||||
return append_elems(array, ..args, loc=loc)
|
||||
} else {
|
||||
return non_zero_append_elems(array, ..args, loc=loc)
|
||||
}
|
||||
}
|
||||
|
||||
@builtin
|
||||
append_elem_string :: proc(array: ^$T/[dynamic]$E/u8, arg: $A/string, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
args := transmute([]E)arg
|
||||
return append_elems(array, ..args, loc=loc)
|
||||
return _append_elem_string(array, arg, true, loc)
|
||||
}
|
||||
@builtin
|
||||
non_zero_append_elem_string :: proc(array: ^$T/[dynamic]$E/u8, arg: $A/string, loc := #caller_location) -> (n: int, err: Allocator_Error) #optional_allocator_error {
|
||||
return _append_elem_string(array, arg, false, loc)
|
||||
}
|
||||
|
||||
|
||||
@@ -492,6 +540,7 @@ append_string :: proc(array: ^$T/[dynamic]$E/u8, args: ..string, loc := #caller_
|
||||
|
||||
// The append built-in procedure appends elements to the end of a dynamic array
|
||||
@builtin append :: proc{append_elem, append_elems, append_elem_string}
|
||||
@builtin non_zero_append :: proc{non_zero_append_elem, non_zero_append_elems, non_zero_append_elem_string}
|
||||
|
||||
|
||||
@builtin
|
||||
@@ -587,11 +636,14 @@ assign_at_elem :: proc(array: ^$T/[dynamic]$E, index: int, arg: E, loc := #calle
|
||||
|
||||
@builtin
|
||||
assign_at_elems :: proc(array: ^$T/[dynamic]$E, index: int, args: ..E, loc := #caller_location) -> (ok: bool, err: Allocator_Error) #no_bounds_check #optional_allocator_error {
|
||||
if index+len(args) < len(array) {
|
||||
new_size := index + len(args)
|
||||
if len(args) == 0 {
|
||||
ok = true
|
||||
} else if new_size < len(array) {
|
||||
copy(array[index:], args)
|
||||
ok = true
|
||||
} else {
|
||||
resize(array, index+1+len(args), loc) or_return
|
||||
resize(array, new_size, loc) or_return
|
||||
copy(array[index:], args)
|
||||
ok = true
|
||||
}
|
||||
@@ -633,8 +685,7 @@ clear_dynamic_array :: proc "contextless" (array: ^$T/[dynamic]$E) {
|
||||
// `reserve_dynamic_array` will try to reserve memory of a passed dynamic array or map to the requested element count (setting the `cap`).
|
||||
//
|
||||
// Note: Prefer the procedure group `reserve`.
|
||||
@builtin
|
||||
reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #caller_location) -> Allocator_Error {
|
||||
_reserve_dynamic_array :: #force_inline proc(array: ^$T/[dynamic]$E, capacity: int, should_zero: bool, loc := #caller_location) -> Allocator_Error {
|
||||
if array == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -653,7 +704,12 @@ reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #cal
|
||||
new_size := capacity * size_of(E)
|
||||
allocator := a.allocator
|
||||
|
||||
new_data := mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
new_data: []byte
|
||||
if should_zero {
|
||||
new_data = mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
} else {
|
||||
new_data = non_zero_mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
}
|
||||
if new_data == nil && new_size > 0 {
|
||||
return .Out_Of_Memory
|
||||
}
|
||||
@@ -663,11 +719,20 @@ reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #cal
|
||||
return nil
|
||||
}
|
||||
|
||||
@builtin
|
||||
reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #caller_location) -> Allocator_Error {
|
||||
return _reserve_dynamic_array(array, capacity, true, loc)
|
||||
}
|
||||
|
||||
@builtin
|
||||
non_zero_reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #caller_location) -> Allocator_Error {
|
||||
return _reserve_dynamic_array(array, capacity, false, loc)
|
||||
}
|
||||
|
||||
// `resize_dynamic_array` will try to resize memory of a passed dynamic array or map to the requested element count (setting the `len`, and possibly `cap`).
|
||||
//
|
||||
// Note: Prefer the procedure group `resize`
|
||||
@builtin
|
||||
resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller_location) -> Allocator_Error {
|
||||
_resize_dynamic_array :: #force_inline proc(array: ^$T/[dynamic]$E, length: int, should_zero: bool, loc := #caller_location) -> Allocator_Error {
|
||||
if array == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -687,7 +752,12 @@ resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller
|
||||
new_size := length * size_of(E)
|
||||
allocator := a.allocator
|
||||
|
||||
new_data := mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
new_data : []byte
|
||||
if should_zero {
|
||||
new_data = mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
} else {
|
||||
new_data = non_zero_mem_resize(a.data, old_size, new_size, align_of(E), allocator, loc) or_return
|
||||
}
|
||||
if new_data == nil && new_size > 0 {
|
||||
return .Out_Of_Memory
|
||||
}
|
||||
@@ -698,6 +768,16 @@ resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller
|
||||
return nil
|
||||
}
|
||||
|
||||
@builtin
|
||||
resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller_location) -> Allocator_Error {
|
||||
return _resize_dynamic_array(array, length, true, loc=loc)
|
||||
}
|
||||
|
||||
@builtin
|
||||
non_zero_resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller_location) -> Allocator_Error {
|
||||
return _resize_dynamic_array(array, length, false, loc=loc)
|
||||
}
|
||||
|
||||
/*
|
||||
Shrinks the capacity of a dynamic array down to the current length, or the given capacity.
|
||||
|
||||
|
||||
@@ -287,7 +287,7 @@ append_soa_elem :: proc(array: ^$T/#soa[dynamic]$E, arg: E, loc := #caller_locat
|
||||
footer := raw_soa_footer(array)
|
||||
|
||||
if size_of(E) > 0 && cap(array)-len(array) > 0 {
|
||||
ti := type_info_of(typeid_of(T))
|
||||
ti := type_info_of(T)
|
||||
ti = type_info_base(ti)
|
||||
si := &ti.variant.(Type_Info_Struct)
|
||||
field_count: uintptr
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user