core/crypto: Documentation cleanups

This commit is contained in:
Yawning Angel
2024-01-26 12:42:00 +09:00
parent 00ab3beed9
commit 899fab64d9
9 changed files with 239 additions and 72 deletions

View File

@@ -1,3 +1,10 @@
/*
package blake2b implements the BLAKE2b hash algorithm.
See:
- https://datatracker.ietf.org/doc/html/rfc7693
- https://www.blake2.net
*/
package blake2b
/*
@@ -6,34 +13,44 @@ package blake2b
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the vanilla BLAKE2b hashing algorithm.
*/
import "../_blake2"
// DIGEST_SIZE is the BLAKE2b digest size.
DIGEST_SIZE :: 64
// Context is a BLAKE2b instance.
Context :: _blake2.Blake2b_Context
// init initializes a Context with the default BLAKE2b config.
init :: proc(ctx: ^Context) {
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
_blake2.init(ctx, &cfg)
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
_blake2.update(ctx, data)
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_blake2.final(ctx, hash, finalize_clone)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_blake2.clone(ctx, other)
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
_blake2.reset(ctx)
}

View File

@@ -1,3 +1,10 @@
/*
package blake2s implements the BLAKE2s hash algorithm.
See:
- https://datatracker.ietf.org/doc/html/rfc7693
- https://www.blake2.net/
*/
package blake2s
/*
@@ -6,34 +13,44 @@ package blake2s
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the vanilla BLAKE2s hashing algorithm.
*/
import "../_blake2"
// DIGEST_SIZE is the BLAKE2s digest size.
DIGEST_SIZE :: 32
// Context is a BLAKE2s instance.
Context :: _blake2.Blake2s_Context
// init initializes a Context with the default BLAKE2s config.
init :: proc(ctx: ^Context) {
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2S_SIZE
_blake2.init(ctx, &cfg)
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
_blake2.update(ctx, data)
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_blake2.final(ctx, hash, finalize_clone)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_blake2.clone(ctx, other)
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
_blake2.reset(ctx)
}

View File

@@ -329,8 +329,8 @@ clone :: proc(ctx, other: ^Context, allocator := context.allocator) {
}
}
// reset sanitizes the Context and frees resources internal to the
// Context. The Context must be re-initialized to be used again.
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
switch impl in ctx._impl {
case ^blake2b.Context:

View File

@@ -1,3 +1,11 @@
/*
package keccak implements the Keccak hash algorithm family.
During the SHA-3 standardization process, the padding scheme was changed
thus Keccac and SHA-3 produce different outputs. Most users should use
SHA-3 and/or SHAKE instead, however the legacy algorithm is provided for
backward compatibility purposes.
*/
package keccak
/*
@@ -6,37 +14,41 @@ package keccak
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the Keccak hashing algorithm. Most users will probably
want SHA-3 and/or SHAKE instead, however the padding was changed during
the standardization process by NIST, thus the legacy Keccak algorithm
is provided.
*/
import "../../_sha3"
// DIGEST_SIZE_224 is the Keccak-224 digest size.
DIGEST_SIZE_224 :: 28
// DIGEST_SIZE_256 is the Keccak-256 digest size.
DIGEST_SIZE_256 :: 32
// DIGEST_SIZE_384 is the Keccak-384 digest size.
DIGEST_SIZE_384 :: 48
// DIGEST_SIZE_512 is the Keccak-512 digest size.
DIGEST_SIZE_512 :: 64
// Context is a Keccak instance.
Context :: distinct _sha3.Context
// init_224 initializes a Context for Keccak-224.
init_224 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_224
_init(ctx)
}
// init_256 initializes a Context for Keccak-256.
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
_init(ctx)
}
// init_384 initializes a Context for Keccak-384.
init_384 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_384
_init(ctx)
}
// init_512 initializes a Context for Keccak-512.
init_512 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_512
_init(ctx)
@@ -48,18 +60,27 @@ _init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}

View File

@@ -1,3 +1,13 @@
/*
package md5 implements the MD5 hash algorithm.
WARNING: The MD5 algorithm is known to be insecure and should only be
used for interoperating with legacy applications.
See:
- https://eprint.iacr.org/2005/075
- https://datatracker.ietf.org/doc/html/rfc1321
*/
package md5
/*
@@ -6,16 +16,26 @@ package md5
List of contributors:
zhibog, dotbmp: Initial implementation.
Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
// DIGEST_SIZE is the MD5 digest size.
DIGEST_SIZE :: 16
// Context is a MD5 instance.
Context :: struct {
data: [BLOCK_SIZE]byte,
state: [4]u32,
bitlen: u64,
datalen: u32,
is_initialized: bool,
}
// init initializes a Context.
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -28,6 +48,7 @@ init :: proc(ctx: ^Context) {
ctx.is_initialized = true
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
@@ -42,6 +63,11 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -86,10 +112,13 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
}
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
@@ -102,17 +131,9 @@ reset :: proc(ctx: ^$T) {
MD5 implementation
*/
@(private)
BLOCK_SIZE :: 64
Context :: struct {
data: [BLOCK_SIZE]byte,
state: [4]u32,
bitlen: u64,
datalen: u32,
is_initialized: bool,
}
/*
@note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
and II respectively, instead of declaring them separately.

View File

@@ -1,3 +1,14 @@
/*
package sha1 implements the SHA1 hash algorithm.
WARNING: The SHA1 algorithm is known to be insecure and should only be
used for interoperating with legacy applications.
See:
- https://eprint.iacr.org/2017/190
- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
- https://datatracker.ietf.org/doc/html/rfc3174
*/
package sha1
/*
@@ -6,16 +17,27 @@ package sha1
List of contributors:
zhibog, dotbmp: Initial implementation.
Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
// DIGEST_SIZE is the SHA1 digest size.
DIGEST_SIZE :: 20
// Context is a SHA1 instance.
Context :: struct {
data: [BLOCK_SIZE]byte,
state: [5]u32,
k: [4]u32,
bitlen: u64,
datalen: u32,
is_initialized: bool,
}
// init initializes a Context.
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -33,6 +55,7 @@ init :: proc(ctx: ^Context) {
ctx.is_initialized = true
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
@@ -47,6 +70,11 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -91,10 +119,13 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
}
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
@@ -107,18 +138,9 @@ reset :: proc(ctx: ^$T) {
SHA1 implementation
*/
@(private)
BLOCK_SIZE :: 64
Context :: struct {
data: [BLOCK_SIZE]byte,
state: [5]u32,
k: [4]u32,
bitlen: u64,
datalen: u32,
is_initialized: bool,
}
@(private)
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
a, b, c, d, e, i, t: u32

View File

@@ -1,3 +1,10 @@
/*
package sha2 implements the SHA2 hash algorithm family.
See:
- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
- https://datatracker.ietf.org/doc/html/rfc3874
*/
package sha2
/*
@@ -6,41 +13,71 @@ package sha2
List of contributors:
zhibog, dotbmp: Initial implementation.
Implementation of the SHA2 hashing algorithm, as defined in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf>
and in RFC 3874 <https://datatracker.ietf.org/doc/html/rfc3874>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
// DIGEST_SIZE_224 is the SHA-224 digest size.
DIGEST_SIZE_224 :: 28
// DIGEST_SIZE_256 is the SHA-256 digest size.
DIGEST_SIZE_256 :: 32
// DIGEST_SIZE_384 is the SHA-384 digest size.
DIGEST_SIZE_384 :: 48
// DIGEST_SIZE_512 is the SHA-512 digest size.
DIGEST_SIZE_512 :: 64
// DIGEST_SIZE_512_256 is the SHA-512/256 digest size.
DIGEST_SIZE_512_256 :: 32
// Context_256 is a SHA-224 or SHA-256 instance.
Context_256 :: struct {
block: [SHA256_BLOCK_SIZE]byte,
h: [8]u32,
bitlength: u64,
length: u64,
md_bits: int,
is_initialized: bool,
}
// Context_512 is a SHA-384, SHA-512 or SHA-512/256 instance.
Context_512 :: struct {
block: [SHA512_BLOCK_SIZE]byte,
h: [8]u64,
bitlength: u64,
length: u64,
md_bits: int,
is_initialized: bool,
}
// init_224 initializes a Context_256 for SHA-224.
init_224 :: proc(ctx: ^Context_256) {
ctx.md_bits = 224
_init(ctx)
}
// init_256 initializes a Context_256 for SHA-256.
init_256 :: proc(ctx: ^Context_256) {
ctx.md_bits = 256
_init(ctx)
}
// init_384 initializes a Context_512 for SHA-384.
init_384 :: proc(ctx: ^Context_512) {
ctx.md_bits = 384
_init(ctx)
}
// init_512 initializes a Context_512 for SHA-512.
init_512 :: proc(ctx: ^Context_512) {
ctx.md_bits = 512
_init(ctx)
}
// init_512_256 initializes a Context_512 for SHA-512/256.
init_512_256 :: proc(ctx: ^Context_512) {
ctx.md_bits = 256
_init(ctx)
@@ -114,6 +151,7 @@ _init :: proc(ctx: ^$T) {
ctx.is_initialized = true
}
// update adds more data to the Context.
update :: proc(ctx: ^$T, data: []byte) {
assert(ctx.is_initialized)
@@ -145,6 +183,11 @@ update :: proc(ctx: ^$T, data: []byte) {
}
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -203,10 +246,13 @@ final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
}
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
@@ -219,29 +265,11 @@ reset :: proc(ctx: ^$T) {
SHA2 implementation
*/
@(private)
SHA256_BLOCK_SIZE :: 64
@(private)
SHA512_BLOCK_SIZE :: 128
Context_256 :: struct {
block: [SHA256_BLOCK_SIZE]byte,
h: [8]u32,
bitlength: u64,
length: u64,
md_bits: int,
is_initialized: bool,
}
Context_512 :: struct {
block: [SHA512_BLOCK_SIZE]byte,
h: [8]u64,
bitlength: u64,
length: u64,
md_bits: int,
is_initialized: bool,
}
@(private)
sha256_k := [64]u32 {
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,

View File

@@ -1,3 +1,13 @@
/*
package sha3 implements the SHA3 hash algorithm family.
The SHAKE XOF can be found in crypto/shake. While discouraged if the
pre-standardization Keccak algorithm is required, it can be found in
crypto/legacy/keccak.
See:
- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
*/
package sha3
/*
@@ -6,36 +16,41 @@ package sha3
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the SHA3 hashing algorithm. The SHAKE functionality can
be found in package shake. If you wish to compute a Keccak hash, you
can use the legacy/keccak package, it will use the original padding.
*/
import "../_sha3"
// DIGEST_SIZE_224 is the SHA3-224 digest size.
DIGEST_SIZE_224 :: 28
// DIGEST_SIZE_256 is the SHA3-256 digest size.
DIGEST_SIZE_256 :: 32
// DIGEST_SIZE_384 is the SHA3-384 digest size.
DIGEST_SIZE_384 :: 48
// DIGEST_SIZE_512 is the SHA3-512 digest size.
DIGEST_SIZE_512 :: 64
// Context is a SHA3 instance.
Context :: distinct _sha3.Context
// init_224 initializes a Context for SHA3-224.
init_224 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_224
_init(ctx)
}
// init_256 initializes a Context for SHA3-256.
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
_init(ctx)
}
// init_384 initializes a Context for SHA3-384.
init_384 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_384
_init(ctx)
}
// init_512 initializes a Context for SHA3-512.
init_512 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_512
_init(ctx)
@@ -46,18 +61,27 @@ _init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}

View File

@@ -1,3 +1,9 @@
/*
package sm3 implements the SM3 hash algorithm.
See:
- https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
*/
package sm3
/*
@@ -6,16 +12,26 @@ package sm3
List of contributors:
zhibog, dotbmp: Initial implementation.
Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
// DIGEST_SIZE is the SM3 digest size.
DIGEST_SIZE :: 32
// Context is a SM3 instance.
Context :: struct {
state: [8]u32,
x: [BLOCK_SIZE]byte,
bitlength: u64,
length: u64,
is_initialized: bool,
}
// init initializes a Context.
init :: proc(ctx: ^Context) {
ctx.state[0] = IV[0]
ctx.state[1] = IV[1]
@@ -32,6 +48,7 @@ init :: proc(ctx: ^Context) {
ctx.is_initialized = true
}
// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
@@ -57,6 +74,11 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
// final finalizes the Context, writes the digest to hash, and calls
// reset on the Context.
//
// Iff finalize_clone is set, final will work on a copy of the Context,
// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -92,10 +114,13 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
}
}
// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
ctx^ = other^
}
// reset sanitizes the Context. The Context must be re-initialized to
// be used again.
reset :: proc(ctx: ^Context) {
if !ctx.is_initialized {
return
@@ -108,17 +133,9 @@ reset :: proc(ctx: ^Context) {
SM3 implementation
*/
@(private)
BLOCK_SIZE :: 64
Context :: struct {
state: [8]u32,
x: [BLOCK_SIZE]byte,
bitlength: u64,
length: u64,
is_initialized: bool,
}
@(private)
IV := [8]u32 {
0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,