ab_blake3/
lib.rs

1//! Optimized and more exotic APIs around BLAKE3
2
3#![no_std]
4#![feature(array_chunks)]
5
6mod const_fn;
7mod platform;
8mod portable;
9mod single_block;
10mod single_chunk;
11
12pub use const_fn::{const_derive_key, const_hash, const_keyed_hash};
13pub use platform::{le_bytes_from_words_32, words_from_le_bytes_32, words_from_le_bytes_64};
14pub use single_block::{
15    single_block_derive_key, single_block_hash, single_block_hash_portable_words,
16    single_block_keyed_hash,
17};
18pub use single_chunk::{single_chunk_derive_key, single_chunk_hash, single_chunk_keyed_hash};
19
20/// The number of bytes in a hash
21pub const OUT_LEN: usize = 32;
22/// The number of bytes in a key
23pub const KEY_LEN: usize = 32;
24/// The number of bytes in a block
25pub const BLOCK_LEN: usize = 64;
26
27/// The number of bytes in a chunk, 1024.
28///
29/// You don't usually need to think about this number, but it often comes up in benchmarks, because
30/// the maximum degree of parallelism used by the implementation equals the number of chunks.
31const CHUNK_LEN: usize = 1024;
32
33// While iterating the compression function within a chunk, the CV is
34// represented as words, to avoid doing two extra endianness conversions for
35// each compression in the portable implementation. But the hash_many interface
36// needs to hash both input bytes and parent nodes, so its better for its
37// output CVs to be represented as bytes.
38type CVWords = [u32; 8];
39type CVBytes = [u8; 32]; // little-endian
40
41type BlockBytes = [u8; BLOCK_LEN];
42type BlockWords = [u32; 16];
43
44const IV: &CVWords = &[
45    0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19,
46];
47
48const MSG_SCHEDULE: [[usize; 16]; 7] = [
49    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
50    [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8],
51    [3, 4, 10, 12, 13, 2, 7, 14, 6, 5, 9, 0, 11, 15, 8, 1],
52    [10, 7, 12, 9, 14, 3, 13, 15, 4, 0, 11, 2, 5, 8, 1, 6],
53    [12, 13, 9, 11, 15, 10, 14, 8, 7, 2, 5, 3, 0, 1, 6, 4],
54    [9, 14, 11, 5, 8, 12, 15, 1, 13, 3, 0, 10, 2, 6, 4, 7],
55    [11, 15, 5, 0, 1, 9, 8, 6, 14, 10, 2, 12, 3, 4, 7, 13],
56];
57
58// These are the internal flags that we use to domain separate root/non-root,
59// chunk/parent, and chunk beginning/middle/end. These get set at the high end
60// of the block flags word in the compression function, so their values start
61// high and go down.
62const CHUNK_START: u8 = 1 << 0;
63const CHUNK_END: u8 = 1 << 1;
64const PARENT: u8 = 1 << 2;
65const ROOT: u8 = 1 << 3;
66const KEYED_HASH: u8 = 1 << 4;
67const DERIVE_KEY_CONTEXT: u8 = 1 << 5;
68const DERIVE_KEY_MATERIAL: u8 = 1 << 6;