ab_proof_of_space/
chiapos.rs

1//! Chia proof of space reimplementation in Rust
2
3mod constants;
4mod table;
5#[cfg(all(feature = "alloc", test))]
6mod tests;
7mod utils;
8
9#[cfg(feature = "alloc")]
10use crate::PosProofs;
11use crate::chiapos::constants::NUM_TABLES;
12#[cfg(feature = "alloc")]
13pub use crate::chiapos::table::TablesCache;
14#[cfg(feature = "alloc")]
15use crate::chiapos::table::types::Position;
16use crate::chiapos::table::types::{Metadata, X, Y};
17use crate::chiapos::table::{
18    COMPUTE_F1_SIMD_FACTOR, compute_f1, compute_fn, has_match, metadata_size_bytes, num_buckets,
19};
20#[cfg(feature = "alloc")]
21use crate::chiapos::table::{PrunedTable, Table};
22use crate::chiapos::utils::EvaluatableUsize;
23#[cfg(feature = "alloc")]
24use ab_core_primitives::pieces::Record;
25#[cfg(feature = "alloc")]
26use ab_core_primitives::pos::PosProof;
27#[cfg(feature = "alloc")]
28use ab_core_primitives::sectors::SBucket;
29#[cfg(feature = "alloc")]
30use alloc::boxed::Box;
31use core::array;
32use core::mem::MaybeUninit;
33#[cfg(feature = "alloc")]
34use core::mem::offset_of;
35#[cfg(any(feature = "full-chiapos", test))]
36use sha2::{Digest, Sha256};
37
38mod private {
39    pub trait Supported {}
40}
41
42/// Proof-of-space proofs
43#[derive(Debug)]
44#[cfg(feature = "alloc")]
45#[repr(C)]
46pub struct Proofs<const K: u8>
47where
48    [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
49{
50    /// S-buckets at which proofs were found.
51    ///
52    /// S-buckets are grouped by 8, within each `u8` bits right to left (LSB) indicate the presence
53    /// of a proof for corresponding s-bucket, so that the whole array of bytes can be thought as a
54    /// large set of bits.
55    ///
56    /// There will be at most [`Record::NUM_CHUNKS`] proofs produced/bits set to `1`.
57    pub found_proofs: [u8; Record::NUM_S_BUCKETS / u8::BITS as usize],
58    /// [`Record::NUM_CHUNKS`] proofs, corresponding to set bits of `found_proofs`.
59    pub proofs: [[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize];
60        Record::NUM_CHUNKS],
61}
62
63#[cfg(feature = "alloc")]
64impl From<Box<Proofs<{ PosProof::K }>>> for Box<PosProofs> {
65    fn from(proofs: Box<Proofs<{ PosProof::K }>>) -> Self {
66        // Statically ensure types are the same
67        const {
68            assert!(size_of::<Proofs<{ PosProof::K }>>() == size_of::<PosProofs>());
69            assert!(align_of::<Proofs<{ PosProof::K }>>() == align_of::<PosProofs>());
70            assert!(
71                offset_of!(Proofs<{ PosProof::K }>, found_proofs)
72                    == offset_of!(PosProofs, found_proofs)
73            );
74            assert!(offset_of!(Proofs<{ PosProof::K }>, proofs) == offset_of!(PosProofs, proofs));
75        }
76        // SAFETY: Both structs have an identical layout with `#[repr(C)]` internals
77        unsafe { Box::from_raw(Box::into_raw(proofs).cast()) }
78    }
79}
80
81#[cfg(feature = "alloc")]
82impl<const K: u8> Proofs<K>
83where
84    [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
85{
86    /// Get proof for specified s-bucket (if exists).
87    ///
88    /// Note that this is not the most efficient API possible, so prefer using the `proofs` field
89    /// directly if the use case allows.
90    #[inline]
91    pub fn for_s_bucket(
92        &self,
93        s_bucket: SBucket,
94    ) -> Option<[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]>
95    {
96        let proof_index = PosProofs::proof_index_for_s_bucket(self.found_proofs, s_bucket)?;
97
98        Some(self.proofs[proof_index])
99    }
100}
101
102type Seed = [u8; 32];
103#[cfg(any(feature = "full-chiapos", test))]
104type Challenge = [u8; 32];
105#[cfg(any(feature = "full-chiapos", test))]
106type Quality = [u8; 32];
107
108/// Pick position in `table_number` based on challenge bits
109#[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
110const fn pick_position(
111    [left_position, right_position]: [Position; 2],
112    last_5_challenge_bits: u8,
113    table_number: u8,
114) -> Position {
115    if ((last_5_challenge_bits >> (table_number - 2)) & 1) == 0 {
116        left_position
117    } else {
118        right_position
119    }
120}
121
122/// Collection of Chia tables
123#[derive(Debug)]
124pub struct Tables<const K: u8>
125where
126    Self: private::Supported,
127    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
128    [(); 1 << K]:,
129    [(); num_buckets(K)]:,
130    [(); num_buckets(K) - 1]:,
131{
132    #[cfg(feature = "alloc")]
133    table_2: PrunedTable<K, 2>,
134    #[cfg(feature = "alloc")]
135    table_3: PrunedTable<K, 3>,
136    #[cfg(feature = "alloc")]
137    table_4: PrunedTable<K, 4>,
138    #[cfg(feature = "alloc")]
139    table_5: PrunedTable<K, 5>,
140    #[cfg(feature = "alloc")]
141    table_6: PrunedTable<K, 6>,
142    #[cfg(feature = "alloc")]
143    table_7: Table<K, 7>,
144}
145
146impl<const K: u8> Tables<K>
147where
148    Self: private::Supported,
149    EvaluatableUsize<{ metadata_size_bytes(K, 1) }>: Sized,
150    EvaluatableUsize<{ metadata_size_bytes(K, 2) }>: Sized,
151    EvaluatableUsize<{ metadata_size_bytes(K, 3) }>: Sized,
152    EvaluatableUsize<{ metadata_size_bytes(K, 4) }>: Sized,
153    EvaluatableUsize<{ metadata_size_bytes(K, 5) }>: Sized,
154    EvaluatableUsize<{ metadata_size_bytes(K, 6) }>: Sized,
155    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
156    EvaluatableUsize<{ usize::from(K) * COMPUTE_F1_SIMD_FACTOR / u8::BITS as usize }>: Sized,
157    EvaluatableUsize<
158        { 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize },
159    >: Sized,
160    [(); 1 << K]:,
161    [(); num_buckets(K)]:,
162    [(); num_buckets(K) - 1]:,
163{
164    /// Create Chia proof of space tables.
165    ///
166    /// There is also `Self::create_parallel()` that can achieve higher performance and lower
167    /// latency at the cost of lower CPU efficiency and higher memory usage.
168    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
169    pub fn create(seed: Seed, cache: &TablesCache) -> Self {
170        let table_1 = Table::<K, 1>::create(seed);
171        let (table_2, _) = Table::<K, 2>::create(table_1, cache);
172        let (table_3, table_2) = Table::<K, 3>::create(table_2, cache);
173        let (table_4, table_3) = Table::<K, 4>::create(table_3, cache);
174        let (table_5, table_4) = Table::<K, 5>::create(table_4, cache);
175        let (table_6, table_5) = Table::<K, 6>::create(table_5, cache);
176        let (table_7, table_6) = Table::<K, 7>::create(table_6, cache);
177
178        Self {
179            table_2,
180            table_3,
181            table_4,
182            table_5,
183            table_6,
184            table_7,
185        }
186    }
187
188    /// Create proofs.
189    ///
190    /// This is an optimized combination of `Self::create()` and `Self::find_proof()`.
191    ///
192    /// There is also `Self::create_proofs_parallel()` that can achieve higher performance and lower
193    /// latency at the cost of lower CPU efficiency and higher memory usage.
194    #[cfg(feature = "alloc")]
195    pub fn create_proofs(seed: Seed, cache: &TablesCache) -> Box<Proofs<K>>
196    where
197        [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
198    {
199        let table_1 = Table::<K, 1>::create(seed);
200        let (table_2, _) = Table::<K, 2>::create(table_1, cache);
201        let (table_3, table_2) = Table::<K, 3>::create(table_2, cache);
202        let (table_4, table_3) = Table::<K, 4>::create(table_3, cache);
203        let (table_5, table_4) = Table::<K, 5>::create(table_4, cache);
204        let (table_6, table_5) = Table::<K, 6>::create(table_5, cache);
205        let (table_6_proof_targets, table_6) = Table::<K, 7>::create_proof_targets(table_6, cache);
206
207        // TODO: Rewrite this more efficiently
208        let mut proofs = Box::<Proofs<K>>::new_uninit();
209        {
210            let proofs_ptr = proofs.as_mut().as_mut_ptr();
211            let found_proofs = unsafe {
212                (&raw mut (*proofs_ptr).found_proofs)
213                    .as_uninit_mut()
214                    .expect("Not null; qed")
215            };
216            let found_proofs = found_proofs.write([0; _]);
217            let proofs = unsafe {
218                (&raw mut (*proofs_ptr).proofs)
219                    .cast::<[MaybeUninit<_>; Record::NUM_CHUNKS]>()
220                    .as_mut_unchecked()
221            };
222
223            let mut num_found_proofs = 0_usize;
224            'outer: for (table_6_proof_targets, found_proofs) in table_6_proof_targets
225                .as_chunks::<{ u8::BITS as usize }>()
226                .0
227                .iter()
228                .zip(found_proofs)
229            {
230                // TODO: Find proofs with SIMD
231                for (proof_offset, table_6_proof_targets) in
232                    table_6_proof_targets.iter().enumerate()
233                {
234                    // Real right position is never zero
235                    if table_6_proof_targets[1] != Position::ZERO {
236                        let proof = Self::find_proof_raw_internal(
237                            &table_2,
238                            &table_3,
239                            &table_4,
240                            &table_5,
241                            &table_6,
242                            *table_6_proof_targets,
243                        );
244
245                        *found_proofs |= 1 << proof_offset;
246
247                        proofs[num_found_proofs].write(proof);
248                        num_found_proofs += 1;
249
250                        if num_found_proofs == Record::NUM_CHUNKS {
251                            break 'outer;
252                        }
253                    }
254                }
255            }
256
257            // It is statically known to be the case, and there is a test that checks the lower
258            // bound
259            debug_assert_eq!(num_found_proofs, Record::NUM_CHUNKS);
260        }
261
262        // SAFETY: Fully initialized above
263        unsafe { proofs.assume_init() }
264    }
265
266    /// Almost the same as [`Self::create()`], but uses parallelism internally for better
267    /// performance and lower latency at the cost of lower CPU efficiency and higher memory usage
268    #[cfg(all(feature = "parallel", any(feature = "full-chiapos", test)))]
269    pub fn create_parallel(seed: Seed, cache: &TablesCache) -> Self {
270        let table_1 = Table::<K, 1>::create_parallel(seed);
271        let (table_2, _) = Table::<K, 2>::create_parallel(table_1, cache);
272        let (table_3, table_2) = Table::<K, 3>::create_parallel(table_2, cache);
273        let (table_4, table_3) = Table::<K, 4>::create_parallel(table_3, cache);
274        let (table_5, table_4) = Table::<K, 5>::create_parallel(table_4, cache);
275        let (table_6, table_5) = Table::<K, 6>::create_parallel(table_5, cache);
276        let (table_7, table_6) = Table::<K, 7>::create_parallel(table_6, cache);
277
278        Self {
279            table_2,
280            table_3,
281            table_4,
282            table_5,
283            table_6,
284            table_7,
285        }
286    }
287
288    /// Almost the same as [`Self::create_proofs()`], but uses parallelism internally for better
289    /// performance and lower latency at the cost of lower CPU efficiency and higher memory usage
290    #[cfg(feature = "parallel")]
291    pub fn create_proofs_parallel(seed: Seed, cache: &TablesCache) -> Box<Proofs<K>>
292    where
293        [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
294    {
295        let table_1 = Table::<K, 1>::create_parallel(seed);
296        let (table_2, _) = Table::<K, 2>::create_parallel(table_1, cache);
297        let (table_3, table_2) = Table::<K, 3>::create_parallel(table_2, cache);
298        let (table_4, table_3) = Table::<K, 4>::create_parallel(table_3, cache);
299        let (table_5, table_4) = Table::<K, 5>::create_parallel(table_4, cache);
300        let (table_6, table_5) = Table::<K, 6>::create_parallel(table_5, cache);
301        let (table_6_proof_targets, table_6) =
302            Table::<K, 7>::create_proof_targets_parallel(table_6, cache);
303
304        // TODO: Rewrite this more efficiently
305        let mut proofs = Box::<Proofs<K>>::new_uninit();
306        {
307            let proofs_ptr = proofs.as_mut().as_mut_ptr();
308            let found_proofs = unsafe {
309                (&raw mut (*proofs_ptr).found_proofs)
310                    .as_uninit_mut()
311                    .expect("Not null; qed")
312            };
313            let found_proofs = found_proofs.write([0; _]);
314            let proofs = unsafe {
315                (&raw mut (*proofs_ptr).proofs)
316                    .cast::<[MaybeUninit<_>; Record::NUM_CHUNKS]>()
317                    .as_mut_unchecked()
318            };
319
320            let mut num_found_proofs = 0_usize;
321            'outer: for (table_6_proof_targets, found_proofs) in table_6_proof_targets
322                .as_chunks::<{ u8::BITS as usize }>()
323                .0
324                .iter()
325                .zip(found_proofs)
326            {
327                // TODO: Find proofs with SIMD
328                for (proof_offset, table_6_proof_targets) in
329                    table_6_proof_targets.iter().enumerate()
330                {
331                    // Real right position is never zero
332                    if table_6_proof_targets[1] != Position::ZERO {
333                        let proof = Self::find_proof_raw_internal(
334                            &table_2,
335                            &table_3,
336                            &table_4,
337                            &table_5,
338                            &table_6,
339                            *table_6_proof_targets,
340                        );
341
342                        *found_proofs |= 1 << proof_offset;
343
344                        proofs[num_found_proofs].write(proof);
345                        num_found_proofs += 1;
346
347                        if num_found_proofs == Record::NUM_CHUNKS {
348                            break 'outer;
349                        }
350                    }
351                }
352            }
353
354            // It is statically known to be the case, and there is a test that checks the lower
355            // bound
356            debug_assert_eq!(num_found_proofs, Record::NUM_CHUNKS);
357        }
358
359        // SAFETY: Fully initialized above
360        unsafe { proofs.assume_init() }
361    }
362
363    /// Find proof of space quality for a given challenge
364    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
365    pub fn find_quality<'a>(
366        &'a self,
367        challenge: &'a Challenge,
368    ) -> impl Iterator<Item = Quality> + 'a {
369        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
370
371        let first_k_challenge_bits = u32::from_be_bytes(
372            challenge[..size_of::<u32>()]
373                .try_into()
374                .expect("Challenge is known to statically have enough bytes; qed"),
375        ) >> (u32::BITS as usize - usize::from(K));
376
377        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
378        self.table_7.buckets()[Y::bucket_range_from_first_k_bits(first_k_challenge_bits)]
379            .iter()
380            .flat_map(move |positions| {
381                positions
382                    .iter()
383                    .take_while(|&&(position, _y)| position != Position::SENTINEL)
384                    .filter(move |&&(_position, y)| y.first_k_bits() == first_k_challenge_bits)
385            })
386            .map(move |&(position, _y)| {
387                // SAFETY: Internally generated positions that come from the parent table
388                let positions = unsafe { self.table_7.position(position) };
389                // SAFETY: Internally generated positions that come from the parent table
390                let positions = unsafe {
391                    self.table_6
392                        .position(pick_position(positions, last_5_challenge_bits, 6))
393                };
394                // SAFETY: Internally generated positions that come from the parent table
395                let positions = unsafe {
396                    self.table_5
397                        .position(pick_position(positions, last_5_challenge_bits, 5))
398                };
399                // SAFETY: Internally generated positions that come from the parent table
400                let positions = unsafe {
401                    self.table_4
402                        .position(pick_position(positions, last_5_challenge_bits, 4))
403                };
404                // SAFETY: Internally generated positions that come from the parent table
405                let positions = unsafe {
406                    self.table_3
407                        .position(pick_position(positions, last_5_challenge_bits, 3))
408                };
409                // SAFETY: Internally generated positions that come from the parent table
410                let [left_position, right_position] = unsafe {
411                    self.table_2
412                        .position(pick_position(positions, last_5_challenge_bits, 2))
413                };
414
415                // X matches position
416                let left_x = X::from(u32::from(left_position));
417                let right_x = X::from(u32::from(right_position));
418
419                let mut hasher = Sha256::new();
420                hasher.update(challenge);
421                let left_right_xs = (u64::from(left_x) << (u64::BITS as usize - usize::from(K)))
422                    | (u64::from(right_x) << (u64::BITS as usize - usize::from(K * 2)));
423                hasher.update(
424                    &left_right_xs.to_be_bytes()
425                        [..(usize::from(K) * 2).div_ceil(u8::BITS as usize)],
426                );
427                hasher.finalize().into()
428            })
429    }
430
431    /// Similar to `Self::find_proof()`, but takes the first `k` challenge bits in the least
432    /// significant bits of `u32` as a challenge instead
433    #[cfg(feature = "alloc")]
434    pub fn find_proof_raw<'a>(
435        &'a self,
436        first_k_challenge_bits: u32,
437    ) -> impl Iterator<
438        Item = [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize],
439    > + 'a {
440        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
441        self.table_7.buckets()[Y::bucket_range_from_first_k_bits(first_k_challenge_bits)]
442            .iter()
443            .flat_map(move |positions| {
444                positions
445                    .iter()
446                    .take_while(|&&(position, _y)| position != Position::SENTINEL)
447                    .filter(move |&&(_position, y)| y.first_k_bits() == first_k_challenge_bits)
448            })
449            .map(move |&(position, _y)| {
450                // SAFETY: Internally generated positions that come from the parent table
451                let table_6_proof_targets = unsafe { self.table_7.position(position) };
452
453                Self::find_proof_raw_internal(
454                    &self.table_2,
455                    &self.table_3,
456                    &self.table_4,
457                    &self.table_5,
458                    &self.table_6,
459                    table_6_proof_targets,
460                )
461            })
462    }
463
464    #[cfg(feature = "alloc")]
465    #[inline(always)]
466    fn find_proof_raw_internal(
467        table_2: &PrunedTable<K, 2>,
468        table_3: &PrunedTable<K, 3>,
469        table_4: &PrunedTable<K, 4>,
470        table_5: &PrunedTable<K, 5>,
471        table_6: &PrunedTable<K, 6>,
472        table_6_proof_targets: [Position; 2],
473    ) -> [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize] {
474        let mut proof =
475            [0u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize];
476
477        // TODO: Optimize with SIMD
478        table_6_proof_targets
479            .into_iter()
480            .flat_map(|position| {
481                // SAFETY: Internally generated positions that come from the parent table
482                unsafe { table_6.position(position) }
483            })
484            .flat_map(|position| {
485                // SAFETY: Internally generated positions that come from the parent table
486                unsafe { table_5.position(position) }
487            })
488            .flat_map(|position| {
489                // SAFETY: Internally generated positions that come from the parent table
490                unsafe { table_4.position(position) }
491            })
492            .flat_map(|position| {
493                // SAFETY: Internally generated positions that come from the parent table
494                unsafe { table_3.position(position) }
495            })
496            .flat_map(|position| {
497                // SAFETY: Internally generated positions that come from the parent table
498                unsafe { table_2.position(position) }
499            })
500            .map(|position| {
501                // X matches position
502                X::from(u32::from(position))
503            })
504            .enumerate()
505            .for_each(|(offset, x)| {
506                let x_offset_in_bits = usize::from(K) * offset;
507                // Collect bytes where bits of `x` will be written
508                let proof_bytes = &mut proof[x_offset_in_bits / u8::BITS as usize..]
509                    [..(x_offset_in_bits % u8::BITS as usize + usize::from(K))
510                        .div_ceil(u8::BITS as usize)];
511
512                // Bits of `x` already shifted to the correct location as they will appear
513                // in `proof`
514                let x_shifted = u32::from(x)
515                    << (u32::BITS as usize
516                        - (usize::from(K) + x_offset_in_bits % u8::BITS as usize));
517
518                // TODO: Store proofs in words, like GPU version does
519                // Copy `x` bits into proof
520                x_shifted
521                    .to_be_bytes()
522                    .iter()
523                    .zip(proof_bytes)
524                    .for_each(|(from, to)| {
525                        *to |= from;
526                    });
527            });
528
529        proof
530    }
531
532    /// Find proof of space for a given challenge
533    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
534    pub fn find_proof<'a>(
535        &'a self,
536        first_challenge_bytes: [u8; 4],
537    ) -> impl Iterator<
538        Item = [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize],
539    > + 'a {
540        let first_k_challenge_bits =
541            u32::from_be_bytes(first_challenge_bytes) >> (u32::BITS as usize - usize::from(K));
542
543        self.find_proof_raw(first_k_challenge_bits)
544    }
545
546    /// Similar to `Self::verify()`, but takes the first `k` challenge bits in the least significant
547    /// bits of `u32` as a challenge instead and doesn't compute quality
548    pub fn verify_only_raw(
549        seed: &Seed,
550        first_k_challenge_bits: u32,
551        proof_of_space: &[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K)
552             / u8::BITS as usize],
553    ) -> bool {
554        let ys_and_metadata = array::from_fn::<_, 64, _>(|offset| {
555            let mut pre_x_bytes = 0u64.to_be_bytes();
556            let offset_in_bits = usize::from(K) * offset;
557            let bytes_to_copy =
558                (offset_in_bits % u8::BITS as usize + usize::from(K)).div_ceil(u8::BITS as usize);
559            // Copy full bytes that contain bits of `x`
560            pre_x_bytes[..bytes_to_copy].copy_from_slice(
561                &proof_of_space[offset_in_bits / u8::BITS as usize..][..bytes_to_copy],
562            );
563            // Extract `pre_x` whose last `K` bits start with `x`
564            let pre_x = u64::from_be_bytes(pre_x_bytes)
565                >> (u64::BITS as usize - (usize::from(K) + offset_in_bits % u8::BITS as usize));
566            // Convert to the desired type and clear extra bits
567            let x = X::from(pre_x as u32 & (u32::MAX >> (u32::BITS as usize - usize::from(K))));
568
569            let y = compute_f1::<K>(x, seed);
570
571            (y, Metadata::from(x))
572        });
573
574        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
575        let ys_and_metadata =
576            Self::collect_ys_and_metadata::<2, 1, 64>(&ys_and_metadata, &mut next_ys_and_metadata);
577        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
578        let ys_and_metadata =
579            Self::collect_ys_and_metadata::<3, 2, 32>(ys_and_metadata, &mut next_ys_and_metadata);
580        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
581        let ys_and_metadata =
582            Self::collect_ys_and_metadata::<4, 3, 16>(ys_and_metadata, &mut next_ys_and_metadata);
583        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
584        let ys_and_metadata =
585            Self::collect_ys_and_metadata::<5, 4, 8>(ys_and_metadata, &mut next_ys_and_metadata);
586        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
587        let ys_and_metadata =
588            Self::collect_ys_and_metadata::<6, 5, 4>(ys_and_metadata, &mut next_ys_and_metadata);
589        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
590        let ys_and_metadata =
591            Self::collect_ys_and_metadata::<7, 6, 2>(ys_and_metadata, &mut next_ys_and_metadata);
592
593        let Some((y, _metadata)) = ys_and_metadata.first() else {
594            return false;
595        };
596
597        // Check if the first K bits of `y` match
598        y.first_k_bits() == first_k_challenge_bits
599    }
600
601    /// Verify proof of space for a given seed and challenge
602    #[cfg(any(feature = "full-chiapos", test))]
603    pub fn verify(
604        seed: &Seed,
605        challenge: &Challenge,
606        proof_of_space: &[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K)
607             / u8::BITS as usize],
608    ) -> Option<Quality>
609    where
610        EvaluatableUsize<{ (usize::from(K) * 2).div_ceil(u8::BITS as usize) }>: Sized,
611    {
612        let first_k_challenge_bits =
613            u32::from_be_bytes([challenge[0], challenge[1], challenge[2], challenge[3]])
614                >> (u32::BITS as usize - usize::from(K));
615
616        if !Self::verify_only_raw(seed, first_k_challenge_bits, proof_of_space) {
617            return None;
618        }
619
620        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
621
622        let mut quality_index = 0_usize.to_be_bytes();
623        quality_index[0] = last_5_challenge_bits;
624        let quality_index = usize::from_be_bytes(quality_index);
625
626        // NOTE: this works correctly but may overflow if `quality_index` is changed to
627        // not be zero-initialized anymore
628        let left_right_xs_bit_offset = quality_index * usize::from(K * 2);
629        // Collect `left_x` and `right_x` bits, potentially with extra bits at the beginning
630        // and the end
631        let left_right_xs_bytes = &proof_of_space[left_right_xs_bit_offset / u8::BITS as usize..]
632            [..(left_right_xs_bit_offset % u8::BITS as usize + usize::from(K * 2))
633                .div_ceil(u8::BITS as usize)];
634
635        let mut left_right_xs = 0u64.to_be_bytes();
636        left_right_xs[..left_right_xs_bytes.len()].copy_from_slice(left_right_xs_bytes);
637        // Move `left_x` and `right_x` bits to most significant bits
638        let left_right_xs =
639            u64::from_be_bytes(left_right_xs) << (left_right_xs_bit_offset % u8::BITS as usize);
640        // Clear extra bits
641        let left_right_xs_mask = u64::MAX << (u64::BITS as usize - usize::from(K * 2));
642        let left_right_xs = left_right_xs & left_right_xs_mask;
643
644        let mut hasher = Sha256::new();
645        hasher.update(challenge);
646        hasher
647            .update(&left_right_xs.to_be_bytes()[..usize::from(K * 2).div_ceil(u8::BITS as usize)]);
648        Some(hasher.finalize().into())
649    }
650
651    fn collect_ys_and_metadata<
652        'a,
653        const TABLE_NUMBER: u8,
654        const PARENT_TABLE_NUMBER: u8,
655        const N: usize,
656    >(
657        ys_and_metadata: &[(Y, Metadata<K, PARENT_TABLE_NUMBER>)],
658        next_ys_and_metadata: &'a mut [MaybeUninit<(Y, Metadata<K, TABLE_NUMBER>)>; N],
659    ) -> &'a [(Y, Metadata<K, TABLE_NUMBER>)]
660    where
661        EvaluatableUsize<{ metadata_size_bytes(K, TABLE_NUMBER) }>: Sized,
662        EvaluatableUsize<{ metadata_size_bytes(K, PARENT_TABLE_NUMBER) }>: Sized,
663    {
664        let mut next_offset = 0_usize;
665        for &[(left_y, left_metadata), (right_y, right_metadata)] in
666            ys_and_metadata.as_chunks::<2>().0
667        {
668            if !has_match(left_y, right_y) {
669                continue;
670            }
671
672            next_ys_and_metadata[next_offset].write(compute_fn::<
673                K,
674                TABLE_NUMBER,
675                PARENT_TABLE_NUMBER,
676            >(
677                left_y, left_metadata, right_metadata
678            ));
679            next_offset += 1;
680        }
681
682        // SAFETY: Initialized `next_offset` elements
683        unsafe { next_ys_and_metadata[..next_offset].assume_init_ref() }
684    }
685}
686
687macro_rules! impl_supported {
688    ($($k: expr$(,)? )*) => {
689        $(
690impl private::Supported for Tables<$k> {}
691        )*
692    }
693}
694
695// Only these k values are supported by the current implementation
696#[cfg(feature = "full-chiapos")]
697impl_supported!(15, 16, 18, 19, 21, 22, 23, 24, 25);
698#[cfg(any(feature = "full-chiapos", test))]
699impl_supported!(17);
700impl_supported!(20);