ab_proof_of_space/
chiapos.rs

1//! Chia proof of space reimplementation in Rust
2
3mod constants;
4mod table;
5#[cfg(all(feature = "alloc", test))]
6mod tests;
7mod utils;
8
9#[cfg(feature = "alloc")]
10use crate::PosProofs;
11use crate::chiapos::constants::NUM_TABLES;
12#[cfg(feature = "alloc")]
13pub use crate::chiapos::table::TablesCache;
14#[cfg(feature = "alloc")]
15use crate::chiapos::table::types::Position;
16use crate::chiapos::table::types::{Metadata, X, Y};
17use crate::chiapos::table::{
18    COMPUTE_F1_SIMD_FACTOR, compute_f1, compute_fn, has_match, metadata_size_bytes, num_buckets,
19};
20#[cfg(feature = "alloc")]
21use crate::chiapos::table::{PrunedTable, Table};
22use crate::chiapos::utils::EvaluatableUsize;
23#[cfg(feature = "alloc")]
24use ab_core_primitives::pieces::Record;
25#[cfg(feature = "alloc")]
26use ab_core_primitives::pos::PosProof;
27#[cfg(feature = "alloc")]
28use ab_core_primitives::sectors::SBucket;
29#[cfg(feature = "alloc")]
30use alloc::boxed::Box;
31use core::array;
32use core::mem::MaybeUninit;
33#[cfg(feature = "alloc")]
34use core::mem::offset_of;
35#[cfg(any(feature = "full-chiapos", test))]
36use sha2::{Digest, Sha256};
37
38mod private {
39    pub trait Supported {}
40}
41
42/// Proof-of-space proofs
43#[derive(Debug)]
44#[cfg(feature = "alloc")]
45#[repr(C)]
46pub struct Proofs<const K: u8>
47where
48    [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
49{
50    /// S-buckets at which proofs were found.
51    ///
52    /// S-buckets are grouped by 8, within each `u8` bits right to left (LSB) indicate the presence
53    /// of a proof for corresponding s-bucket, so that the whole array of bytes can be thought as a
54    /// large set of bits.
55    ///
56    /// There will be at most [`Record::NUM_CHUNKS`] proofs produced/bits set to `1`.
57    pub found_proofs: [u8; Record::NUM_S_BUCKETS / u8::BITS as usize],
58    /// [`Record::NUM_CHUNKS`] proofs, corresponding to set bits of `found_proofs`.
59    pub proofs: [[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize];
60        Record::NUM_CHUNKS],
61}
62
63#[cfg(feature = "alloc")]
64impl From<Box<Proofs<{ PosProof::K }>>> for Box<PosProofs> {
65    fn from(proofs: Box<Proofs<{ PosProof::K }>>) -> Self {
66        // Statically ensure types are the same
67        const {
68            assert!(size_of::<Proofs<{ PosProof::K }>>() == size_of::<PosProofs>());
69            assert!(align_of::<Proofs<{ PosProof::K }>>() == align_of::<PosProofs>());
70            assert!(
71                offset_of!(Proofs<{ PosProof::K }>, found_proofs)
72                    == offset_of!(PosProofs, found_proofs)
73            );
74            assert!(offset_of!(Proofs<{ PosProof::K }>, proofs) == offset_of!(PosProofs, proofs));
75        }
76        // SAFETY: Both structs have an identical layout with `#[repr(C)]` internals
77        unsafe { Box::from_raw(Box::into_raw(proofs).cast()) }
78    }
79}
80
81#[cfg(feature = "alloc")]
82impl<const K: u8> Proofs<K>
83where
84    [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
85{
86    /// Get proof for specified s-bucket (if exists).
87    ///
88    /// Note that this is not the most efficient API possible, so prefer using the `proofs` field
89    /// directly if the use case allows.
90    #[inline]
91    pub fn for_s_bucket(
92        &self,
93        s_bucket: SBucket,
94    ) -> Option<[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]>
95    {
96        let proof_index = PosProofs::proof_index_for_s_bucket(self.found_proofs, s_bucket)?;
97
98        Some(self.proofs[proof_index])
99    }
100}
101
102type Seed = [u8; 32];
103#[cfg(any(feature = "full-chiapos", test))]
104type Challenge = [u8; 32];
105#[cfg(any(feature = "full-chiapos", test))]
106type Quality = [u8; 32];
107
108/// Pick position in `table_number` based on challenge bits
109#[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
110const fn pick_position(
111    [left_position, right_position]: [Position; 2],
112    last_5_challenge_bits: u8,
113    table_number: u8,
114) -> Position {
115    if ((last_5_challenge_bits >> (table_number - 2)) & 1) == 0 {
116        left_position
117    } else {
118        right_position
119    }
120}
121
122/// Collection of Chia tables
123#[derive(Debug)]
124pub struct Tables<const K: u8>
125where
126    Self: private::Supported,
127    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
128    [(); 1 << K]:,
129    [(); num_buckets(K)]:,
130    [(); num_buckets(K) - 1]:,
131{
132    #[cfg(feature = "alloc")]
133    table_2: PrunedTable<K, 2>,
134    #[cfg(feature = "alloc")]
135    table_3: PrunedTable<K, 3>,
136    #[cfg(feature = "alloc")]
137    table_4: PrunedTable<K, 4>,
138    #[cfg(feature = "alloc")]
139    table_5: PrunedTable<K, 5>,
140    #[cfg(feature = "alloc")]
141    table_6: PrunedTable<K, 6>,
142    #[cfg(feature = "alloc")]
143    table_7: Table<K, 7>,
144}
145
146impl<const K: u8> Tables<K>
147where
148    Self: private::Supported,
149    EvaluatableUsize<{ metadata_size_bytes(K, 1) }>: Sized,
150    EvaluatableUsize<{ metadata_size_bytes(K, 2) }>: Sized,
151    EvaluatableUsize<{ metadata_size_bytes(K, 3) }>: Sized,
152    EvaluatableUsize<{ metadata_size_bytes(K, 4) }>: Sized,
153    EvaluatableUsize<{ metadata_size_bytes(K, 5) }>: Sized,
154    EvaluatableUsize<{ metadata_size_bytes(K, 6) }>: Sized,
155    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
156    EvaluatableUsize<{ usize::from(K) * COMPUTE_F1_SIMD_FACTOR / u8::BITS as usize }>: Sized,
157    EvaluatableUsize<
158        { 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize },
159    >: Sized,
160    [(); 1 << K]:,
161    [(); num_buckets(K)]:,
162    [(); num_buckets(K) - 1]:,
163{
164    /// Create Chia proof of space tables.
165    ///
166    /// There is also `Self::create_parallel()` that can achieve higher performance and lower
167    /// latency at the cost of lower CPU efficiency and higher memory usage.
168    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
169    pub fn create(seed: Seed, cache: &TablesCache) -> Self {
170        let table_1 = Table::<K, 1>::create(seed);
171        let (table_2, _) = Table::<K, 2>::create(table_1, cache);
172        let (table_3, table_2) = Table::<K, 3>::create(table_2, cache);
173        let (table_4, table_3) = Table::<K, 4>::create(table_3, cache);
174        let (table_5, table_4) = Table::<K, 5>::create(table_4, cache);
175        let (table_6, table_5) = Table::<K, 6>::create(table_5, cache);
176        let (table_7, table_6) = Table::<K, 7>::create(table_6, cache);
177
178        Self {
179            table_2,
180            table_3,
181            table_4,
182            table_5,
183            table_6,
184            table_7,
185        }
186    }
187
188    /// Create proofs.
189    ///
190    /// This is an optimized combination of `Self::create()` and `Self::find_proof()`.
191    ///
192    /// There is also `Self::create_proofs_parallel()` that can achieve higher performance and lower
193    /// latency at the cost of lower CPU efficiency and higher memory usage.
194    #[cfg(feature = "alloc")]
195    pub fn create_proofs(seed: Seed, cache: &TablesCache) -> Box<Proofs<K>>
196    where
197        [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
198    {
199        let table_1 = Table::<K, 1>::create(seed);
200        let (table_2, _) = Table::<K, 2>::create(table_1, cache);
201        let (table_3, table_2) = Table::<K, 3>::create(table_2, cache);
202        let (table_4, table_3) = Table::<K, 4>::create(table_3, cache);
203        let (table_5, table_4) = Table::<K, 5>::create(table_4, cache);
204        let (table_6, table_5) = Table::<K, 6>::create(table_5, cache);
205        let (table_6_proof_targets, table_6) = Table::<K, 7>::create_proof_targets(table_6, cache);
206
207        // TODO: Rewrite this more efficiently
208        let mut proofs = Box::<Proofs<K>>::new_uninit();
209        {
210            let proofs_ptr = proofs.as_mut().as_mut_ptr();
211            // SAFETY: This is the correct way to access uninit reference to the inner field
212            let found_proofs = unsafe {
213                (&raw mut (*proofs_ptr).found_proofs)
214                    .as_uninit_mut()
215                    .expect("Not null; qed")
216            };
217            let found_proofs = found_proofs.write([0; _]);
218            // SAFETY: This is the correct way to access uninit reference to the inner field
219            let proofs = unsafe {
220                (&raw mut (*proofs_ptr).proofs)
221                    .cast::<[MaybeUninit<_>; Record::NUM_CHUNKS]>()
222                    .as_mut_unchecked()
223            };
224
225            let mut num_found_proofs = 0_usize;
226            'outer: for (table_6_proof_targets, found_proofs) in table_6_proof_targets
227                .as_chunks::<{ u8::BITS as usize }>()
228                .0
229                .iter()
230                .zip(found_proofs)
231            {
232                // TODO: Find proofs with SIMD
233                for (proof_offset, table_6_proof_targets) in
234                    table_6_proof_targets.iter().enumerate()
235                {
236                    if table_6_proof_targets != &[Position::ZERO; 2] {
237                        let proof = Self::find_proof_raw_internal(
238                            &table_2,
239                            &table_3,
240                            &table_4,
241                            &table_5,
242                            &table_6,
243                            *table_6_proof_targets,
244                        );
245
246                        *found_proofs |= 1 << proof_offset;
247
248                        proofs[num_found_proofs].write(proof);
249                        num_found_proofs += 1;
250
251                        if num_found_proofs == Record::NUM_CHUNKS {
252                            break 'outer;
253                        }
254                    }
255                }
256            }
257
258            // It is statically known to be the case, and there is a test that checks the lower
259            // bound
260            debug_assert_eq!(num_found_proofs, Record::NUM_CHUNKS);
261        }
262
263        // SAFETY: Fully initialized above
264        unsafe { proofs.assume_init() }
265    }
266
267    /// Almost the same as [`Self::create()`], but uses parallelism internally for better
268    /// performance and lower latency at the cost of lower CPU efficiency and higher memory usage
269    #[cfg(all(feature = "parallel", any(feature = "full-chiapos", test)))]
270    pub fn create_parallel(seed: Seed, cache: &TablesCache) -> Self {
271        let table_1 = Table::<K, 1>::create_parallel(seed);
272        let (table_2, _) = Table::<K, 2>::create_parallel(table_1, cache);
273        let (table_3, table_2) = Table::<K, 3>::create_parallel(table_2, cache);
274        let (table_4, table_3) = Table::<K, 4>::create_parallel(table_3, cache);
275        let (table_5, table_4) = Table::<K, 5>::create_parallel(table_4, cache);
276        let (table_6, table_5) = Table::<K, 6>::create_parallel(table_5, cache);
277        let (table_7, table_6) = Table::<K, 7>::create_parallel(table_6, cache);
278
279        Self {
280            table_2,
281            table_3,
282            table_4,
283            table_5,
284            table_6,
285            table_7,
286        }
287    }
288
289    /// Almost the same as [`Self::create_proofs()`], but uses parallelism internally for better
290    /// performance and lower latency at the cost of lower CPU efficiency and higher memory usage
291    #[cfg(feature = "parallel")]
292    pub fn create_proofs_parallel(seed: Seed, cache: &TablesCache) -> Box<Proofs<K>>
293    where
294        [(); 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize]:,
295    {
296        let table_1 = Table::<K, 1>::create_parallel(seed);
297        let (table_2, _) = Table::<K, 2>::create_parallel(table_1, cache);
298        let (table_3, table_2) = Table::<K, 3>::create_parallel(table_2, cache);
299        let (table_4, table_3) = Table::<K, 4>::create_parallel(table_3, cache);
300        let (table_5, table_4) = Table::<K, 5>::create_parallel(table_4, cache);
301        let (table_6, table_5) = Table::<K, 6>::create_parallel(table_5, cache);
302        let (table_6_proof_targets, table_6) =
303            Table::<K, 7>::create_proof_targets_parallel(table_6, cache);
304
305        // TODO: Rewrite this more efficiently
306        let mut proofs = Box::<Proofs<K>>::new_uninit();
307        {
308            let proofs_ptr = proofs.as_mut().as_mut_ptr();
309            // SAFETY: This is the correct way to access uninit reference to the inner field
310            let found_proofs = unsafe {
311                (&raw mut (*proofs_ptr).found_proofs)
312                    .as_uninit_mut()
313                    .expect("Not null; qed")
314            };
315            let found_proofs = found_proofs.write([0; _]);
316            // SAFETY: This is the correct way to access uninit reference to the inner field
317            let proofs = unsafe {
318                (&raw mut (*proofs_ptr).proofs)
319                    .cast::<[MaybeUninit<_>; Record::NUM_CHUNKS]>()
320                    .as_mut_unchecked()
321            };
322
323            let mut num_found_proofs = 0_usize;
324            'outer: for (table_6_proof_targets, found_proofs) in table_6_proof_targets
325                .as_chunks::<{ u8::BITS as usize }>()
326                .0
327                .iter()
328                .zip(found_proofs)
329            {
330                // TODO: Find proofs with SIMD
331                for (proof_offset, table_6_proof_targets) in
332                    table_6_proof_targets.iter().enumerate()
333                {
334                    if table_6_proof_targets != &[Position::ZERO; 2] {
335                        let proof = Self::find_proof_raw_internal(
336                            &table_2,
337                            &table_3,
338                            &table_4,
339                            &table_5,
340                            &table_6,
341                            *table_6_proof_targets,
342                        );
343
344                        *found_proofs |= 1 << proof_offset;
345
346                        proofs[num_found_proofs].write(proof);
347                        num_found_proofs += 1;
348
349                        if num_found_proofs == Record::NUM_CHUNKS {
350                            break 'outer;
351                        }
352                    }
353                }
354            }
355
356            // It is statically known to be the case, and there is a test that checks the lower
357            // bound
358            debug_assert_eq!(num_found_proofs, Record::NUM_CHUNKS);
359        }
360
361        // SAFETY: Fully initialized above
362        unsafe { proofs.assume_init() }
363    }
364
365    /// Find proof of space quality for a given challenge
366    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
367    pub fn find_quality<'a>(
368        &'a self,
369        challenge: &'a Challenge,
370    ) -> impl Iterator<Item = Quality> + 'a {
371        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
372
373        let first_k_challenge_bits = u32::from_be_bytes(
374            challenge[..size_of::<u32>()]
375                .try_into()
376                .expect("Challenge is known to statically have enough bytes; qed"),
377        ) >> (u32::BITS as usize - usize::from(K));
378
379        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
380        self.table_7.buckets()[Y::bucket_range_from_first_k_bits(first_k_challenge_bits)]
381            .iter()
382            .flat_map(move |positions| {
383                positions
384                    .iter()
385                    .take_while(|&&(position, _y)| position != Position::SENTINEL)
386                    .filter(move |&&(_position, y)| y.first_k_bits() == first_k_challenge_bits)
387            })
388            .map(move |&(position, _y)| {
389                // SAFETY: Internally generated positions that come from the parent table
390                let positions = unsafe { self.table_7.position(position) };
391                // SAFETY: Internally generated positions that come from the parent table
392                let positions = unsafe {
393                    self.table_6
394                        .position(pick_position(positions, last_5_challenge_bits, 6))
395                };
396                // SAFETY: Internally generated positions that come from the parent table
397                let positions = unsafe {
398                    self.table_5
399                        .position(pick_position(positions, last_5_challenge_bits, 5))
400                };
401                // SAFETY: Internally generated positions that come from the parent table
402                let positions = unsafe {
403                    self.table_4
404                        .position(pick_position(positions, last_5_challenge_bits, 4))
405                };
406                // SAFETY: Internally generated positions that come from the parent table
407                let positions = unsafe {
408                    self.table_3
409                        .position(pick_position(positions, last_5_challenge_bits, 3))
410                };
411                // SAFETY: Internally generated positions that come from the parent table
412                let [left_position, right_position] = unsafe {
413                    self.table_2
414                        .position(pick_position(positions, last_5_challenge_bits, 2))
415                };
416
417                // X matches position
418                let left_x = X::from(u32::from(left_position));
419                let right_x = X::from(u32::from(right_position));
420
421                let mut hasher = Sha256::new();
422                hasher.update(challenge);
423                let left_right_xs = (u64::from(left_x) << (u64::BITS as usize - usize::from(K)))
424                    | (u64::from(right_x) << (u64::BITS as usize - usize::from(K * 2)));
425                hasher.update(
426                    &left_right_xs.to_be_bytes()
427                        [..(usize::from(K) * 2).div_ceil(u8::BITS as usize)],
428                );
429                hasher.finalize().into()
430            })
431    }
432
433    /// Similar to `Self::find_proof()`, but takes the first `k` challenge bits in the least
434    /// significant bits of `u32` as a challenge instead
435    #[cfg(feature = "alloc")]
436    pub fn find_proof_raw<'a>(
437        &'a self,
438        first_k_challenge_bits: u32,
439    ) -> impl Iterator<
440        Item = [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize],
441    > + 'a {
442        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
443        self.table_7.buckets()[Y::bucket_range_from_first_k_bits(first_k_challenge_bits)]
444            .iter()
445            .flat_map(move |positions| {
446                positions
447                    .iter()
448                    .take_while(|&&(position, _y)| position != Position::SENTINEL)
449                    .filter(move |&&(_position, y)| y.first_k_bits() == first_k_challenge_bits)
450            })
451            .map(move |&(position, _y)| {
452                // SAFETY: Internally generated positions that come from the parent table
453                let table_6_proof_targets = unsafe { self.table_7.position(position) };
454
455                Self::find_proof_raw_internal(
456                    &self.table_2,
457                    &self.table_3,
458                    &self.table_4,
459                    &self.table_5,
460                    &self.table_6,
461                    table_6_proof_targets,
462                )
463            })
464    }
465
466    #[cfg(feature = "alloc")]
467    #[inline(always)]
468    fn find_proof_raw_internal(
469        table_2: &PrunedTable<K, 2>,
470        table_3: &PrunedTable<K, 3>,
471        table_4: &PrunedTable<K, 4>,
472        table_5: &PrunedTable<K, 5>,
473        table_6: &PrunedTable<K, 6>,
474        table_6_proof_targets: [Position; 2],
475    ) -> [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize] {
476        let mut proof =
477            [0u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize];
478
479        // TODO: Optimize with SIMD
480        table_6_proof_targets
481            .into_iter()
482            .flat_map(|position| {
483                // SAFETY: Internally generated positions that come from the parent table
484                unsafe { table_6.position(position) }
485            })
486            .flat_map(|position| {
487                // SAFETY: Internally generated positions that come from the parent table
488                unsafe { table_5.position(position) }
489            })
490            .flat_map(|position| {
491                // SAFETY: Internally generated positions that come from the parent table
492                unsafe { table_4.position(position) }
493            })
494            .flat_map(|position| {
495                // SAFETY: Internally generated positions that come from the parent table
496                unsafe { table_3.position(position) }
497            })
498            .flat_map(|position| {
499                // SAFETY: Internally generated positions that come from the parent table
500                unsafe { table_2.position(position) }
501            })
502            .map(|position| {
503                // X matches position
504                X::from(u32::from(position))
505            })
506            .enumerate()
507            .for_each(|(offset, x)| {
508                let x_offset_in_bits = usize::from(K) * offset;
509                // Collect bytes where bits of `x` will be written
510                let proof_bytes = &mut proof[x_offset_in_bits / u8::BITS as usize..]
511                    [..(x_offset_in_bits % u8::BITS as usize + usize::from(K))
512                        .div_ceil(u8::BITS as usize)];
513
514                // Bits of `x` already shifted to the correct location as they will appear
515                // in `proof`
516                let x_shifted = u32::from(x)
517                    << (u32::BITS as usize
518                        - (usize::from(K) + x_offset_in_bits % u8::BITS as usize));
519
520                // TODO: Store proofs in words, like GPU version does
521                // Copy `x` bits into proof
522                x_shifted
523                    .to_be_bytes()
524                    .iter()
525                    .zip(proof_bytes)
526                    .for_each(|(from, to)| {
527                        *to |= from;
528                    });
529            });
530
531        proof
532    }
533
534    /// Find proof of space for a given challenge
535    #[cfg(all(feature = "alloc", any(feature = "full-chiapos", test)))]
536    pub fn find_proof<'a>(
537        &'a self,
538        first_challenge_bytes: [u8; 4],
539    ) -> impl Iterator<
540        Item = [u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K) / u8::BITS as usize],
541    > + 'a {
542        let first_k_challenge_bits =
543            u32::from_be_bytes(first_challenge_bytes) >> (u32::BITS as usize - usize::from(K));
544
545        self.find_proof_raw(first_k_challenge_bits)
546    }
547
548    /// Similar to `Self::verify()`, but takes the first `k` challenge bits in the least significant
549    /// bits of `u32` as a challenge instead and doesn't compute quality
550    pub fn verify_only_raw(
551        seed: &Seed,
552        first_k_challenge_bits: u32,
553        proof_of_space: &[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K)
554             / u8::BITS as usize],
555    ) -> bool {
556        let ys_and_metadata = array::from_fn::<_, 64, _>(|offset| {
557            let mut pre_x_bytes = 0u64.to_be_bytes();
558            let offset_in_bits = usize::from(K) * offset;
559            let bytes_to_copy =
560                (offset_in_bits % u8::BITS as usize + usize::from(K)).div_ceil(u8::BITS as usize);
561            // Copy full bytes that contain bits of `x`
562            pre_x_bytes[..bytes_to_copy].copy_from_slice(
563                &proof_of_space[offset_in_bits / u8::BITS as usize..][..bytes_to_copy],
564            );
565            // Extract `pre_x` whose last `K` bits start with `x`
566            let pre_x = u64::from_be_bytes(pre_x_bytes)
567                >> (u64::BITS as usize - (usize::from(K) + offset_in_bits % u8::BITS as usize));
568            // Convert to the desired type and clear extra bits
569            let x = X::from(pre_x as u32 & (u32::MAX >> (u32::BITS as usize - usize::from(K))));
570
571            let y = compute_f1::<K>(x, seed);
572
573            (y, Metadata::from(x))
574        });
575
576        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
577        let ys_and_metadata =
578            Self::collect_ys_and_metadata::<2, 1, 64>(&ys_and_metadata, &mut next_ys_and_metadata);
579        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
580        let ys_and_metadata =
581            Self::collect_ys_and_metadata::<3, 2, 32>(ys_and_metadata, &mut next_ys_and_metadata);
582        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
583        let ys_and_metadata =
584            Self::collect_ys_and_metadata::<4, 3, 16>(ys_and_metadata, &mut next_ys_and_metadata);
585        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
586        let ys_and_metadata =
587            Self::collect_ys_and_metadata::<5, 4, 8>(ys_and_metadata, &mut next_ys_and_metadata);
588        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
589        let ys_and_metadata =
590            Self::collect_ys_and_metadata::<6, 5, 4>(ys_and_metadata, &mut next_ys_and_metadata);
591        let mut next_ys_and_metadata = [MaybeUninit::uninit(); _];
592        let ys_and_metadata =
593            Self::collect_ys_and_metadata::<7, 6, 2>(ys_and_metadata, &mut next_ys_and_metadata);
594
595        let Some((y, _metadata)) = ys_and_metadata.first() else {
596            return false;
597        };
598
599        // Check if the first K bits of `y` match
600        y.first_k_bits() == first_k_challenge_bits
601    }
602
603    /// Verify proof of space for a given seed and challenge
604    #[cfg(any(feature = "full-chiapos", test))]
605    pub fn verify(
606        seed: &Seed,
607        challenge: &Challenge,
608        proof_of_space: &[u8; 2_usize.pow(u32::from(NUM_TABLES - 1)) * usize::from(K)
609             / u8::BITS as usize],
610    ) -> Option<Quality>
611    where
612        EvaluatableUsize<{ (usize::from(K) * 2).div_ceil(u8::BITS as usize) }>: Sized,
613    {
614        let first_k_challenge_bits =
615            u32::from_be_bytes([challenge[0], challenge[1], challenge[2], challenge[3]])
616                >> (u32::BITS as usize - usize::from(K));
617
618        if !Self::verify_only_raw(seed, first_k_challenge_bits, proof_of_space) {
619            return None;
620        }
621
622        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
623
624        let mut quality_index = 0_usize.to_be_bytes();
625        quality_index[0] = last_5_challenge_bits;
626        let quality_index = usize::from_be_bytes(quality_index);
627
628        // NOTE: this works correctly but may overflow if `quality_index` is changed to
629        // not be zero-initialized anymore
630        let left_right_xs_bit_offset = quality_index * usize::from(K * 2);
631        // Collect `left_x` and `right_x` bits, potentially with extra bits at the beginning
632        // and the end
633        let left_right_xs_bytes = &proof_of_space[left_right_xs_bit_offset / u8::BITS as usize..]
634            [..(left_right_xs_bit_offset % u8::BITS as usize + usize::from(K * 2))
635                .div_ceil(u8::BITS as usize)];
636
637        let mut left_right_xs = 0u64.to_be_bytes();
638        left_right_xs[..left_right_xs_bytes.len()].copy_from_slice(left_right_xs_bytes);
639        // Move `left_x` and `right_x` bits to most significant bits
640        let left_right_xs =
641            u64::from_be_bytes(left_right_xs) << (left_right_xs_bit_offset % u8::BITS as usize);
642        // Clear extra bits
643        let left_right_xs_mask = u64::MAX << (u64::BITS as usize - usize::from(K * 2));
644        let left_right_xs = left_right_xs & left_right_xs_mask;
645
646        let mut hasher = Sha256::new();
647        hasher.update(challenge);
648        hasher
649            .update(&left_right_xs.to_be_bytes()[..usize::from(K * 2).div_ceil(u8::BITS as usize)]);
650        Some(hasher.finalize().into())
651    }
652
653    fn collect_ys_and_metadata<
654        'a,
655        const TABLE_NUMBER: u8,
656        const PARENT_TABLE_NUMBER: u8,
657        const N: usize,
658    >(
659        ys_and_metadata: &[(Y, Metadata<K, PARENT_TABLE_NUMBER>)],
660        next_ys_and_metadata: &'a mut [MaybeUninit<(Y, Metadata<K, TABLE_NUMBER>)>; N],
661    ) -> &'a [(Y, Metadata<K, TABLE_NUMBER>)]
662    where
663        EvaluatableUsize<{ metadata_size_bytes(K, TABLE_NUMBER) }>: Sized,
664        EvaluatableUsize<{ metadata_size_bytes(K, PARENT_TABLE_NUMBER) }>: Sized,
665    {
666        let mut next_offset = 0_usize;
667        for &[(left_y, left_metadata), (right_y, right_metadata)] in
668            ys_and_metadata.as_chunks::<2>().0
669        {
670            if !has_match(left_y, right_y) {
671                continue;
672            }
673
674            next_ys_and_metadata[next_offset].write(compute_fn::<
675                K,
676                TABLE_NUMBER,
677                PARENT_TABLE_NUMBER,
678            >(
679                left_y, left_metadata, right_metadata
680            ));
681            next_offset += 1;
682        }
683
684        // SAFETY: Initialized `next_offset` elements
685        unsafe { next_ys_and_metadata[..next_offset].assume_init_ref() }
686    }
687}
688
689macro_rules! impl_supported {
690    ($($k: expr$(,)? )*) => {
691        $(
692impl private::Supported for Tables<$k> {}
693        )*
694    }
695}
696
697// Only these k values are supported by the current implementation
698#[cfg(feature = "full-chiapos")]
699impl_supported!(15, 16, 18, 19, 21, 22, 23, 24, 25);
700#[cfg(any(feature = "full-chiapos", test))]
701impl_supported!(17);
702impl_supported!(20);