ab_core_primitives/
solutions.rs

1//! Solutions-related data structures and functions.
2
3use crate::block::BlockNumber;
4use crate::hashes::Blake3Hash;
5use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
6use crate::pos::{PosProof, PosSeed};
7use crate::pot::{PotOutput, SlotNumber};
8use crate::sectors::{SectorId, SectorIndex, SectorSlotChallenge};
9use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
10use ab_io_type::trivial_type::TrivialType;
11use ab_merkle_tree::balanced::BalancedMerkleTree;
12use blake3::OUT_LEN;
13use core::fmt;
14use core::simd::Simd;
15use derive_more::{Add, AddAssign, Deref, DerefMut, Display, From, Into, Sub, SubAssign};
16#[cfg(feature = "scale-codec")]
17use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
18#[cfg(feature = "scale-codec")]
19use scale_info::TypeInfo;
20#[cfg(feature = "serde")]
21use serde::{Deserialize, Serialize};
22#[cfg(feature = "serde")]
23use serde::{Deserializer, Serializer};
24#[cfg(feature = "serde")]
25use serde_big_array::BigArray;
26
27/// Solution distance
28#[derive(
29    Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
30)]
31#[cfg_attr(
32    feature = "scale-codec",
33    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
34)]
35#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
36#[repr(C)]
37pub struct SolutionDistance(u64);
38
39impl SolutionDistance {
40    /// Maximum value
41    pub const MAX: Self = Self(u64::MAX / 2);
42
43    // TODO: Remove once `From` is stable
44    /// Create new instance
45    #[inline(always)]
46    pub const fn from_u64(n: u64) -> Self {
47        Self(n)
48    }
49
50    /// Calculate solution distance for given parameters.
51    ///
52    /// Typically used as a primitive to check whether solution distance is within solution range
53    /// (see [`Self::is_within()`]).
54    pub fn calculate(
55        global_challenge: &Blake3Hash,
56        chunk: &[u8; 32],
57        sector_slot_challenge: &SectorSlotChallenge,
58    ) -> Self {
59        // TODO: Is keyed hash really needed here?
60        let audit_chunk = *blake3::keyed_hash(sector_slot_challenge, chunk).as_bytes();
61        let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes([
62            audit_chunk[0],
63            audit_chunk[1],
64            audit_chunk[2],
65            audit_chunk[3],
66            audit_chunk[4],
67            audit_chunk[5],
68            audit_chunk[6],
69            audit_chunk[7],
70        ]);
71        let global_challenge_as_solution_range: SolutionRange = SolutionRange::from_bytes(
72            *global_challenge
73                .array_chunks::<{ SolutionRange::SIZE }>()
74                .next()
75                .expect("Solution range is smaller in size than global challenge; qed"),
76        );
77
78        global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
79    }
80
81    /// Check if solution distance is within the provided solution range
82    pub const fn is_within(self, solution_range: SolutionRange) -> bool {
83        self.0 <= solution_range.as_u64() / 2
84    }
85}
86
87/// Solution range
88#[derive(
89    Debug,
90    Display,
91    Default,
92    Copy,
93    Clone,
94    Ord,
95    PartialOrd,
96    Eq,
97    PartialEq,
98    Hash,
99    From,
100    Into,
101    Add,
102    AddAssign,
103    Sub,
104    SubAssign,
105    TrivialType,
106)]
107#[cfg_attr(
108    feature = "scale-codec",
109    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
110)]
111#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
112#[repr(C)]
113pub struct SolutionRange(u64);
114
115impl SolutionRange {
116    /// Size in bytes
117    pub const SIZE: usize = size_of::<u64>();
118    /// Minimum value
119    pub const MIN: Self = Self(u64::MIN);
120    /// Maximum value
121    pub const MAX: Self = Self(u64::MAX);
122
123    // TODO: Remove once `From` is stable
124    /// Create new instance
125    #[inline(always)]
126    pub const fn new(n: u64) -> Self {
127        Self(n)
128    }
129
130    // TODO: Remove once `From` is stable
131    /// Get internal representation
132    #[inline(always)]
133    pub const fn as_u64(self) -> u64 {
134        self.0
135    }
136
137    /// Create a new instance from bytes
138    #[inline(always)]
139    pub fn to_bytes(self) -> [u8; 8] {
140        self.0.to_le_bytes()
141    }
142
143    /// Create a new instance from bytes
144    #[inline(always)]
145    pub fn from_bytes(bytes: [u8; 8]) -> Self {
146        Self(u64::from_le_bytes(bytes))
147    }
148
149    /// Computes the following:
150    /// ```text
151    /// MAX * slot_probability / chunks * s_buckets / pieces
152    /// ```
153    #[inline]
154    pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
155        let solution_range = u64::MAX
156            // Account for slot probability
157            / slot_probability.1 * slot_probability.0
158            // Now take the probability of hitting occupied s-bucket in a piece into account
159            / Record::NUM_CHUNKS as u64
160            * Record::NUM_S_BUCKETS as u64;
161
162        // Take the number of pieces into account
163        Self(solution_range / pieces)
164    }
165
166    /// Computes the following:
167    /// ```text
168    /// MAX * slot_probability / chunks * s_buckets / solution_range
169    /// ```
170    #[inline]
171    pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
172        let pieces = u64::MAX
173            // Account for slot probability
174            / slot_probability.1 * slot_probability.0
175            // Now take the probability of hitting occupied s-bucket in sector into account
176            / Record::NUM_CHUNKS as u64
177            * Record::NUM_S_BUCKETS as u64;
178
179        // Take solution range into account
180        pieces / self.0
181    }
182
183    /// Bidirectional distance between two solution ranges
184    #[inline]
185    pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
186        let a = self.0;
187        let b = other.0;
188        let diff = a.wrapping_sub(b);
189        let diff2 = b.wrapping_sub(a);
190        // Find smaller diff between 2 directions
191        SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
192    }
193
194    /// Derives next solution range
195    #[inline]
196    pub fn derive_next(
197        self,
198        slots_in_last_era: SlotNumber,
199        slot_probability: (u64, u64),
200        era_duration: BlockNumber,
201    ) -> Self {
202        // The idea here is to keep block production at the same pace while space pledged on the
203        // network changes. For this, we adjust the previous solution range according to actual and
204        // expected number of blocks per era.
205        //
206        // Below is code analogous to the following, but without using floats:
207        // ```rust
208        // let actual_slots_per_block = slots_in_last_era as f64 / era_duration as f64;
209        // let expected_slots_per_block =
210        //     slot_probability.1 as f64 / slot_probability.0 as f64;
211        // let adjustment_factor =
212        //     (actual_slots_per_block / expected_slots_per_block).clamp(0.25, 4.0);
213        //
214        // next_solution_range =
215        //     (solution_ranges.current as f64 * adjustment_factor).round() as u64;
216        // ```
217        let current_solution_range = self.0;
218        let next_solution_range = u64::try_from(
219            u128::from(current_solution_range)
220                .saturating_mul(u128::from(slots_in_last_era))
221                .saturating_mul(u128::from(slot_probability.0))
222                / u128::from(u64::from(era_duration))
223                / u128::from(slot_probability.1),
224        );
225
226        Self(next_solution_range.unwrap_or(u64::MAX).clamp(
227            current_solution_range / 4,
228            current_solution_range.saturating_mul(4),
229        ))
230    }
231}
232
233// Quick test to ensure the functions above are the inverse of each other
234const _: () = {
235    assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
236    assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
237    assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
238};
239
240/// Proof for chunk contained within a record.
241#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
242#[cfg_attr(
243    feature = "scale-codec",
244    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
245)]
246#[repr(C)]
247pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
248
249impl fmt::Debug for ChunkProof {
250    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
251        write!(f, "[")?;
252        for hash in self.0 {
253            for byte in hash {
254                write!(f, "{byte:02x}")?;
255            }
256            write!(f, ", ")?;
257        }
258        write!(f, "]")?;
259        Ok(())
260    }
261}
262
263#[cfg(feature = "serde")]
264#[derive(Serialize, Deserialize)]
265#[serde(transparent)]
266struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
267
268#[cfg(feature = "serde")]
269#[derive(Serialize, Deserialize)]
270#[serde(transparent)]
271struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
272
273#[cfg(feature = "serde")]
274#[derive(Serialize, Deserialize)]
275#[serde(transparent)]
276struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
277
278#[cfg(feature = "serde")]
279impl Serialize for ChunkProof {
280    #[inline]
281    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
282    where
283        S: Serializer,
284    {
285        if serializer.is_human_readable() {
286            // SAFETY: `ChunkProofHexHash` is `#[repr(C)]` and guaranteed to have the
287            // same memory layout
288            ChunkProofHex(unsafe {
289                core::mem::transmute::<
290                    [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
291                    [ChunkProofHexHash; ChunkProof::NUM_HASHES],
292                >(self.0)
293            })
294            .serialize(serializer)
295        } else {
296            ChunkProofBinary(self.0).serialize(serializer)
297        }
298    }
299}
300
301#[cfg(feature = "serde")]
302impl<'de> Deserialize<'de> for ChunkProof {
303    #[inline]
304    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
305    where
306        D: Deserializer<'de>,
307    {
308        Ok(Self(if deserializer.is_human_readable() {
309            // SAFETY: `ChunkProofHexHash` is `#[repr(C)]` and guaranteed to have the
310            // same memory layout
311            unsafe {
312                core::mem::transmute::<
313                    [ChunkProofHexHash; ChunkProof::NUM_HASHES],
314                    [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
315                >(ChunkProofHex::deserialize(deserializer)?.0)
316            }
317        } else {
318            ChunkProofBinary::deserialize(deserializer)?.0
319        }))
320    }
321}
322
323impl Default for ChunkProof {
324    #[inline]
325    fn default() -> Self {
326        Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
327    }
328}
329
330impl AsRef<[u8]> for ChunkProof {
331    #[inline]
332    fn as_ref(&self) -> &[u8] {
333        self.0.as_flattened()
334    }
335}
336
337impl AsMut<[u8]> for ChunkProof {
338    #[inline]
339    fn as_mut(&mut self) -> &mut [u8] {
340        self.0.as_flattened_mut()
341    }
342}
343
344impl ChunkProof {
345    /// Size of chunk proof in bytes.
346    pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
347    const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
348}
349
350/// Solution verification errors
351#[derive(Debug, Eq, PartialEq, thiserror::Error)]
352pub enum SolutionVerifyError {
353    /// Invalid piece offset
354    #[error("Piece verification failed")]
355    InvalidPieceOffset {
356        /// Index of the piece that failed verification
357        piece_offset: u16,
358        /// How many pieces one sector is supposed to contain (max)
359        max_pieces_in_sector: u16,
360    },
361    /// History size is in the future
362    #[error("History size {solution} is in the future, current is {current}")]
363    FutureHistorySize {
364        /// Current history size
365        current: HistorySize,
366        /// History size solution was created for
367        solution: HistorySize,
368    },
369    /// Sector expired
370    #[error("Sector expired")]
371    SectorExpired {
372        /// Expiration history size
373        expiration_history_size: HistorySize,
374        /// Current history size
375        current_history_size: HistorySize,
376    },
377    /// Piece verification failed
378    #[error("Piece verification failed")]
379    InvalidPiece,
380    /// Solution is outside the solution range
381    #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
382    OutsideSolutionRange {
383        /// Solution range
384        solution_range: SolutionRange,
385        /// Solution distance
386        solution_distance: SolutionDistance,
387    },
388    /// Invalid proof of space
389    #[error("Invalid proof of space")]
390    InvalidProofOfSpace,
391    /// Invalid audit chunk offset
392    #[error("Invalid audit chunk offset")]
393    InvalidAuditChunkOffset,
394    /// Invalid chunk proof
395    #[error("Invalid chunk proof")]
396    InvalidChunkProof,
397    /// Invalid history size
398    #[error("Invalid history size")]
399    InvalidHistorySize,
400}
401
402/// Parameters for checking piece validity
403#[derive(Debug, Clone)]
404#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
405pub struct SolutionVerifyPieceCheckParams {
406    /// How many pieces one sector is supposed to contain (max)
407    pub max_pieces_in_sector: u16,
408    /// Segment root of the segment to which piece belongs
409    pub segment_root: SegmentRoot,
410    /// Number of latest archived segments that are considered "recent history"
411    pub recent_segments: HistorySize,
412    /// Fraction of pieces from the "recent history" (`recent_segments`) in each sector
413    pub recent_history_fraction: (HistorySize, HistorySize),
414    /// Minimum lifetime of a plotted sector, measured in archived segments
415    pub min_sector_lifetime: HistorySize,
416    /// Current size of the history
417    pub current_history_size: HistorySize,
418    /// Segment root at `min_sector_lifetime` from sector creation (if exists)
419    pub sector_expiration_check_segment_root: Option<SegmentRoot>,
420}
421
422/// Parameters for solution verification
423#[derive(Debug, Clone)]
424#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
425pub struct SolutionVerifyParams {
426    /// Proof of time for which solution is built
427    pub proof_of_time: PotOutput,
428    /// Solution range
429    pub solution_range: SolutionRange,
430    /// Parameters for checking piece validity.
431    ///
432    /// If `None`, piece validity check will be skipped.
433    pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
434}
435
436/// Proof-of-time verifier to be used in [`Solution::verify()`]
437pub trait SolutionPotVerifier {
438    /// Check whether proof created earlier is valid
439    fn is_proof_valid(seed: &PosSeed, challenge_index: u32, proof: &PosProof) -> bool;
440}
441
442/// Farmer solution for slot challenge.
443#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
444#[cfg_attr(
445    feature = "scale-codec",
446    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
447)]
448#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
449#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
450#[repr(C)]
451pub struct Solution {
452    /// Public key of the farmer that created the solution
453    pub public_key_hash: Blake3Hash,
454    /// Record root that can use used to verify that piece was included in blockchain history
455    pub record_root: RecordRoot,
456    /// Proof for above record root
457    pub record_proof: RecordProof,
458    /// Chunk at the above offset
459    pub chunk: RecordChunk,
460    /// Proof for the above chunk
461    pub chunk_proof: ChunkProof,
462    /// Proof of space for piece offset
463    pub proof_of_space: PosProof,
464    /// Size of the blockchain history at the time of sector creation
465    pub history_size: HistorySize,
466    /// Index of the sector where the solution was found
467    pub sector_index: SectorIndex,
468    /// Pieces offset within sector
469    pub piece_offset: PieceOffset,
470    /// Padding for data structure alignment
471    pub padding: [u8; 4],
472}
473
474impl Solution {
475    /// Fake solution for the genesis block
476    pub fn genesis_solution() -> Self {
477        Self {
478            public_key_hash: Blake3Hash::default(),
479            record_root: RecordRoot::default(),
480            record_proof: RecordProof::default(),
481            chunk: RecordChunk::default(),
482            chunk_proof: ChunkProof::default(),
483            proof_of_space: PosProof::default(),
484            history_size: HistorySize::from(SegmentIndex::ZERO),
485            sector_index: SectorIndex::ZERO,
486            piece_offset: PieceOffset::default(),
487            padding: [0; _],
488        }
489    }
490
491    /// Check solution validity
492    pub fn verify<PotVerifier>(
493        &self,
494        slot: SlotNumber,
495        params: &SolutionVerifyParams,
496    ) -> Result<(), SolutionVerifyError>
497    where
498        PotVerifier: SolutionPotVerifier,
499    {
500        let SolutionVerifyParams {
501            proof_of_time,
502            solution_range,
503            piece_check_params,
504        } = params;
505
506        let sector_id = SectorId::new(&self.public_key_hash, self.sector_index, self.history_size);
507
508        let global_challenge = proof_of_time.derive_global_challenge(slot);
509        let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
510        let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
511
512        // Check that proof of space is valid
513        if !PotVerifier::is_proof_valid(
514            &sector_id.derive_evaluation_seed(self.piece_offset),
515            s_bucket_audit_index.into(),
516            &self.proof_of_space,
517        ) {
518            return Err(SolutionVerifyError::InvalidProofOfSpace);
519        };
520
521        let masked_chunk =
522            (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
523
524        let solution_distance =
525            SolutionDistance::calculate(&global_challenge, &masked_chunk, &sector_slot_challenge);
526
527        if !solution_distance.is_within(*solution_range) {
528            return Err(SolutionVerifyError::OutsideSolutionRange {
529                solution_range: *solution_range,
530                solution_distance,
531            });
532        }
533
534        // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that
535        //  allows the code to compile. Constant 65536 is hardcoded here and below for compilation
536        //  to succeed.
537        const _: () = {
538            assert!(Record::NUM_S_BUCKETS == 65536);
539        };
540        // Check that chunk belongs to the record
541        if !BalancedMerkleTree::<65536>::verify(
542            &self.record_root,
543            &self.chunk_proof,
544            usize::from(s_bucket_audit_index),
545            *self.chunk,
546        ) {
547            return Err(SolutionVerifyError::InvalidChunkProof);
548        }
549
550        if let Some(SolutionVerifyPieceCheckParams {
551            max_pieces_in_sector,
552            segment_root,
553            recent_segments,
554            recent_history_fraction,
555            min_sector_lifetime,
556            current_history_size,
557            sector_expiration_check_segment_root,
558        }) = piece_check_params
559        {
560            if &self.history_size > current_history_size {
561                return Err(SolutionVerifyError::FutureHistorySize {
562                    current: *current_history_size,
563                    solution: self.history_size,
564                });
565            }
566
567            if u16::from(self.piece_offset) >= *max_pieces_in_sector {
568                return Err(SolutionVerifyError::InvalidPieceOffset {
569                    piece_offset: u16::from(self.piece_offset),
570                    max_pieces_in_sector: *max_pieces_in_sector,
571                });
572            }
573
574            if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
575            {
576                let expiration_history_size = match sector_id.derive_expiration_history_size(
577                    self.history_size,
578                    sector_expiration_check_segment_root,
579                    *min_sector_lifetime,
580                ) {
581                    Some(expiration_history_size) => expiration_history_size,
582                    None => {
583                        return Err(SolutionVerifyError::InvalidHistorySize);
584                    }
585                };
586
587                if expiration_history_size <= *current_history_size {
588                    return Err(SolutionVerifyError::SectorExpired {
589                        expiration_history_size,
590                        current_history_size: *current_history_size,
591                    });
592                }
593            }
594
595            let position = sector_id
596                .derive_piece_index(
597                    self.piece_offset,
598                    self.history_size,
599                    *max_pieces_in_sector,
600                    *recent_segments,
601                    *recent_history_fraction,
602                )
603                .position();
604
605            // Check that piece is part of the blockchain history
606            if !self
607                .record_root
608                .is_valid(segment_root, &self.record_proof, position)
609            {
610                return Err(SolutionVerifyError::InvalidPiece);
611            }
612        }
613
614        Ok(())
615    }
616}