1use crate::block::BlockNumber;
4use crate::hashes::Blake3Hash;
5use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
6use crate::pos::{PosProof, PosSeed};
7use crate::pot::{PotOutput, SlotNumber};
8use crate::sectors::{SectorId, SectorIndex, SectorSlotChallenge};
9use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
10use ab_blake3::single_block_keyed_hash;
11use ab_io_type::trivial_type::TrivialType;
12use ab_merkle_tree::balanced::BalancedMerkleTree;
13use blake3::OUT_LEN;
14use core::fmt;
15use core::simd::Simd;
16use derive_more::{Add, AddAssign, Deref, DerefMut, Display, From, Into, Sub, SubAssign};
17#[cfg(feature = "scale-codec")]
18use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
19#[cfg(feature = "scale-codec")]
20use scale_info::TypeInfo;
21#[cfg(feature = "serde")]
22use serde::{Deserialize, Serialize};
23#[cfg(feature = "serde")]
24use serde::{Deserializer, Serializer};
25#[cfg(feature = "serde")]
26use serde_big_array::BigArray;
27
28#[derive(
30 Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
31)]
32#[cfg_attr(
33 feature = "scale-codec",
34 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
35)]
36#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
37#[repr(C)]
38pub struct SolutionDistance(u64);
39
40impl SolutionDistance {
41 pub const MAX: Self = Self(u64::MAX / 2);
43
44 #[inline(always)]
47 pub const fn from_u64(n: u64) -> Self {
48 Self(n)
49 }
50
51 pub fn calculate(
56 global_challenge: &Blake3Hash,
57 chunk: &[u8; 32],
58 sector_slot_challenge: &SectorSlotChallenge,
59 ) -> Self {
60 let audit_chunk = single_block_keyed_hash(sector_slot_challenge, chunk)
62 .expect("Less than a single block worth of bytes; qed");
63 let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes([
64 audit_chunk[0],
65 audit_chunk[1],
66 audit_chunk[2],
67 audit_chunk[3],
68 audit_chunk[4],
69 audit_chunk[5],
70 audit_chunk[6],
71 audit_chunk[7],
72 ]);
73 let global_challenge_as_solution_range: SolutionRange =
74 SolutionRange::from_bytes(global_challenge.as_chunks().0[0]);
75
76 global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
77 }
78
79 pub const fn is_within(self, solution_range: SolutionRange) -> bool {
81 self.0 <= solution_range.as_u64() / 2
82 }
83}
84
85#[derive(
87 Debug,
88 Display,
89 Default,
90 Copy,
91 Clone,
92 Ord,
93 PartialOrd,
94 Eq,
95 PartialEq,
96 Hash,
97 From,
98 Into,
99 Add,
100 AddAssign,
101 Sub,
102 SubAssign,
103 TrivialType,
104)]
105#[cfg_attr(
106 feature = "scale-codec",
107 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
108)]
109#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
110#[repr(C)]
111pub struct SolutionRange(u64);
112
113impl SolutionRange {
114 pub const SIZE: usize = size_of::<u64>();
116 pub const MIN: Self = Self(u64::MIN);
118 pub const MAX: Self = Self(u64::MAX);
120
121 #[inline(always)]
124 pub const fn new(n: u64) -> Self {
125 Self(n)
126 }
127
128 #[inline(always)]
131 pub const fn as_u64(self) -> u64 {
132 self.0
133 }
134
135 #[inline(always)]
137 pub fn to_bytes(self) -> [u8; 8] {
138 self.0.to_le_bytes()
139 }
140
141 #[inline(always)]
143 pub fn from_bytes(bytes: [u8; 8]) -> Self {
144 Self(u64::from_le_bytes(bytes))
145 }
146
147 #[inline]
152 pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
153 let solution_range = u64::MAX
154 / slot_probability.1 * slot_probability.0
156 / Record::NUM_CHUNKS as u64
158 * Record::NUM_S_BUCKETS as u64;
159
160 Self(solution_range / pieces)
162 }
163
164 #[inline]
169 pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
170 let pieces = u64::MAX
171 / slot_probability.1 * slot_probability.0
173 / Record::NUM_CHUNKS as u64
175 * Record::NUM_S_BUCKETS as u64;
176
177 pieces / self.0
179 }
180
181 #[inline]
183 pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
184 let a = self.0;
185 let b = other.0;
186 let diff = a.wrapping_sub(b);
187 let diff2 = b.wrapping_sub(a);
188 SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
190 }
191
192 #[inline]
194 pub fn derive_next(
195 self,
196 slots_in_last_era: SlotNumber,
197 slot_probability: (u64, u64),
198 era_duration: BlockNumber,
199 ) -> Self {
200 let current_solution_range = self.0;
216 let next_solution_range = u64::try_from(
217 u128::from(current_solution_range)
218 .saturating_mul(u128::from(slots_in_last_era))
219 .saturating_mul(u128::from(slot_probability.0))
220 / u128::from(u64::from(era_duration))
221 / u128::from(slot_probability.1),
222 );
223
224 Self(next_solution_range.unwrap_or(u64::MAX).clamp(
225 current_solution_range / 4,
226 current_solution_range.saturating_mul(4),
227 ))
228 }
229}
230
231const _: () = {
233 assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
234 assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
235 assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
236};
237
238#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
240#[cfg_attr(
241 feature = "scale-codec",
242 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
243)]
244#[repr(C)]
245pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
246
247impl fmt::Debug for ChunkProof {
248 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
249 write!(f, "[")?;
250 for hash in self.0 {
251 for byte in hash {
252 write!(f, "{byte:02x}")?;
253 }
254 write!(f, ", ")?;
255 }
256 write!(f, "]")?;
257 Ok(())
258 }
259}
260
261#[cfg(feature = "serde")]
262#[derive(Serialize, Deserialize)]
263#[serde(transparent)]
264struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
265
266#[cfg(feature = "serde")]
267#[derive(Serialize, Deserialize)]
268#[serde(transparent)]
269struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
270
271#[cfg(feature = "serde")]
272#[derive(Serialize, Deserialize)]
273#[serde(transparent)]
274struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
275
276#[cfg(feature = "serde")]
277impl Serialize for ChunkProof {
278 #[inline]
279 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
280 where
281 S: Serializer,
282 {
283 if serializer.is_human_readable() {
284 ChunkProofHex(unsafe {
287 core::mem::transmute::<
288 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
289 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
290 >(self.0)
291 })
292 .serialize(serializer)
293 } else {
294 ChunkProofBinary(self.0).serialize(serializer)
295 }
296 }
297}
298
299#[cfg(feature = "serde")]
300impl<'de> Deserialize<'de> for ChunkProof {
301 #[inline]
302 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
303 where
304 D: Deserializer<'de>,
305 {
306 Ok(Self(if deserializer.is_human_readable() {
307 unsafe {
310 core::mem::transmute::<
311 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
312 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
313 >(ChunkProofHex::deserialize(deserializer)?.0)
314 }
315 } else {
316 ChunkProofBinary::deserialize(deserializer)?.0
317 }))
318 }
319}
320
321impl Default for ChunkProof {
322 #[inline]
323 fn default() -> Self {
324 Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
325 }
326}
327
328impl AsRef<[u8]> for ChunkProof {
329 #[inline]
330 fn as_ref(&self) -> &[u8] {
331 self.0.as_flattened()
332 }
333}
334
335impl AsMut<[u8]> for ChunkProof {
336 #[inline]
337 fn as_mut(&mut self) -> &mut [u8] {
338 self.0.as_flattened_mut()
339 }
340}
341
342impl ChunkProof {
343 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
345 const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
346}
347
348#[derive(Debug, Eq, PartialEq, thiserror::Error)]
350pub enum SolutionVerifyError {
351 #[error("Piece verification failed")]
353 InvalidPieceOffset {
354 piece_offset: u16,
356 max_pieces_in_sector: u16,
358 },
359 #[error("History size {solution} is in the future, current is {current}")]
361 FutureHistorySize {
362 current: HistorySize,
364 solution: HistorySize,
366 },
367 #[error("Sector expired")]
369 SectorExpired {
370 expiration_history_size: HistorySize,
372 current_history_size: HistorySize,
374 },
375 #[error("Piece verification failed")]
377 InvalidPiece,
378 #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
380 OutsideSolutionRange {
381 solution_range: SolutionRange,
383 solution_distance: SolutionDistance,
385 },
386 #[error("Invalid proof of space")]
388 InvalidProofOfSpace,
389 #[error("Invalid audit chunk offset")]
391 InvalidAuditChunkOffset,
392 #[error("Invalid chunk proof")]
394 InvalidChunkProof,
395 #[error("Invalid history size")]
397 InvalidHistorySize,
398}
399
400#[derive(Debug, Clone)]
402#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
403pub struct SolutionVerifyPieceCheckParams {
404 pub max_pieces_in_sector: u16,
406 pub segment_root: SegmentRoot,
408 pub recent_segments: HistorySize,
410 pub recent_history_fraction: (HistorySize, HistorySize),
412 pub min_sector_lifetime: HistorySize,
414 pub current_history_size: HistorySize,
416 pub sector_expiration_check_segment_root: Option<SegmentRoot>,
418}
419
420#[derive(Debug, Clone)]
422#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
423pub struct SolutionVerifyParams {
424 pub proof_of_time: PotOutput,
426 pub solution_range: SolutionRange,
428 pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
432}
433
434pub trait SolutionPotVerifier {
436 fn is_proof_valid(seed: &PosSeed, challenge_index: u32, proof: &PosProof) -> bool;
438}
439
440#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
442#[cfg_attr(
443 feature = "scale-codec",
444 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
445)]
446#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
447#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
448#[repr(C)]
449pub struct Solution {
450 pub public_key_hash: Blake3Hash,
452 pub record_root: RecordRoot,
454 pub record_proof: RecordProof,
456 pub chunk: RecordChunk,
458 pub chunk_proof: ChunkProof,
460 pub proof_of_space: PosProof,
462 pub history_size: HistorySize,
464 pub sector_index: SectorIndex,
466 pub piece_offset: PieceOffset,
468 pub padding: [u8; 4],
470}
471
472impl Solution {
473 pub fn genesis_solution() -> Self {
475 Self {
476 public_key_hash: Blake3Hash::default(),
477 record_root: RecordRoot::default(),
478 record_proof: RecordProof::default(),
479 chunk: RecordChunk::default(),
480 chunk_proof: ChunkProof::default(),
481 proof_of_space: PosProof::default(),
482 history_size: HistorySize::from(SegmentIndex::ZERO),
483 sector_index: SectorIndex::ZERO,
484 piece_offset: PieceOffset::default(),
485 padding: [0; _],
486 }
487 }
488
489 pub fn verify<PotVerifier>(
491 &self,
492 slot: SlotNumber,
493 params: &SolutionVerifyParams,
494 ) -> Result<(), SolutionVerifyError>
495 where
496 PotVerifier: SolutionPotVerifier,
497 {
498 let SolutionVerifyParams {
499 proof_of_time,
500 solution_range,
501 piece_check_params,
502 } = params;
503
504 let sector_id = SectorId::new(&self.public_key_hash, self.sector_index, self.history_size);
505
506 let global_challenge = proof_of_time.derive_global_challenge(slot);
507 let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
508 let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
509
510 if !PotVerifier::is_proof_valid(
512 §or_id.derive_evaluation_seed(self.piece_offset),
513 s_bucket_audit_index.into(),
514 &self.proof_of_space,
515 ) {
516 return Err(SolutionVerifyError::InvalidProofOfSpace);
517 };
518
519 let masked_chunk =
520 (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
521
522 let solution_distance =
523 SolutionDistance::calculate(&global_challenge, &masked_chunk, §or_slot_challenge);
524
525 if !solution_distance.is_within(*solution_range) {
526 return Err(SolutionVerifyError::OutsideSolutionRange {
527 solution_range: *solution_range,
528 solution_distance,
529 });
530 }
531
532 const _: () = {
536 assert!(Record::NUM_S_BUCKETS == 65536);
537 };
538 if !BalancedMerkleTree::<65536>::verify(
540 &self.record_root,
541 &self.chunk_proof,
542 usize::from(s_bucket_audit_index),
543 *self.chunk,
544 ) {
545 return Err(SolutionVerifyError::InvalidChunkProof);
546 }
547
548 if let Some(SolutionVerifyPieceCheckParams {
549 max_pieces_in_sector,
550 segment_root,
551 recent_segments,
552 recent_history_fraction,
553 min_sector_lifetime,
554 current_history_size,
555 sector_expiration_check_segment_root,
556 }) = piece_check_params
557 {
558 if &self.history_size > current_history_size {
559 return Err(SolutionVerifyError::FutureHistorySize {
560 current: *current_history_size,
561 solution: self.history_size,
562 });
563 }
564
565 if u16::from(self.piece_offset) >= *max_pieces_in_sector {
566 return Err(SolutionVerifyError::InvalidPieceOffset {
567 piece_offset: u16::from(self.piece_offset),
568 max_pieces_in_sector: *max_pieces_in_sector,
569 });
570 }
571
572 if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
573 {
574 let expiration_history_size = match sector_id.derive_expiration_history_size(
575 self.history_size,
576 sector_expiration_check_segment_root,
577 *min_sector_lifetime,
578 ) {
579 Some(expiration_history_size) => expiration_history_size,
580 None => {
581 return Err(SolutionVerifyError::InvalidHistorySize);
582 }
583 };
584
585 if expiration_history_size <= *current_history_size {
586 return Err(SolutionVerifyError::SectorExpired {
587 expiration_history_size,
588 current_history_size: *current_history_size,
589 });
590 }
591 }
592
593 let position = sector_id
594 .derive_piece_index(
595 self.piece_offset,
596 self.history_size,
597 *max_pieces_in_sector,
598 *recent_segments,
599 *recent_history_fraction,
600 )
601 .position();
602
603 if !self
605 .record_root
606 .is_valid(segment_root, &self.record_proof, position)
607 {
608 return Err(SolutionVerifyError::InvalidPiece);
609 }
610 }
611
612 Ok(())
613 }
614}