1use crate::block::BlockNumber;
4use crate::hashes::Blake3Hash;
5use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
6use crate::pos::{PosProof, PosSeed};
7use crate::pot::{PotOutput, SlotNumber};
8use crate::sectors::{SectorId, SectorIndex, SectorSlotChallenge};
9use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
10use ab_io_type::trivial_type::TrivialType;
11use ab_merkle_tree::balanced::BalancedMerkleTree;
12use blake3::OUT_LEN;
13use core::fmt;
14use core::simd::Simd;
15use derive_more::{Add, AddAssign, Deref, DerefMut, Display, From, Into, Sub, SubAssign};
16#[cfg(feature = "scale-codec")]
17use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
18#[cfg(feature = "scale-codec")]
19use scale_info::TypeInfo;
20#[cfg(feature = "serde")]
21use serde::{Deserialize, Serialize};
22#[cfg(feature = "serde")]
23use serde::{Deserializer, Serializer};
24#[cfg(feature = "serde")]
25use serde_big_array::BigArray;
26
27#[derive(
29 Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
30)]
31#[cfg_attr(
32 feature = "scale-codec",
33 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
34)]
35#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
36#[repr(C)]
37pub struct SolutionDistance(u64);
38
39impl SolutionDistance {
40 pub const MAX: Self = Self(u64::MAX / 2);
42
43 #[inline(always)]
46 pub const fn from_u64(n: u64) -> Self {
47 Self(n)
48 }
49
50 pub fn calculate(
55 global_challenge: &Blake3Hash,
56 chunk: &[u8; 32],
57 sector_slot_challenge: &SectorSlotChallenge,
58 ) -> Self {
59 let audit_chunk = *blake3::keyed_hash(sector_slot_challenge, chunk).as_bytes();
61 let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes([
62 audit_chunk[0],
63 audit_chunk[1],
64 audit_chunk[2],
65 audit_chunk[3],
66 audit_chunk[4],
67 audit_chunk[5],
68 audit_chunk[6],
69 audit_chunk[7],
70 ]);
71 let global_challenge_as_solution_range: SolutionRange = SolutionRange::from_bytes(
72 *global_challenge
73 .array_chunks::<{ SolutionRange::SIZE }>()
74 .next()
75 .expect("Solution range is smaller in size than global challenge; qed"),
76 );
77
78 global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
79 }
80
81 pub const fn is_within(self, solution_range: SolutionRange) -> bool {
83 self.0 <= solution_range.as_u64() / 2
84 }
85}
86
87#[derive(
89 Debug,
90 Display,
91 Default,
92 Copy,
93 Clone,
94 Ord,
95 PartialOrd,
96 Eq,
97 PartialEq,
98 Hash,
99 From,
100 Into,
101 Add,
102 AddAssign,
103 Sub,
104 SubAssign,
105 TrivialType,
106)]
107#[cfg_attr(
108 feature = "scale-codec",
109 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
110)]
111#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
112#[repr(C)]
113pub struct SolutionRange(u64);
114
115impl SolutionRange {
116 pub const SIZE: usize = size_of::<u64>();
118 pub const MIN: Self = Self(u64::MIN);
120 pub const MAX: Self = Self(u64::MAX);
122
123 #[inline(always)]
126 pub const fn new(n: u64) -> Self {
127 Self(n)
128 }
129
130 #[inline(always)]
133 pub const fn as_u64(self) -> u64 {
134 self.0
135 }
136
137 #[inline(always)]
139 pub fn to_bytes(self) -> [u8; 8] {
140 self.0.to_le_bytes()
141 }
142
143 #[inline(always)]
145 pub fn from_bytes(bytes: [u8; 8]) -> Self {
146 Self(u64::from_le_bytes(bytes))
147 }
148
149 #[inline]
154 pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
155 let solution_range = u64::MAX
156 / slot_probability.1 * slot_probability.0
158 / Record::NUM_CHUNKS as u64
160 * Record::NUM_S_BUCKETS as u64;
161
162 Self(solution_range / pieces)
164 }
165
166 #[inline]
171 pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
172 let pieces = u64::MAX
173 / slot_probability.1 * slot_probability.0
175 / Record::NUM_CHUNKS as u64
177 * Record::NUM_S_BUCKETS as u64;
178
179 pieces / self.0
181 }
182
183 #[inline]
185 pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
186 let a = self.0;
187 let b = other.0;
188 let diff = a.wrapping_sub(b);
189 let diff2 = b.wrapping_sub(a);
190 SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
192 }
193
194 #[inline]
196 pub fn derive_next(
197 self,
198 slots_in_last_era: SlotNumber,
199 slot_probability: (u64, u64),
200 era_duration: BlockNumber,
201 ) -> Self {
202 let current_solution_range = self.0;
218 let next_solution_range = u64::try_from(
219 u128::from(current_solution_range)
220 .saturating_mul(u128::from(slots_in_last_era))
221 .saturating_mul(u128::from(slot_probability.0))
222 / u128::from(u64::from(era_duration))
223 / u128::from(slot_probability.1),
224 );
225
226 Self(next_solution_range.unwrap_or(u64::MAX).clamp(
227 current_solution_range / 4,
228 current_solution_range.saturating_mul(4),
229 ))
230 }
231}
232
233const _: () = {
235 assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
236 assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
237 assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
238};
239
240#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
242#[cfg_attr(
243 feature = "scale-codec",
244 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
245)]
246#[repr(C)]
247pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
248
249impl fmt::Debug for ChunkProof {
250 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
251 write!(f, "[")?;
252 for hash in self.0 {
253 for byte in hash {
254 write!(f, "{byte:02x}")?;
255 }
256 write!(f, ", ")?;
257 }
258 write!(f, "]")?;
259 Ok(())
260 }
261}
262
263#[cfg(feature = "serde")]
264#[derive(Serialize, Deserialize)]
265#[serde(transparent)]
266struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
267
268#[cfg(feature = "serde")]
269#[derive(Serialize, Deserialize)]
270#[serde(transparent)]
271struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
272
273#[cfg(feature = "serde")]
274#[derive(Serialize, Deserialize)]
275#[serde(transparent)]
276struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
277
278#[cfg(feature = "serde")]
279impl Serialize for ChunkProof {
280 #[inline]
281 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
282 where
283 S: Serializer,
284 {
285 if serializer.is_human_readable() {
286 ChunkProofHex(unsafe {
289 core::mem::transmute::<
290 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
291 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
292 >(self.0)
293 })
294 .serialize(serializer)
295 } else {
296 ChunkProofBinary(self.0).serialize(serializer)
297 }
298 }
299}
300
301#[cfg(feature = "serde")]
302impl<'de> Deserialize<'de> for ChunkProof {
303 #[inline]
304 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
305 where
306 D: Deserializer<'de>,
307 {
308 Ok(Self(if deserializer.is_human_readable() {
309 unsafe {
312 core::mem::transmute::<
313 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
314 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
315 >(ChunkProofHex::deserialize(deserializer)?.0)
316 }
317 } else {
318 ChunkProofBinary::deserialize(deserializer)?.0
319 }))
320 }
321}
322
323impl Default for ChunkProof {
324 #[inline]
325 fn default() -> Self {
326 Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
327 }
328}
329
330impl AsRef<[u8]> for ChunkProof {
331 #[inline]
332 fn as_ref(&self) -> &[u8] {
333 self.0.as_flattened()
334 }
335}
336
337impl AsMut<[u8]> for ChunkProof {
338 #[inline]
339 fn as_mut(&mut self) -> &mut [u8] {
340 self.0.as_flattened_mut()
341 }
342}
343
344impl ChunkProof {
345 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
347 const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
348}
349
350#[derive(Debug, Eq, PartialEq, thiserror::Error)]
352pub enum SolutionVerifyError {
353 #[error("Piece verification failed")]
355 InvalidPieceOffset {
356 piece_offset: u16,
358 max_pieces_in_sector: u16,
360 },
361 #[error("History size {solution} is in the future, current is {current}")]
363 FutureHistorySize {
364 current: HistorySize,
366 solution: HistorySize,
368 },
369 #[error("Sector expired")]
371 SectorExpired {
372 expiration_history_size: HistorySize,
374 current_history_size: HistorySize,
376 },
377 #[error("Piece verification failed")]
379 InvalidPiece,
380 #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
382 OutsideSolutionRange {
383 solution_range: SolutionRange,
385 solution_distance: SolutionDistance,
387 },
388 #[error("Invalid proof of space")]
390 InvalidProofOfSpace,
391 #[error("Invalid audit chunk offset")]
393 InvalidAuditChunkOffset,
394 #[error("Invalid chunk proof")]
396 InvalidChunkProof,
397 #[error("Invalid history size")]
399 InvalidHistorySize,
400}
401
402#[derive(Debug, Clone)]
404#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
405pub struct SolutionVerifyPieceCheckParams {
406 pub max_pieces_in_sector: u16,
408 pub segment_root: SegmentRoot,
410 pub recent_segments: HistorySize,
412 pub recent_history_fraction: (HistorySize, HistorySize),
414 pub min_sector_lifetime: HistorySize,
416 pub current_history_size: HistorySize,
418 pub sector_expiration_check_segment_root: Option<SegmentRoot>,
420}
421
422#[derive(Debug, Clone)]
424#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
425pub struct SolutionVerifyParams {
426 pub proof_of_time: PotOutput,
428 pub solution_range: SolutionRange,
430 pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
434}
435
436pub trait SolutionPotVerifier {
438 fn is_proof_valid(seed: &PosSeed, challenge_index: u32, proof: &PosProof) -> bool;
440}
441
442#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
444#[cfg_attr(
445 feature = "scale-codec",
446 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
447)]
448#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
449#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
450#[repr(C)]
451pub struct Solution {
452 pub public_key_hash: Blake3Hash,
454 pub record_root: RecordRoot,
456 pub record_proof: RecordProof,
458 pub chunk: RecordChunk,
460 pub chunk_proof: ChunkProof,
462 pub proof_of_space: PosProof,
464 pub history_size: HistorySize,
466 pub sector_index: SectorIndex,
468 pub piece_offset: PieceOffset,
470 pub padding: [u8; 4],
472}
473
474impl Solution {
475 pub fn genesis_solution() -> Self {
477 Self {
478 public_key_hash: Blake3Hash::default(),
479 record_root: RecordRoot::default(),
480 record_proof: RecordProof::default(),
481 chunk: RecordChunk::default(),
482 chunk_proof: ChunkProof::default(),
483 proof_of_space: PosProof::default(),
484 history_size: HistorySize::from(SegmentIndex::ZERO),
485 sector_index: SectorIndex::ZERO,
486 piece_offset: PieceOffset::default(),
487 padding: [0; _],
488 }
489 }
490
491 pub fn verify<PotVerifier>(
493 &self,
494 slot: SlotNumber,
495 params: &SolutionVerifyParams,
496 ) -> Result<(), SolutionVerifyError>
497 where
498 PotVerifier: SolutionPotVerifier,
499 {
500 let SolutionVerifyParams {
501 proof_of_time,
502 solution_range,
503 piece_check_params,
504 } = params;
505
506 let sector_id = SectorId::new(&self.public_key_hash, self.sector_index, self.history_size);
507
508 let global_challenge = proof_of_time.derive_global_challenge(slot);
509 let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
510 let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
511
512 if !PotVerifier::is_proof_valid(
514 §or_id.derive_evaluation_seed(self.piece_offset),
515 s_bucket_audit_index.into(),
516 &self.proof_of_space,
517 ) {
518 return Err(SolutionVerifyError::InvalidProofOfSpace);
519 };
520
521 let masked_chunk =
522 (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
523
524 let solution_distance =
525 SolutionDistance::calculate(&global_challenge, &masked_chunk, §or_slot_challenge);
526
527 if !solution_distance.is_within(*solution_range) {
528 return Err(SolutionVerifyError::OutsideSolutionRange {
529 solution_range: *solution_range,
530 solution_distance,
531 });
532 }
533
534 const _: () = {
538 assert!(Record::NUM_S_BUCKETS == 65536);
539 };
540 if !BalancedMerkleTree::<65536>::verify(
542 &self.record_root,
543 &self.chunk_proof,
544 usize::from(s_bucket_audit_index),
545 *self.chunk,
546 ) {
547 return Err(SolutionVerifyError::InvalidChunkProof);
548 }
549
550 if let Some(SolutionVerifyPieceCheckParams {
551 max_pieces_in_sector,
552 segment_root,
553 recent_segments,
554 recent_history_fraction,
555 min_sector_lifetime,
556 current_history_size,
557 sector_expiration_check_segment_root,
558 }) = piece_check_params
559 {
560 if &self.history_size > current_history_size {
561 return Err(SolutionVerifyError::FutureHistorySize {
562 current: *current_history_size,
563 solution: self.history_size,
564 });
565 }
566
567 if u16::from(self.piece_offset) >= *max_pieces_in_sector {
568 return Err(SolutionVerifyError::InvalidPieceOffset {
569 piece_offset: u16::from(self.piece_offset),
570 max_pieces_in_sector: *max_pieces_in_sector,
571 });
572 }
573
574 if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
575 {
576 let expiration_history_size = match sector_id.derive_expiration_history_size(
577 self.history_size,
578 sector_expiration_check_segment_root,
579 *min_sector_lifetime,
580 ) {
581 Some(expiration_history_size) => expiration_history_size,
582 None => {
583 return Err(SolutionVerifyError::InvalidHistorySize);
584 }
585 };
586
587 if expiration_history_size <= *current_history_size {
588 return Err(SolutionVerifyError::SectorExpired {
589 expiration_history_size,
590 current_history_size: *current_history_size,
591 });
592 }
593 }
594
595 let position = sector_id
596 .derive_piece_index(
597 self.piece_offset,
598 self.history_size,
599 *max_pieces_in_sector,
600 *recent_segments,
601 *recent_history_fraction,
602 )
603 .position();
604
605 if !self
607 .record_root
608 .is_valid(segment_root, &self.record_proof, position)
609 {
610 return Err(SolutionVerifyError::InvalidPiece);
611 }
612 }
613
614 Ok(())
615 }
616}