1use crate::block::BlockNumber;
4use crate::ed25519::Ed25519PublicKey;
5use crate::hashes::Blake3Hash;
6use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
7use crate::pos::{PosProof, PosSeed};
8use crate::pot::{PotOutput, SlotNumber};
9use crate::sectors::{SBucket, SectorId, SectorIndex, SectorSlotChallenge};
10use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
11use ab_blake3::single_block_keyed_hash;
12use ab_io_type::trivial_type::TrivialType;
13use ab_merkle_tree::balanced::BalancedMerkleTree;
14use blake3::OUT_LEN;
15use core::fmt;
16use core::simd::Simd;
17use derive_more::{Add, AddAssign, Deref, DerefMut, Display, From, Into, Sub, SubAssign};
18#[cfg(feature = "scale-codec")]
19use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
20#[cfg(feature = "scale-codec")]
21use scale_info::TypeInfo;
22#[cfg(feature = "serde")]
23use serde::{Deserialize, Serialize};
24#[cfg(feature = "serde")]
25use serde::{Deserializer, Serializer};
26#[cfg(feature = "serde")]
27use serde_big_array::BigArray;
28
29#[derive(
31 Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
32)]
33#[cfg_attr(
34 feature = "scale-codec",
35 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
36)]
37#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
38#[repr(C)]
39pub struct SolutionDistance(u64);
40
41impl SolutionDistance {
42 pub const MAX: Self = Self(u64::MAX / 2);
44
45 #[inline(always)]
48 pub const fn from_u64(n: u64) -> Self {
49 Self(n)
50 }
51
52 pub fn calculate(
57 global_challenge: &Blake3Hash,
58 chunk: &[u8; 32],
59 sector_slot_challenge: &SectorSlotChallenge,
60 ) -> Self {
61 let audit_chunk = single_block_keyed_hash(sector_slot_challenge, chunk)
63 .expect("Less than a single block worth of bytes; qed");
64 let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes([
65 audit_chunk[0],
66 audit_chunk[1],
67 audit_chunk[2],
68 audit_chunk[3],
69 audit_chunk[4],
70 audit_chunk[5],
71 audit_chunk[6],
72 audit_chunk[7],
73 ]);
74 let global_challenge_as_solution_range: SolutionRange =
75 SolutionRange::from_bytes(global_challenge.as_chunks().0[0]);
76
77 global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
78 }
79
80 pub const fn is_within(self, solution_range: SolutionRange) -> bool {
82 self.0 <= solution_range.as_u64() / 2
83 }
84}
85
86#[derive(
88 Debug,
89 Display,
90 Default,
91 Copy,
92 Clone,
93 Ord,
94 PartialOrd,
95 Eq,
96 PartialEq,
97 Hash,
98 From,
99 Into,
100 Add,
101 AddAssign,
102 Sub,
103 SubAssign,
104 TrivialType,
105)]
106#[cfg_attr(
107 feature = "scale-codec",
108 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
109)]
110#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
111#[repr(C)]
112pub struct SolutionRange(u64);
113
114impl SolutionRange {
115 pub const SIZE: usize = size_of::<u64>();
117 pub const MIN: Self = Self(u64::MIN);
119 pub const MAX: Self = Self(u64::MAX);
121
122 #[inline(always)]
125 pub const fn new(n: u64) -> Self {
126 Self(n)
127 }
128
129 #[inline(always)]
132 pub const fn as_u64(self) -> u64 {
133 self.0
134 }
135
136 #[inline(always)]
138 pub fn to_bytes(self) -> [u8; 8] {
139 self.0.to_le_bytes()
140 }
141
142 #[inline(always)]
144 pub fn from_bytes(bytes: [u8; 8]) -> Self {
145 Self(u64::from_le_bytes(bytes))
146 }
147
148 #[inline]
153 pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
154 let solution_range = u64::MAX
155 / slot_probability.1 * slot_probability.0
157 / Record::NUM_CHUNKS as u64
159 * Record::NUM_S_BUCKETS as u64;
160
161 Self(solution_range / pieces)
163 }
164
165 #[inline]
170 pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
171 let pieces = u64::MAX
172 / slot_probability.1 * slot_probability.0
174 / Record::NUM_CHUNKS as u64
176 * Record::NUM_S_BUCKETS as u64;
177
178 pieces / self.0
180 }
181
182 #[inline]
184 pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
185 let a = self.0;
186 let b = other.0;
187 let diff = a.wrapping_sub(b);
188 let diff2 = b.wrapping_sub(a);
189 SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
191 }
192
193 #[inline]
195 pub fn derive_next(
196 self,
197 slots_in_last_interval: SlotNumber,
198 slot_probability: (u64, u64),
199 retarget_interval: BlockNumber,
200 ) -> Self {
201 let current_solution_range = self.0;
217 let next_solution_range = u64::try_from(
218 u128::from(current_solution_range)
219 .saturating_mul(u128::from(slots_in_last_interval))
220 .saturating_mul(u128::from(slot_probability.0))
221 / u128::from(u64::from(retarget_interval))
222 / u128::from(slot_probability.1),
223 );
224
225 Self(next_solution_range.unwrap_or(u64::MAX).clamp(
226 current_solution_range / 4,
227 current_solution_range.saturating_mul(4),
228 ))
229 }
230}
231
232const _: () = {
234 assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
235 assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
236 assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
237};
238
239#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
241#[cfg_attr(
242 feature = "scale-codec",
243 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
244)]
245#[repr(C)]
246pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
247
248impl fmt::Debug for ChunkProof {
249 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
250 write!(f, "[")?;
251 for hash in self.0 {
252 for byte in hash {
253 write!(f, "{byte:02x}")?;
254 }
255 write!(f, ", ")?;
256 }
257 write!(f, "]")?;
258 Ok(())
259 }
260}
261
262#[cfg(feature = "serde")]
263#[derive(Serialize, Deserialize)]
264#[serde(transparent)]
265struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
266
267#[cfg(feature = "serde")]
268#[derive(Serialize, Deserialize)]
269#[serde(transparent)]
270struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
271
272#[cfg(feature = "serde")]
273#[derive(Serialize, Deserialize)]
274#[serde(transparent)]
275struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
276
277#[cfg(feature = "serde")]
278impl Serialize for ChunkProof {
279 #[inline]
280 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
281 where
282 S: Serializer,
283 {
284 if serializer.is_human_readable() {
285 ChunkProofHex(unsafe {
288 core::mem::transmute::<
289 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
290 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
291 >(self.0)
292 })
293 .serialize(serializer)
294 } else {
295 ChunkProofBinary(self.0).serialize(serializer)
296 }
297 }
298}
299
300#[cfg(feature = "serde")]
301impl<'de> Deserialize<'de> for ChunkProof {
302 #[inline]
303 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
304 where
305 D: Deserializer<'de>,
306 {
307 Ok(Self(if deserializer.is_human_readable() {
308 unsafe {
311 core::mem::transmute::<
312 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
313 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
314 >(ChunkProofHex::deserialize(deserializer)?.0)
315 }
316 } else {
317 ChunkProofBinary::deserialize(deserializer)?.0
318 }))
319 }
320}
321
322impl Default for ChunkProof {
323 #[inline]
324 fn default() -> Self {
325 Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
326 }
327}
328
329impl AsRef<[u8]> for ChunkProof {
330 #[inline]
331 fn as_ref(&self) -> &[u8] {
332 self.0.as_flattened()
333 }
334}
335
336impl AsMut<[u8]> for ChunkProof {
337 #[inline]
338 fn as_mut(&mut self) -> &mut [u8] {
339 self.0.as_flattened_mut()
340 }
341}
342
343impl ChunkProof {
344 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
346 const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
347}
348
349#[derive(Debug, Eq, PartialEq, thiserror::Error)]
351pub enum SolutionVerifyError {
352 #[error("Piece verification failed")]
354 InvalidPieceOffset {
355 piece_offset: u16,
357 max_pieces_in_sector: u16,
359 },
360 #[error("History size {solution} is in the future, current is {current}")]
362 FutureHistorySize {
363 current: HistorySize,
365 solution: HistorySize,
367 },
368 #[error("Sector expired")]
370 SectorExpired {
371 expiration_history_size: HistorySize,
373 current_history_size: HistorySize,
375 },
376 #[error("Piece verification failed")]
378 InvalidPiece,
379 #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
381 OutsideSolutionRange {
382 solution_range: SolutionRange,
384 solution_distance: SolutionDistance,
386 },
387 #[error("Invalid proof of space")]
389 InvalidProofOfSpace,
390 #[error("Invalid chunk proof")]
392 InvalidChunkProof,
393 #[error("Invalid history size")]
395 InvalidHistorySize,
396}
397
398#[derive(Debug, Clone)]
400#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
401pub struct SolutionVerifyPieceCheckParams {
402 pub max_pieces_in_sector: u16,
404 pub segment_root: SegmentRoot,
406 pub recent_segments: HistorySize,
408 pub recent_history_fraction: (HistorySize, HistorySize),
410 pub min_sector_lifetime: HistorySize,
412 pub current_history_size: HistorySize,
414 pub sector_expiration_check_segment_root: Option<SegmentRoot>,
416}
417
418#[derive(Debug, Clone)]
420#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
421pub struct SolutionVerifyParams {
422 pub proof_of_time: PotOutput,
424 pub solution_range: SolutionRange,
426 pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
430}
431
432pub trait SolutionPotVerifier {
434 fn is_proof_valid(seed: &PosSeed, s_bucket: SBucket, proof: &PosProof) -> bool;
436}
437
438#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
440#[cfg_attr(
441 feature = "scale-codec",
442 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
443)]
444#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
445#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
446#[repr(C)]
447pub struct Solution {
448 pub public_key_hash: Blake3Hash,
450 pub record_root: RecordRoot,
452 pub record_proof: RecordProof,
454 pub chunk: RecordChunk,
456 pub chunk_proof: ChunkProof,
458 pub proof_of_space: PosProof,
460 pub history_size: HistorySize,
462 pub sector_index: SectorIndex,
464 pub piece_offset: PieceOffset,
466 pub padding: [u8; 4],
468}
469
470impl Solution {
471 pub fn genesis_solution() -> Self {
473 Self {
474 public_key_hash: Ed25519PublicKey::default().hash(),
475 record_root: RecordRoot::default(),
476 record_proof: RecordProof::default(),
477 chunk: RecordChunk::default(),
478 chunk_proof: ChunkProof::default(),
479 proof_of_space: PosProof::default(),
480 history_size: HistorySize::from(SegmentIndex::ZERO),
481 sector_index: SectorIndex::ZERO,
482 piece_offset: PieceOffset::default(),
483 padding: [0; _],
484 }
485 }
486
487 pub fn verify<PotVerifier>(
489 &self,
490 slot: SlotNumber,
491 params: &SolutionVerifyParams,
492 ) -> Result<(), SolutionVerifyError>
493 where
494 PotVerifier: SolutionPotVerifier,
495 {
496 let SolutionVerifyParams {
497 proof_of_time,
498 solution_range,
499 piece_check_params,
500 } = params;
501
502 let sector_id = SectorId::new(&self.public_key_hash, self.sector_index, self.history_size);
503
504 let global_challenge = proof_of_time.derive_global_challenge(slot);
505 let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
506 let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
507
508 if !PotVerifier::is_proof_valid(
510 §or_id.derive_evaluation_seed(self.piece_offset),
511 s_bucket_audit_index,
512 &self.proof_of_space,
513 ) {
514 return Err(SolutionVerifyError::InvalidProofOfSpace);
515 };
516
517 let masked_chunk =
518 (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
519
520 let solution_distance =
521 SolutionDistance::calculate(&global_challenge, &masked_chunk, §or_slot_challenge);
522
523 if !solution_distance.is_within(*solution_range) {
524 return Err(SolutionVerifyError::OutsideSolutionRange {
525 solution_range: *solution_range,
526 solution_distance,
527 });
528 }
529
530 const _: () = {
534 assert!(Record::NUM_S_BUCKETS == 65536);
535 };
536 if !BalancedMerkleTree::<65536>::verify(
538 &self.record_root,
539 &self.chunk_proof,
540 usize::from(s_bucket_audit_index),
541 *self.chunk,
542 ) {
543 return Err(SolutionVerifyError::InvalidChunkProof);
544 }
545
546 if let Some(SolutionVerifyPieceCheckParams {
547 max_pieces_in_sector,
548 segment_root,
549 recent_segments,
550 recent_history_fraction,
551 min_sector_lifetime,
552 current_history_size,
553 sector_expiration_check_segment_root,
554 }) = piece_check_params
555 {
556 if &self.history_size > current_history_size {
557 return Err(SolutionVerifyError::FutureHistorySize {
558 current: *current_history_size,
559 solution: self.history_size,
560 });
561 }
562
563 if u16::from(self.piece_offset) >= *max_pieces_in_sector {
564 return Err(SolutionVerifyError::InvalidPieceOffset {
565 piece_offset: u16::from(self.piece_offset),
566 max_pieces_in_sector: *max_pieces_in_sector,
567 });
568 }
569
570 if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
571 {
572 let expiration_history_size = match sector_id.derive_expiration_history_size(
573 self.history_size,
574 sector_expiration_check_segment_root,
575 *min_sector_lifetime,
576 ) {
577 Some(expiration_history_size) => expiration_history_size,
578 None => {
579 return Err(SolutionVerifyError::InvalidHistorySize);
580 }
581 };
582
583 if expiration_history_size <= *current_history_size {
584 return Err(SolutionVerifyError::SectorExpired {
585 expiration_history_size,
586 current_history_size: *current_history_size,
587 });
588 }
589 }
590
591 let position = sector_id
592 .derive_piece_index(
593 self.piece_offset,
594 self.history_size,
595 *max_pieces_in_sector,
596 *recent_segments,
597 *recent_history_fraction,
598 )
599 .position();
600
601 if !self
603 .record_root
604 .is_valid(segment_root, &self.record_proof, position)
605 {
606 return Err(SolutionVerifyError::InvalidPiece);
607 }
608 }
609
610 Ok(())
611 }
612}