1use crate::block::BlockNumber;
4use crate::hashes::{Blake3Hash, blake3_hash_with_key};
5use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
6use crate::pos::{PosProof, PosSeed};
7use crate::pot::{PotOutput, SlotNumber};
8use crate::sectors::{SectorId, SectorIndex, SectorSlotChallenge};
9use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
10use ab_merkle_tree::balanced_hashed::BalancedHashedMerkleTree;
11use blake3::OUT_LEN;
12use core::fmt;
13use core::simd::Simd;
14use derive_more::{Add, AddAssign, Deref, DerefMut, Display, From, Into, Sub, SubAssign};
15#[cfg(feature = "scale-codec")]
16use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
17#[cfg(feature = "scale-codec")]
18use scale_info::TypeInfo;
19#[cfg(feature = "serde")]
20use serde::{Deserialize, Serialize};
21#[cfg(feature = "serde")]
22use serde::{Deserializer, Serializer};
23#[cfg(feature = "serde")]
24use serde_big_array::BigArray;
25
26#[derive(
28 Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
29)]
30#[cfg_attr(
31 feature = "scale-codec",
32 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
33)]
34#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
35#[repr(transparent)]
36pub struct SolutionDistance(u64);
37
38impl SolutionDistance {
39 pub const MAX: Self = Self(u64::MAX / 2);
41
42 #[inline(always)]
45 pub const fn from_u64(n: u64) -> Self {
46 Self(n)
47 }
48
49 pub fn calculate(
54 global_challenge: &Blake3Hash,
55 chunk: &[u8; 32],
56 sector_slot_challenge: &SectorSlotChallenge,
57 ) -> Self {
58 let audit_chunk = blake3_hash_with_key(sector_slot_challenge, chunk);
59 let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes(
60 *audit_chunk
61 .array_chunks::<{ SolutionRange::SIZE }>()
62 .next()
63 .expect("Solution range is smaller in size than global challenge; qed"),
64 );
65 let global_challenge_as_solution_range: SolutionRange = SolutionRange::from_bytes(
66 *global_challenge
67 .array_chunks::<{ SolutionRange::SIZE }>()
68 .next()
69 .expect("Solution range is smaller in size than global challenge; qed"),
70 );
71
72 global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
73 }
74
75 pub const fn is_within(self, solution_range: SolutionRange) -> bool {
77 self.0 <= solution_range.to_u64() / 2
78 }
79}
80
81#[derive(
83 Debug,
84 Display,
85 Default,
86 Copy,
87 Clone,
88 Ord,
89 PartialOrd,
90 Eq,
91 PartialEq,
92 Hash,
93 From,
94 Into,
95 Add,
96 AddAssign,
97 Sub,
98 SubAssign,
99)]
100#[cfg_attr(
101 feature = "scale-codec",
102 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
103)]
104#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
105#[repr(transparent)]
106pub struct SolutionRange(u64);
107
108impl SolutionRange {
109 pub const SIZE: usize = size_of::<u64>();
111 pub const MIN: Self = Self(u64::MIN);
113 pub const MAX: Self = Self(u64::MAX);
115
116 #[inline(always)]
119 pub const fn from_u64(n: u64) -> Self {
120 Self(n)
121 }
122
123 #[inline(always)]
126 pub const fn to_u64(self) -> u64 {
127 self.0
128 }
129
130 #[inline(always)]
132 pub fn from_bytes(bytes: [u8; 8]) -> Self {
133 Self(u64::from_le_bytes(bytes))
134 }
135
136 #[inline]
141 pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
142 let solution_range = u64::MAX
143 / slot_probability.1 * slot_probability.0
145 / Record::NUM_CHUNKS as u64
147 * Record::NUM_S_BUCKETS as u64;
148
149 Self(solution_range / pieces)
151 }
152
153 #[inline]
158 pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
159 let pieces = u64::MAX
160 / slot_probability.1 * slot_probability.0
162 / Record::NUM_CHUNKS as u64
164 * Record::NUM_S_BUCKETS as u64;
165
166 pieces / self.0
168 }
169
170 #[inline]
172 pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
173 let a = self.0;
174 let b = other.0;
175 let diff = a.wrapping_sub(b);
176 let diff2 = b.wrapping_sub(a);
177 SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
179 }
180
181 pub fn derive_next(
183 self,
184 start_slot: SlotNumber,
185 current_slot: SlotNumber,
186 slot_probability: (u64, u64),
187 era_duration: BlockNumber,
188 ) -> Self {
189 let era_slot_count = current_slot - start_slot;
191
192 let current_solution_range = self.0;
208 let next_solution_range = u64::try_from(
209 u128::from(current_solution_range)
210 .saturating_mul(u128::from(era_slot_count))
211 .saturating_mul(u128::from(slot_probability.0))
212 / u128::from(u64::from(era_duration))
213 / u128::from(slot_probability.1),
214 );
215
216 Self(next_solution_range.unwrap_or(u64::MAX).clamp(
217 current_solution_range / 4,
218 current_solution_range.saturating_mul(4),
219 ))
220 }
221}
222
223const _: () = {
225 assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
226 assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
227 assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
228};
229
230#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into)]
232#[cfg_attr(
233 feature = "scale-codec",
234 derive(Encode, Decode, TypeInfo, MaxEncodedLen)
235)]
236#[repr(transparent)]
237pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
238
239impl fmt::Debug for ChunkProof {
240 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
241 write!(f, "[")?;
242 for hash in self.0 {
243 for byte in hash {
244 write!(f, "{byte:02x}")?;
245 }
246 write!(f, ", ")?;
247 }
248 write!(f, "]")?;
249 Ok(())
250 }
251}
252
253#[cfg(feature = "serde")]
254#[derive(Serialize, Deserialize)]
255#[serde(transparent)]
256struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
257
258#[cfg(feature = "serde")]
259#[derive(Serialize, Deserialize)]
260#[serde(transparent)]
261struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
262
263#[cfg(feature = "serde")]
264#[derive(Serialize, Deserialize)]
265#[serde(transparent)]
266struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
267
268#[cfg(feature = "serde")]
269impl Serialize for ChunkProof {
270 #[inline]
271 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
272 where
273 S: Serializer,
274 {
275 if serializer.is_human_readable() {
276 ChunkProofHex(unsafe {
279 core::mem::transmute::<
280 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
281 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
282 >(self.0)
283 })
284 .serialize(serializer)
285 } else {
286 ChunkProofBinary(self.0).serialize(serializer)
287 }
288 }
289}
290
291#[cfg(feature = "serde")]
292impl<'de> Deserialize<'de> for ChunkProof {
293 #[inline]
294 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
295 where
296 D: Deserializer<'de>,
297 {
298 Ok(Self(if deserializer.is_human_readable() {
299 unsafe {
302 core::mem::transmute::<
303 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
304 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
305 >(ChunkProofHex::deserialize(deserializer)?.0)
306 }
307 } else {
308 ChunkProofBinary::deserialize(deserializer)?.0
309 }))
310 }
311}
312
313impl Default for ChunkProof {
314 #[inline]
315 fn default() -> Self {
316 Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
317 }
318}
319
320impl AsRef<[u8]> for ChunkProof {
321 #[inline]
322 fn as_ref(&self) -> &[u8] {
323 self.0.as_flattened()
324 }
325}
326
327impl AsMut<[u8]> for ChunkProof {
328 #[inline]
329 fn as_mut(&mut self) -> &mut [u8] {
330 self.0.as_flattened_mut()
331 }
332}
333
334impl ChunkProof {
335 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
337 const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
338}
339
340#[derive(Debug, Eq, PartialEq, thiserror::Error)]
342pub enum SolutionVerifyError {
343 #[error("Piece verification failed")]
345 InvalidPieceOffset {
346 piece_offset: u16,
348 max_pieces_in_sector: u16,
350 },
351 #[error("Sector expired")]
353 SectorExpired {
354 expiration_history_size: HistorySize,
356 current_history_size: HistorySize,
358 },
359 #[error("Piece verification failed")]
361 InvalidPiece,
362 #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
364 OutsideSolutionRange {
365 solution_range: SolutionRange,
367 solution_distance: SolutionDistance,
369 },
370 #[error("Invalid proof of space")]
372 InvalidProofOfSpace,
373 #[error("Invalid audit chunk offset")]
375 InvalidAuditChunkOffset,
376 #[error("Invalid chunk proof")]
378 InvalidChunkProof,
379 #[error("Invalid history size")]
381 InvalidHistorySize,
382}
383
384#[derive(Debug, Clone)]
386#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
387pub struct SolutionVerifyPieceCheckParams {
388 pub max_pieces_in_sector: u16,
390 pub segment_root: SegmentRoot,
392 pub recent_segments: HistorySize,
394 pub recent_history_fraction: (HistorySize, HistorySize),
396 pub min_sector_lifetime: HistorySize,
398 pub current_history_size: HistorySize,
400 pub sector_expiration_check_segment_root: Option<SegmentRoot>,
402}
403
404#[derive(Debug, Clone)]
406#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
407pub struct SolutionVerifyParams {
408 pub proof_of_time: PotOutput,
410 pub solution_range: SolutionRange,
412 pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
416}
417
418pub trait SolutionPotVerifier {
420 fn is_proof_valid(seed: &PosSeed, challenge_index: u32, proof: &PosProof) -> bool;
422}
423
424#[derive(Clone, Debug, Eq, PartialEq)]
426#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
427#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
428#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
429pub struct Solution {
430 pub public_key_hash: Blake3Hash,
432 pub sector_index: SectorIndex,
434 pub history_size: HistorySize,
436 pub piece_offset: PieceOffset,
438 pub record_root: RecordRoot,
440 pub record_proof: RecordProof,
442 pub chunk: RecordChunk,
444 pub chunk_proof: ChunkProof,
446 pub proof_of_space: PosProof,
448}
449
450impl Solution {
451 pub fn genesis_solution() -> Self {
453 Self {
454 public_key_hash: Blake3Hash::default(),
455 sector_index: SectorIndex::ZERO,
456 history_size: HistorySize::from(SegmentIndex::ZERO),
457 piece_offset: PieceOffset::default(),
458 record_root: RecordRoot::default(),
459 record_proof: RecordProof::default(),
460 chunk: RecordChunk::default(),
461 chunk_proof: ChunkProof::default(),
462 proof_of_space: PosProof::default(),
463 }
464 }
465
466 pub fn verify<PotVerifier>(
468 &self,
469 slot: SlotNumber,
470 params: &SolutionVerifyParams,
471 ) -> Result<(), SolutionVerifyError>
472 where
473 PotVerifier: SolutionPotVerifier,
474 {
475 let SolutionVerifyParams {
476 proof_of_time,
477 solution_range,
478 piece_check_params,
479 } = params;
480
481 let sector_id = SectorId::new(&self.public_key_hash, self.sector_index, self.history_size);
482
483 let global_challenge = proof_of_time.derive_global_challenge(slot);
484 let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
485 let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
486
487 if !PotVerifier::is_proof_valid(
489 §or_id.derive_evaluation_seed(self.piece_offset),
490 s_bucket_audit_index.into(),
491 &self.proof_of_space,
492 ) {
493 return Err(SolutionVerifyError::InvalidProofOfSpace);
494 };
495
496 let masked_chunk =
497 (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
498
499 let solution_distance =
500 SolutionDistance::calculate(&global_challenge, &masked_chunk, §or_slot_challenge);
501
502 if !solution_distance.is_within(*solution_range) {
503 return Err(SolutionVerifyError::OutsideSolutionRange {
504 solution_range: *solution_range,
505 solution_distance,
506 });
507 }
508
509 const _: () = {
513 assert!(Record::NUM_S_BUCKETS == 65536);
514 };
515 if !BalancedHashedMerkleTree::<65536>::verify(
517 &self.record_root,
518 &self.chunk_proof,
519 usize::from(s_bucket_audit_index),
520 *self.chunk,
521 ) {
522 return Err(SolutionVerifyError::InvalidChunkProof);
523 }
524
525 if let Some(SolutionVerifyPieceCheckParams {
526 max_pieces_in_sector,
527 segment_root,
528 recent_segments,
529 recent_history_fraction,
530 min_sector_lifetime,
531 current_history_size,
532 sector_expiration_check_segment_root,
533 }) = piece_check_params
534 {
535 if u16::from(self.piece_offset) >= *max_pieces_in_sector {
536 return Err(SolutionVerifyError::InvalidPieceOffset {
537 piece_offset: u16::from(self.piece_offset),
538 max_pieces_in_sector: *max_pieces_in_sector,
539 });
540 }
541 if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
542 {
543 let expiration_history_size = match sector_id.derive_expiration_history_size(
544 self.history_size,
545 sector_expiration_check_segment_root,
546 *min_sector_lifetime,
547 ) {
548 Some(expiration_history_size) => expiration_history_size,
549 None => {
550 return Err(SolutionVerifyError::InvalidHistorySize);
551 }
552 };
553
554 if expiration_history_size <= *current_history_size {
555 return Err(SolutionVerifyError::SectorExpired {
556 expiration_history_size,
557 current_history_size: *current_history_size,
558 });
559 }
560 }
561
562 let position = sector_id
563 .derive_piece_index(
564 self.piece_offset,
565 self.history_size,
566 *max_pieces_in_sector,
567 *recent_segments,
568 *recent_history_fraction,
569 )
570 .position();
571
572 if !self
574 .record_root
575 .is_valid(segment_root, &self.record_proof, position)
576 {
577 return Err(SolutionVerifyError::InvalidPiece);
578 }
579 }
580
581 Ok(())
582 }
583}