1use crate::block::BlockNumber;
4use crate::ed25519::Ed25519PublicKey;
5use crate::hashes::Blake3Hash;
6use crate::pieces::{PieceOffset, Record, RecordChunk, RecordProof, RecordRoot};
7use crate::pos::{PosProof, PosSeed};
8use crate::pot::{PotOutput, SlotNumber};
9use crate::sectors::{SBucket, SectorId, SectorIndex, SectorSlotChallenge};
10use crate::segments::{HistorySize, SegmentIndex, SegmentRoot};
11use crate::shard::{NumShards, RealShardKind, ShardIndex, ShardKind};
12use ab_blake3::single_block_keyed_hash;
13use ab_io_type::trivial_type::TrivialType;
14use ab_merkle_tree::balanced::BalancedMerkleTree;
15use blake3::{Hash, OUT_LEN};
16use core::simd::Simd;
17use core::{fmt, mem};
18use derive_more::{
19 Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, From, Into, Sub, SubAssign,
20};
21#[cfg(feature = "scale-codec")]
22use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
23#[cfg(feature = "serde")]
24use serde::{Deserialize, Serialize};
25#[cfg(feature = "serde")]
26use serde::{Deserializer, Serializer};
27#[cfg(feature = "serde")]
28use serde_big_array::BigArray;
29
30#[derive(
32 Debug, Display, Default, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into,
33)]
34#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
35#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
36#[repr(C)]
37pub struct SolutionDistance(u64);
38
39impl SolutionDistance {
40 pub const MAX: Self = Self(u64::MAX / 2);
42
43 #[inline(always)]
46 pub const fn from_u64(n: u64) -> Self {
47 Self(n)
48 }
49
50 pub fn calculate(
55 global_challenge: &Blake3Hash,
56 chunk: &[u8; 32],
57 sector_slot_challenge: &SectorSlotChallenge,
58 ) -> Self {
59 let audit_chunk = single_block_keyed_hash(sector_slot_challenge, chunk)
61 .expect("Less than a single block worth of bytes; qed");
62 let audit_chunk_as_solution_range: SolutionRange = SolutionRange::from_bytes([
63 audit_chunk[0],
64 audit_chunk[1],
65 audit_chunk[2],
66 audit_chunk[3],
67 audit_chunk[4],
68 audit_chunk[5],
69 audit_chunk[6],
70 audit_chunk[7],
71 ]);
72 let global_challenge_as_solution_range: SolutionRange =
73 SolutionRange::from_bytes(global_challenge.as_chunks().0[0]);
74
75 global_challenge_as_solution_range.bidirectional_distance(audit_chunk_as_solution_range)
76 }
77
78 pub const fn is_within(self, solution_range: SolutionRange) -> bool {
80 self.0 <= solution_range.as_u64() / 2
81 }
82}
83
84#[derive(
86 Debug,
87 Display,
88 Default,
89 Copy,
90 Clone,
91 Ord,
92 PartialOrd,
93 Eq,
94 PartialEq,
95 Hash,
96 From,
97 Into,
98 Add,
99 AddAssign,
100 Sub,
101 SubAssign,
102 TrivialType,
103)]
104#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
105#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
106#[repr(C)]
107pub struct SolutionRange(u64);
108
109impl SolutionRange {
110 pub const SIZE: usize = size_of::<u64>();
112 pub const MIN: Self = Self(u64::MIN);
114 pub const MAX: Self = Self(u64::MAX);
116
117 #[inline(always)]
120 pub const fn new(n: u64) -> Self {
121 Self(n)
122 }
123
124 #[inline(always)]
127 pub const fn as_u64(self) -> u64 {
128 self.0
129 }
130
131 #[inline(always)]
133 pub fn to_bytes(self) -> [u8; 8] {
134 self.0.to_le_bytes()
135 }
136
137 #[inline(always)]
139 pub fn from_bytes(bytes: [u8; 8]) -> Self {
140 Self(u64::from_le_bytes(bytes))
141 }
142
143 #[inline]
148 pub const fn from_pieces(pieces: u64, slot_probability: (u64, u64)) -> Self {
149 let solution_range = u64::MAX
150 / slot_probability.1 * slot_probability.0
152 / Record::NUM_CHUNKS as u64
154 * Record::NUM_S_BUCKETS as u64;
155
156 Self(solution_range / pieces)
158 }
159
160 #[inline]
165 pub const fn to_pieces(self, slot_probability: (u64, u64)) -> u64 {
166 let pieces = u64::MAX
167 / slot_probability.1 * slot_probability.0
169 / Record::NUM_CHUNKS as u64
171 * Record::NUM_S_BUCKETS as u64;
172
173 pieces / self.0
175 }
176
177 #[inline]
182 pub const fn to_leaf_shard(self, num_shards: NumShards) -> Self {
183 Self(
184 self.0
185 .saturating_mul(u64::from(num_shards.leaf_shards().get())),
186 )
187 }
188
189 #[inline]
192 pub const fn to_intermediate_shard(self, num_shards: NumShards) -> Self {
193 Self(
194 self.0
195 .saturating_mul(u64::from(num_shards.intermediate_shards().get())),
196 )
197 }
198
199 #[inline]
201 pub const fn bidirectional_distance(self, other: Self) -> SolutionDistance {
202 let a = self.0;
203 let b = other.0;
204 let diff = a.wrapping_sub(b);
205 let diff2 = b.wrapping_sub(a);
206 SolutionDistance::from_u64(if diff < diff2 { diff } else { diff2 })
208 }
209
210 #[inline]
212 pub fn derive_next(
213 self,
214 slots_in_last_interval: SlotNumber,
215 slot_probability: (u64, u64),
216 retarget_interval: BlockNumber,
217 ) -> Self {
218 let current_solution_range = self.0;
234 let next_solution_range = u64::try_from(
235 u128::from(current_solution_range)
236 .saturating_mul(u128::from(slots_in_last_interval))
237 .saturating_mul(u128::from(slot_probability.0))
238 / u128::from(u64::from(retarget_interval))
239 / u128::from(slot_probability.1),
240 );
241
242 Self(next_solution_range.unwrap_or(u64::MAX).clamp(
243 current_solution_range / 4,
244 current_solution_range.saturating_mul(4),
245 ))
246 }
247}
248
249const _: () = {
251 assert!(SolutionRange::from_pieces(1, (1, 6)).to_pieces((1, 6)) == 1);
252 assert!(SolutionRange::from_pieces(3, (1, 6)).to_pieces((1, 6)) == 3);
253 assert!(SolutionRange::from_pieces(5, (1, 6)).to_pieces((1, 6)) == 5);
254};
255
256#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
258#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
259#[repr(C)]
260pub struct ChunkProof([[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
261
262impl fmt::Debug for ChunkProof {
263 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
264 write!(f, "[")?;
265 for hash in self.0 {
266 for byte in hash {
267 write!(f, "{byte:02x}")?;
268 }
269 write!(f, ", ")?;
270 }
271 write!(f, "]")?;
272 Ok(())
273 }
274}
275
276#[cfg(feature = "serde")]
277#[derive(Serialize, Deserialize)]
278#[serde(transparent)]
279struct ChunkProofBinary(#[serde(with = "BigArray")] [[u8; OUT_LEN]; ChunkProof::NUM_HASHES]);
280
281#[cfg(feature = "serde")]
282#[derive(Serialize, Deserialize)]
283#[serde(transparent)]
284struct ChunkProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
285
286#[cfg(feature = "serde")]
287#[derive(Serialize, Deserialize)]
288#[serde(transparent)]
289struct ChunkProofHex([ChunkProofHexHash; ChunkProof::NUM_HASHES]);
290
291#[cfg(feature = "serde")]
292impl Serialize for ChunkProof {
293 #[inline]
294 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
295 where
296 S: Serializer,
297 {
298 if serializer.is_human_readable() {
299 ChunkProofHex(unsafe {
302 mem::transmute::<
303 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
304 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
305 >(self.0)
306 })
307 .serialize(serializer)
308 } else {
309 ChunkProofBinary(self.0).serialize(serializer)
310 }
311 }
312}
313
314#[cfg(feature = "serde")]
315impl<'de> Deserialize<'de> for ChunkProof {
316 #[inline]
317 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
318 where
319 D: Deserializer<'de>,
320 {
321 Ok(Self(if deserializer.is_human_readable() {
322 unsafe {
325 mem::transmute::<
326 [ChunkProofHexHash; ChunkProof::NUM_HASHES],
327 [[u8; OUT_LEN]; ChunkProof::NUM_HASHES],
328 >(ChunkProofHex::deserialize(deserializer)?.0)
329 }
330 } else {
331 ChunkProofBinary::deserialize(deserializer)?.0
332 }))
333 }
334}
335
336impl Default for ChunkProof {
337 #[inline]
338 fn default() -> Self {
339 Self([[0; OUT_LEN]; ChunkProof::NUM_HASHES])
340 }
341}
342
343impl AsRef<[u8]> for ChunkProof {
344 #[inline]
345 fn as_ref(&self) -> &[u8] {
346 self.0.as_flattened()
347 }
348}
349
350impl AsMut<[u8]> for ChunkProof {
351 #[inline]
352 fn as_mut(&mut self) -> &mut [u8] {
353 self.0.as_flattened_mut()
354 }
355}
356
357impl ChunkProof {
358 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
360 const NUM_HASHES: usize = Record::NUM_S_BUCKETS.ilog2() as usize;
361}
362
363#[derive(Debug, Eq, PartialEq, thiserror::Error)]
365pub enum SolutionVerifyError {
366 #[error("Piece verification failed")]
368 InvalidPieceOffset {
369 piece_offset: u16,
371 max_pieces_in_sector: u16,
373 },
374 #[error("History size {solution} is in the future, current is {current}")]
376 FutureHistorySize {
377 current: HistorySize,
379 solution: HistorySize,
381 },
382 #[error("Sector expired")]
384 SectorExpired {
385 expiration_history_size: HistorySize,
387 current_history_size: HistorySize,
389 },
390 #[error("Piece verification failed")]
392 InvalidPiece,
393 #[error("Solution distance {solution_distance} is outside of solution range {solution_range}")]
395 OutsideSolutionRange {
396 solution_range: SolutionRange,
398 solution_distance: SolutionDistance,
400 },
401 #[error("Invalid proof of space")]
403 InvalidProofOfSpace,
404 #[error("Invalid shard commitment")]
406 InvalidShardCommitment,
407 #[error("Invalid input shard {shard_index} ({shard_kind:?})")]
409 InvalidInputShard {
410 shard_index: ShardIndex,
412 shard_kind: Option<ShardKind>,
414 },
415 #[error(
417 "Invalid solution shard {solution_shard_index} (parent {solution_parent_shard_index:?}), \
418 expected shard {expected_shard_index} ({expected_shard_kind:?})"
419 )]
420 InvalidSolutionShard {
421 solution_shard_index: ShardIndex,
423 solution_parent_shard_index: Option<ShardIndex>,
425 expected_shard_index: ShardIndex,
427 expected_shard_kind: RealShardKind,
429 },
430 #[error("Invalid chunk proof")]
432 InvalidChunkProof,
433 #[error("Invalid history size")]
435 InvalidHistorySize,
436}
437
438#[derive(Debug, Clone)]
440#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
441pub struct SolutionVerifyPieceCheckParams {
442 pub max_pieces_in_sector: u16,
444 pub segment_root: SegmentRoot,
446 pub recent_segments: HistorySize,
448 pub recent_history_fraction: (HistorySize, HistorySize),
450 pub min_sector_lifetime: HistorySize,
452 pub current_history_size: HistorySize,
454 pub sector_expiration_check_segment_root: Option<SegmentRoot>,
456}
457
458#[derive(Debug, Clone)]
460#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
461pub struct SolutionVerifyParams {
462 pub shard_index: ShardIndex,
464 pub proof_of_time: PotOutput,
466 pub solution_range: SolutionRange,
468 pub shard_membership_entropy: ShardMembershipEntropy,
470 pub num_shards: NumShards,
472 pub piece_check_params: Option<SolutionVerifyPieceCheckParams>,
476}
477
478pub trait SolutionPotVerifier {
480 fn is_proof_valid(seed: &PosSeed, s_bucket: SBucket, proof: &PosProof) -> bool;
482}
483
484#[derive(
486 Default,
487 Copy,
488 Clone,
489 Eq,
490 PartialEq,
491 Ord,
492 PartialOrd,
493 Hash,
494 From,
495 Into,
496 AsRef,
497 AsMut,
498 Deref,
499 DerefMut,
500 TrivialType,
501)]
502#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
503#[repr(C)]
504pub struct ShardMembershipEntropy([u8; ShardMembershipEntropy::SIZE]);
505
506impl fmt::Display for ShardMembershipEntropy {
507 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
508 for byte in self.0 {
509 write!(f, "{byte:02x}")?;
510 }
511 Ok(())
512 }
513}
514
515#[cfg(feature = "serde")]
516#[derive(Serialize, Deserialize)]
517#[serde(transparent)]
518struct ShardMembershipEntropyBinary([u8; ShardMembershipEntropy::SIZE]);
519
520#[cfg(feature = "serde")]
521#[derive(Serialize, Deserialize)]
522#[serde(transparent)]
523struct ShardMembershipEntropyHex(#[serde(with = "hex")] [u8; ShardMembershipEntropy::SIZE]);
524
525#[cfg(feature = "serde")]
526impl Serialize for ShardMembershipEntropy {
527 #[inline]
528 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
529 where
530 S: Serializer,
531 {
532 if serializer.is_human_readable() {
533 ShardMembershipEntropyHex(self.0).serialize(serializer)
534 } else {
535 ShardMembershipEntropyBinary(self.0).serialize(serializer)
536 }
537 }
538}
539
540#[cfg(feature = "serde")]
541impl<'de> Deserialize<'de> for ShardMembershipEntropy {
542 #[inline]
543 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
544 where
545 D: Deserializer<'de>,
546 {
547 Ok(Self(if deserializer.is_human_readable() {
548 ShardMembershipEntropyHex::deserialize(deserializer)?.0
549 } else {
550 ShardMembershipEntropyBinary::deserialize(deserializer)?.0
551 }))
552 }
553}
554
555impl fmt::Debug for ShardMembershipEntropy {
556 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
557 for byte in self.0 {
558 write!(f, "{byte:02x}")?;
559 }
560 Ok(())
561 }
562}
563
564impl AsRef<[u8]> for ShardMembershipEntropy {
565 #[inline(always)]
566 fn as_ref(&self) -> &[u8] {
567 &self.0
568 }
569}
570
571impl AsMut<[u8]> for ShardMembershipEntropy {
572 #[inline(always)]
573 fn as_mut(&mut self) -> &mut [u8] {
574 &mut self.0
575 }
576}
577
578impl ShardMembershipEntropy {
579 pub const SIZE: usize = PotOutput::SIZE;
581
582 #[inline(always)]
584 pub const fn new(bytes: [u8; Self::SIZE]) -> Self {
585 Self(bytes)
586 }
587
588 #[inline(always)]
590 pub const fn as_bytes(&self) -> &[u8; Self::SIZE] {
591 &self.0
592 }
593
594 #[inline(always)]
596 pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
597 unsafe { mem::transmute(value) }
600 }
601
602 #[inline(always)]
604 pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
605 unsafe { mem::transmute(value) }
608 }
609}
610
611#[derive(
613 Default,
614 Copy,
615 Clone,
616 Eq,
617 PartialEq,
618 Ord,
619 PartialOrd,
620 Hash,
621 From,
622 Into,
623 AsRef,
624 AsMut,
625 Deref,
626 DerefMut,
627 TrivialType,
628)]
629#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
630#[repr(C)]
631pub struct ShardCommitmentHash([u8; ShardCommitmentHash::SIZE]);
632
633impl fmt::Display for ShardCommitmentHash {
634 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
635 for byte in self.0 {
636 write!(f, "{byte:02x}")?;
637 }
638 Ok(())
639 }
640}
641
642#[cfg(feature = "serde")]
643#[derive(Serialize, Deserialize)]
644#[serde(transparent)]
645struct ShardCommitmentHashBinary([u8; ShardCommitmentHash::SIZE]);
646
647#[cfg(feature = "serde")]
648#[derive(Serialize, Deserialize)]
649#[serde(transparent)]
650struct ShardCommitmentHashHex(#[serde(with = "hex")] [u8; ShardCommitmentHash::SIZE]);
651
652#[cfg(feature = "serde")]
653impl Serialize for ShardCommitmentHash {
654 #[inline]
655 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
656 where
657 S: Serializer,
658 {
659 if serializer.is_human_readable() {
660 ShardCommitmentHashHex(self.0).serialize(serializer)
661 } else {
662 ShardCommitmentHashBinary(self.0).serialize(serializer)
663 }
664 }
665}
666
667#[cfg(feature = "serde")]
668impl<'de> Deserialize<'de> for ShardCommitmentHash {
669 #[inline]
670 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
671 where
672 D: Deserializer<'de>,
673 {
674 Ok(Self(if deserializer.is_human_readable() {
675 ShardCommitmentHashHex::deserialize(deserializer)?.0
676 } else {
677 ShardCommitmentHashBinary::deserialize(deserializer)?.0
678 }))
679 }
680}
681
682impl fmt::Debug for ShardCommitmentHash {
683 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
684 for byte in self.0 {
685 write!(f, "{byte:02x}")?;
686 }
687 Ok(())
688 }
689}
690
691impl AsRef<[u8]> for ShardCommitmentHash {
692 #[inline(always)]
693 fn as_ref(&self) -> &[u8] {
694 &self.0
695 }
696}
697
698impl AsMut<[u8]> for ShardCommitmentHash {
699 #[inline(always)]
700 fn as_mut(&mut self) -> &mut [u8] {
701 &mut self.0
702 }
703}
704
705impl From<Hash> for ShardCommitmentHash {
706 #[inline(always)]
707 fn from(value: Hash) -> Self {
708 let bytes = value.as_bytes();
709 Self(*bytes)
710 }
714}
715
716impl ShardCommitmentHash {
717 pub const SIZE: usize = 32;
720
721 #[inline(always)]
723 pub const fn new(hash: [u8; Self::SIZE]) -> Self {
724 Self(hash)
725 }
726
727 #[inline(always)]
729 pub const fn as_bytes(&self) -> &[u8; Self::SIZE] {
730 &self.0
731 }
732
733 #[inline(always)]
735 pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
736 unsafe { mem::transmute(value) }
739 }
740
741 #[inline(always)]
743 pub const fn array_from_repr<const N: usize>(value: [[u8; Self::SIZE]; N]) -> [Self; N] {
744 unsafe { mem::transmute_copy(&value) }
748 }
749
750 #[inline(always)]
752 pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
753 unsafe { mem::transmute(value) }
756 }
757
758 #[inline(always)]
760 pub const fn repr_from_array<const N: usize>(value: [Self; N]) -> [[u8; Self::SIZE]; N] {
761 unsafe { mem::transmute_copy(&value) }
765 }
766}
767
768#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
770#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
771#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
772#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
773#[repr(C)]
774pub struct SolutionShardCommitment {
775 pub root: ShardCommitmentHash,
777 pub proof: [ShardCommitmentHash; SolutionShardCommitment::NUM_LEAVES.ilog2() as usize],
779 pub leaf: ShardCommitmentHash,
781}
782
783impl SolutionShardCommitment {
784 pub const NUM_LEAVES: usize = 2_u32.pow(20) as usize;
786}
787
788#[derive(Clone, Copy, Debug, Eq, PartialEq, TrivialType)]
790#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
791#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
792#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
793#[repr(C)]
794pub struct Solution {
795 pub public_key_hash: Blake3Hash,
797 pub shard_commitment: SolutionShardCommitment,
799 pub record_root: RecordRoot,
801 pub record_proof: RecordProof,
803 pub chunk: RecordChunk,
805 pub chunk_proof: ChunkProof,
807 pub proof_of_space: PosProof,
809 pub history_size: HistorySize,
811 pub sector_index: SectorIndex,
813 pub piece_offset: PieceOffset,
815 pub padding: [u8; 4],
817}
818
819impl Solution {
820 pub fn genesis_solution() -> Self {
822 Self {
823 public_key_hash: Ed25519PublicKey::default().hash(),
824 shard_commitment: SolutionShardCommitment {
825 root: Default::default(),
826 proof: [Default::default(); _],
827 leaf: Default::default(),
828 },
829 record_root: RecordRoot::default(),
830 record_proof: RecordProof::default(),
831 chunk: RecordChunk::default(),
832 chunk_proof: ChunkProof::default(),
833 proof_of_space: PosProof::default(),
834 history_size: HistorySize::from(SegmentIndex::ZERO),
835 sector_index: SectorIndex::ZERO,
836 piece_offset: PieceOffset::default(),
837 padding: [0; _],
838 }
839 }
840
841 pub fn verify<PotVerifier>(
843 &self,
844 slot: SlotNumber,
845 params: &SolutionVerifyParams,
846 ) -> Result<(), SolutionVerifyError>
847 where
848 PotVerifier: SolutionPotVerifier,
849 {
850 let SolutionVerifyParams {
851 shard_index,
852 proof_of_time,
853 solution_range,
854 shard_membership_entropy,
855 num_shards,
856 piece_check_params,
857 } = params;
858
859 let shard_kind = shard_index
860 .shard_kind()
861 .and_then(|shard_kind| shard_kind.to_real())
862 .ok_or(SolutionVerifyError::InvalidInputShard {
863 shard_index: *shard_index,
864 shard_kind: shard_index.shard_kind(),
865 })?;
866
867 let (solution_shard_index, shard_commitment_index) = num_shards
868 .derive_shard_index_and_shard_commitment_index(
869 &self.public_key_hash,
870 &self.shard_commitment.root,
871 shard_membership_entropy,
872 self.history_size,
873 );
874
875 let solution_range = match shard_kind {
877 RealShardKind::BeaconChain => *solution_range,
878 RealShardKind::IntermediateShard => {
879 if solution_shard_index.parent_shard() != Some(*shard_index) {
880 return Err(SolutionVerifyError::InvalidSolutionShard {
881 solution_shard_index,
882 solution_parent_shard_index: solution_shard_index.parent_shard(),
883 expected_shard_index: *shard_index,
884 expected_shard_kind: RealShardKind::IntermediateShard,
885 });
886 }
887
888 solution_range.to_intermediate_shard(*num_shards)
889 }
890 RealShardKind::LeafShard => {
891 if solution_shard_index != *shard_index {
892 return Err(SolutionVerifyError::InvalidSolutionShard {
893 solution_shard_index,
894 solution_parent_shard_index: solution_shard_index.parent_shard(),
895 expected_shard_index: *shard_index,
896 expected_shard_kind: RealShardKind::LeafShard,
897 });
898 }
899
900 solution_range.to_leaf_shard(*num_shards)
901 }
902 };
903
904 const {
908 assert!(SolutionShardCommitment::NUM_LEAVES == 1048576);
909 }
910 if !BalancedMerkleTree::<1048576>::verify(
911 &self.shard_commitment.root,
912 &ShardCommitmentHash::repr_from_array(self.shard_commitment.proof),
913 shard_commitment_index as usize,
914 *self.shard_commitment.leaf,
915 ) {
916 return Err(SolutionVerifyError::InvalidShardCommitment);
917 }
918
919 let sector_id = SectorId::new(
920 &self.public_key_hash,
921 &self.shard_commitment.root,
922 self.sector_index,
923 self.history_size,
924 );
925
926 let global_challenge = proof_of_time.derive_global_challenge(slot);
927 let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
928 let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
929
930 if !PotVerifier::is_proof_valid(
932 §or_id.derive_evaluation_seed(self.piece_offset),
933 s_bucket_audit_index,
934 &self.proof_of_space,
935 ) {
936 return Err(SolutionVerifyError::InvalidProofOfSpace);
937 };
938
939 let masked_chunk =
940 (Simd::from(*self.chunk) ^ Simd::from(*self.proof_of_space.hash())).to_array();
941
942 let solution_distance =
943 SolutionDistance::calculate(&global_challenge, &masked_chunk, §or_slot_challenge);
944
945 if !solution_distance.is_within(solution_range) {
946 return Err(SolutionVerifyError::OutsideSolutionRange {
947 solution_range,
948 solution_distance,
949 });
950 }
951
952 const {
956 assert!(Record::NUM_S_BUCKETS == 65536);
957 }
958 if !BalancedMerkleTree::<65536>::verify(
960 &self.record_root,
961 &self.chunk_proof,
962 usize::from(s_bucket_audit_index),
963 *self.chunk,
964 ) {
965 return Err(SolutionVerifyError::InvalidChunkProof);
966 }
967
968 if let Some(SolutionVerifyPieceCheckParams {
969 max_pieces_in_sector,
970 segment_root,
971 recent_segments,
972 recent_history_fraction,
973 min_sector_lifetime,
974 current_history_size,
975 sector_expiration_check_segment_root,
976 }) = piece_check_params
977 {
978 if &self.history_size > current_history_size {
979 return Err(SolutionVerifyError::FutureHistorySize {
980 current: *current_history_size,
981 solution: self.history_size,
982 });
983 }
984
985 if u16::from(self.piece_offset) >= *max_pieces_in_sector {
986 return Err(SolutionVerifyError::InvalidPieceOffset {
987 piece_offset: u16::from(self.piece_offset),
988 max_pieces_in_sector: *max_pieces_in_sector,
989 });
990 }
991
992 if let Some(sector_expiration_check_segment_root) = sector_expiration_check_segment_root
993 {
994 let expiration_history_size = match sector_id.derive_expiration_history_size(
995 self.history_size,
996 sector_expiration_check_segment_root,
997 *min_sector_lifetime,
998 ) {
999 Some(expiration_history_size) => expiration_history_size,
1000 None => {
1001 return Err(SolutionVerifyError::InvalidHistorySize);
1002 }
1003 };
1004
1005 if expiration_history_size <= *current_history_size {
1006 return Err(SolutionVerifyError::SectorExpired {
1007 expiration_history_size,
1008 current_history_size: *current_history_size,
1009 });
1010 }
1011 }
1012
1013 let position = sector_id
1014 .derive_piece_index(
1015 self.piece_offset,
1016 self.history_size,
1017 *max_pieces_in_sector,
1018 *recent_segments,
1019 *recent_history_fraction,
1020 )
1021 .position();
1022
1023 if !self
1025 .record_root
1026 .is_valid(segment_root, &self.record_proof, position)
1027 {
1028 return Err(SolutionVerifyError::InvalidPiece);
1029 }
1030 }
1031
1032 Ok(())
1033 }
1034}