Skip to main content

ab_core_primitives/
segments.rs

1//! Segments-related data structures
2
3#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record, SegmentProof};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use crate::shard::ShardIndex;
12use ab_blake3::{single_block_hash, single_chunk_hash};
13use ab_io_type::trivial_type::TrivialType;
14use ab_io_type::unaligned::Unaligned;
15use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
16#[cfg(feature = "alloc")]
17use alloc::boxed::Box;
18#[cfg(feature = "alloc")]
19use alloc::sync::Arc as StdArc;
20use blake3::{CHUNK_LEN, OUT_LEN};
21use core::iter::Step;
22use core::num::{NonZeroU32, NonZeroU64};
23use core::{fmt, mem};
24use derive_more::{
25    Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
26    SubAssign,
27};
28#[cfg(feature = "scale-codec")]
29use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
30#[cfg(feature = "serde")]
31use serde::{Deserialize, Deserializer, Serialize, Serializer};
32#[cfg(feature = "serde")]
33use serde_big_array::BigArray;
34
35/// Super segment index
36#[derive(
37    Debug,
38    Display,
39    Default,
40    Copy,
41    Clone,
42    Ord,
43    PartialOrd,
44    Eq,
45    PartialEq,
46    Hash,
47    Add,
48    AddAssign,
49    Sub,
50    SubAssign,
51    Mul,
52    MulAssign,
53    Div,
54    DivAssign,
55    TrivialType,
56)]
57#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
58#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
59#[repr(C)]
60pub struct SuperSegmentIndex(u64);
61
62impl Step for SuperSegmentIndex {
63    #[inline]
64    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
65        u64::steps_between(&start.0, &end.0)
66    }
67
68    #[inline]
69    fn forward_checked(start: Self, count: usize) -> Option<Self> {
70        u64::forward_checked(start.0, count).map(Self)
71    }
72
73    #[inline]
74    fn backward_checked(start: Self, count: usize) -> Option<Self> {
75        u64::backward_checked(start.0, count).map(Self)
76    }
77}
78
79impl const From<u64> for SuperSegmentIndex {
80    #[inline(always)]
81    fn from(value: u64) -> Self {
82        Self(value)
83    }
84}
85
86impl const From<SuperSegmentIndex> for u64 {
87    #[inline(always)]
88    fn from(value: SuperSegmentIndex) -> Self {
89        value.0
90    }
91}
92
93impl SuperSegmentIndex {
94    /// Super segment index 0
95    pub const ZERO: Self = Self(0);
96    /// Super segment index 1
97    pub const ONE: Self = Self(1);
98
99    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred
100    #[inline]
101    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
102        self.0.checked_sub(rhs.0).map(Self)
103    }
104
105    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
106    /// occurred
107    #[inline]
108    pub const fn saturating_sub(self, rhs: Self) -> Self {
109        Self(self.0.saturating_sub(rhs.0))
110    }
111}
112
113/// Super segment root contained within a beacon chain block
114#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
115#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
116#[repr(C)]
117pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
118
119impl fmt::Debug for SuperSegmentRoot {
120    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
121        for byte in self.0 {
122            write!(f, "{byte:02x}")?;
123        }
124        Ok(())
125    }
126}
127
128impl const Default for SuperSegmentRoot {
129    #[inline]
130    fn default() -> Self {
131        Self([0; Self::SIZE])
132    }
133}
134
135#[cfg(feature = "serde")]
136#[derive(Serialize, Deserialize)]
137#[serde(transparent)]
138struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
139
140#[cfg(feature = "serde")]
141#[derive(Serialize, Deserialize)]
142#[serde(transparent)]
143struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
144
145#[cfg(feature = "serde")]
146impl Serialize for SuperSegmentRoot {
147    #[inline]
148    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
149    where
150        S: Serializer,
151    {
152        if serializer.is_human_readable() {
153            SuperSegmentRootHex(self.0).serialize(serializer)
154        } else {
155            SuperSegmentRootBinary(self.0).serialize(serializer)
156        }
157    }
158}
159
160#[cfg(feature = "serde")]
161impl<'de> Deserialize<'de> for SuperSegmentRoot {
162    #[inline]
163    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
164    where
165        D: Deserializer<'de>,
166    {
167        Ok(Self(if deserializer.is_human_readable() {
168            SuperSegmentRootHex::deserialize(deserializer)?.0
169        } else {
170            SuperSegmentRootBinary::deserialize(deserializer)?.0
171        }))
172    }
173}
174
175impl AsRef<[u8]> for SuperSegmentRoot {
176    #[inline]
177    fn as_ref(&self) -> &[u8] {
178        &self.0
179    }
180}
181
182impl AsMut<[u8]> for SuperSegmentRoot {
183    #[inline]
184    fn as_mut(&mut self) -> &mut [u8] {
185        &mut self.0
186    }
187}
188
189impl SuperSegmentRoot {
190    /// Size in bytes
191    pub const SIZE: usize = 32;
192    /// The maximum number of segments in a super segment's Merkle Tree.
193    ///
194    /// `-1` to minimize the number of bits needed to represent it (exactly 20).
195    pub const MAX_SEGMENTS: u32 = 2u32.pow(20) - 1;
196}
197
198/// Segment position in a super segment
199#[derive(
200    Debug,
201    Display,
202    Default,
203    Copy,
204    Clone,
205    Ord,
206    PartialOrd,
207    Eq,
208    PartialEq,
209    Hash,
210    From,
211    Into,
212    TrivialType,
213)]
214#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
215#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
216#[repr(C)]
217pub struct SegmentPosition(u32);
218
219impl From<SegmentPosition> for u64 {
220    #[inline]
221    fn from(original: SegmentPosition) -> Self {
222        Self::from(original.0)
223    }
224}
225
226impl SegmentPosition {
227    /// Zero position
228    pub const ZERO: Self = Self(0);
229}
230
231/// Shard segment root with position
232#[derive(Debug, Clone, Copy, TrivialType)]
233#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
234#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
235#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
236#[repr(C)]
237pub struct ShardSegmentRootWithPosition {
238    /// Shard index
239    pub shard_index: ShardIndex,
240    /// Position of the segment in the super segment
241    pub segment_position: SegmentPosition,
242    /// Local segment index
243    pub local_segment_index: LocalSegmentIndex,
244    /// Segment root
245    pub segment_root: SegmentRoot,
246}
247
248impl ShardSegmentRootWithPosition {
249    /// Hash for super segment creation
250    #[inline(always)]
251    pub fn hash(&self) -> [u8; OUT_LEN] {
252        single_block_hash(self.as_bytes()).expect("Less than a single block worth of bytes; qed")
253    }
254}
255
256/// Super segment header
257#[derive(Debug, Clone, Copy, Eq, PartialEq, TrivialType)]
258#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
259#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
260#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
261#[repr(C)]
262pub struct SuperSegmentHeader {
263    /// Super segment index
264    pub index: Unaligned<SuperSegmentIndex>,
265    /// Super segment root
266    pub root: SuperSegmentRoot,
267    /// Hash of the previous super segment header
268    pub prev_super_segment_header_hash: Blake3Hash,
269    /// Max index of the segment in the super segment
270    pub max_segment_index: Unaligned<SegmentIndex>,
271    /// Target beacon chain block number for the super segment
272    pub target_beacon_chain_block_number: Unaligned<BlockNumber>,
273    // TODO: New type?
274    /// Number of segments in the super segment
275    pub num_segments: u32,
276}
277
278/// Super segment
279#[cfg(feature = "alloc")]
280#[derive(Debug, Clone)]
281// TODO: Implement SCALE serialization/deserialization manually (if necessary at all)
282// #[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
283#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
284#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
285pub struct SuperSegment {
286    /// Super segment root
287    pub header: SuperSegmentHeader,
288    /// Segment roots that are included in the super segment
289    pub segment_roots: StdArc<[ShardSegmentRootWithPosition]>,
290}
291
292#[cfg(feature = "alloc")]
293impl SuperSegment {
294    /// Create a new instance and derive super segment root.
295    ///
296    /// Returns `None` if the list of segment roots is empty or there are too many of them.
297    pub fn new(
298        previous_header: &SuperSegmentHeader,
299        target_beacon_chain_block_number: BlockNumber,
300        segment_roots: StdArc<[ShardSegmentRootWithPosition]>,
301    ) -> Option<Self> {
302        let num_segments = u32::try_from(segment_roots.len()).ok()?;
303        let max_segment_index = SegmentIndex::from(
304            u64::from(previous_header.max_segment_index.as_inner()) + u64::from(num_segments),
305        );
306
307        // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that
308        //  allows the code to compile. Constant 1048575 is hardcoded here and below for compilation
309        //  to succeed.
310        const {
311            assert!(SuperSegmentRoot::MAX_SEGMENTS == 1048575);
312        }
313        // TODO: Keyed hash
314        let maybe_super_segment_root = UnbalancedMerkleTree::compute_root_only::<1048575, _, _>(
315            segment_roots.iter().map(ShardSegmentRootWithPosition::hash),
316        )?;
317
318        Some(Self {
319            header: SuperSegmentHeader {
320                index: (previous_header.index.as_inner() + SuperSegmentIndex::ONE).into(),
321                root: SuperSegmentRoot::from(maybe_super_segment_root),
322                prev_super_segment_header_hash: Blake3Hash::from(
323                    single_chunk_hash(previous_header.as_bytes())
324                        .expect("Less than a single chunk worth of bytes; qed"),
325                ),
326                max_segment_index: max_segment_index.into(),
327                target_beacon_chain_block_number: target_beacon_chain_block_number.into(),
328                num_segments,
329            },
330            segment_roots,
331        })
332    }
333
334    /// Produce a proof for a segment in the super segment at a given position
335    pub fn proof_for_segment(&self, segment_position: SegmentPosition) -> Option<SegmentProof> {
336        // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that
337        //  allows the code to compile. Constant 1048575 is hardcoded here and below for compilation
338        //  to succeed.
339        const {
340            assert!(SuperSegmentRoot::MAX_SEGMENTS == 1048575);
341        }
342        // TODO: Keyed hash
343        let mut segment_proof = SegmentProof::default();
344        UnbalancedMerkleTree::compute_root_and_proof_in::<1048575, _, _>(
345            self.segment_roots.iter().map(|shard_segment_root| {
346                single_block_hash(shard_segment_root.as_bytes())
347                    .expect("Less than a single block worth of bytes; qed")
348            }),
349            u32::from(segment_position) as usize,
350            segment_proof.as_uninit_repr(),
351        )?;
352
353        Some(segment_proof)
354    }
355}
356
357/// Local segment index of a shard
358#[derive(
359    Debug,
360    Display,
361    Default,
362    Copy,
363    Clone,
364    Ord,
365    PartialOrd,
366    Eq,
367    PartialEq,
368    Hash,
369    Add,
370    AddAssign,
371    Sub,
372    SubAssign,
373    Mul,
374    MulAssign,
375    Div,
376    DivAssign,
377    TrivialType,
378)]
379#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
380#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
381#[repr(C)]
382pub struct LocalSegmentIndex(u64);
383
384impl Step for LocalSegmentIndex {
385    #[inline]
386    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
387        u64::steps_between(&start.0, &end.0)
388    }
389
390    #[inline]
391    fn forward_checked(start: Self, count: usize) -> Option<Self> {
392        u64::forward_checked(start.0, count).map(Self)
393    }
394
395    #[inline]
396    fn backward_checked(start: Self, count: usize) -> Option<Self> {
397        u64::backward_checked(start.0, count).map(Self)
398    }
399}
400
401impl const From<u64> for LocalSegmentIndex {
402    #[inline(always)]
403    fn from(value: u64) -> Self {
404        Self(value)
405    }
406}
407
408impl const From<LocalSegmentIndex> for u64 {
409    #[inline(always)]
410    fn from(value: LocalSegmentIndex) -> Self {
411        value.0
412    }
413}
414
415impl LocalSegmentIndex {
416    /// Local segment index 0
417    pub const ZERO: Self = Self(0);
418    /// Local segment index 1
419    pub const ONE: Self = Self(1);
420
421    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred
422    #[inline]
423    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
424        self.0.checked_sub(rhs.0).map(Self)
425    }
426
427    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
428    /// occurred
429    #[inline]
430    pub const fn saturating_sub(self, rhs: Self) -> Self {
431        Self(self.0.saturating_sub(rhs.0))
432    }
433}
434
435/// Segment index
436#[derive(
437    Debug,
438    Display,
439    Default,
440    Copy,
441    Clone,
442    Ord,
443    PartialOrd,
444    Eq,
445    PartialEq,
446    Hash,
447    Add,
448    AddAssign,
449    Sub,
450    SubAssign,
451    Mul,
452    MulAssign,
453    Div,
454    DivAssign,
455    TrivialType,
456)]
457#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
458#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
459#[repr(C)]
460pub struct SegmentIndex(u64);
461
462impl Step for SegmentIndex {
463    #[inline]
464    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
465        u64::steps_between(&start.0, &end.0)
466    }
467
468    #[inline]
469    fn forward_checked(start: Self, count: usize) -> Option<Self> {
470        u64::forward_checked(start.0, count).map(Self)
471    }
472
473    #[inline]
474    fn backward_checked(start: Self, count: usize) -> Option<Self> {
475        u64::backward_checked(start.0, count).map(Self)
476    }
477}
478
479impl const From<u64> for SegmentIndex {
480    #[inline(always)]
481    fn from(value: u64) -> Self {
482        Self(value)
483    }
484}
485
486impl const From<SegmentIndex> for u64 {
487    #[inline(always)]
488    fn from(value: SegmentIndex) -> Self {
489        value.0
490    }
491}
492
493impl SegmentIndex {
494    /// Segment index 0
495    pub const ZERO: Self = Self(0);
496    /// Segment index 1
497    pub const ONE: Self = Self(1);
498
499    /// Get the first piece index in this segment
500    #[inline]
501    pub const fn first_piece_index(&self) -> PieceIndex {
502        PieceIndex::from(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
503    }
504
505    /// Get the last piece index in this segment
506    #[inline]
507    pub const fn last_piece_index(&self) -> PieceIndex {
508        PieceIndex::from((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
509    }
510
511    /// List of piece indexes that belong to this segment
512    #[inline]
513    pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
514        let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
515        (self.first_piece_index()..=self.last_piece_index())
516            .zip(&mut piece_indices)
517            .for_each(|(input, output)| {
518                *output = input;
519            });
520
521        piece_indices
522    }
523
524    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred
525    #[inline]
526    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
527        self.0.checked_sub(rhs.0).map(Self)
528    }
529
530    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
531    /// occurred
532    #[inline]
533    pub const fn saturating_sub(self, rhs: Self) -> Self {
534        Self(self.0.saturating_sub(rhs.0))
535    }
536}
537
538/// Segment root contained within a segment
539#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
540#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
541#[repr(C)]
542pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
543
544impl SegmentRoot {
545    /// Check whether a segment root is a part of the super segment
546    pub fn is_valid(
547        &self,
548        shard_index: ShardIndex,
549        local_segment_index: LocalSegmentIndex,
550        segment_position: SegmentPosition,
551        segment_proof: &SegmentProof,
552        num_segments: u32,
553        super_segment_root: &SuperSegmentRoot,
554    ) -> bool {
555        let shard_segment_root = ShardSegmentRootWithPosition {
556            shard_index,
557            segment_position,
558            local_segment_index,
559            segment_root: *self,
560        };
561        // The proof is fixed size and contains zero padding elements, which must be skipped for
562        // verification purposes
563        let segment_proof = segment_proof
564            .split_once(|hash| hash == &[0; _])
565            .map_or(segment_proof.as_slice(), |(before, _after)| before);
566        UnbalancedMerkleTree::verify(
567            super_segment_root,
568            segment_proof,
569            u64::from(segment_position),
570            shard_segment_root.hash(),
571            u64::from(num_segments),
572        )
573    }
574}
575
576impl fmt::Debug for SegmentRoot {
577    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
578        for byte in self.0 {
579            write!(f, "{byte:02x}")?;
580        }
581        Ok(())
582    }
583}
584
585#[cfg(feature = "serde")]
586#[derive(Serialize, Deserialize)]
587#[serde(transparent)]
588struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
589
590#[cfg(feature = "serde")]
591#[derive(Serialize, Deserialize)]
592#[serde(transparent)]
593struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
594
595#[cfg(feature = "serde")]
596impl Serialize for SegmentRoot {
597    #[inline]
598    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
599    where
600        S: Serializer,
601    {
602        if serializer.is_human_readable() {
603            SegmentRootHex(self.0).serialize(serializer)
604        } else {
605            SegmentRootBinary(self.0).serialize(serializer)
606        }
607    }
608}
609
610#[cfg(feature = "serde")]
611impl<'de> Deserialize<'de> for SegmentRoot {
612    #[inline]
613    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
614    where
615        D: Deserializer<'de>,
616    {
617        Ok(Self(if deserializer.is_human_readable() {
618            SegmentRootHex::deserialize(deserializer)?.0
619        } else {
620            SegmentRootBinary::deserialize(deserializer)?.0
621        }))
622    }
623}
624
625impl Default for SegmentRoot {
626    #[inline(always)]
627    fn default() -> Self {
628        Self([0; Self::SIZE])
629    }
630}
631
632impl AsRef<[u8]> for SegmentRoot {
633    #[inline(always)]
634    fn as_ref(&self) -> &[u8] {
635        &self.0
636    }
637}
638
639impl AsMut<[u8]> for SegmentRoot {
640    #[inline(always)]
641    fn as_mut(&mut self) -> &mut [u8] {
642        &mut self.0
643    }
644}
645
646impl SegmentRoot {
647    /// Size in bytes
648    pub const SIZE: usize = 32;
649
650    /// Convenient conversion from a slice of underlying representation for efficiency purposes
651    #[inline(always)]
652    pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
653        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
654        unsafe { mem::transmute(value) }
655    }
656
657    /// Convenient conversion to a slice of underlying representation for efficiency purposes
658    #[inline(always)]
659    pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
660        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
661        unsafe { mem::transmute(value) }
662    }
663}
664
665/// Size of blockchain history in segments
666#[derive(
667    Debug,
668    Display,
669    Copy,
670    Clone,
671    Ord,
672    PartialOrd,
673    Eq,
674    PartialEq,
675    Hash,
676    From,
677    Into,
678    Deref,
679    DerefMut,
680    TrivialType,
681)]
682#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
683#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
684#[repr(C)]
685// Storing `SegmentIndex` to make all invariants valid
686pub struct HistorySize(SegmentIndex);
687
688impl HistorySize {
689    /// History size of one
690    pub const ONE: Self = Self(SegmentIndex::ZERO);
691
692    /// Create a new instance
693    #[inline(always)]
694    pub const fn new(value: NonZeroU64) -> Self {
695        Self(SegmentIndex::from(value.get() - 1))
696    }
697
698    /// Get internal representation
699    pub const fn as_segment_index(&self) -> SegmentIndex {
700        self.0
701    }
702
703    /// Get internal representation
704    pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
705        NonZeroU64::new(u64::from(self.0).saturating_add(1)).expect("Not zero; qed")
706    }
707
708    /// Size of blockchain history in pieces
709    #[inline(always)]
710    pub const fn in_pieces(&self) -> NonZeroU64 {
711        NonZeroU64::new(
712            u64::from(self.0)
713                .saturating_add(1)
714                .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
715        )
716        .expect("Not zero; qed")
717    }
718
719    /// Segment index that corresponds to this history size
720    #[inline(always)]
721    pub fn segment_index(&self) -> SegmentIndex {
722        self.0
723    }
724
725    /// History size at which expiration check for a sector happens.
726    ///
727    /// Returns `None` on overflow.
728    #[inline(always)]
729    pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
730        self.as_non_zero_u64()
731            .checked_add(min_sector_lifetime.as_non_zero_u64().get())
732            .map(Self::new)
733    }
734}
735
736/// Progress of an archived block.
737#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
738#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
739#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
740#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
741#[repr(C)]
742pub struct ArchivedBlockProgress {
743    /// Number of partially archived bytes of a block, `0` for a full block
744    bytes: u32,
745}
746
747impl Default for ArchivedBlockProgress {
748    /// We assume a block can always fit into the segment initially, but it is definitely possible
749    /// to be transitioned into the partial state after some overflow checking.
750    #[inline(always)]
751    fn default() -> Self {
752        Self::new_complete()
753    }
754}
755
756impl ArchivedBlockProgress {
757    /// Block is archived fully
758    #[inline(always)]
759    pub const fn new_complete() -> Self {
760        Self { bytes: 0 }
761    }
762
763    /// Block is partially archived with the provided number of bytes
764    #[inline(always)]
765    pub const fn new_partial(new_partial: NonZeroU32) -> Self {
766        Self {
767            bytes: new_partial.get(),
768        }
769    }
770
771    /// Return the number of partially archived bytes if the progress is not complete
772    #[inline(always)]
773    pub const fn partial(&self) -> Option<NonZeroU32> {
774        NonZeroU32::new(self.bytes)
775    }
776}
777
778/// Last archived block
779#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
780#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
781#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
782#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
783#[repr(C)]
784pub struct LastArchivedBlock {
785    /// Block number
786    pub number: Unaligned<BlockNumber>,
787    /// Progress of an archived block
788    pub archived_progress: ArchivedBlockProgress,
789}
790
791impl LastArchivedBlock {
792    /// Returns the number of partially archived bytes for a block
793    #[inline(always)]
794    pub fn partial_archived(&self) -> Option<NonZeroU32> {
795        self.archived_progress.partial()
796    }
797
798    /// Sets the number of partially archived bytes if block progress was archived partially
799    #[inline(always)]
800    pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
801        self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
802    }
803
804    /// Indicate the last archived block was archived fully
805    #[inline(always)]
806    pub fn set_complete(&mut self) {
807        self.archived_progress = ArchivedBlockProgress::new_complete();
808    }
809
810    /// Get the block number (unwrap `Unaligned`)
811    pub const fn number(&self) -> BlockNumber {
812        self.number.as_inner()
813    }
814}
815
816/// Segment header for a specific segment of a shard
817#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
818#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
819#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
820#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
821#[repr(C)]
822pub struct SegmentHeader {
823    /// Local segment index
824    pub index: Unaligned<LocalSegmentIndex>,
825    /// Root of roots of all records in a segment.
826    pub root: SegmentRoot,
827    /// Hash of the segment header of the previous segment
828    pub prev_segment_header_hash: Blake3Hash,
829    /// Last archived block
830    pub last_archived_block: LastArchivedBlock,
831}
832
833impl SegmentHeader {
834    /// Hash of the whole segment header
835    #[inline(always)]
836    pub fn hash(&self) -> Blake3Hash {
837        const {
838            assert!(size_of::<Self>() <= CHUNK_LEN);
839        }
840        Blake3Hash::new(
841            single_chunk_hash(self.as_bytes())
842                .expect("Less than a single chunk worth of bytes; qed"),
843        )
844    }
845}
846
847/// Recorded history segment before archiving is applied.
848///
849/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
850#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
851#[repr(C)]
852pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
853
854impl fmt::Debug for RecordedHistorySegment {
855    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
856        f.debug_struct("RecordedHistorySegment")
857            .finish_non_exhaustive()
858    }
859}
860
861impl AsRef<[u8]> for RecordedHistorySegment {
862    #[inline]
863    fn as_ref(&self) -> &[u8] {
864        Record::slice_to_repr(&self.0).as_flattened().as_flattened()
865    }
866}
867
868impl AsMut<[u8]> for RecordedHistorySegment {
869    #[inline]
870    fn as_mut(&mut self) -> &mut [u8] {
871        Record::slice_mut_to_repr(&mut self.0)
872            .as_flattened_mut()
873            .as_flattened_mut()
874    }
875}
876
877impl RecordedHistorySegment {
878    /// Number of raw records in one segment of recorded history
879    pub const NUM_RAW_RECORDS: usize = 128;
880    /// Erasure coding rate for records during the archiving process
881    pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
882    /// Number of pieces in one segment of archived history (taking erasure coding rate into
883    /// account)
884    pub const NUM_PIECES: usize =
885        Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
886    /// Size of recorded history segment in bytes.
887    ///
888    /// It includes half of the records (just source records) that will later be erasure coded and
889    /// together with corresponding roots and proofs will result in
890    /// [`Self::NUM_PIECES`] `Piece`s of archival history.
891    pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
892
893    /// Create boxed value without hitting stack overflow
894    #[inline]
895    #[cfg(feature = "alloc")]
896    pub fn new_boxed() -> Box<Self> {
897        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
898        // SAFETY: Data structure filled with zeroes is a valid invariant
899        unsafe { Box::<Self>::new_zeroed().assume_init() }
900    }
901}