ab_core_primitives/
segments.rs

1//! Segments-related data structures
2
3#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use ab_blake3::single_chunk_hash;
12use ab_io_type::trivial_type::TrivialType;
13use ab_io_type::unaligned::Unaligned;
14#[cfg(feature = "alloc")]
15use alloc::boxed::Box;
16use blake3::CHUNK_LEN;
17use core::iter::Step;
18use core::num::{NonZeroU32, NonZeroU64};
19use core::{fmt, mem};
20use derive_more::{
21    Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
22    SubAssign,
23};
24#[cfg(feature = "scale-codec")]
25use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
26#[cfg(feature = "serde")]
27use serde::{Deserialize, Deserializer, Serialize, Serializer};
28#[cfg(feature = "serde")]
29use serde_big_array::BigArray;
30
31/// Super segment root contained within a beacon chain block
32#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
33#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
34#[repr(C)]
35pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
36
37impl fmt::Debug for SuperSegmentRoot {
38    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39        for byte in self.0 {
40            write!(f, "{byte:02x}")?;
41        }
42        Ok(())
43    }
44}
45
46#[cfg(feature = "serde")]
47#[derive(Serialize, Deserialize)]
48#[serde(transparent)]
49struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
50
51#[cfg(feature = "serde")]
52#[derive(Serialize, Deserialize)]
53#[serde(transparent)]
54struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
55
56#[cfg(feature = "serde")]
57impl Serialize for SuperSegmentRoot {
58    #[inline]
59    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
60    where
61        S: Serializer,
62    {
63        if serializer.is_human_readable() {
64            SuperSegmentRootHex(self.0).serialize(serializer)
65        } else {
66            SuperSegmentRootBinary(self.0).serialize(serializer)
67        }
68    }
69}
70
71#[cfg(feature = "serde")]
72impl<'de> Deserialize<'de> for SuperSegmentRoot {
73    #[inline]
74    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
75    where
76        D: Deserializer<'de>,
77    {
78        Ok(Self(if deserializer.is_human_readable() {
79            SuperSegmentRootHex::deserialize(deserializer)?.0
80        } else {
81            SuperSegmentRootBinary::deserialize(deserializer)?.0
82        }))
83    }
84}
85
86impl Default for SuperSegmentRoot {
87    #[inline]
88    fn default() -> Self {
89        Self([0; Self::SIZE])
90    }
91}
92
93impl AsRef<[u8]> for SuperSegmentRoot {
94    #[inline]
95    fn as_ref(&self) -> &[u8] {
96        &self.0
97    }
98}
99
100impl AsMut<[u8]> for SuperSegmentRoot {
101    #[inline]
102    fn as_mut(&mut self) -> &mut [u8] {
103        &mut self.0
104    }
105}
106
107impl SuperSegmentRoot {
108    /// Size in bytes
109    pub const SIZE: usize = 32;
110}
111
112/// Local segment index of a shard
113#[derive(
114    Debug,
115    Display,
116    Default,
117    Copy,
118    Clone,
119    Ord,
120    PartialOrd,
121    Eq,
122    PartialEq,
123    Hash,
124    From,
125    Into,
126    Add,
127    AddAssign,
128    Sub,
129    SubAssign,
130    Mul,
131    MulAssign,
132    Div,
133    DivAssign,
134    TrivialType,
135)]
136#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
137#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
138#[repr(C)]
139pub struct LocalSegmentIndex(u64);
140
141impl Step for LocalSegmentIndex {
142    #[inline]
143    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
144        u64::steps_between(&start.0, &end.0)
145    }
146
147    #[inline]
148    fn forward_checked(start: Self, count: usize) -> Option<Self> {
149        u64::forward_checked(start.0, count).map(Self)
150    }
151
152    #[inline]
153    fn backward_checked(start: Self, count: usize) -> Option<Self> {
154        u64::backward_checked(start.0, count).map(Self)
155    }
156}
157
158// TODO: This is a massive hack that is only present temporarily before super segments are really a
159//  thing
160impl From<LocalSegmentIndex> for SegmentIndex {
161    fn from(value: LocalSegmentIndex) -> Self {
162        Self(value.0)
163    }
164}
165
166// TODO: This is a massive hack that is only present temporarily before super segments are really a
167//  thing
168impl From<SegmentIndex> for LocalSegmentIndex {
169    fn from(value: SegmentIndex) -> Self {
170        Self(value.0)
171    }
172}
173
174impl LocalSegmentIndex {
175    /// Local segment index 0
176    pub const ZERO: Self = Self(0);
177    /// Local segment index 1
178    pub const ONE: Self = Self(1);
179
180    /// Create a new instance
181    #[inline]
182    pub const fn new(n: u64) -> Self {
183        Self(n)
184    }
185
186    /// Get internal representation
187    #[inline(always)]
188    pub const fn as_u64(self) -> u64 {
189        self.0
190    }
191
192    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred
193    #[inline]
194    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
195        self.0.checked_sub(rhs.0).map(Self)
196    }
197
198    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
199    /// occurred
200    #[inline]
201    pub const fn saturating_sub(self, rhs: Self) -> Self {
202        Self(self.0.saturating_sub(rhs.0))
203    }
204}
205
206/// Segment index
207#[derive(
208    Debug,
209    Display,
210    Default,
211    Copy,
212    Clone,
213    Ord,
214    PartialOrd,
215    Eq,
216    PartialEq,
217    Hash,
218    From,
219    Into,
220    Add,
221    AddAssign,
222    Sub,
223    SubAssign,
224    Mul,
225    MulAssign,
226    Div,
227    DivAssign,
228    TrivialType,
229)]
230#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
231#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
232#[repr(C)]
233pub struct SegmentIndex(u64);
234
235impl Step for SegmentIndex {
236    #[inline]
237    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
238        u64::steps_between(&start.0, &end.0)
239    }
240
241    #[inline]
242    fn forward_checked(start: Self, count: usize) -> Option<Self> {
243        u64::forward_checked(start.0, count).map(Self)
244    }
245
246    #[inline]
247    fn backward_checked(start: Self, count: usize) -> Option<Self> {
248        u64::backward_checked(start.0, count).map(Self)
249    }
250}
251
252impl SegmentIndex {
253    /// Segment index 0
254    pub const ZERO: Self = Self(0);
255    /// Segment index 1
256    pub const ONE: Self = Self(1);
257
258    /// Create a new instance
259    #[inline]
260    pub const fn new(n: u64) -> Self {
261        Self(n)
262    }
263
264    /// Get internal representation
265    #[inline(always)]
266    pub const fn as_u64(self) -> u64 {
267        self.0
268    }
269
270    /// Get the first piece index in this segment
271    #[inline]
272    pub const fn first_piece_index(&self) -> PieceIndex {
273        PieceIndex::new(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
274    }
275
276    /// Get the last piece index in this segment
277    #[inline]
278    pub const fn last_piece_index(&self) -> PieceIndex {
279        PieceIndex::new((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
280    }
281
282    /// List of piece indexes that belong to this segment
283    #[inline]
284    pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
285        let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
286        (self.first_piece_index()..=self.last_piece_index())
287            .zip(&mut piece_indices)
288            .for_each(|(input, output)| {
289                *output = input;
290            });
291
292        piece_indices
293    }
294
295    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred
296    #[inline]
297    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
298        self.0.checked_sub(rhs.0).map(Self)
299    }
300
301    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
302    /// occurred
303    #[inline]
304    pub const fn saturating_sub(self, rhs: Self) -> Self {
305        Self(self.0.saturating_sub(rhs.0))
306    }
307}
308
309/// Segment root contained within a segment
310#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
311#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
312#[repr(C)]
313pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
314
315impl fmt::Debug for SegmentRoot {
316    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
317        for byte in self.0 {
318            write!(f, "{byte:02x}")?;
319        }
320        Ok(())
321    }
322}
323
324#[cfg(feature = "serde")]
325#[derive(Serialize, Deserialize)]
326#[serde(transparent)]
327struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
328
329#[cfg(feature = "serde")]
330#[derive(Serialize, Deserialize)]
331#[serde(transparent)]
332struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
333
334#[cfg(feature = "serde")]
335impl Serialize for SegmentRoot {
336    #[inline]
337    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
338    where
339        S: Serializer,
340    {
341        if serializer.is_human_readable() {
342            SegmentRootHex(self.0).serialize(serializer)
343        } else {
344            SegmentRootBinary(self.0).serialize(serializer)
345        }
346    }
347}
348
349#[cfg(feature = "serde")]
350impl<'de> Deserialize<'de> for SegmentRoot {
351    #[inline]
352    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
353    where
354        D: Deserializer<'de>,
355    {
356        Ok(Self(if deserializer.is_human_readable() {
357            SegmentRootHex::deserialize(deserializer)?.0
358        } else {
359            SegmentRootBinary::deserialize(deserializer)?.0
360        }))
361    }
362}
363
364impl Default for SegmentRoot {
365    #[inline(always)]
366    fn default() -> Self {
367        Self([0; Self::SIZE])
368    }
369}
370
371impl AsRef<[u8]> for SegmentRoot {
372    #[inline(always)]
373    fn as_ref(&self) -> &[u8] {
374        &self.0
375    }
376}
377
378impl AsMut<[u8]> for SegmentRoot {
379    #[inline(always)]
380    fn as_mut(&mut self) -> &mut [u8] {
381        &mut self.0
382    }
383}
384
385impl SegmentRoot {
386    /// Size in bytes
387    pub const SIZE: usize = 32;
388
389    /// Convenient conversion from a slice of underlying representation for efficiency purposes
390    #[inline(always)]
391    pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
392        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
393        unsafe { mem::transmute(value) }
394    }
395
396    /// Convenient conversion to a slice of underlying representation for efficiency purposes
397    #[inline(always)]
398    pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
399        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
400        unsafe { mem::transmute(value) }
401    }
402}
403
404/// Size of blockchain history in segments
405#[derive(
406    Debug,
407    Display,
408    Copy,
409    Clone,
410    Ord,
411    PartialOrd,
412    Eq,
413    PartialEq,
414    Hash,
415    From,
416    Into,
417    Deref,
418    DerefMut,
419    TrivialType,
420)]
421#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
422#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
423#[repr(C)]
424// Storing `SegmentIndex` to make all invariants valid
425pub struct HistorySize(SegmentIndex);
426
427impl HistorySize {
428    /// History size of one
429    pub const ONE: Self = Self(SegmentIndex::ZERO);
430
431    /// Create a new instance
432    #[inline(always)]
433    pub const fn new(value: NonZeroU64) -> Self {
434        Self(SegmentIndex::new(value.get() - 1))
435    }
436
437    /// Get internal representation
438    pub const fn as_segment_index(&self) -> SegmentIndex {
439        self.0
440    }
441
442    /// Get internal representation
443    pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
444        NonZeroU64::new(self.0.as_u64().saturating_add(1)).expect("Not zero; qed")
445    }
446
447    /// Size of blockchain history in pieces
448    #[inline(always)]
449    pub const fn in_pieces(&self) -> NonZeroU64 {
450        NonZeroU64::new(
451            self.0
452                .as_u64()
453                .saturating_add(1)
454                .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
455        )
456        .expect("Not zero; qed")
457    }
458
459    /// Segment index that corresponds to this history size
460    #[inline(always)]
461    pub fn segment_index(&self) -> SegmentIndex {
462        self.0
463    }
464
465    /// History size at which expiration check for a sector happens.
466    ///
467    /// Returns `None` on overflow.
468    #[inline(always)]
469    pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
470        self.as_non_zero_u64()
471            .checked_add(min_sector_lifetime.as_non_zero_u64().get())
472            .map(Self::new)
473    }
474}
475
476/// Progress of an archived block.
477#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
478#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
479#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
480#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
481#[repr(C)]
482pub struct ArchivedBlockProgress {
483    /// Number of partially archived bytes of a block, `0` for a full block
484    bytes: u32,
485}
486
487impl Default for ArchivedBlockProgress {
488    /// We assume a block can always fit into the segment initially, but it is definitely possible
489    /// to be transitioned into the partial state after some overflow checking.
490    #[inline(always)]
491    fn default() -> Self {
492        Self::new_complete()
493    }
494}
495
496impl ArchivedBlockProgress {
497    /// Block is archived fully
498    #[inline(always)]
499    pub const fn new_complete() -> Self {
500        Self { bytes: 0 }
501    }
502
503    /// Block is partially archived with the provided number of bytes
504    #[inline(always)]
505    pub const fn new_partial(new_partial: NonZeroU32) -> Self {
506        Self {
507            bytes: new_partial.get(),
508        }
509    }
510
511    /// Return the number of partially archived bytes if the progress is not complete
512    #[inline(always)]
513    pub const fn partial(&self) -> Option<NonZeroU32> {
514        NonZeroU32::new(self.bytes)
515    }
516}
517
518/// Last archived block
519#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
520#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
521#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
522#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
523#[repr(C)]
524pub struct LastArchivedBlock {
525    /// Block number
526    pub number: Unaligned<BlockNumber>,
527    /// Progress of an archived block
528    pub archived_progress: ArchivedBlockProgress,
529}
530
531impl LastArchivedBlock {
532    /// Returns the number of partially archived bytes for a block
533    #[inline(always)]
534    pub fn partial_archived(&self) -> Option<NonZeroU32> {
535        self.archived_progress.partial()
536    }
537
538    /// Sets the number of partially archived bytes if block progress was archived partially
539    #[inline(always)]
540    pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
541        self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
542    }
543
544    /// Indicate the last archived block was archived fully
545    #[inline(always)]
546    pub fn set_complete(&mut self) {
547        self.archived_progress = ArchivedBlockProgress::new_complete();
548    }
549
550    /// Get the block number (unwrap `Unaligned`)
551    pub const fn number(&self) -> BlockNumber {
552        self.number.as_inner()
553    }
554}
555
556/// Segment header for a specific segment of a shard
557#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
558#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
559#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
560#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
561#[repr(C)]
562pub struct SegmentHeader {
563    /// Local segment index
564    pub segment_index: Unaligned<LocalSegmentIndex>,
565    /// Root of roots of all records in a segment.
566    pub segment_root: SegmentRoot,
567    /// Hash of the segment header of the previous segment
568    pub prev_segment_header_hash: Blake3Hash,
569    /// Last archived block
570    pub last_archived_block: LastArchivedBlock,
571}
572
573impl SegmentHeader {
574    /// Hash of the whole segment header
575    #[inline(always)]
576    pub fn hash(&self) -> Blake3Hash {
577        const {
578            assert!(size_of::<Self>() <= CHUNK_LEN);
579        }
580        Blake3Hash::new(
581            single_chunk_hash(self.as_bytes())
582                .expect("Less than a single chunk worth of bytes; qed"),
583        )
584    }
585
586    /// Get local segment index (unwrap `Unaligned`)
587    #[inline(always)]
588    pub const fn local_segment_index(&self) -> LocalSegmentIndex {
589        self.segment_index.as_inner()
590    }
591}
592
593/// Recorded history segment before archiving is applied.
594///
595/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
596#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
597#[repr(C)]
598pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
599
600impl fmt::Debug for RecordedHistorySegment {
601    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
602        f.debug_struct("RecordedHistorySegment")
603            .finish_non_exhaustive()
604    }
605}
606
607impl Default for RecordedHistorySegment {
608    #[inline]
609    fn default() -> Self {
610        Self([Record::default(); Self::NUM_RAW_RECORDS])
611    }
612}
613
614impl AsRef<[u8]> for RecordedHistorySegment {
615    #[inline]
616    fn as_ref(&self) -> &[u8] {
617        Record::slice_to_repr(&self.0).as_flattened().as_flattened()
618    }
619}
620
621impl AsMut<[u8]> for RecordedHistorySegment {
622    #[inline]
623    fn as_mut(&mut self) -> &mut [u8] {
624        Record::slice_mut_to_repr(&mut self.0)
625            .as_flattened_mut()
626            .as_flattened_mut()
627    }
628}
629
630impl RecordedHistorySegment {
631    /// Number of raw records in one segment of recorded history
632    pub const NUM_RAW_RECORDS: usize = 128;
633    /// Erasure coding rate for records during the archiving process
634    pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
635    /// Number of pieces in one segment of archived history (taking erasure coding rate into
636    /// account)
637    pub const NUM_PIECES: usize =
638        Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
639    /// Size of recorded history segment in bytes.
640    ///
641    /// It includes half of the records (just source records) that will later be erasure coded and
642    /// together with corresponding roots and proofs will result in
643    /// [`Self::NUM_PIECES`] `Piece`s of archival history.
644    pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
645
646    /// Create boxed value without hitting stack overflow
647    #[inline]
648    #[cfg(feature = "alloc")]
649    pub fn new_boxed() -> Box<Self> {
650        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
651        // SAFETY: Data structure filled with zeroes is a valid invariant
652        unsafe { Box::<Self>::new_zeroed().assume_init() }
653    }
654}