ab_core_primitives/
segments.rs

1//! Segments-related data structures.
2
3#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use ab_blake3::single_chunk_hash;
12use ab_io_type::trivial_type::TrivialType;
13use ab_io_type::unaligned::Unaligned;
14#[cfg(feature = "alloc")]
15use alloc::boxed::Box;
16use blake3::CHUNK_LEN;
17use core::iter::Step;
18use core::num::{NonZeroU32, NonZeroU64};
19use core::{fmt, mem};
20use derive_more::{
21    Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
22    SubAssign,
23};
24#[cfg(feature = "scale-codec")]
25use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
26#[cfg(feature = "serde")]
27use serde::{Deserialize, Deserializer, Serialize, Serializer};
28#[cfg(feature = "serde")]
29use serde_big_array::BigArray;
30
31/// Super segment root contained within beacon chain block
32#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
33#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
34#[repr(C)]
35pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
36
37impl fmt::Debug for SuperSegmentRoot {
38    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39        for byte in self.0 {
40            write!(f, "{byte:02x}")?;
41        }
42        Ok(())
43    }
44}
45
46#[cfg(feature = "serde")]
47#[derive(Serialize, Deserialize)]
48#[serde(transparent)]
49struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
50
51#[cfg(feature = "serde")]
52#[derive(Serialize, Deserialize)]
53#[serde(transparent)]
54struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
55
56#[cfg(feature = "serde")]
57impl Serialize for SuperSegmentRoot {
58    #[inline]
59    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
60    where
61        S: Serializer,
62    {
63        if serializer.is_human_readable() {
64            SuperSegmentRootHex(self.0).serialize(serializer)
65        } else {
66            SuperSegmentRootBinary(self.0).serialize(serializer)
67        }
68    }
69}
70
71#[cfg(feature = "serde")]
72impl<'de> Deserialize<'de> for SuperSegmentRoot {
73    #[inline]
74    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
75    where
76        D: Deserializer<'de>,
77    {
78        Ok(Self(if deserializer.is_human_readable() {
79            SuperSegmentRootHex::deserialize(deserializer)?.0
80        } else {
81            SuperSegmentRootBinary::deserialize(deserializer)?.0
82        }))
83    }
84}
85
86impl Default for SuperSegmentRoot {
87    #[inline]
88    fn default() -> Self {
89        Self([0; Self::SIZE])
90    }
91}
92
93impl AsRef<[u8]> for SuperSegmentRoot {
94    #[inline]
95    fn as_ref(&self) -> &[u8] {
96        &self.0
97    }
98}
99
100impl AsMut<[u8]> for SuperSegmentRoot {
101    #[inline]
102    fn as_mut(&mut self) -> &mut [u8] {
103        &mut self.0
104    }
105}
106
107impl SuperSegmentRoot {
108    /// Size in bytes
109    pub const SIZE: usize = 32;
110}
111
112/// Segment index type.
113#[derive(
114    Debug,
115    Display,
116    Default,
117    Copy,
118    Clone,
119    Ord,
120    PartialOrd,
121    Eq,
122    PartialEq,
123    Hash,
124    From,
125    Into,
126    Add,
127    AddAssign,
128    Sub,
129    SubAssign,
130    Mul,
131    MulAssign,
132    Div,
133    DivAssign,
134    TrivialType,
135)]
136#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
137#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
138#[repr(C)]
139pub struct SegmentIndex(u64);
140
141impl Step for SegmentIndex {
142    #[inline]
143    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
144        u64::steps_between(&start.0, &end.0)
145    }
146
147    #[inline]
148    fn forward_checked(start: Self, count: usize) -> Option<Self> {
149        u64::forward_checked(start.0, count).map(Self)
150    }
151
152    #[inline]
153    fn backward_checked(start: Self, count: usize) -> Option<Self> {
154        u64::backward_checked(start.0, count).map(Self)
155    }
156}
157
158impl SegmentIndex {
159    /// Segment index 0.
160    pub const ZERO: SegmentIndex = SegmentIndex(0);
161    /// Segment index 1.
162    pub const ONE: SegmentIndex = SegmentIndex(1);
163
164    /// Create a new instance
165    #[inline]
166    pub const fn new(n: u64) -> Self {
167        Self(n)
168    }
169
170    /// Get internal representation
171    #[inline(always)]
172    pub const fn as_u64(self) -> u64 {
173        self.0
174    }
175
176    /// Get the first piece index in this segment.
177    #[inline]
178    pub const fn first_piece_index(&self) -> PieceIndex {
179        PieceIndex::new(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
180    }
181
182    /// Get the last piece index in this segment.
183    #[inline]
184    pub const fn last_piece_index(&self) -> PieceIndex {
185        PieceIndex::new((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
186    }
187
188    /// List of piece indexes that belong to this segment.
189    #[inline]
190    pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
191        let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
192        (self.first_piece_index()..=self.last_piece_index())
193            .zip(&mut piece_indices)
194            .for_each(|(input, output)| {
195                *output = input;
196            });
197
198        piece_indices
199    }
200
201    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred.
202    #[inline]
203    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
204        self.0.checked_sub(rhs.0).map(Self)
205    }
206
207    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
208    /// occurred.
209    #[inline]
210    pub const fn saturating_sub(self, rhs: Self) -> Self {
211        Self(self.0.saturating_sub(rhs.0))
212    }
213}
214
215/// Segment root contained within segment header.
216#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
217#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
218#[repr(C)]
219pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
220
221impl fmt::Debug for SegmentRoot {
222    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
223        for byte in self.0 {
224            write!(f, "{byte:02x}")?;
225        }
226        Ok(())
227    }
228}
229
230#[cfg(feature = "serde")]
231#[derive(Serialize, Deserialize)]
232#[serde(transparent)]
233struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
234
235#[cfg(feature = "serde")]
236#[derive(Serialize, Deserialize)]
237#[serde(transparent)]
238struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
239
240#[cfg(feature = "serde")]
241impl Serialize for SegmentRoot {
242    #[inline]
243    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
244    where
245        S: Serializer,
246    {
247        if serializer.is_human_readable() {
248            SegmentRootHex(self.0).serialize(serializer)
249        } else {
250            SegmentRootBinary(self.0).serialize(serializer)
251        }
252    }
253}
254
255#[cfg(feature = "serde")]
256impl<'de> Deserialize<'de> for SegmentRoot {
257    #[inline]
258    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
259    where
260        D: Deserializer<'de>,
261    {
262        Ok(Self(if deserializer.is_human_readable() {
263            SegmentRootHex::deserialize(deserializer)?.0
264        } else {
265            SegmentRootBinary::deserialize(deserializer)?.0
266        }))
267    }
268}
269
270impl Default for SegmentRoot {
271    #[inline(always)]
272    fn default() -> Self {
273        Self([0; Self::SIZE])
274    }
275}
276
277impl AsRef<[u8]> for SegmentRoot {
278    #[inline(always)]
279    fn as_ref(&self) -> &[u8] {
280        &self.0
281    }
282}
283
284impl AsMut<[u8]> for SegmentRoot {
285    #[inline(always)]
286    fn as_mut(&mut self) -> &mut [u8] {
287        &mut self.0
288    }
289}
290
291impl SegmentRoot {
292    /// Size in bytes
293    pub const SIZE: usize = 32;
294
295    /// Convenient conversion from slice of underlying representation for efficiency purposes
296    #[inline(always)]
297    pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
298        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
299        unsafe { mem::transmute(value) }
300    }
301
302    /// Convenient conversion to slice of underlying representation for efficiency purposes
303    #[inline(always)]
304    pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
305        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
306        unsafe { mem::transmute(value) }
307    }
308}
309
310/// Size of blockchain history in segments.
311#[derive(
312    Debug,
313    Display,
314    Copy,
315    Clone,
316    Ord,
317    PartialOrd,
318    Eq,
319    PartialEq,
320    Hash,
321    From,
322    Into,
323    Deref,
324    DerefMut,
325    TrivialType,
326)]
327#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
328#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
329#[repr(C)]
330// Storing `SegmentIndex` to make all invariants valid
331pub struct HistorySize(SegmentIndex);
332
333impl HistorySize {
334    /// History size of one
335    pub const ONE: Self = Self(SegmentIndex::ZERO);
336
337    /// Create a new instance
338    #[inline(always)]
339    pub const fn new(value: NonZeroU64) -> Self {
340        Self(SegmentIndex::new(value.get() - 1))
341    }
342
343    /// Get internal representation
344    pub const fn as_segment_index(&self) -> SegmentIndex {
345        self.0
346    }
347
348    /// Get internal representation
349    pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
350        NonZeroU64::new(self.0.as_u64().saturating_add(1)).expect("Not zero; qed")
351    }
352
353    /// Size of blockchain history in pieces.
354    #[inline(always)]
355    pub const fn in_pieces(&self) -> NonZeroU64 {
356        NonZeroU64::new(
357            self.0
358                .as_u64()
359                .saturating_add(1)
360                .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
361        )
362        .expect("Not zero; qed")
363    }
364
365    /// Segment index that corresponds to this history size.
366    #[inline(always)]
367    pub fn segment_index(&self) -> SegmentIndex {
368        self.0
369    }
370
371    /// History size at which expiration check for sector happens.
372    ///
373    /// Returns `None` on overflow.
374    #[inline(always)]
375    pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
376        self.as_non_zero_u64()
377            .checked_add(min_sector_lifetime.as_non_zero_u64().get())
378            .map(Self::new)
379    }
380}
381
382/// Progress of an archived block.
383#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
384#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
385#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
386#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
387#[repr(C)]
388pub struct ArchivedBlockProgress {
389    /// Number of partially archived bytes of a block, `0` for full block
390    bytes: u32,
391}
392
393impl Default for ArchivedBlockProgress {
394    /// We assume a block can always fit into the segment initially, but it is definitely possible
395    /// to be transitioned into the partial state after some overflow checking.
396    #[inline(always)]
397    fn default() -> Self {
398        Self::new_complete()
399    }
400}
401
402impl ArchivedBlockProgress {
403    /// Block is archived fully
404    #[inline(always)]
405    pub const fn new_complete() -> Self {
406        Self { bytes: 0 }
407    }
408
409    /// Block is partially archived with provided number of bytes
410    #[inline(always)]
411    pub const fn new_partial(new_partial: NonZeroU32) -> Self {
412        Self {
413            bytes: new_partial.get(),
414        }
415    }
416
417    /// Return the number of partially archived bytes if the progress is not complete
418    #[inline(always)]
419    pub const fn partial(&self) -> Option<NonZeroU32> {
420        NonZeroU32::new(self.bytes)
421    }
422}
423
424/// Last archived block
425#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
426#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
427#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
428#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
429#[repr(C)]
430pub struct LastArchivedBlock {
431    /// Block number
432    pub number: Unaligned<BlockNumber>,
433    /// Progress of an archived block.
434    pub archived_progress: ArchivedBlockProgress,
435}
436
437impl LastArchivedBlock {
438    /// Returns the number of partially archived bytes for a block.
439    #[inline(always)]
440    pub fn partial_archived(&self) -> Option<NonZeroU32> {
441        self.archived_progress.partial()
442    }
443
444    /// Sets the number of partially archived bytes if block progress was archived partially
445    #[inline(always)]
446    pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
447        self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
448    }
449
450    /// Indicate last archived block was archived fully
451    #[inline(always)]
452    pub fn set_complete(&mut self) {
453        self.archived_progress = ArchivedBlockProgress::new_complete();
454    }
455
456    /// Get block number (unwrap `Unaligned`)
457    pub const fn number(&self) -> BlockNumber {
458        self.number.as_inner()
459    }
460}
461
462/// Segment header for a specific segment.
463///
464/// Each segment will have corresponding [`SegmentHeader`] included as the first item in the next
465/// segment. Each `SegmentHeader` includes hash of the previous one and all together form a chain of
466/// segment headers that is used for quick and efficient verification that some `Piece`
467/// corresponds to the actual archival history of the blockchain.
468#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
469#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
470#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
471#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
472#[repr(C)]
473pub struct SegmentHeader {
474    /// Segment index
475    pub segment_index: Unaligned<SegmentIndex>,
476    /// Root of roots of all records in a segment.
477    pub segment_root: SegmentRoot,
478    /// Hash of the segment header of the previous segment
479    pub prev_segment_header_hash: Blake3Hash,
480    /// Last archived block
481    pub last_archived_block: LastArchivedBlock,
482}
483
484impl SegmentHeader {
485    /// Hash of the whole segment header
486    #[inline(always)]
487    pub fn hash(&self) -> Blake3Hash {
488        const {
489            assert!(size_of::<Self>() <= CHUNK_LEN);
490        }
491        Blake3Hash::new(
492            single_chunk_hash(self.as_bytes())
493                .expect("Less than a single chunk worth of bytes; qed"),
494        )
495    }
496
497    /// Get segment index (unwrap `Unaligned`)
498    #[inline(always)]
499    pub const fn segment_index(&self) -> SegmentIndex {
500        self.segment_index.as_inner()
501    }
502}
503
504/// Recorded history segment before archiving is applied.
505///
506/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
507#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
508#[repr(C)]
509pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
510
511impl fmt::Debug for RecordedHistorySegment {
512    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
513        f.debug_struct("RecordedHistorySegment")
514            .finish_non_exhaustive()
515    }
516}
517
518impl Default for RecordedHistorySegment {
519    #[inline]
520    fn default() -> Self {
521        Self([Record::default(); Self::NUM_RAW_RECORDS])
522    }
523}
524
525impl AsRef<[u8]> for RecordedHistorySegment {
526    #[inline]
527    fn as_ref(&self) -> &[u8] {
528        Record::slice_to_repr(&self.0).as_flattened().as_flattened()
529    }
530}
531
532impl AsMut<[u8]> for RecordedHistorySegment {
533    #[inline]
534    fn as_mut(&mut self) -> &mut [u8] {
535        Record::slice_mut_to_repr(&mut self.0)
536            .as_flattened_mut()
537            .as_flattened_mut()
538    }
539}
540
541impl RecordedHistorySegment {
542    /// Number of raw records in one segment of recorded history.
543    pub const NUM_RAW_RECORDS: usize = 128;
544    /// Erasure coding rate for records during archiving process.
545    pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
546    /// Number of pieces in one segment of archived history (taking erasure coding rate into
547    /// account)
548    pub const NUM_PIECES: usize =
549        Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
550    /// Size of recorded history segment in bytes.
551    ///
552    /// It includes half of the records (just source records) that will later be erasure coded and
553    /// together with corresponding roots and proofs will result in
554    /// [`Self::NUM_PIECES`] `Piece`s of archival history.
555    pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
556
557    /// Create boxed value without hitting stack overflow
558    #[inline]
559    #[cfg(feature = "alloc")]
560    pub fn new_boxed() -> Box<Self> {
561        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
562        // SAFETY: Data structure filled with zeroes is a valid invariant
563        unsafe { Box::<Self>::new_zeroed().assume_init() }
564    }
565}