ab_core_primitives/
segments.rs

1//! Segments-related data structures.
2
3#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use ab_io_type::trivial_type::TrivialType;
12use ab_io_type::unaligned::Unaligned;
13#[cfg(feature = "alloc")]
14use alloc::boxed::Box;
15use core::iter::Step;
16use core::num::{NonZeroU32, NonZeroU64};
17use core::{fmt, mem};
18use derive_more::{
19    Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
20    SubAssign,
21};
22#[cfg(feature = "scale-codec")]
23use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
24#[cfg(feature = "scale-codec")]
25use scale_info::TypeInfo;
26#[cfg(feature = "serde")]
27use serde::{Deserialize, Deserializer, Serialize, Serializer};
28#[cfg(feature = "serde")]
29use serde_big_array::BigArray;
30
31/// Super segment root contained within beacon chain block
32#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
33#[cfg_attr(
34    feature = "scale-codec",
35    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
36)]
37#[repr(C)]
38pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
39
40impl fmt::Debug for SuperSegmentRoot {
41    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
42        for byte in self.0 {
43            write!(f, "{byte:02x}")?;
44        }
45        Ok(())
46    }
47}
48
49#[cfg(feature = "serde")]
50#[derive(Serialize, Deserialize)]
51#[serde(transparent)]
52struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
53
54#[cfg(feature = "serde")]
55#[derive(Serialize, Deserialize)]
56#[serde(transparent)]
57struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
58
59#[cfg(feature = "serde")]
60impl Serialize for SuperSegmentRoot {
61    #[inline]
62    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
63    where
64        S: Serializer,
65    {
66        if serializer.is_human_readable() {
67            SuperSegmentRootHex(self.0).serialize(serializer)
68        } else {
69            SuperSegmentRootBinary(self.0).serialize(serializer)
70        }
71    }
72}
73
74#[cfg(feature = "serde")]
75impl<'de> Deserialize<'de> for SuperSegmentRoot {
76    #[inline]
77    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
78    where
79        D: Deserializer<'de>,
80    {
81        Ok(Self(if deserializer.is_human_readable() {
82            SuperSegmentRootHex::deserialize(deserializer)?.0
83        } else {
84            SuperSegmentRootBinary::deserialize(deserializer)?.0
85        }))
86    }
87}
88
89impl Default for SuperSegmentRoot {
90    #[inline]
91    fn default() -> Self {
92        Self([0; Self::SIZE])
93    }
94}
95
96impl AsRef<[u8]> for SuperSegmentRoot {
97    #[inline]
98    fn as_ref(&self) -> &[u8] {
99        &self.0
100    }
101}
102
103impl AsMut<[u8]> for SuperSegmentRoot {
104    #[inline]
105    fn as_mut(&mut self) -> &mut [u8] {
106        &mut self.0
107    }
108}
109
110impl SuperSegmentRoot {
111    /// Size in bytes
112    pub const SIZE: usize = 32;
113}
114
115/// Segment index type.
116#[derive(
117    Debug,
118    Display,
119    Default,
120    Copy,
121    Clone,
122    Ord,
123    PartialOrd,
124    Eq,
125    PartialEq,
126    Hash,
127    From,
128    Into,
129    Add,
130    AddAssign,
131    Sub,
132    SubAssign,
133    Mul,
134    MulAssign,
135    Div,
136    DivAssign,
137    TrivialType,
138)]
139#[cfg_attr(
140    feature = "scale-codec",
141    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
142)]
143#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
144#[repr(C)]
145pub struct SegmentIndex(u64);
146
147impl Step for SegmentIndex {
148    #[inline]
149    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
150        u64::steps_between(&start.0, &end.0)
151    }
152
153    #[inline]
154    fn forward_checked(start: Self, count: usize) -> Option<Self> {
155        u64::forward_checked(start.0, count).map(Self)
156    }
157
158    #[inline]
159    fn backward_checked(start: Self, count: usize) -> Option<Self> {
160        u64::backward_checked(start.0, count).map(Self)
161    }
162}
163
164impl SegmentIndex {
165    /// Segment index 0.
166    pub const ZERO: SegmentIndex = SegmentIndex(0);
167    /// Segment index 1.
168    pub const ONE: SegmentIndex = SegmentIndex(1);
169
170    /// Create a new instance
171    #[inline]
172    pub const fn new(n: u64) -> Self {
173        Self(n)
174    }
175
176    /// Get internal representation
177    #[inline(always)]
178    pub const fn as_u64(self) -> u64 {
179        self.0
180    }
181
182    /// Get the first piece index in this segment.
183    #[inline]
184    pub const fn first_piece_index(&self) -> PieceIndex {
185        PieceIndex::new(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
186    }
187
188    /// Get the last piece index in this segment.
189    #[inline]
190    pub const fn last_piece_index(&self) -> PieceIndex {
191        PieceIndex::new((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
192    }
193
194    /// List of piece indexes that belong to this segment.
195    #[inline]
196    pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
197        let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
198        (self.first_piece_index()..=self.last_piece_index())
199            .zip(&mut piece_indices)
200            .for_each(|(input, output)| {
201                *output = input;
202            });
203
204        piece_indices
205    }
206
207    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred.
208    #[inline]
209    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
210        self.0.checked_sub(rhs.0).map(Self)
211    }
212
213    /// Saturating integer subtraction. Computes `self - rhs`, returning zero if underflow
214    /// occurred.
215    #[inline]
216    pub const fn saturating_sub(self, rhs: Self) -> Self {
217        Self(self.0.saturating_sub(rhs.0))
218    }
219}
220
221/// Segment root contained within segment header.
222#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
223#[cfg_attr(
224    feature = "scale-codec",
225    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
226)]
227#[repr(C)]
228pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
229
230impl fmt::Debug for SegmentRoot {
231    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
232        for byte in self.0 {
233            write!(f, "{byte:02x}")?;
234        }
235        Ok(())
236    }
237}
238
239#[cfg(feature = "serde")]
240#[derive(Serialize, Deserialize)]
241#[serde(transparent)]
242struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
243
244#[cfg(feature = "serde")]
245#[derive(Serialize, Deserialize)]
246#[serde(transparent)]
247struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
248
249#[cfg(feature = "serde")]
250impl Serialize for SegmentRoot {
251    #[inline]
252    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
253    where
254        S: Serializer,
255    {
256        if serializer.is_human_readable() {
257            SegmentRootHex(self.0).serialize(serializer)
258        } else {
259            SegmentRootBinary(self.0).serialize(serializer)
260        }
261    }
262}
263
264#[cfg(feature = "serde")]
265impl<'de> Deserialize<'de> for SegmentRoot {
266    #[inline]
267    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
268    where
269        D: Deserializer<'de>,
270    {
271        Ok(Self(if deserializer.is_human_readable() {
272            SegmentRootHex::deserialize(deserializer)?.0
273        } else {
274            SegmentRootBinary::deserialize(deserializer)?.0
275        }))
276    }
277}
278
279impl Default for SegmentRoot {
280    #[inline(always)]
281    fn default() -> Self {
282        Self([0; Self::SIZE])
283    }
284}
285
286impl AsRef<[u8]> for SegmentRoot {
287    #[inline(always)]
288    fn as_ref(&self) -> &[u8] {
289        &self.0
290    }
291}
292
293impl AsMut<[u8]> for SegmentRoot {
294    #[inline(always)]
295    fn as_mut(&mut self) -> &mut [u8] {
296        &mut self.0
297    }
298}
299
300impl SegmentRoot {
301    /// Size in bytes
302    pub const SIZE: usize = 32;
303
304    /// Convenient conversion from slice of underlying representation for efficiency purposes
305    #[inline(always)]
306    pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
307        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
308        unsafe { mem::transmute(value) }
309    }
310
311    /// Convenient conversion to slice of underlying representation for efficiency purposes
312    #[inline(always)]
313    pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
314        // SAFETY: `SegmentRoot` is `#[repr(C)]` and guaranteed to have the same memory layout
315        unsafe { mem::transmute(value) }
316    }
317}
318
319/// Size of blockchain history in segments.
320#[derive(
321    Debug,
322    Display,
323    Copy,
324    Clone,
325    Ord,
326    PartialOrd,
327    Eq,
328    PartialEq,
329    Hash,
330    From,
331    Into,
332    Deref,
333    DerefMut,
334    TrivialType,
335)]
336#[cfg_attr(
337    feature = "scale-codec",
338    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
339)]
340#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
341#[repr(C)]
342// Storing `SegmentIndex` to make all invariants valid
343pub struct HistorySize(SegmentIndex);
344
345impl HistorySize {
346    /// History size of one
347    pub const ONE: Self = Self(SegmentIndex::ZERO);
348
349    /// Create a new instance
350    #[inline(always)]
351    pub const fn new(value: NonZeroU64) -> Self {
352        Self(SegmentIndex::new(value.get() - 1))
353    }
354
355    /// Get internal representation
356    pub const fn as_segment_index(&self) -> SegmentIndex {
357        self.0
358    }
359
360    /// Get internal representation
361    pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
362        NonZeroU64::new(self.0.as_u64().saturating_add(1)).expect("Not zero; qed")
363    }
364
365    /// Size of blockchain history in pieces.
366    #[inline(always)]
367    pub const fn in_pieces(&self) -> NonZeroU64 {
368        NonZeroU64::new(
369            self.0
370                .as_u64()
371                .saturating_add(1)
372                .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
373        )
374        .expect("Not zero; qed")
375    }
376
377    /// Segment index that corresponds to this history size.
378    #[inline(always)]
379    pub fn segment_index(&self) -> SegmentIndex {
380        self.0
381    }
382
383    /// History size at which expiration check for sector happens.
384    ///
385    /// Returns `None` on overflow.
386    #[inline(always)]
387    pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
388        self.as_non_zero_u64()
389            .checked_add(min_sector_lifetime.as_non_zero_u64().get())
390            .map(Self::new)
391    }
392}
393
394/// Progress of an archived block.
395#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
396#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
397#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
398#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
399#[repr(C)]
400pub struct ArchivedBlockProgress {
401    /// Number of partially archived bytes of a block, `0` for full block
402    bytes: u32,
403}
404
405impl Default for ArchivedBlockProgress {
406    /// We assume a block can always fit into the segment initially, but it is definitely possible
407    /// to be transitioned into the partial state after some overflow checking.
408    #[inline(always)]
409    fn default() -> Self {
410        Self::new_complete()
411    }
412}
413
414impl ArchivedBlockProgress {
415    /// Block is archived fully
416    #[inline(always)]
417    pub const fn new_complete() -> Self {
418        Self { bytes: 0 }
419    }
420
421    /// Block is partially archived with provided number of bytes
422    #[inline(always)]
423    pub const fn new_partial(new_partial: NonZeroU32) -> Self {
424        Self {
425            bytes: new_partial.get(),
426        }
427    }
428
429    /// Return the number of partially archived bytes if the progress is not complete
430    #[inline(always)]
431    pub const fn partial(&self) -> Option<NonZeroU32> {
432        NonZeroU32::new(self.bytes)
433    }
434}
435
436/// Last archived block
437#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
438#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
439#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
440#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
441#[repr(C)]
442pub struct LastArchivedBlock {
443    /// Block number
444    pub number: Unaligned<BlockNumber>,
445    /// Progress of an archived block.
446    pub archived_progress: ArchivedBlockProgress,
447}
448
449impl LastArchivedBlock {
450    /// Returns the number of partially archived bytes for a block.
451    #[inline(always)]
452    pub fn partial_archived(&self) -> Option<NonZeroU32> {
453        self.archived_progress.partial()
454    }
455
456    /// Sets the number of partially archived bytes if block progress was archived partially
457    #[inline(always)]
458    pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
459        self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
460    }
461
462    /// Indicate last archived block was archived fully
463    #[inline(always)]
464    pub fn set_complete(&mut self) {
465        self.archived_progress = ArchivedBlockProgress::new_complete();
466    }
467
468    /// Get block number (unwrap `Unaligned`)
469    pub const fn number(&self) -> BlockNumber {
470        self.number.as_inner()
471    }
472}
473
474/// Segment header for a specific segment.
475///
476/// Each segment will have corresponding [`SegmentHeader`] included as the first item in the next
477/// segment. Each `SegmentHeader` includes hash of the previous one and all together form a chain of
478/// segment headers that is used for quick and efficient verification that some `Piece`
479/// corresponds to the actual archival history of the blockchain.
480#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
481#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
482#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
483#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
484#[repr(C)]
485pub struct SegmentHeader {
486    /// Segment index
487    pub segment_index: Unaligned<SegmentIndex>,
488    /// Root of roots of all records in a segment.
489    pub segment_root: SegmentRoot,
490    /// Hash of the segment header of the previous segment
491    pub prev_segment_header_hash: Blake3Hash,
492    /// Last archived block
493    pub last_archived_block: LastArchivedBlock,
494}
495
496impl SegmentHeader {
497    /// Hash of the whole segment header
498    #[inline(always)]
499    pub fn hash(&self) -> Blake3Hash {
500        blake3::hash(self.as_bytes()).into()
501    }
502
503    /// Get segment index (unwrap `Unaligned`)
504    #[inline(always)]
505    pub const fn segment_index(&self) -> SegmentIndex {
506        self.segment_index.as_inner()
507    }
508}
509
510/// Recorded history segment before archiving is applied.
511///
512/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
513#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
514#[repr(C)]
515pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
516
517impl fmt::Debug for RecordedHistorySegment {
518    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
519        f.debug_struct("RecordedHistorySegment")
520            .finish_non_exhaustive()
521    }
522}
523
524impl Default for RecordedHistorySegment {
525    #[inline]
526    fn default() -> Self {
527        Self([Record::default(); Self::NUM_RAW_RECORDS])
528    }
529}
530
531impl AsRef<[u8]> for RecordedHistorySegment {
532    #[inline]
533    fn as_ref(&self) -> &[u8] {
534        Record::slice_to_repr(&self.0).as_flattened().as_flattened()
535    }
536}
537
538impl AsMut<[u8]> for RecordedHistorySegment {
539    #[inline]
540    fn as_mut(&mut self) -> &mut [u8] {
541        Record::slice_mut_to_repr(&mut self.0)
542            .as_flattened_mut()
543            .as_flattened_mut()
544    }
545}
546
547impl RecordedHistorySegment {
548    /// Number of raw records in one segment of recorded history.
549    pub const NUM_RAW_RECORDS: usize = 128;
550    /// Erasure coding rate for records during archiving process.
551    pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
552    /// Number of pieces in one segment of archived history (taking erasure coding rate into
553    /// account)
554    pub const NUM_PIECES: usize =
555        Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
556    /// Size of recorded history segment in bytes.
557    ///
558    /// It includes half of the records (just source records) that will later be erasure coded and
559    /// together with corresponding roots and proofs will result in
560    /// [`Self::NUM_PIECES`] `Piece`s of archival history.
561    pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
562
563    /// Create boxed value without hitting stack overflow
564    #[inline]
565    #[cfg(feature = "alloc")]
566    pub fn new_boxed() -> Box<Self> {
567        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
568        // SAFETY: Data structure filled with zeroes is a valid invariant
569        unsafe { Box::<Self>::new_zeroed().assume_init() }
570    }
571}