ab_core_primitives/
segments.rs

1//! Segments-related data structures.
2
3#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::{Blake3Hash, blake3_hash};
8use crate::pieces::{PieceIndex, Record};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use ab_io_type::trivial_type::TrivialType;
12#[cfg(feature = "alloc")]
13use alloc::boxed::Box;
14use core::array::TryFromSliceError;
15use core::fmt;
16use core::iter::Step;
17use core::num::{NonZeroU32, NonZeroU64};
18use derive_more::{
19    Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
20    SubAssign,
21};
22#[cfg(feature = "scale-codec")]
23use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
24#[cfg(feature = "scale-codec")]
25use scale_info::TypeInfo;
26#[cfg(feature = "serde")]
27use serde::{Deserialize, Serialize};
28#[cfg(feature = "serde")]
29use serde::{Deserializer, Serializer};
30#[cfg(feature = "serde")]
31use serde_big_array::BigArray;
32
33/// Segment index type.
34#[derive(
35    Debug,
36    Display,
37    Default,
38    Copy,
39    Clone,
40    Ord,
41    PartialOrd,
42    Eq,
43    PartialEq,
44    Hash,
45    From,
46    Into,
47    Add,
48    AddAssign,
49    Sub,
50    SubAssign,
51    Mul,
52    MulAssign,
53    Div,
54    DivAssign,
55    TrivialType,
56)]
57#[cfg_attr(
58    feature = "scale-codec",
59    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
60)]
61#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
62#[repr(C)]
63pub struct SegmentIndex(u64);
64
65impl Step for SegmentIndex {
66    #[inline]
67    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
68        u64::steps_between(&start.0, &end.0)
69    }
70
71    #[inline]
72    fn forward_checked(start: Self, count: usize) -> Option<Self> {
73        u64::forward_checked(start.0, count).map(Self)
74    }
75
76    #[inline]
77    fn backward_checked(start: Self, count: usize) -> Option<Self> {
78        u64::backward_checked(start.0, count).map(Self)
79    }
80}
81
82impl SegmentIndex {
83    /// Segment index 0.
84    pub const ZERO: SegmentIndex = SegmentIndex(0);
85    /// Segment index 1.
86    pub const ONE: SegmentIndex = SegmentIndex(1);
87
88    /// Create new instance
89    #[inline]
90    pub const fn new(n: u64) -> Self {
91        Self(n)
92    }
93
94    /// Get the first piece index in this segment.
95    #[inline]
96    pub const fn first_piece_index(&self) -> PieceIndex {
97        PieceIndex::new(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
98    }
99
100    /// Get the last piece index in this segment.
101    #[inline]
102    pub const fn last_piece_index(&self) -> PieceIndex {
103        PieceIndex::new((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
104    }
105
106    /// List of piece indexes that belong to this segment.
107    #[inline]
108    pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
109        let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
110        (self.first_piece_index()..=self.last_piece_index())
111            .zip(&mut piece_indices)
112            .for_each(|(input, output)| {
113                *output = input;
114            });
115
116        piece_indices
117    }
118
119    /// Checked integer subtraction. Computes `self - rhs`, returning `None` if overflow occurred.
120    #[inline]
121    pub fn checked_sub(self, rhs: Self) -> Option<Self> {
122        self.0.checked_sub(rhs.0).map(Self)
123    }
124}
125
126/// Segment root contained within segment header.
127#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
128#[cfg_attr(
129    feature = "scale-codec",
130    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
131)]
132#[repr(C)]
133pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
134
135impl fmt::Debug for SegmentRoot {
136    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
137        for byte in self.0 {
138            write!(f, "{byte:02x}")?;
139        }
140        Ok(())
141    }
142}
143
144#[cfg(feature = "serde")]
145#[derive(Serialize, Deserialize)]
146#[serde(transparent)]
147struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
148
149#[cfg(feature = "serde")]
150#[derive(Serialize, Deserialize)]
151#[serde(transparent)]
152struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
153
154#[cfg(feature = "serde")]
155impl Serialize for SegmentRoot {
156    #[inline]
157    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
158    where
159        S: Serializer,
160    {
161        if serializer.is_human_readable() {
162            SegmentRootHex(self.0).serialize(serializer)
163        } else {
164            SegmentRootBinary(self.0).serialize(serializer)
165        }
166    }
167}
168
169#[cfg(feature = "serde")]
170impl<'de> Deserialize<'de> for SegmentRoot {
171    #[inline]
172    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
173    where
174        D: Deserializer<'de>,
175    {
176        Ok(Self(if deserializer.is_human_readable() {
177            SegmentRootHex::deserialize(deserializer)?.0
178        } else {
179            SegmentRootBinary::deserialize(deserializer)?.0
180        }))
181    }
182}
183
184impl Default for SegmentRoot {
185    #[inline]
186    fn default() -> Self {
187        Self([0; Self::SIZE])
188    }
189}
190
191impl TryFrom<&[u8]> for SegmentRoot {
192    type Error = TryFromSliceError;
193
194    #[inline]
195    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
196        <[u8; Self::SIZE]>::try_from(slice).map(Self)
197    }
198}
199
200impl AsRef<[u8]> for SegmentRoot {
201    #[inline]
202    fn as_ref(&self) -> &[u8] {
203        &self.0
204    }
205}
206
207impl AsMut<[u8]> for SegmentRoot {
208    #[inline]
209    fn as_mut(&mut self) -> &mut [u8] {
210        &mut self.0
211    }
212}
213
214impl SegmentRoot {
215    /// Size of segment root in bytes.
216    pub const SIZE: usize = 32;
217}
218
219/// Size of blockchain history in segments.
220#[derive(
221    Debug, Display, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, From, Into, Deref, DerefMut,
222)]
223#[cfg_attr(
224    feature = "scale-codec",
225    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
226)]
227#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
228#[repr(transparent)]
229pub struct HistorySize(NonZeroU64);
230
231impl From<SegmentIndex> for HistorySize {
232    #[inline]
233    fn from(value: SegmentIndex) -> Self {
234        Self(NonZeroU64::new(value.0 + 1).expect("Not zero; qed"))
235    }
236}
237
238impl HistorySize {
239    /// History size of one
240    pub const ONE: Self = Self(NonZeroU64::new(1).expect("Not zero; qed"));
241
242    /// Create new instance.
243    #[inline(always)]
244    pub const fn new(value: NonZeroU64) -> Self {
245        Self(value)
246    }
247
248    /// Size of blockchain history in pieces.
249    #[inline(always)]
250    pub const fn in_pieces(&self) -> NonZeroU64 {
251        self.0.saturating_mul(
252            NonZeroU64::new(RecordedHistorySegment::NUM_PIECES as u64).expect("Not zero; qed"),
253        )
254    }
255
256    /// Segment index that corresponds to this history size.
257    #[inline(always)]
258    pub fn segment_index(&self) -> SegmentIndex {
259        SegmentIndex::from(self.0.get() - 1)
260    }
261
262    /// History size at which expiration check for sector happens.
263    ///
264    /// Returns `None` on overflow.
265    #[inline(always)]
266    pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
267        self.0.checked_add(min_sector_lifetime.0.get()).map(Self)
268    }
269}
270
271/// Progress of an archived block.
272#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
273#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
274#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
275#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
276#[repr(C)]
277pub struct ArchivedBlockProgress {
278    /// Number of partially archived bytes of a block, `0` for full block
279    bytes: u32,
280}
281
282impl Default for ArchivedBlockProgress {
283    /// We assume a block can always fit into the segment initially, but it is definitely possible
284    /// to be transitioned into the partial state after some overflow checking.
285    #[inline(always)]
286    fn default() -> Self {
287        Self::new_complete()
288    }
289}
290
291impl ArchivedBlockProgress {
292    /// Block is archived fully
293    #[inline(always)]
294    pub const fn new_complete() -> Self {
295        Self { bytes: 0 }
296    }
297
298    /// Block is partially archived with provided number of bytes
299    #[inline(always)]
300    pub const fn new_partial(new_partial: NonZeroU32) -> Self {
301        Self {
302            bytes: new_partial.get(),
303        }
304    }
305
306    /// Return the number of partially archived bytes if the progress is not complete
307    #[inline(always)]
308    pub const fn partial(&self) -> Option<NonZeroU32> {
309        NonZeroU32::new(self.bytes)
310    }
311}
312
313/// Last archived block
314#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
315#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
316#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
317#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
318#[repr(C)]
319pub struct LastArchivedBlock {
320    /// Block number
321    pub number: BlockNumber,
322    /// Progress of an archived block.
323    pub archived_progress: ArchivedBlockProgress,
324    // TODO: Figure out a way to avoid this padding
325    /// Not used and must be set to `0`
326    pub padding: [u8; 4],
327}
328
329impl LastArchivedBlock {
330    /// Returns the number of partially archived bytes for a block.
331    #[inline(always)]
332    pub fn partial_archived(&self) -> Option<NonZeroU32> {
333        self.archived_progress.partial()
334    }
335
336    /// Sets the number of partially archived bytes if block progress was archived partially
337    #[inline(always)]
338    pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
339        self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
340    }
341
342    /// Indicate last archived block was archived fully
343    #[inline(always)]
344    pub fn set_complete(&mut self) {
345        self.archived_progress = ArchivedBlockProgress::new_complete();
346    }
347}
348
349/// Segment header for a specific segment.
350///
351/// Each segment will have corresponding [`SegmentHeader`] included as the first item in the next
352/// segment. Each `SegmentHeader` includes hash of the previous one and all together form a chain of
353/// segment headers that is used for quick and efficient verification that some `Piece`
354/// corresponds to the actual archival history of the blockchain.
355#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
356#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
357#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
358#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
359#[repr(C)]
360pub struct SegmentHeader {
361    /// Segment index
362    pub segment_index: SegmentIndex,
363    /// Root of roots of all records in a segment.
364    pub segment_root: SegmentRoot,
365    /// Hash of the segment header of the previous segment
366    pub prev_segment_header_hash: Blake3Hash,
367    /// Last archived block
368    pub last_archived_block: LastArchivedBlock,
369}
370
371impl SegmentHeader {
372    /// Hash of the whole segment header
373    #[inline(always)]
374    pub fn hash(&self) -> Blake3Hash {
375        blake3_hash(self.as_bytes())
376    }
377}
378
379/// Recorded history segment before archiving is applied.
380///
381/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
382#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
383#[repr(transparent)]
384pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
385
386impl fmt::Debug for RecordedHistorySegment {
387    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
388        f.debug_struct("RecordedHistorySegment")
389            .finish_non_exhaustive()
390    }
391}
392
393impl Default for RecordedHistorySegment {
394    #[inline]
395    fn default() -> Self {
396        Self([Record::default(); Self::NUM_RAW_RECORDS])
397    }
398}
399
400impl AsRef<[u8]> for RecordedHistorySegment {
401    #[inline]
402    fn as_ref(&self) -> &[u8] {
403        Record::slice_to_repr(&self.0).as_flattened().as_flattened()
404    }
405}
406
407impl AsMut<[u8]> for RecordedHistorySegment {
408    #[inline]
409    fn as_mut(&mut self) -> &mut [u8] {
410        Record::slice_mut_to_repr(&mut self.0)
411            .as_flattened_mut()
412            .as_flattened_mut()
413    }
414}
415
416impl RecordedHistorySegment {
417    /// Number of raw records in one segment of recorded history.
418    pub const NUM_RAW_RECORDS: usize = 128;
419    /// Erasure coding rate for records during archiving process.
420    pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
421    /// Number of pieces in one segment of archived history (taking erasure coding rate into
422    /// account)
423    pub const NUM_PIECES: usize =
424        Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
425    /// Size of recorded history segment in bytes.
426    ///
427    /// It includes half of the records (just source records) that will later be erasure coded and
428    /// together with corresponding roots and proofs will result in
429    /// [`Self::NUM_PIECES`] `Piece`s of archival history.
430    pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
431
432    /// Create boxed value without hitting stack overflow
433    #[inline]
434    #[cfg(feature = "alloc")]
435    pub fn new_boxed() -> Box<Self> {
436        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
437        // SAFETY: Data structure filled with zeroes is a valid invariant
438        unsafe { Box::<Self>::new_zeroed().assume_init() }
439    }
440}