Skip to main content

ab_core_primitives/
pieces.rs

1//! Pieces-related data structures.
2
3#[cfg(feature = "alloc")]
4mod cow_bytes;
5#[cfg(feature = "alloc")]
6mod flat_pieces;
7#[cfg(feature = "alloc")]
8mod piece;
9
10#[cfg(feature = "alloc")]
11pub use crate::pieces::flat_pieces::FlatPieces;
12#[cfg(feature = "alloc")]
13pub use crate::pieces::piece::Piece;
14use crate::segments::{
15    LocalSegmentIndex, RecordedHistorySegment, SegmentIndex, SegmentPosition, SegmentRoot,
16    SuperSegmentIndex, SuperSegmentRoot,
17};
18use crate::shard::ShardIndex;
19#[cfg(feature = "serde")]
20use ::serde::{Deserialize, Deserializer, Serialize, Serializer};
21use ab_io_type::trivial_type::TrivialType;
22use ab_io_type::unaligned::Unaligned;
23use ab_merkle_tree::balanced::BalancedMerkleTree;
24#[cfg(feature = "alloc")]
25use alloc::boxed::Box;
26#[cfg(feature = "alloc")]
27use alloc::vec::Vec;
28use blake3::OUT_LEN;
29use core::array::TryFromSliceError;
30use core::hash::Hash;
31use core::iter::Step;
32use core::mem::MaybeUninit;
33#[cfg(feature = "alloc")]
34use core::slice;
35use core::{fmt, mem};
36use derive_more::{
37    Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul,
38    MulAssign, Sub, SubAssign,
39};
40#[cfg(feature = "scale-codec")]
41use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
42#[cfg(feature = "serde")]
43use serde_big_array::BigArray;
44
45/// Piece index
46#[derive(
47    Debug,
48    Display,
49    Default,
50    Copy,
51    Clone,
52    Ord,
53    PartialOrd,
54    Eq,
55    PartialEq,
56    Hash,
57    Add,
58    AddAssign,
59    Sub,
60    SubAssign,
61    Mul,
62    MulAssign,
63    Div,
64    DivAssign,
65)]
66#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
67#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
68#[repr(C)]
69pub struct PieceIndex(u64);
70
71impl Step for PieceIndex {
72    #[inline]
73    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
74        u64::steps_between(&start.0, &end.0)
75    }
76
77    #[inline]
78    fn forward_checked(start: Self, count: usize) -> Option<Self> {
79        u64::forward_checked(start.0, count).map(Self)
80    }
81
82    #[inline]
83    fn backward_checked(start: Self, count: usize) -> Option<Self> {
84        u64::backward_checked(start.0, count).map(Self)
85    }
86}
87
88impl const From<u64> for PieceIndex {
89    #[inline(always)]
90    fn from(value: u64) -> Self {
91        Self(value)
92    }
93}
94
95impl const From<PieceIndex> for u64 {
96    #[inline(always)]
97    fn from(value: PieceIndex) -> Self {
98        value.0
99    }
100}
101
102impl PieceIndex {
103    /// Size in bytes.
104    pub const SIZE: usize = size_of::<u64>();
105    /// Piece index 0.
106    pub const ZERO: PieceIndex = PieceIndex(0);
107    /// Piece index 1.
108    pub const ONE: PieceIndex = PieceIndex(1);
109
110    /// Create a piece index from bytes.
111    #[inline]
112    pub const fn from_bytes(bytes: [u8; Self::SIZE]) -> Self {
113        Self(u64::from_le_bytes(bytes))
114    }
115
116    /// Convert a piece index to bytes.
117    #[inline]
118    pub const fn to_bytes(self) -> [u8; Self::SIZE] {
119        self.0.to_le_bytes()
120    }
121
122    /// Segment index piece index corresponds to
123    #[inline]
124    pub const fn segment_index(&self) -> SegmentIndex {
125        SegmentIndex::from(self.0 / RecordedHistorySegment::NUM_PIECES as u64)
126    }
127
128    /// Position of a piece in a segment
129    #[inline]
130    pub fn position(&self) -> PiecePosition {
131        PiecePosition::from((self.0 % RecordedHistorySegment::NUM_PIECES as u64) as u8)
132    }
133}
134
135const {
136    // Assert that `u8` represents `PiecePosition` perfectly
137    assert!(RecordedHistorySegment::NUM_PIECES == usize::from(u8::MAX) + 1);
138}
139
140/// Piece position in a segment
141#[derive(
142    Debug,
143    Display,
144    Default,
145    Copy,
146    Clone,
147    Ord,
148    PartialOrd,
149    Eq,
150    PartialEq,
151    Hash,
152    From,
153    Into,
154    TrivialType,
155)]
156#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
157#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
158#[repr(C)]
159pub struct PiecePosition(u8);
160
161impl Step for PiecePosition {
162    #[inline]
163    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
164        u8::steps_between(&start.0, &end.0)
165    }
166
167    #[inline]
168    fn forward_checked(start: Self, count: usize) -> Option<Self> {
169        u8::forward_checked(start.0, count).map(Self)
170    }
171
172    #[inline]
173    fn backward_checked(start: Self, count: usize) -> Option<Self> {
174        u8::backward_checked(start.0, count).map(Self)
175    }
176}
177
178impl From<PiecePosition> for u16 {
179    #[inline]
180    fn from(original: PiecePosition) -> Self {
181        Self::from(original.0)
182    }
183}
184
185impl From<PiecePosition> for u32 {
186    #[inline]
187    fn from(original: PiecePosition) -> Self {
188        Self::from(original.0)
189    }
190}
191
192impl From<PiecePosition> for u64 {
193    #[inline]
194    fn from(original: PiecePosition) -> Self {
195        Self::from(original.0)
196    }
197}
198
199impl From<PiecePosition> for usize {
200    #[inline]
201    fn from(original: PiecePosition) -> Self {
202        usize::from(original.0)
203    }
204}
205
206/// Piece offset in a sector
207#[derive(
208    Debug,
209    Display,
210    Default,
211    Copy,
212    Clone,
213    Ord,
214    PartialOrd,
215    Eq,
216    PartialEq,
217    Hash,
218    From,
219    Into,
220    Add,
221    AddAssign,
222    Sub,
223    SubAssign,
224    Mul,
225    MulAssign,
226    Div,
227    DivAssign,
228    TrivialType,
229)]
230#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
231#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
232#[repr(C)]
233pub struct PieceOffset(u16);
234
235impl Step for PieceOffset {
236    #[inline]
237    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
238        u16::steps_between(&start.0, &end.0)
239    }
240
241    #[inline]
242    fn forward_checked(start: Self, count: usize) -> Option<Self> {
243        u16::forward_checked(start.0, count).map(Self)
244    }
245
246    #[inline]
247    fn backward_checked(start: Self, count: usize) -> Option<Self> {
248        u16::backward_checked(start.0, count).map(Self)
249    }
250}
251
252impl From<PieceOffset> for u32 {
253    #[inline]
254    fn from(original: PieceOffset) -> Self {
255        Self::from(original.0)
256    }
257}
258
259impl From<PieceOffset> for u64 {
260    #[inline]
261    fn from(original: PieceOffset) -> Self {
262        Self::from(original.0)
263    }
264}
265
266impl From<PieceOffset> for usize {
267    #[inline]
268    fn from(original: PieceOffset) -> Self {
269        usize::from(original.0)
270    }
271}
272
273impl PieceOffset {
274    /// Piece index 0
275    pub const ZERO: Self = Self(0);
276    /// Piece index 1
277    pub const ONE: Self = Self(1);
278    /// Size in bytes
279    pub const SIZE: usize = size_of::<u16>();
280
281    /// Convert piece offset to bytes
282    #[inline]
283    pub const fn to_bytes(self) -> [u8; size_of::<u16>()] {
284        self.0.to_le_bytes()
285    }
286}
287
288/// Chunk contained in a record
289#[derive(
290    Default,
291    Copy,
292    Clone,
293    Eq,
294    PartialEq,
295    Ord,
296    PartialOrd,
297    Hash,
298    From,
299    Into,
300    AsRef,
301    AsMut,
302    Deref,
303    DerefMut,
304    TrivialType,
305)]
306#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
307#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
308#[cfg_attr(feature = "serde", serde(transparent))]
309#[repr(C)]
310pub struct RecordChunk([u8; RecordChunk::SIZE]);
311
312impl fmt::Debug for RecordChunk {
313    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
314        for byte in self.0 {
315            write!(f, "{byte:02x}")?;
316        }
317        Ok(())
318    }
319}
320
321impl RecordChunk {
322    /// Size of the chunk in bytes
323    pub const SIZE: usize = 32;
324
325    /// Convenient conversion from slice to underlying representation for efficiency purposes
326    #[inline]
327    pub fn slice_to_repr(value: &[Self]) -> &[[u8; RecordChunk::SIZE]] {
328        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
329        unsafe { mem::transmute(value) }
330    }
331
332    /// Convenient conversion from slice of underlying representation for efficiency purposes
333    #[inline]
334    pub fn slice_from_repr(value: &[[u8; RecordChunk::SIZE]]) -> &[Self] {
335        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
336        unsafe { mem::transmute(value) }
337    }
338
339    /// Convenient conversion from mutable slice to underlying representation for efficiency
340    /// purposes
341    #[inline]
342    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; RecordChunk::SIZE]] {
343        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
344        unsafe { mem::transmute(value) }
345    }
346
347    /// Convenient conversion from mutable slice of underlying representation for efficiency
348    /// purposes
349    #[inline]
350    pub fn slice_mut_from_repr(value: &mut [[u8; RecordChunk::SIZE]]) -> &mut [Self] {
351        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
352        unsafe { mem::transmute(value) }
353    }
354}
355
356/// Record contained within a piece.
357///
358/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
359#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut, TrivialType)]
360#[repr(C)]
361pub struct Record([[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]);
362
363impl fmt::Debug for Record {
364    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
365        for byte in self.0.as_flattened() {
366            write!(f, "{byte:02x}")?;
367        }
368        Ok(())
369    }
370}
371
372impl AsRef<[u8]> for Record {
373    #[inline]
374    fn as_ref(&self) -> &[u8] {
375        self.0.as_flattened()
376    }
377}
378
379impl AsMut<[u8]> for Record {
380    #[inline]
381    fn as_mut(&mut self) -> &mut [u8] {
382        self.0.as_flattened_mut()
383    }
384}
385
386impl From<&Record> for &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
387    #[inline]
388    fn from(value: &Record) -> Self {
389        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
390        unsafe { mem::transmute(value) }
391    }
392}
393
394impl From<&[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &Record {
395    #[inline]
396    fn from(value: &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
397        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
398        unsafe { mem::transmute(value) }
399    }
400}
401
402impl From<&mut Record> for &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
403    #[inline]
404    fn from(value: &mut Record) -> Self {
405        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
406        unsafe { mem::transmute(value) }
407    }
408}
409
410impl From<&mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &mut Record {
411    #[inline]
412    fn from(value: &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
413        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
414        unsafe { mem::transmute(value) }
415    }
416}
417
418impl Record {
419    /// Number of chunks within one record.
420    pub const NUM_CHUNKS: usize = 2_usize.pow(15);
421    /// Number of s-buckets contained within one sector record.
422    ///
423    /// Essentially we chunk records and erasure code them.
424    pub const NUM_S_BUCKETS: usize = Record::NUM_CHUNKS
425        * RecordedHistorySegment::ERASURE_CODING_RATE.1
426        / RecordedHistorySegment::ERASURE_CODING_RATE.0;
427    /// Size of a segment record, it is guaranteed to be a multiple of [`RecordChunk::SIZE`]
428    pub const SIZE: usize = RecordChunk::SIZE * Record::NUM_CHUNKS;
429
430    /// Create boxed value without hitting stack overflow
431    #[inline]
432    #[cfg(feature = "alloc")]
433    pub fn new_boxed() -> Box<Self> {
434        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
435        // SAFETY: Data structure filled with zeroes is a valid invariant
436        unsafe { Box::new_zeroed().assume_init() }
437    }
438
439    /// Create vector filled with zeroed records without hitting stack overflow
440    #[inline]
441    #[cfg(feature = "alloc")]
442    pub fn new_zero_vec(length: usize) -> Vec<Self> {
443        // TODO: Should have been just `vec![Self::default(); length]`, but
444        //  https://github.com/rust-lang/rust/issues/53827
445        let mut records = Vec::with_capacity(length);
446        {
447            let slice = records.spare_capacity_mut();
448            // SAFETY: Same memory layout due to `#[repr(C)]` on `Record` and
449            // `MaybeUninit<[[T; M]; N]>` is guaranteed to have the same layout as
450            // `[[MaybeUninit<T>; M]; N]`
451            let slice = unsafe {
452                slice::from_raw_parts_mut(
453                    slice
454                        .as_mut_ptr()
455                        .cast::<[[mem::MaybeUninit<u8>; RecordChunk::SIZE]; Record::NUM_CHUNKS]>(),
456                    length,
457                )
458            };
459            for byte in slice.as_flattened_mut().as_flattened_mut() {
460                byte.write(0);
461            }
462        }
463        // SAFETY: All values are initialized above.
464        unsafe {
465            records.set_len(records.capacity());
466        }
467
468        records
469    }
470
471    /// Convenient conversion from slice of record to underlying representation for efficiency
472    /// purposes.
473    #[inline(always)]
474    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
475        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
476        unsafe { mem::transmute(value) }
477    }
478
479    /// Convenient conversion from slice of underlying representation to record for efficiency
480    /// purposes.
481    #[inline(always)]
482    pub fn slice_from_repr(value: &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]]) -> &[Self] {
483        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
484        unsafe { mem::transmute(value) }
485    }
486
487    /// Convenient conversion from mutable slice of record to underlying representation for
488    /// efficiency purposes.
489    #[inline(always)]
490    pub fn slice_mut_to_repr(
491        value: &mut [Self],
492    ) -> &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
493        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
494        unsafe { mem::transmute(value) }
495    }
496
497    /// Convenient conversion from mutable slice of underlying representation to record for
498    /// efficiency purposes.
499    #[inline(always)]
500    pub fn slice_mut_from_repr(
501        value: &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]],
502    ) -> &mut [Self] {
503        // SAFETY: `Record` is `#[repr(C)]` and guaranteed to have the same memory layout
504        unsafe { mem::transmute(value) }
505    }
506
507    /// Derive source chunks root on-demand
508    #[inline(always)]
509    pub fn source_chunks_root(&self) -> RecordChunksRoot {
510        RecordChunksRoot(BalancedMerkleTree::compute_root_only(self))
511    }
512}
513
514/// Root of the record contained within a piece.
515///
516/// This is a Merkle Tree root of the roots of source and parity record chunks.
517#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
518#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
519#[repr(C)]
520pub struct RecordRoot([u8; RecordRoot::SIZE]);
521
522impl fmt::Debug for RecordRoot {
523    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
524        for byte in self.0 {
525            write!(f, "{byte:02x}")?;
526        }
527        Ok(())
528    }
529}
530
531#[cfg(feature = "serde")]
532#[derive(Serialize, Deserialize)]
533#[serde(transparent)]
534struct RecordRootBinary(#[serde(with = "BigArray")] [u8; RecordRoot::SIZE]);
535
536#[cfg(feature = "serde")]
537#[derive(Serialize, Deserialize)]
538#[serde(transparent)]
539struct RecordRootHex(#[serde(with = "hex")] [u8; RecordRoot::SIZE]);
540
541#[cfg(feature = "serde")]
542impl Serialize for RecordRoot {
543    #[inline]
544    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
545    where
546        S: Serializer,
547    {
548        if serializer.is_human_readable() {
549            RecordRootHex(self.0).serialize(serializer)
550        } else {
551            RecordRootBinary(self.0).serialize(serializer)
552        }
553    }
554}
555
556#[cfg(feature = "serde")]
557impl<'de> Deserialize<'de> for RecordRoot {
558    #[inline]
559    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
560    where
561        D: Deserializer<'de>,
562    {
563        Ok(Self(if deserializer.is_human_readable() {
564            RecordRootHex::deserialize(deserializer)?.0
565        } else {
566            RecordRootBinary::deserialize(deserializer)?.0
567        }))
568    }
569}
570
571impl Default for RecordRoot {
572    #[inline]
573    fn default() -> Self {
574        Self([0; Self::SIZE])
575    }
576}
577
578impl TryFrom<&[u8]> for RecordRoot {
579    type Error = TryFromSliceError;
580
581    #[inline]
582    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
583        <[u8; Self::SIZE]>::try_from(slice).map(Self)
584    }
585}
586
587impl AsRef<[u8]> for RecordRoot {
588    #[inline]
589    fn as_ref(&self) -> &[u8] {
590        &self.0
591    }
592}
593
594impl AsMut<[u8]> for RecordRoot {
595    #[inline]
596    fn as_mut(&mut self) -> &mut [u8] {
597        &mut self.0
598    }
599}
600
601impl From<&RecordRoot> for &[u8; RecordRoot::SIZE] {
602    #[inline]
603    fn from(value: &RecordRoot) -> Self {
604        // SAFETY: `RecordRoot` is `#[repr(C)]` and guaranteed to have the same
605        // memory layout
606        unsafe { mem::transmute(value) }
607    }
608}
609
610impl From<&[u8; RecordRoot::SIZE]> for &RecordRoot {
611    #[inline]
612    fn from(value: &[u8; RecordRoot::SIZE]) -> Self {
613        // SAFETY: `RecordRoot` is `#[repr(C)]` and guaranteed to have the same
614        // memory layout
615        unsafe { mem::transmute(value) }
616    }
617}
618
619impl From<&mut RecordRoot> for &mut [u8; RecordRoot::SIZE] {
620    #[inline]
621    fn from(value: &mut RecordRoot) -> Self {
622        // SAFETY: `RecordRoot` is `#[repr(C)]` and guaranteed to have the same
623        // memory layout
624        unsafe { mem::transmute(value) }
625    }
626}
627
628impl From<&mut [u8; RecordRoot::SIZE]> for &mut RecordRoot {
629    #[inline]
630    fn from(value: &mut [u8; RecordRoot::SIZE]) -> Self {
631        // SAFETY: `RecordRoot` is `#[repr(C)]` and guaranteed to have the same
632        // memory layout
633        unsafe { mem::transmute(value) }
634    }
635}
636
637impl RecordRoot {
638    /// Size of record root in bytes.
639    pub const SIZE: usize = 32;
640
641    /// Validate record root hash produced by the archiver
642    pub fn is_valid(
643        &self,
644        segment_root: &SegmentRoot,
645        record_proof: &RecordProof,
646        position: PiecePosition,
647    ) -> bool {
648        BalancedMerkleTree::<{ RecordedHistorySegment::NUM_PIECES }>::verify(
649            segment_root,
650            record_proof,
651            usize::from(position),
652            self.0,
653        )
654    }
655}
656
657/// Root of source or parity record chunks
658#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
659#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
660#[repr(C)]
661pub struct RecordChunksRoot([u8; RecordChunksRoot::SIZE]);
662
663impl fmt::Debug for RecordChunksRoot {
664    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
665        for byte in self.0 {
666            write!(f, "{byte:02x}")?;
667        }
668        Ok(())
669    }
670}
671
672#[cfg(feature = "serde")]
673#[derive(Serialize, Deserialize)]
674#[serde(transparent)]
675struct RecordChunksRootBinary(#[serde(with = "BigArray")] [u8; RecordChunksRoot::SIZE]);
676
677#[cfg(feature = "serde")]
678#[derive(Serialize, Deserialize)]
679#[serde(transparent)]
680struct RecordChunksRootHex(#[serde(with = "hex")] [u8; RecordChunksRoot::SIZE]);
681
682#[cfg(feature = "serde")]
683impl Serialize for RecordChunksRoot {
684    #[inline]
685    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
686    where
687        S: Serializer,
688    {
689        if serializer.is_human_readable() {
690            RecordChunksRootHex(self.0).serialize(serializer)
691        } else {
692            RecordChunksRootBinary(self.0).serialize(serializer)
693        }
694    }
695}
696
697#[cfg(feature = "serde")]
698impl<'de> Deserialize<'de> for RecordChunksRoot {
699    #[inline]
700    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
701    where
702        D: Deserializer<'de>,
703    {
704        Ok(Self(if deserializer.is_human_readable() {
705            RecordChunksRootHex::deserialize(deserializer)?.0
706        } else {
707            RecordChunksRootBinary::deserialize(deserializer)?.0
708        }))
709    }
710}
711
712impl Default for RecordChunksRoot {
713    #[inline]
714    fn default() -> Self {
715        Self([0; Self::SIZE])
716    }
717}
718
719impl TryFrom<&[u8]> for RecordChunksRoot {
720    type Error = TryFromSliceError;
721
722    #[inline]
723    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
724        <[u8; Self::SIZE]>::try_from(slice).map(Self)
725    }
726}
727
728impl AsRef<[u8]> for RecordChunksRoot {
729    #[inline]
730    fn as_ref(&self) -> &[u8] {
731        &self.0
732    }
733}
734
735impl AsMut<[u8]> for RecordChunksRoot {
736    #[inline]
737    fn as_mut(&mut self) -> &mut [u8] {
738        &mut self.0
739    }
740}
741
742impl From<&RecordChunksRoot> for &[u8; RecordChunksRoot::SIZE] {
743    #[inline]
744    fn from(value: &RecordChunksRoot) -> Self {
745        // SAFETY: `RecordChunksRoot` is `#[repr(C)]` and guaranteed to have the same
746        // memory layout
747        unsafe { mem::transmute(value) }
748    }
749}
750
751impl From<&[u8; RecordChunksRoot::SIZE]> for &RecordChunksRoot {
752    #[inline]
753    fn from(value: &[u8; RecordChunksRoot::SIZE]) -> Self {
754        // SAFETY: `RecordChunksRoot` is `#[repr(C)]` and guaranteed to have the same
755        // memory layout
756        unsafe { mem::transmute(value) }
757    }
758}
759
760impl From<&mut RecordChunksRoot> for &mut [u8; RecordChunksRoot::SIZE] {
761    #[inline]
762    fn from(value: &mut RecordChunksRoot) -> Self {
763        // SAFETY: `RecordChunksRoot` is `#[repr(C)]` and guaranteed to have the same
764        // memory layout
765        unsafe { mem::transmute(value) }
766    }
767}
768
769impl From<&mut [u8; RecordChunksRoot::SIZE]> for &mut RecordChunksRoot {
770    #[inline]
771    fn from(value: &mut [u8; RecordChunksRoot::SIZE]) -> Self {
772        // SAFETY: `RecordChunksRoot` is `#[repr(C)]` and guaranteed to have the same
773        // memory layout
774        unsafe { mem::transmute(value) }
775    }
776}
777
778impl RecordChunksRoot {
779    /// Size of record chunks root in bytes.
780    pub const SIZE: usize = 32;
781}
782
783/// Proof that the record (root) belongs to a segment
784#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
785#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
786#[repr(C)]
787pub struct RecordProof([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
788
789impl fmt::Debug for RecordProof {
790    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
791        write!(f, "[")?;
792        for hash in self.0 {
793            for byte in hash {
794                write!(f, "{byte:02x}")?;
795            }
796            write!(f, ", ")?;
797        }
798        write!(f, "]")?;
799        Ok(())
800    }
801}
802
803#[cfg(feature = "serde")]
804#[derive(Serialize, Deserialize)]
805#[serde(transparent)]
806struct RecordProofBinary([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
807
808#[cfg(feature = "serde")]
809#[derive(Serialize, Deserialize)]
810#[serde(transparent)]
811struct RecordProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
812
813#[cfg(feature = "serde")]
814#[derive(Serialize, Deserialize)]
815#[serde(transparent)]
816struct RecordProofHex([RecordProofHexHash; RecordProof::NUM_HASHES]);
817
818#[cfg(feature = "serde")]
819impl Serialize for RecordProof {
820    #[inline]
821    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
822    where
823        S: Serializer,
824    {
825        if serializer.is_human_readable() {
826            // SAFETY: `RecordProofHexHash` is `#[repr(C)]` and guaranteed to have the
827            // same memory layout
828            RecordProofHex(unsafe {
829                mem::transmute::<
830                    [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
831                    [RecordProofHexHash; RecordProof::NUM_HASHES],
832                >(self.0)
833            })
834            .serialize(serializer)
835        } else {
836            RecordProofBinary(self.0).serialize(serializer)
837        }
838    }
839}
840
841#[cfg(feature = "serde")]
842impl<'de> Deserialize<'de> for RecordProof {
843    #[inline]
844    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
845    where
846        D: Deserializer<'de>,
847    {
848        Ok(Self(if deserializer.is_human_readable() {
849            // SAFETY: `RecordProofHexHash` is `#[repr(C)]` and guaranteed to have the
850            // same memory layout
851            unsafe {
852                mem::transmute::<
853                    [RecordProofHexHash; RecordProof::NUM_HASHES],
854                    [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
855                >(RecordProofHex::deserialize(deserializer)?.0)
856            }
857        } else {
858            RecordProofBinary::deserialize(deserializer)?.0
859        }))
860    }
861}
862
863impl Default for RecordProof {
864    #[inline]
865    fn default() -> Self {
866        Self([[0; OUT_LEN]; RecordProof::NUM_HASHES])
867    }
868}
869
870impl AsRef<[u8]> for RecordProof {
871    #[inline]
872    fn as_ref(&self) -> &[u8] {
873        self.0.as_flattened()
874    }
875}
876
877impl AsMut<[u8]> for RecordProof {
878    #[inline]
879    fn as_mut(&mut self) -> &mut [u8] {
880        self.0.as_flattened_mut()
881    }
882}
883
884impl From<&RecordProof> for &[u8; RecordProof::SIZE] {
885    #[inline]
886    fn from(value: &RecordProof) -> Self {
887        // SAFETY: `RecordProof` is `#[repr(C)]` and guaranteed to have the same
888        // memory layout
889        unsafe { mem::transmute(value) }
890    }
891}
892
893impl From<&[u8; RecordProof::SIZE]> for &RecordProof {
894    #[inline]
895    fn from(value: &[u8; RecordProof::SIZE]) -> Self {
896        // SAFETY: `RecordProof` is `#[repr(C)]` and guaranteed to have the same
897        // memory layout
898        unsafe { mem::transmute(value) }
899    }
900}
901
902impl From<&mut RecordProof> for &mut [u8; RecordProof::SIZE] {
903    #[inline]
904    fn from(value: &mut RecordProof) -> Self {
905        // SAFETY: `RecordProof` is `#[repr(C)]` and guaranteed to have the same
906        // memory layout
907        unsafe { mem::transmute(value) }
908    }
909}
910
911impl From<&mut [u8; RecordProof::SIZE]> for &mut RecordProof {
912    #[inline]
913    fn from(value: &mut [u8; RecordProof::SIZE]) -> Self {
914        // SAFETY: `RecordProof` is `#[repr(C)]` and guaranteed to have the same
915        // memory layout
916        unsafe { mem::transmute(value) }
917    }
918}
919
920impl RecordProof {
921    /// Size of record proof in bytes
922    pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
923    const NUM_HASHES: usize = RecordedHistorySegment::NUM_PIECES.ilog2() as usize;
924}
925
926/// Proof that the segment belongs to a super segment
927#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
928#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
929#[repr(C)]
930pub struct SegmentProof([[u8; OUT_LEN]; SegmentProof::NUM_HASHES]);
931
932impl fmt::Debug for SegmentProof {
933    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
934        write!(f, "[")?;
935        for hash in self.0 {
936            for byte in hash {
937                write!(f, "{byte:02x}")?;
938            }
939            write!(f, ", ")?;
940        }
941        write!(f, "]")?;
942        Ok(())
943    }
944}
945
946#[cfg(feature = "serde")]
947#[derive(Serialize, Deserialize)]
948#[serde(transparent)]
949struct SegmentProofBinary([[u8; OUT_LEN]; SegmentProof::NUM_HASHES]);
950
951#[cfg(feature = "serde")]
952#[derive(Serialize, Deserialize)]
953#[serde(transparent)]
954struct SegmentProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
955
956#[cfg(feature = "serde")]
957#[derive(Serialize, Deserialize)]
958#[serde(transparent)]
959struct SegmentProofHex([SegmentProofHexHash; SegmentProof::NUM_HASHES]);
960
961#[cfg(feature = "serde")]
962impl Serialize for SegmentProof {
963    #[inline]
964    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
965    where
966        S: Serializer,
967    {
968        if serializer.is_human_readable() {
969            // SAFETY: `SegmentProofHexHash` is `#[repr(C)]` and guaranteed to have the
970            // same memory layout
971            SegmentProofHex(unsafe {
972                mem::transmute::<
973                    [[u8; OUT_LEN]; SegmentProof::NUM_HASHES],
974                    [SegmentProofHexHash; SegmentProof::NUM_HASHES],
975                >(self.0)
976            })
977            .serialize(serializer)
978        } else {
979            SegmentProofBinary(self.0).serialize(serializer)
980        }
981    }
982}
983
984#[cfg(feature = "serde")]
985impl<'de> Deserialize<'de> for SegmentProof {
986    #[inline]
987    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
988    where
989        D: Deserializer<'de>,
990    {
991        Ok(Self(if deserializer.is_human_readable() {
992            // SAFETY: `SegmentProofHexHash` is `#[repr(C)]` and guaranteed to have the
993            // same memory layout
994            unsafe {
995                mem::transmute::<
996                    [SegmentProofHexHash; SegmentProof::NUM_HASHES],
997                    [[u8; OUT_LEN]; SegmentProof::NUM_HASHES],
998                >(SegmentProofHex::deserialize(deserializer)?.0)
999            }
1000        } else {
1001            SegmentProofBinary::deserialize(deserializer)?.0
1002        }))
1003    }
1004}
1005
1006impl Default for SegmentProof {
1007    #[inline]
1008    fn default() -> Self {
1009        Self([[0; OUT_LEN]; SegmentProof::NUM_HASHES])
1010    }
1011}
1012
1013impl AsRef<[u8]> for SegmentProof {
1014    #[inline]
1015    fn as_ref(&self) -> &[u8] {
1016        self.0.as_flattened()
1017    }
1018}
1019
1020impl AsMut<[u8]> for SegmentProof {
1021    #[inline]
1022    fn as_mut(&mut self) -> &mut [u8] {
1023        self.0.as_flattened_mut()
1024    }
1025}
1026
1027impl From<&SegmentProof> for &[u8; SegmentProof::SIZE] {
1028    #[inline]
1029    fn from(value: &SegmentProof) -> Self {
1030        // SAFETY: `SegmentProof` is `#[repr(C)]` and guaranteed to have the same
1031        // memory layout
1032        unsafe { mem::transmute(value) }
1033    }
1034}
1035
1036impl From<&[u8; SegmentProof::SIZE]> for &SegmentProof {
1037    #[inline]
1038    fn from(value: &[u8; SegmentProof::SIZE]) -> Self {
1039        // SAFETY: `SegmentProof` is `#[repr(C)]` and guaranteed to have the same
1040        // memory layout
1041        unsafe { mem::transmute(value) }
1042    }
1043}
1044
1045impl From<&mut SegmentProof> for &mut [u8; SegmentProof::SIZE] {
1046    #[inline]
1047    fn from(value: &mut SegmentProof) -> Self {
1048        // SAFETY: `SegmentProof` is `#[repr(C)]` and guaranteed to have the same
1049        // memory layout
1050        unsafe { mem::transmute(value) }
1051    }
1052}
1053
1054impl From<&mut [u8; SegmentProof::SIZE]> for &mut SegmentProof {
1055    #[inline]
1056    fn from(value: &mut [u8; SegmentProof::SIZE]) -> Self {
1057        // SAFETY: `SegmentProof` is `#[repr(C)]` and guaranteed to have the same
1058        // memory layout
1059        unsafe { mem::transmute(value) }
1060    }
1061}
1062
1063impl SegmentProof {
1064    /// Size of segment proof in bytes
1065    pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
1066    const NUM_HASHES: usize = SuperSegmentRoot::MAX_SEGMENTS.next_power_of_two().ilog2() as usize;
1067
1068    /// Returns a mutable reference to an internal array as uninitialized memory.
1069    ///
1070    /// This is a convenience method for proof generation.
1071    pub fn as_uninit_repr(
1072        &mut self,
1073    ) -> &mut [MaybeUninit<[u8; OUT_LEN]>; SegmentProof::NUM_HASHES] {
1074        // SAFETY: Casting initialized memory into uninitialized memory of the same size is safe
1075        unsafe { mem::transmute(&mut self.0) }
1076    }
1077}
1078
1079/// Header for a piece of archival history.
1080///
1081/// Primarily contains information needed for piece verification.
1082#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, TrivialType)]
1083#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
1084#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
1085#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
1086#[repr(C)]
1087pub struct PieceHeader {
1088    /// Shard index
1089    pub shard_index: Unaligned<ShardIndex>,
1090    /// Local segment index
1091    pub local_segment_index: Unaligned<LocalSegmentIndex>,
1092    /// Super segment index
1093    pub super_segment_index: Unaligned<SuperSegmentIndex>,
1094    /// Position of the segment in the super segment
1095    pub segment_position: Unaligned<SegmentPosition>,
1096    /// Segment root
1097    pub segment_root: SegmentRoot,
1098    /// Segment proof
1099    pub segment_proof: SegmentProof,
1100    /// Root of parity record chunks.
1101    ///
1102    /// Technically redundant, but helps to avoid repeating erasure coding during verification.
1103    pub parity_chunks_root: RecordChunksRoot,
1104    /// Proof that the record (root) belongs to a segment
1105    pub record_proof: RecordProof,
1106}
1107
1108const {
1109    // Must have minimal alignment for various conversions to/from bytes
1110    assert!(align_of::<PieceHeader>() == 1);
1111}
1112
1113/// A piece of archival history.
1114///
1115/// This version is allocated on the stack, for a heap-allocated piece that can be moved around
1116/// efficiently, see [`Piece`].
1117///
1118/// Internally, a piece contains a record, supplementary record chunk root, and a proof proving this
1119/// piece belongs to can be used to verify that a piece belongs to the actual archival history of
1120/// the blockchain.
1121#[derive(Debug, Copy, Clone, Eq, PartialEq, TrivialType)]
1122#[repr(C)]
1123pub struct InnerPiece {
1124    /// Piece header
1125    pub header: PieceHeader,
1126    /// Record contained within a piece
1127    pub record: Record,
1128}
1129
1130const {
1131    // Must have minimal alignment for various conversions to/from bytes
1132    assert!(align_of::<InnerPiece>() == 1);
1133}
1134
1135impl InnerPiece {
1136    /// Size of a piece (in bytes)
1137    pub const SIZE: usize = size_of::<Self>();
1138
1139    /// Create boxed value without hitting stack overflow
1140    #[inline]
1141    #[cfg(feature = "alloc")]
1142    pub fn new_boxed() -> Box<Self> {
1143        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
1144        // SAFETY: Data structure filled with zeroes is a valid invariant
1145        unsafe { Box::<Self>::new_zeroed().assume_init() }
1146    }
1147
1148    /// Check whether the piece is valid against the matching super segment root
1149    pub fn is_valid(
1150        &self,
1151        super_segment_root: &SuperSegmentRoot,
1152        num_segments: u32,
1153        position: PiecePosition,
1154    ) -> bool {
1155        if !self.header.segment_root.is_valid(
1156            self.header.shard_index.as_inner(),
1157            self.header.local_segment_index.as_inner(),
1158            self.header.segment_position.as_inner(),
1159            &self.header.segment_proof,
1160            num_segments,
1161            super_segment_root,
1162        ) {
1163            return false;
1164        }
1165        self.record_root().is_valid(
1166            &self.header.segment_root,
1167            &self.header.record_proof,
1168            position,
1169        )
1170    }
1171
1172    /// Root of the record contained within a piece.
1173    ///
1174    /// It is re-derived on every call of this function.
1175    #[inline]
1176    pub fn record_root(&self) -> RecordRoot {
1177        let record_merkle_tree_root = BalancedMerkleTree::compute_root_only(&[
1178            *self.record.source_chunks_root(),
1179            *self.header.parity_chunks_root,
1180        ]);
1181
1182        RecordRoot::from(record_merkle_tree_root)
1183    }
1184
1185    /// Convenient conversion from slice of piece array to underlying representation for efficiency
1186    /// purposes.
1187    #[inline]
1188    pub fn slice_to_repr(value: &[Self]) -> &[[u8; Self::SIZE]] {
1189        // SAFETY: `PieceArray` is `#[repr(C)]` and guaranteed to have the same memory
1190        // layout
1191        unsafe { mem::transmute(value) }
1192    }
1193
1194    /// Convenient conversion from slice of underlying representation to piece array for efficiency
1195    /// purposes.
1196    #[inline]
1197    pub fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
1198        // SAFETY: `PieceArray` is `#[repr(C)]` and guaranteed to have the same memory
1199        // layout
1200        unsafe { mem::transmute(value) }
1201    }
1202
1203    /// Convenient conversion from mutable slice of piece array to underlying representation for
1204    /// efficiency purposes.
1205    #[inline]
1206    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; Self::SIZE]] {
1207        // SAFETY: `PieceArray` is `#[repr(C)]` and guaranteed to have the same memory
1208        // layout
1209        unsafe { mem::transmute(value) }
1210    }
1211
1212    /// Convenient conversion from mutable slice of underlying representation to piece array for
1213    /// efficiency purposes.
1214    #[inline]
1215    pub fn slice_mut_from_repr(value: &mut [[u8; Self::SIZE]]) -> &mut [Self] {
1216        // SAFETY: `PieceArray` is `#[repr(C)]` and guaranteed to have the same memory
1217        // layout
1218        unsafe { mem::transmute(value) }
1219    }
1220}
1221
1222#[cfg(feature = "alloc")]
1223impl From<Box<InnerPiece>> for Vec<u8> {
1224    fn from(value: Box<InnerPiece>) -> Self {
1225        let mut value = mem::ManuallyDrop::new(value);
1226        // SAFETY: Always contains fixed allocation of bytes
1227        unsafe {
1228            Vec::from_raw_parts(
1229                value.as_bytes_mut().as_mut_ptr(),
1230                InnerPiece::SIZE,
1231                InnerPiece::SIZE,
1232            )
1233        }
1234    }
1235}