ab_core_primitives/
pieces.rs

1//! Pieces-related data structures.
2
3#[cfg(feature = "alloc")]
4mod cow_bytes;
5#[cfg(feature = "alloc")]
6mod flat_pieces;
7#[cfg(feature = "alloc")]
8mod piece;
9
10#[cfg(feature = "alloc")]
11pub use crate::pieces::flat_pieces::FlatPieces;
12#[cfg(feature = "alloc")]
13pub use crate::pieces::piece::Piece;
14use crate::segments::{RecordedHistorySegment, SegmentIndex, SegmentRoot};
15#[cfg(feature = "serde")]
16use ::serde::{Deserialize, Serialize};
17#[cfg(feature = "serde")]
18use ::serde::{Deserializer, Serializer};
19use ab_merkle_tree::balanced_hashed::BalancedHashedMerkleTree;
20#[cfg(feature = "alloc")]
21use alloc::boxed::Box;
22#[cfg(feature = "alloc")]
23use alloc::vec::Vec;
24use blake3::OUT_LEN;
25use core::array::TryFromSliceError;
26use core::hash::Hash;
27use core::iter::Step;
28#[cfg(feature = "alloc")]
29use core::slice;
30use core::{fmt, mem};
31use derive_more::{
32    Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul,
33    MulAssign, Sub, SubAssign,
34};
35#[cfg(feature = "scale-codec")]
36use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
37#[cfg(feature = "scale-codec")]
38use scale_info::TypeInfo;
39#[cfg(feature = "serde")]
40use serde_big_array::BigArray;
41
42/// Piece index
43#[derive(
44    Debug,
45    Display,
46    Default,
47    Copy,
48    Clone,
49    Ord,
50    PartialOrd,
51    Eq,
52    PartialEq,
53    Hash,
54    From,
55    Into,
56    Add,
57    AddAssign,
58    Sub,
59    SubAssign,
60    Mul,
61    MulAssign,
62    Div,
63    DivAssign,
64)]
65#[cfg_attr(
66    feature = "scale-codec",
67    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
68)]
69#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
70#[repr(transparent)]
71pub struct PieceIndex(u64);
72
73impl Step for PieceIndex {
74    #[inline]
75    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
76        u64::steps_between(&start.0, &end.0)
77    }
78
79    #[inline]
80    fn forward_checked(start: Self, count: usize) -> Option<Self> {
81        u64::forward_checked(start.0, count).map(Self)
82    }
83
84    #[inline]
85    fn backward_checked(start: Self, count: usize) -> Option<Self> {
86        u64::backward_checked(start.0, count).map(Self)
87    }
88}
89
90impl PieceIndex {
91    /// Size in bytes.
92    pub const SIZE: usize = size_of::<u64>();
93    /// Piece index 0.
94    pub const ZERO: PieceIndex = PieceIndex(0);
95    /// Piece index 1.
96    pub const ONE: PieceIndex = PieceIndex(1);
97
98    /// Create new instance
99    #[inline]
100    pub const fn new(n: u64) -> Self {
101        Self(n)
102    }
103
104    /// Create piece index from bytes.
105    #[inline]
106    pub const fn from_bytes(bytes: [u8; Self::SIZE]) -> Self {
107        Self(u64::from_le_bytes(bytes))
108    }
109
110    /// Convert piece index to bytes.
111    #[inline]
112    pub const fn to_bytes(self) -> [u8; Self::SIZE] {
113        self.0.to_le_bytes()
114    }
115
116    /// Segment index piece index corresponds to
117    #[inline]
118    pub const fn segment_index(&self) -> SegmentIndex {
119        SegmentIndex::new(self.0 / RecordedHistorySegment::NUM_PIECES as u64)
120    }
121
122    /// Position of a piece in a segment
123    #[inline]
124    pub const fn position(&self) -> u32 {
125        // Position is statically guaranteed to fit into u32
126        (self.0 % RecordedHistorySegment::NUM_PIECES as u64) as u32
127    }
128
129    /// Is this piece index a source piece?
130    #[inline]
131    pub const fn is_source(&self) -> bool {
132        self.position() < RecordedHistorySegment::NUM_RAW_RECORDS as u32
133    }
134
135    /// Returns the next source piece index.
136    /// Panics if the piece is not a source piece.
137    #[inline]
138    pub const fn next_source_index(&self) -> Self {
139        if self.position() + 1 < RecordedHistorySegment::NUM_RAW_RECORDS as u32 {
140            // Same segment
141            Self(self.0 + 1)
142        } else {
143            // Next segment
144            Self(self.0 + RecordedHistorySegment::NUM_RAW_RECORDS as u64)
145        }
146    }
147}
148
149/// Piece offset in sector
150#[derive(
151    Debug,
152    Display,
153    Default,
154    Copy,
155    Clone,
156    Ord,
157    PartialOrd,
158    Eq,
159    PartialEq,
160    Hash,
161    From,
162    Into,
163    Add,
164    AddAssign,
165    Sub,
166    SubAssign,
167    Mul,
168    MulAssign,
169    Div,
170    DivAssign,
171)]
172#[cfg_attr(
173    feature = "scale-codec",
174    derive(Encode, Decode, MaxEncodedLen, TypeInfo)
175)]
176#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
177#[repr(transparent)]
178pub struct PieceOffset(u16);
179
180impl Step for PieceOffset {
181    #[inline]
182    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
183        u16::steps_between(&start.0, &end.0)
184    }
185
186    #[inline]
187    fn forward_checked(start: Self, count: usize) -> Option<Self> {
188        u16::forward_checked(start.0, count).map(Self)
189    }
190
191    #[inline]
192    fn backward_checked(start: Self, count: usize) -> Option<Self> {
193        u16::backward_checked(start.0, count).map(Self)
194    }
195}
196
197impl From<PieceOffset> for u32 {
198    #[inline]
199    fn from(original: PieceOffset) -> Self {
200        Self::from(original.0)
201    }
202}
203
204impl From<PieceOffset> for u64 {
205    #[inline]
206    fn from(original: PieceOffset) -> Self {
207        Self::from(original.0)
208    }
209}
210
211impl From<PieceOffset> for usize {
212    #[inline]
213    fn from(original: PieceOffset) -> Self {
214        usize::from(original.0)
215    }
216}
217
218impl PieceOffset {
219    /// Piece index 0.
220    pub const ZERO: PieceOffset = PieceOffset(0);
221    /// Piece index 1.
222    pub const ONE: PieceOffset = PieceOffset(1);
223
224    /// Convert piece offset to bytes.
225    #[inline]
226    pub const fn to_bytes(self) -> [u8; mem::size_of::<u16>()] {
227        self.0.to_le_bytes()
228    }
229}
230
231/// Chunk contained in a record
232#[derive(
233    Default,
234    Copy,
235    Clone,
236    Eq,
237    PartialEq,
238    Ord,
239    PartialOrd,
240    Hash,
241    From,
242    Into,
243    AsRef,
244    AsMut,
245    Deref,
246    DerefMut,
247)]
248#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, TypeInfo))]
249#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
250#[cfg_attr(feature = "serde", serde(transparent))]
251#[repr(C)]
252pub struct RecordChunk([u8; RecordChunk::SIZE]);
253
254impl fmt::Debug for RecordChunk {
255    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
256        for byte in self.0 {
257            write!(f, "{byte:02x}")?;
258        }
259        Ok(())
260    }
261}
262
263impl RecordChunk {
264    /// Size of the chunk in bytes
265    pub const SIZE: usize = 32;
266
267    /// Convenient conversion from slice to underlying representation for efficiency purposes
268    #[inline]
269    pub fn slice_to_repr(value: &[Self]) -> &[[u8; RecordChunk::SIZE]] {
270        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
271        unsafe { mem::transmute(value) }
272    }
273
274    /// Convenient conversion from slice of underlying representation for efficiency purposes
275    #[inline]
276    pub fn slice_from_repr(value: &[[u8; RecordChunk::SIZE]]) -> &[Self] {
277        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
278        unsafe { mem::transmute(value) }
279    }
280
281    /// Convenient conversion from mutable slice to underlying representation for efficiency
282    /// purposes
283    #[inline]
284    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; RecordChunk::SIZE]] {
285        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
286        unsafe { mem::transmute(value) }
287    }
288
289    /// Convenient conversion from mutable slice of underlying representation for efficiency
290    /// purposes
291    #[inline]
292    pub fn slice_mut_from_repr(value: &mut [[u8; RecordChunk::SIZE]]) -> &mut [Self] {
293        // SAFETY: `RecordChunk` is `#[repr(C)]` and guaranteed to have the same memory layout
294        unsafe { mem::transmute(value) }
295    }
296}
297
298/// Record contained within a piece.
299///
300/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
301#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
302#[repr(transparent)]
303pub struct Record([[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]);
304
305impl fmt::Debug for Record {
306    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
307        for byte in self.0.as_flattened() {
308            write!(f, "{byte:02x}")?;
309        }
310        Ok(())
311    }
312}
313
314impl Default for Record {
315    #[inline]
316    fn default() -> Self {
317        Self([Default::default(); Record::NUM_CHUNKS])
318    }
319}
320
321impl AsRef<[u8]> for Record {
322    #[inline]
323    fn as_ref(&self) -> &[u8] {
324        self.0.as_flattened()
325    }
326}
327
328impl AsMut<[u8]> for Record {
329    #[inline]
330    fn as_mut(&mut self) -> &mut [u8] {
331        self.0.as_flattened_mut()
332    }
333}
334
335impl From<&Record> for &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
336    #[inline]
337    fn from(value: &Record) -> Self {
338        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
339        unsafe { mem::transmute(value) }
340    }
341}
342
343impl From<&[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &Record {
344    #[inline]
345    fn from(value: &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
346        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
347        unsafe { mem::transmute(value) }
348    }
349}
350
351impl From<&mut Record> for &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
352    #[inline]
353    fn from(value: &mut Record) -> Self {
354        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
355        unsafe { mem::transmute(value) }
356    }
357}
358
359impl From<&mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &mut Record {
360    #[inline]
361    fn from(value: &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
362        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
363        unsafe { mem::transmute(value) }
364    }
365}
366
367impl From<&Record> for &[u8; Record::SIZE] {
368    #[inline]
369    fn from(value: &Record) -> Self {
370        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
371        // as inner array, while array of byte arrays has the same alignment as a single byte
372        unsafe { mem::transmute(value) }
373    }
374}
375
376impl From<&[u8; Record::SIZE]> for &Record {
377    #[inline]
378    fn from(value: &[u8; Record::SIZE]) -> Self {
379        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
380        // as inner array, while array of byte arrays has the same alignment as a single byte
381        unsafe { mem::transmute(value) }
382    }
383}
384
385impl From<&mut Record> for &mut [u8; Record::SIZE] {
386    #[inline]
387    fn from(value: &mut Record) -> Self {
388        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
389        // as inner array, while array of byte arrays has the same alignment as a single byte
390        unsafe { mem::transmute(value) }
391    }
392}
393
394impl From<&mut [u8; Record::SIZE]> for &mut Record {
395    #[inline]
396    fn from(value: &mut [u8; Record::SIZE]) -> Self {
397        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
398        // as inner array, while array of byte arrays has the same alignment as a single byte
399        unsafe { mem::transmute(value) }
400    }
401}
402
403impl Record {
404    /// Number of chunks within one record.
405    pub const NUM_CHUNKS: usize = 2_usize.pow(15);
406    /// Number of s-buckets contained within one sector record.
407    ///
408    /// Essentially we chunk records and erasure code them.
409    pub const NUM_S_BUCKETS: usize = Record::NUM_CHUNKS
410        * RecordedHistorySegment::ERASURE_CODING_RATE.1
411        / RecordedHistorySegment::ERASURE_CODING_RATE.0;
412    /// Size of a segment record, it is guaranteed to be a multiple of [`RecordChunk::SIZE`]
413    pub const SIZE: usize = RecordChunk::SIZE * Record::NUM_CHUNKS;
414
415    /// Create boxed value without hitting stack overflow
416    #[inline]
417    #[cfg(feature = "alloc")]
418    pub fn new_boxed() -> Box<Self> {
419        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
420        // SAFETY: Data structure filled with zeroes is a valid invariant
421        unsafe { Box::new_zeroed().assume_init() }
422    }
423
424    /// Create vector filled with zeroed records without hitting stack overflow
425    #[inline]
426    #[cfg(feature = "alloc")]
427    pub fn new_zero_vec(length: usize) -> Vec<Self> {
428        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
429        let mut records = Vec::with_capacity(length);
430        {
431            let slice = records.spare_capacity_mut();
432            // SAFETY: Same memory layout due to `#[repr(transparent)]` on `Record` and
433            // `MaybeUninit<[[T; M]; N]>` is guaranteed to have the same layout as
434            // `[[MaybeUninit<T>; M]; N]`
435            let slice = unsafe {
436                slice::from_raw_parts_mut(
437                    slice.as_mut_ptr()
438                        as *mut [[mem::MaybeUninit<u8>; RecordChunk::SIZE]; Record::NUM_CHUNKS],
439                    length,
440                )
441            };
442            for byte in slice.as_flattened_mut().as_flattened_mut() {
443                byte.write(0);
444            }
445        }
446        // SAFETY: All values are initialized above.
447        unsafe {
448            records.set_len(records.capacity());
449        }
450
451        records
452    }
453
454    /// Convenient conversion from slice of record to underlying representation for efficiency
455    /// purposes.
456    #[inline]
457    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
458        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
459        unsafe { mem::transmute(value) }
460    }
461
462    /// Convenient conversion from slice of underlying representation to record for efficiency
463    /// purposes.
464    #[inline]
465    pub fn slice_from_repr(value: &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]]) -> &[Self] {
466        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
467        unsafe { mem::transmute(value) }
468    }
469
470    /// Convenient conversion from mutable slice of record to underlying representation for
471    /// efficiency purposes.
472    #[inline]
473    pub fn slice_mut_to_repr(
474        value: &mut [Self],
475    ) -> &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
476        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
477        unsafe { mem::transmute(value) }
478    }
479
480    /// Convenient conversion from mutable slice of underlying representation to record for
481    /// efficiency purposes.
482    #[inline]
483    pub fn slice_mut_from_repr(
484        value: &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]],
485    ) -> &mut [Self] {
486        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
487        unsafe { mem::transmute(value) }
488    }
489}
490
491/// Record root contained within a piece.
492#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into)]
493#[cfg_attr(
494    feature = "scale-codec",
495    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
496)]
497pub struct RecordRoot([u8; RecordRoot::SIZE]);
498
499impl fmt::Debug for RecordRoot {
500    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
501        for byte in self.0 {
502            write!(f, "{byte:02x}")?;
503        }
504        Ok(())
505    }
506}
507
508#[cfg(feature = "serde")]
509#[derive(Serialize, Deserialize)]
510#[serde(transparent)]
511struct RecordRootBinary(#[serde(with = "BigArray")] [u8; RecordRoot::SIZE]);
512
513#[cfg(feature = "serde")]
514#[derive(Serialize, Deserialize)]
515#[serde(transparent)]
516struct RecordRootHex(#[serde(with = "hex")] [u8; RecordRoot::SIZE]);
517
518#[cfg(feature = "serde")]
519impl Serialize for RecordRoot {
520    #[inline]
521    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
522    where
523        S: Serializer,
524    {
525        if serializer.is_human_readable() {
526            RecordRootHex(self.0).serialize(serializer)
527        } else {
528            RecordRootBinary(self.0).serialize(serializer)
529        }
530    }
531}
532
533#[cfg(feature = "serde")]
534impl<'de> Deserialize<'de> for RecordRoot {
535    #[inline]
536    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
537    where
538        D: Deserializer<'de>,
539    {
540        Ok(Self(if deserializer.is_human_readable() {
541            RecordRootHex::deserialize(deserializer)?.0
542        } else {
543            RecordRootBinary::deserialize(deserializer)?.0
544        }))
545    }
546}
547
548impl Default for RecordRoot {
549    #[inline]
550    fn default() -> Self {
551        Self([0; Self::SIZE])
552    }
553}
554
555impl TryFrom<&[u8]> for RecordRoot {
556    type Error = TryFromSliceError;
557
558    #[inline]
559    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
560        <[u8; Self::SIZE]>::try_from(slice).map(Self)
561    }
562}
563
564impl AsRef<[u8]> for RecordRoot {
565    #[inline]
566    fn as_ref(&self) -> &[u8] {
567        &self.0
568    }
569}
570
571impl AsMut<[u8]> for RecordRoot {
572    #[inline]
573    fn as_mut(&mut self) -> &mut [u8] {
574        &mut self.0
575    }
576}
577
578impl From<&RecordRoot> for &[u8; RecordRoot::SIZE] {
579    #[inline]
580    fn from(value: &RecordRoot) -> Self {
581        // SAFETY: `RecordRoot` is `#[repr(transparent)]` and guaranteed to have the same
582        // memory layout
583        unsafe { mem::transmute(value) }
584    }
585}
586
587impl From<&[u8; RecordRoot::SIZE]> for &RecordRoot {
588    #[inline]
589    fn from(value: &[u8; RecordRoot::SIZE]) -> Self {
590        // SAFETY: `RecordRoot` is `#[repr(transparent)]` and guaranteed to have the same
591        // memory layout
592        unsafe { mem::transmute(value) }
593    }
594}
595
596impl From<&mut RecordRoot> for &mut [u8; RecordRoot::SIZE] {
597    #[inline]
598    fn from(value: &mut RecordRoot) -> Self {
599        // SAFETY: `RecordRoot` is `#[repr(transparent)]` and guaranteed to have the same
600        // memory layout
601        unsafe { mem::transmute(value) }
602    }
603}
604
605impl From<&mut [u8; RecordRoot::SIZE]> for &mut RecordRoot {
606    #[inline]
607    fn from(value: &mut [u8; RecordRoot::SIZE]) -> Self {
608        // SAFETY: `RecordRoot` is `#[repr(transparent)]` and guaranteed to have the same
609        // memory layout
610        unsafe { mem::transmute(value) }
611    }
612}
613
614impl RecordRoot {
615    /// Size of record root in bytes.
616    pub const SIZE: usize = 32;
617
618    /// Validate record root hash produced by the archiver
619    pub fn is_valid(
620        &self,
621        segment_root: &SegmentRoot,
622        record_proof: &RecordProof,
623        position: u32,
624    ) -> bool {
625        BalancedHashedMerkleTree::<{ RecordedHistorySegment::NUM_PIECES }>::verify(
626            segment_root,
627            record_proof,
628            position as usize,
629            self.0,
630        )
631    }
632}
633
634/// Record chunks root (source or parity) contained within a piece.
635#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into)]
636#[cfg_attr(
637    feature = "scale-codec",
638    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
639)]
640pub struct RecordChunksRoot([u8; RecordChunksRoot::SIZE]);
641
642impl fmt::Debug for RecordChunksRoot {
643    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
644        for byte in self.0 {
645            write!(f, "{byte:02x}")?;
646        }
647        Ok(())
648    }
649}
650
651#[cfg(feature = "serde")]
652#[derive(Serialize, Deserialize)]
653#[serde(transparent)]
654struct RecordChunksRootBinary(#[serde(with = "BigArray")] [u8; RecordChunksRoot::SIZE]);
655
656#[cfg(feature = "serde")]
657#[derive(Serialize, Deserialize)]
658#[serde(transparent)]
659struct RecordChunksRootHex(#[serde(with = "hex")] [u8; RecordChunksRoot::SIZE]);
660
661#[cfg(feature = "serde")]
662impl Serialize for RecordChunksRoot {
663    #[inline]
664    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
665    where
666        S: Serializer,
667    {
668        if serializer.is_human_readable() {
669            RecordChunksRootHex(self.0).serialize(serializer)
670        } else {
671            RecordChunksRootBinary(self.0).serialize(serializer)
672        }
673    }
674}
675
676#[cfg(feature = "serde")]
677impl<'de> Deserialize<'de> for RecordChunksRoot {
678    #[inline]
679    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
680    where
681        D: Deserializer<'de>,
682    {
683        Ok(Self(if deserializer.is_human_readable() {
684            RecordChunksRootHex::deserialize(deserializer)?.0
685        } else {
686            RecordChunksRootBinary::deserialize(deserializer)?.0
687        }))
688    }
689}
690
691impl Default for RecordChunksRoot {
692    #[inline]
693    fn default() -> Self {
694        Self([0; Self::SIZE])
695    }
696}
697
698impl TryFrom<&[u8]> for RecordChunksRoot {
699    type Error = TryFromSliceError;
700
701    #[inline]
702    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
703        <[u8; Self::SIZE]>::try_from(slice).map(Self)
704    }
705}
706
707impl AsRef<[u8]> for RecordChunksRoot {
708    #[inline]
709    fn as_ref(&self) -> &[u8] {
710        &self.0
711    }
712}
713
714impl AsMut<[u8]> for RecordChunksRoot {
715    #[inline]
716    fn as_mut(&mut self) -> &mut [u8] {
717        &mut self.0
718    }
719}
720
721impl From<&RecordChunksRoot> for &[u8; RecordChunksRoot::SIZE] {
722    #[inline]
723    fn from(value: &RecordChunksRoot) -> Self {
724        // SAFETY: `RecordChunksRoot` is `#[repr(transparent)]` and guaranteed to have the same
725        // memory layout
726        unsafe { mem::transmute(value) }
727    }
728}
729
730impl From<&[u8; RecordChunksRoot::SIZE]> for &RecordChunksRoot {
731    #[inline]
732    fn from(value: &[u8; RecordChunksRoot::SIZE]) -> Self {
733        // SAFETY: `RecordChunksRoot` is `#[repr(transparent)]` and guaranteed to have the same
734        // memory layout
735        unsafe { mem::transmute(value) }
736    }
737}
738
739impl From<&mut RecordChunksRoot> for &mut [u8; RecordChunksRoot::SIZE] {
740    #[inline]
741    fn from(value: &mut RecordChunksRoot) -> Self {
742        // SAFETY: `RecordChunksRoot` is `#[repr(transparent)]` and guaranteed to have the same
743        // memory layout
744        unsafe { mem::transmute(value) }
745    }
746}
747
748impl From<&mut [u8; RecordChunksRoot::SIZE]> for &mut RecordChunksRoot {
749    #[inline]
750    fn from(value: &mut [u8; RecordChunksRoot::SIZE]) -> Self {
751        // SAFETY: `RecordChunksRoot` is `#[repr(transparent)]` and guaranteed to have the same
752        // memory layout
753        unsafe { mem::transmute(value) }
754    }
755}
756
757impl RecordChunksRoot {
758    /// Size of record chunks root in bytes.
759    pub const SIZE: usize = 32;
760}
761
762/// Record proof contained within a piece.
763#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into)]
764#[cfg_attr(
765    feature = "scale-codec",
766    derive(Encode, Decode, TypeInfo, MaxEncodedLen)
767)]
768pub struct RecordProof([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
769
770impl fmt::Debug for RecordProof {
771    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
772        write!(f, "[")?;
773        for hash in self.0 {
774            for byte in hash {
775                write!(f, "{byte:02x}")?;
776            }
777            write!(f, ", ")?;
778        }
779        write!(f, "]")?;
780        Ok(())
781    }
782}
783
784#[cfg(feature = "serde")]
785#[derive(Serialize, Deserialize)]
786#[serde(transparent)]
787struct RecordProofBinary([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
788
789#[cfg(feature = "serde")]
790#[derive(Serialize, Deserialize)]
791#[serde(transparent)]
792struct RecordProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
793
794#[cfg(feature = "serde")]
795#[derive(Serialize, Deserialize)]
796#[serde(transparent)]
797struct RecordProofHex([RecordProofHexHash; RecordProof::NUM_HASHES]);
798
799#[cfg(feature = "serde")]
800impl Serialize for RecordProof {
801    #[inline]
802    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
803    where
804        S: Serializer,
805    {
806        if serializer.is_human_readable() {
807            // SAFETY: `RecordProofHexHash` is `#[repr(transparent)]` and guaranteed to have the
808            // same memory layout
809            RecordProofHex(unsafe {
810                mem::transmute::<
811                    [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
812                    [RecordProofHexHash; RecordProof::NUM_HASHES],
813                >(self.0)
814            })
815            .serialize(serializer)
816        } else {
817            RecordProofBinary(self.0).serialize(serializer)
818        }
819    }
820}
821
822#[cfg(feature = "serde")]
823impl<'de> Deserialize<'de> for RecordProof {
824    #[inline]
825    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
826    where
827        D: Deserializer<'de>,
828    {
829        Ok(Self(if deserializer.is_human_readable() {
830            // SAFETY: `RecordProofHexHash` is `#[repr(transparent)]` and guaranteed to have the
831            // same memory layout
832            unsafe {
833                mem::transmute::<
834                    [RecordProofHexHash; RecordProof::NUM_HASHES],
835                    [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
836                >(RecordProofHex::deserialize(deserializer)?.0)
837            }
838        } else {
839            RecordProofBinary::deserialize(deserializer)?.0
840        }))
841    }
842}
843
844impl Default for RecordProof {
845    #[inline]
846    fn default() -> Self {
847        Self([[0; OUT_LEN]; RecordProof::NUM_HASHES])
848    }
849}
850
851impl AsRef<[u8]> for RecordProof {
852    #[inline]
853    fn as_ref(&self) -> &[u8] {
854        self.0.as_flattened()
855    }
856}
857
858impl AsMut<[u8]> for RecordProof {
859    #[inline]
860    fn as_mut(&mut self) -> &mut [u8] {
861        self.0.as_flattened_mut()
862    }
863}
864
865impl From<&RecordProof> for &[u8; RecordProof::SIZE] {
866    #[inline]
867    fn from(value: &RecordProof) -> Self {
868        // SAFETY: `RecordProof` is `#[repr(transparent)]` and guaranteed to have the same
869        // memory layout
870        unsafe { mem::transmute(value) }
871    }
872}
873
874impl From<&[u8; RecordProof::SIZE]> for &RecordProof {
875    #[inline]
876    fn from(value: &[u8; RecordProof::SIZE]) -> Self {
877        // SAFETY: `RecordProof` is `#[repr(transparent)]` and guaranteed to have the same
878        // memory layout
879        unsafe { mem::transmute(value) }
880    }
881}
882
883impl From<&mut RecordProof> for &mut [u8; RecordProof::SIZE] {
884    #[inline]
885    fn from(value: &mut RecordProof) -> Self {
886        // SAFETY: `RecordProof` is `#[repr(transparent)]` and guaranteed to have the same
887        // memory layout
888        unsafe { mem::transmute(value) }
889    }
890}
891
892impl From<&mut [u8; RecordProof::SIZE]> for &mut RecordProof {
893    #[inline]
894    fn from(value: &mut [u8; RecordProof::SIZE]) -> Self {
895        // SAFETY: `RecordProof` is `#[repr(transparent)]` and guaranteed to have the same
896        // memory layout
897        unsafe { mem::transmute(value) }
898    }
899}
900
901impl RecordProof {
902    /// Size of record proof in bytes.
903    pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
904    const NUM_HASHES: usize = RecordedHistorySegment::NUM_PIECES.ilog2() as usize;
905}
906
907/// A piece of archival history.
908///
909/// This version is allocated on the stack, for heap-allocated piece see [`Piece`].
910///
911/// Internally a piece contains a record, followed by record root, supplementary record chunk
912/// root and a proof proving this piece belongs to can be used to verify that a piece belongs to
913/// the actual archival history of the blockchain.
914#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Deref, DerefMut, AsRef, AsMut)]
915#[repr(transparent)]
916pub struct PieceArray([u8; PieceArray::SIZE]);
917
918impl fmt::Debug for PieceArray {
919    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
920        for byte in self.0 {
921            write!(f, "{byte:02x}")?;
922        }
923        Ok(())
924    }
925}
926
927impl Default for PieceArray {
928    #[inline]
929    fn default() -> Self {
930        Self([0u8; Self::SIZE])
931    }
932}
933
934impl AsRef<[u8]> for PieceArray {
935    #[inline]
936    fn as_ref(&self) -> &[u8] {
937        &self.0
938    }
939}
940
941impl AsMut<[u8]> for PieceArray {
942    #[inline]
943    fn as_mut(&mut self) -> &mut [u8] {
944        &mut self.0
945    }
946}
947
948impl From<&PieceArray> for &[u8; PieceArray::SIZE] {
949    #[inline]
950    fn from(value: &PieceArray) -> Self {
951        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
952        // layout
953        unsafe { mem::transmute(value) }
954    }
955}
956
957impl From<&[u8; PieceArray::SIZE]> for &PieceArray {
958    #[inline]
959    fn from(value: &[u8; PieceArray::SIZE]) -> Self {
960        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
961        // layout
962        unsafe { mem::transmute(value) }
963    }
964}
965
966impl From<&mut PieceArray> for &mut [u8; PieceArray::SIZE] {
967    #[inline]
968    fn from(value: &mut PieceArray) -> Self {
969        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
970        // layout
971        unsafe { mem::transmute(value) }
972    }
973}
974
975impl From<&mut [u8; PieceArray::SIZE]> for &mut PieceArray {
976    #[inline]
977    fn from(value: &mut [u8; PieceArray::SIZE]) -> Self {
978        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
979        // layout
980        unsafe { mem::transmute(value) }
981    }
982}
983
984impl PieceArray {
985    /// Size of a piece (in bytes).
986    pub const SIZE: usize =
987        Record::SIZE + RecordRoot::SIZE + RecordChunksRoot::SIZE + RecordProof::SIZE;
988
989    /// Create boxed value without hitting stack overflow
990    #[inline]
991    #[cfg(feature = "alloc")]
992    pub fn new_boxed() -> Box<Self> {
993        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
994        // SAFETY: Data structure filled with zeroes is a valid invariant
995        unsafe { Box::<Self>::new_zeroed().assume_init() }
996    }
997
998    /// Validate proof embedded within a piece produced by the archiver
999    pub fn is_valid(&self, segment_root: &SegmentRoot, position: u32) -> bool {
1000        let (record, &record_root, parity_chunks_root, record_proof) = self.split();
1001
1002        let source_record_merkle_tree_root = BalancedHashedMerkleTree::compute_root_only(record);
1003        let record_merkle_tree_root = BalancedHashedMerkleTree::compute_root_only(&[
1004            source_record_merkle_tree_root,
1005            **parity_chunks_root,
1006        ]);
1007
1008        if record_merkle_tree_root != *record_root {
1009            return false;
1010        }
1011
1012        record_root.is_valid(segment_root, record_proof, position)
1013    }
1014
1015    /// Split piece into underlying components.
1016    #[inline]
1017    pub fn split(&self) -> (&Record, &RecordRoot, &RecordChunksRoot, &RecordProof) {
1018        let (record, extra) = self.0.split_at(Record::SIZE);
1019        let (root, extra) = extra.split_at(RecordRoot::SIZE);
1020        let (parity_chunks_root, proof) = extra.split_at(RecordChunksRoot::SIZE);
1021
1022        let record = <&[u8; Record::SIZE]>::try_from(record)
1023            .expect("Slice of memory has correct length; qed");
1024        let root = <&[u8; RecordRoot::SIZE]>::try_from(root)
1025            .expect("Slice of memory has correct length; qed");
1026        let parity_chunks_root = <&[u8; RecordChunksRoot::SIZE]>::try_from(parity_chunks_root)
1027            .expect("Slice of memory has correct length; qed");
1028        let proof = <&[u8; RecordProof::SIZE]>::try_from(proof)
1029            .expect("Slice of memory has correct length; qed");
1030
1031        (
1032            record.into(),
1033            root.into(),
1034            parity_chunks_root.into(),
1035            proof.into(),
1036        )
1037    }
1038
1039    /// Split piece into underlying mutable components.
1040    #[inline]
1041    pub fn split_mut(
1042        &mut self,
1043    ) -> (
1044        &mut Record,
1045        &mut RecordRoot,
1046        &mut RecordChunksRoot,
1047        &mut RecordProof,
1048    ) {
1049        let (record, extra) = self.0.split_at_mut(Record::SIZE);
1050        let (root, extra) = extra.split_at_mut(RecordRoot::SIZE);
1051        let (parity_chunks_root, proof) = extra.split_at_mut(RecordChunksRoot::SIZE);
1052
1053        let record = <&mut [u8; Record::SIZE]>::try_from(record)
1054            .expect("Slice of memory has correct length; qed");
1055        let root = <&mut [u8; RecordRoot::SIZE]>::try_from(root)
1056            .expect("Slice of memory has correct length; qed");
1057        let parity_chunks_root = <&mut [u8; RecordChunksRoot::SIZE]>::try_from(parity_chunks_root)
1058            .expect("Slice of memory has correct length; qed");
1059        let proof = <&mut [u8; RecordProof::SIZE]>::try_from(proof)
1060            .expect("Slice of memory has correct length; qed");
1061
1062        (
1063            record.into(),
1064            root.into(),
1065            parity_chunks_root.into(),
1066            proof.into(),
1067        )
1068    }
1069
1070    /// Record contained within a piece.
1071    #[inline]
1072    pub fn record(&self) -> &Record {
1073        self.split().0
1074    }
1075
1076    /// Mutable record contained within a piece.
1077    #[inline]
1078    pub fn record_mut(&mut self) -> &mut Record {
1079        self.split_mut().0
1080    }
1081
1082    /// Root contained within a piece.
1083    #[inline]
1084    pub fn root(&self) -> &RecordRoot {
1085        self.split().1
1086    }
1087
1088    /// Mutable root contained within a piece.
1089    #[inline]
1090    pub fn root_mut(&mut self) -> &mut RecordRoot {
1091        self.split_mut().1
1092    }
1093
1094    /// Parity chunks root contained within a piece.
1095    #[inline]
1096    pub fn parity_chunks_root(&self) -> &RecordChunksRoot {
1097        self.split().2
1098    }
1099
1100    /// Mutable parity chunks root contained within a piece.
1101    #[inline]
1102    pub fn parity_chunks_root_mut(&mut self) -> &mut RecordChunksRoot {
1103        self.split_mut().2
1104    }
1105
1106    /// Proof contained within a piece.
1107    #[inline]
1108    pub fn proof(&self) -> &RecordProof {
1109        self.split().3
1110    }
1111
1112    /// Mutable proof contained within a piece.
1113    #[inline]
1114    pub fn proof_mut(&mut self) -> &mut RecordProof {
1115        self.split_mut().3
1116    }
1117
1118    /// Convenient conversion from slice of piece array to underlying representation for efficiency
1119    /// purposes.
1120    #[inline]
1121    pub fn slice_to_repr(value: &[Self]) -> &[[u8; Self::SIZE]] {
1122        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1123        // layout
1124        unsafe { mem::transmute(value) }
1125    }
1126
1127    /// Convenient conversion from slice of underlying representation to piece array for efficiency
1128    /// purposes.
1129    #[inline]
1130    pub fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
1131        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1132        // layout
1133        unsafe { mem::transmute(value) }
1134    }
1135
1136    /// Convenient conversion from mutable slice of piece array to underlying representation for
1137    /// efficiency purposes.
1138    #[inline]
1139    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; Self::SIZE]] {
1140        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1141        // layout
1142        unsafe { mem::transmute(value) }
1143    }
1144
1145    /// Convenient conversion from mutable slice of underlying representation to piece array for
1146    /// efficiency purposes.
1147    #[inline]
1148    pub fn slice_mut_from_repr(value: &mut [[u8; Self::SIZE]]) -> &mut [Self] {
1149        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1150        // layout
1151        unsafe { mem::transmute(value) }
1152    }
1153}
1154
1155#[cfg(feature = "alloc")]
1156impl From<Box<PieceArray>> for Vec<u8> {
1157    fn from(value: Box<PieceArray>) -> Self {
1158        let mut value = mem::ManuallyDrop::new(value);
1159        // SAFETY: Always contains fixed allocation of bytes
1160        unsafe { Vec::from_raw_parts(value.as_mut_ptr(), PieceArray::SIZE, PieceArray::SIZE) }
1161    }
1162}