1#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record, SegmentProof};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use crate::shard::ShardIndex;
12use ab_blake3::{single_block_hash, single_chunk_hash};
13use ab_io_type::trivial_type::TrivialType;
14use ab_io_type::unaligned::Unaligned;
15use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
16#[cfg(feature = "alloc")]
17use alloc::boxed::Box;
18#[cfg(feature = "alloc")]
19use alloc::sync::Arc as StdArc;
20use blake3::{CHUNK_LEN, OUT_LEN};
21use core::iter::Step;
22use core::num::{NonZeroU32, NonZeroU64};
23use core::{fmt, mem};
24use derive_more::{
25 Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
26 SubAssign,
27};
28#[cfg(feature = "scale-codec")]
29use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
30#[cfg(feature = "serde")]
31use serde::{Deserialize, Deserializer, Serialize, Serializer};
32#[cfg(feature = "serde")]
33use serde_big_array::BigArray;
34
35#[derive(
37 Debug,
38 Display,
39 Default,
40 Copy,
41 Clone,
42 Ord,
43 PartialOrd,
44 Eq,
45 PartialEq,
46 Hash,
47 Add,
48 AddAssign,
49 Sub,
50 SubAssign,
51 Mul,
52 MulAssign,
53 Div,
54 DivAssign,
55 TrivialType,
56)]
57#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
58#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
59#[repr(C)]
60pub struct SuperSegmentIndex(u64);
61
62impl Step for SuperSegmentIndex {
63 #[inline]
64 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
65 u64::steps_between(&start.0, &end.0)
66 }
67
68 #[inline]
69 fn forward_checked(start: Self, count: usize) -> Option<Self> {
70 u64::forward_checked(start.0, count).map(Self)
71 }
72
73 #[inline]
74 fn backward_checked(start: Self, count: usize) -> Option<Self> {
75 u64::backward_checked(start.0, count).map(Self)
76 }
77}
78
79impl const From<u64> for SuperSegmentIndex {
80 #[inline(always)]
81 fn from(value: u64) -> Self {
82 Self(value)
83 }
84}
85
86impl const From<SuperSegmentIndex> for u64 {
87 #[inline(always)]
88 fn from(value: SuperSegmentIndex) -> Self {
89 value.0
90 }
91}
92
93impl SuperSegmentIndex {
94 pub const ZERO: Self = Self(0);
96 pub const ONE: Self = Self(1);
98
99 #[inline]
101 pub fn checked_sub(self, rhs: Self) -> Option<Self> {
102 self.0.checked_sub(rhs.0).map(Self)
103 }
104
105 #[inline]
108 pub const fn saturating_sub(self, rhs: Self) -> Self {
109 Self(self.0.saturating_sub(rhs.0))
110 }
111}
112
113#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
115#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
116#[repr(C)]
117pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
118
119impl fmt::Debug for SuperSegmentRoot {
120 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
121 for byte in self.0 {
122 write!(f, "{byte:02x}")?;
123 }
124 Ok(())
125 }
126}
127
128impl const Default for SuperSegmentRoot {
129 #[inline]
130 fn default() -> Self {
131 Self([0; Self::SIZE])
132 }
133}
134
135#[cfg(feature = "serde")]
136#[derive(Serialize, Deserialize)]
137#[serde(transparent)]
138struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
139
140#[cfg(feature = "serde")]
141#[derive(Serialize, Deserialize)]
142#[serde(transparent)]
143struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
144
145#[cfg(feature = "serde")]
146impl Serialize for SuperSegmentRoot {
147 #[inline]
148 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
149 where
150 S: Serializer,
151 {
152 if serializer.is_human_readable() {
153 SuperSegmentRootHex(self.0).serialize(serializer)
154 } else {
155 SuperSegmentRootBinary(self.0).serialize(serializer)
156 }
157 }
158}
159
160#[cfg(feature = "serde")]
161impl<'de> Deserialize<'de> for SuperSegmentRoot {
162 #[inline]
163 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
164 where
165 D: Deserializer<'de>,
166 {
167 Ok(Self(if deserializer.is_human_readable() {
168 SuperSegmentRootHex::deserialize(deserializer)?.0
169 } else {
170 SuperSegmentRootBinary::deserialize(deserializer)?.0
171 }))
172 }
173}
174
175impl AsRef<[u8]> for SuperSegmentRoot {
176 #[inline]
177 fn as_ref(&self) -> &[u8] {
178 &self.0
179 }
180}
181
182impl AsMut<[u8]> for SuperSegmentRoot {
183 #[inline]
184 fn as_mut(&mut self) -> &mut [u8] {
185 &mut self.0
186 }
187}
188
189impl SuperSegmentRoot {
190 pub const SIZE: usize = 32;
192 pub const MAX_SEGMENTS: u32 = 2u32.pow(20) - 1;
196}
197
198#[derive(
200 Debug,
201 Display,
202 Default,
203 Copy,
204 Clone,
205 Ord,
206 PartialOrd,
207 Eq,
208 PartialEq,
209 Hash,
210 From,
211 Into,
212 TrivialType,
213)]
214#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
215#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
216#[repr(C)]
217pub struct SegmentPosition(u32);
218
219impl From<SegmentPosition> for u64 {
220 #[inline]
221 fn from(original: SegmentPosition) -> Self {
222 Self::from(original.0)
223 }
224}
225
226impl SegmentPosition {
227 pub const ZERO: Self = Self(0);
229}
230
231#[derive(Debug, Clone, Copy, TrivialType)]
233#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
234#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
235#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
236#[repr(C)]
237pub struct ShardSegmentRootWithPosition {
238 pub shard_index: ShardIndex,
240 pub segment_position: SegmentPosition,
242 pub local_segment_index: LocalSegmentIndex,
244 pub segment_root: SegmentRoot,
246}
247
248impl ShardSegmentRootWithPosition {
249 #[inline(always)]
251 pub fn hash(&self) -> [u8; OUT_LEN] {
252 single_block_hash(self.as_bytes()).expect("Less than a single block worth of bytes; qed")
253 }
254}
255
256#[derive(Debug, Clone, Copy, Eq, PartialEq, TrivialType)]
258#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
259#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
260#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
261#[repr(C)]
262pub struct SuperSegmentHeader {
263 pub index: Unaligned<SuperSegmentIndex>,
265 pub root: SuperSegmentRoot,
267 pub prev_super_segment_header_hash: Blake3Hash,
269 pub max_segment_index: Unaligned<SegmentIndex>,
271 pub target_beacon_chain_block_number: Unaligned<BlockNumber>,
273 pub num_segments: u32,
276}
277
278#[cfg(feature = "alloc")]
280#[derive(Debug, Clone)]
281#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
284#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
285pub struct SuperSegment {
286 pub header: SuperSegmentHeader,
288 pub segment_roots: StdArc<[ShardSegmentRootWithPosition]>,
290}
291
292#[cfg(feature = "alloc")]
293impl SuperSegment {
294 pub fn new(
298 previous_header: &SuperSegmentHeader,
299 target_beacon_chain_block_number: BlockNumber,
300 segment_roots: StdArc<[ShardSegmentRootWithPosition]>,
301 ) -> Option<Self> {
302 let num_segments = u32::try_from(segment_roots.len()).ok()?;
303 let max_segment_index = SegmentIndex::from(
304 u64::from(previous_header.max_segment_index.as_inner()) + u64::from(num_segments),
305 );
306
307 const {
311 assert!(SuperSegmentRoot::MAX_SEGMENTS == 1048575);
312 }
313 let maybe_super_segment_root = UnbalancedMerkleTree::compute_root_only::<1048575, _, _>(
315 segment_roots.iter().map(ShardSegmentRootWithPosition::hash),
316 )?;
317
318 Some(Self {
319 header: SuperSegmentHeader {
320 index: (previous_header.index.as_inner() + SuperSegmentIndex::ONE).into(),
321 root: SuperSegmentRoot::from(maybe_super_segment_root),
322 prev_super_segment_header_hash: Blake3Hash::from(
323 single_chunk_hash(previous_header.as_bytes())
324 .expect("Less than a single chunk worth of bytes; qed"),
325 ),
326 max_segment_index: max_segment_index.into(),
327 target_beacon_chain_block_number: target_beacon_chain_block_number.into(),
328 num_segments,
329 },
330 segment_roots,
331 })
332 }
333
334 pub fn proof_for_segment(&self, segment_position: SegmentPosition) -> Option<SegmentProof> {
336 const {
340 assert!(SuperSegmentRoot::MAX_SEGMENTS == 1048575);
341 }
342 let mut segment_proof = SegmentProof::default();
344 UnbalancedMerkleTree::compute_root_and_proof_in::<1048575, _, _>(
345 self.segment_roots.iter().map(|shard_segment_root| {
346 single_block_hash(shard_segment_root.as_bytes())
347 .expect("Less than a single block worth of bytes; qed")
348 }),
349 u32::from(segment_position) as usize,
350 segment_proof.as_uninit_repr(),
351 )?;
352
353 Some(segment_proof)
354 }
355}
356
357#[derive(
359 Debug,
360 Display,
361 Default,
362 Copy,
363 Clone,
364 Ord,
365 PartialOrd,
366 Eq,
367 PartialEq,
368 Hash,
369 Add,
370 AddAssign,
371 Sub,
372 SubAssign,
373 Mul,
374 MulAssign,
375 Div,
376 DivAssign,
377 TrivialType,
378)]
379#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
380#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
381#[repr(C)]
382pub struct LocalSegmentIndex(u64);
383
384impl Step for LocalSegmentIndex {
385 #[inline]
386 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
387 u64::steps_between(&start.0, &end.0)
388 }
389
390 #[inline]
391 fn forward_checked(start: Self, count: usize) -> Option<Self> {
392 u64::forward_checked(start.0, count).map(Self)
393 }
394
395 #[inline]
396 fn backward_checked(start: Self, count: usize) -> Option<Self> {
397 u64::backward_checked(start.0, count).map(Self)
398 }
399}
400
401impl const From<u64> for LocalSegmentIndex {
402 #[inline(always)]
403 fn from(value: u64) -> Self {
404 Self(value)
405 }
406}
407
408impl const From<LocalSegmentIndex> for u64 {
409 #[inline(always)]
410 fn from(value: LocalSegmentIndex) -> Self {
411 value.0
412 }
413}
414
415impl LocalSegmentIndex {
416 pub const ZERO: Self = Self(0);
418 pub const ONE: Self = Self(1);
420
421 #[inline]
423 pub fn checked_sub(self, rhs: Self) -> Option<Self> {
424 self.0.checked_sub(rhs.0).map(Self)
425 }
426
427 #[inline]
430 pub const fn saturating_sub(self, rhs: Self) -> Self {
431 Self(self.0.saturating_sub(rhs.0))
432 }
433}
434
435#[derive(
437 Debug,
438 Display,
439 Default,
440 Copy,
441 Clone,
442 Ord,
443 PartialOrd,
444 Eq,
445 PartialEq,
446 Hash,
447 Add,
448 AddAssign,
449 Sub,
450 SubAssign,
451 Mul,
452 MulAssign,
453 Div,
454 DivAssign,
455 TrivialType,
456)]
457#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
458#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
459#[repr(C)]
460pub struct SegmentIndex(u64);
461
462impl Step for SegmentIndex {
463 #[inline]
464 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
465 u64::steps_between(&start.0, &end.0)
466 }
467
468 #[inline]
469 fn forward_checked(start: Self, count: usize) -> Option<Self> {
470 u64::forward_checked(start.0, count).map(Self)
471 }
472
473 #[inline]
474 fn backward_checked(start: Self, count: usize) -> Option<Self> {
475 u64::backward_checked(start.0, count).map(Self)
476 }
477}
478
479impl const From<u64> for SegmentIndex {
480 #[inline(always)]
481 fn from(value: u64) -> Self {
482 Self(value)
483 }
484}
485
486impl const From<SegmentIndex> for u64 {
487 #[inline(always)]
488 fn from(value: SegmentIndex) -> Self {
489 value.0
490 }
491}
492
493impl SegmentIndex {
494 pub const ZERO: Self = Self(0);
496 pub const ONE: Self = Self(1);
498
499 #[inline]
501 pub const fn first_piece_index(&self) -> PieceIndex {
502 PieceIndex::from(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
503 }
504
505 #[inline]
507 pub const fn last_piece_index(&self) -> PieceIndex {
508 PieceIndex::from((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
509 }
510
511 #[inline]
513 pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
514 let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
515 (self.first_piece_index()..=self.last_piece_index())
516 .zip(&mut piece_indices)
517 .for_each(|(input, output)| {
518 *output = input;
519 });
520
521 piece_indices
522 }
523
524 #[inline]
526 pub fn checked_sub(self, rhs: Self) -> Option<Self> {
527 self.0.checked_sub(rhs.0).map(Self)
528 }
529
530 #[inline]
533 pub const fn saturating_sub(self, rhs: Self) -> Self {
534 Self(self.0.saturating_sub(rhs.0))
535 }
536}
537
538#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
540#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
541#[repr(C)]
542pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
543
544impl SegmentRoot {
545 pub fn is_valid(
547 &self,
548 shard_index: ShardIndex,
549 local_segment_index: LocalSegmentIndex,
550 segment_position: SegmentPosition,
551 segment_proof: &SegmentProof,
552 num_segments: u32,
553 super_segment_root: &SuperSegmentRoot,
554 ) -> bool {
555 let shard_segment_root = ShardSegmentRootWithPosition {
556 shard_index,
557 segment_position,
558 local_segment_index,
559 segment_root: *self,
560 };
561 let segment_proof = segment_proof
564 .split_once(|hash| hash == &[0; _])
565 .map_or(segment_proof.as_slice(), |(before, _after)| before);
566 UnbalancedMerkleTree::verify(
567 super_segment_root,
568 segment_proof,
569 u64::from(segment_position),
570 shard_segment_root.hash(),
571 u64::from(num_segments),
572 )
573 }
574}
575
576impl fmt::Debug for SegmentRoot {
577 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
578 for byte in self.0 {
579 write!(f, "{byte:02x}")?;
580 }
581 Ok(())
582 }
583}
584
585#[cfg(feature = "serde")]
586#[derive(Serialize, Deserialize)]
587#[serde(transparent)]
588struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
589
590#[cfg(feature = "serde")]
591#[derive(Serialize, Deserialize)]
592#[serde(transparent)]
593struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
594
595#[cfg(feature = "serde")]
596impl Serialize for SegmentRoot {
597 #[inline]
598 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
599 where
600 S: Serializer,
601 {
602 if serializer.is_human_readable() {
603 SegmentRootHex(self.0).serialize(serializer)
604 } else {
605 SegmentRootBinary(self.0).serialize(serializer)
606 }
607 }
608}
609
610#[cfg(feature = "serde")]
611impl<'de> Deserialize<'de> for SegmentRoot {
612 #[inline]
613 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
614 where
615 D: Deserializer<'de>,
616 {
617 Ok(Self(if deserializer.is_human_readable() {
618 SegmentRootHex::deserialize(deserializer)?.0
619 } else {
620 SegmentRootBinary::deserialize(deserializer)?.0
621 }))
622 }
623}
624
625impl Default for SegmentRoot {
626 #[inline(always)]
627 fn default() -> Self {
628 Self([0; Self::SIZE])
629 }
630}
631
632impl AsRef<[u8]> for SegmentRoot {
633 #[inline(always)]
634 fn as_ref(&self) -> &[u8] {
635 &self.0
636 }
637}
638
639impl AsMut<[u8]> for SegmentRoot {
640 #[inline(always)]
641 fn as_mut(&mut self) -> &mut [u8] {
642 &mut self.0
643 }
644}
645
646impl SegmentRoot {
647 pub const SIZE: usize = 32;
649
650 #[inline(always)]
652 pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
653 unsafe { mem::transmute(value) }
655 }
656
657 #[inline(always)]
659 pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
660 unsafe { mem::transmute(value) }
662 }
663}
664
665#[derive(
667 Debug,
668 Display,
669 Copy,
670 Clone,
671 Ord,
672 PartialOrd,
673 Eq,
674 PartialEq,
675 Hash,
676 From,
677 Into,
678 Deref,
679 DerefMut,
680 TrivialType,
681)]
682#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
683#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
684#[repr(C)]
685pub struct HistorySize(SegmentIndex);
687
688impl HistorySize {
689 pub const ONE: Self = Self(SegmentIndex::ZERO);
691
692 #[inline(always)]
694 pub const fn new(value: NonZeroU64) -> Self {
695 Self(SegmentIndex::from(value.get() - 1))
696 }
697
698 pub const fn as_segment_index(&self) -> SegmentIndex {
700 self.0
701 }
702
703 pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
705 NonZeroU64::new(u64::from(self.0).saturating_add(1)).expect("Not zero; qed")
706 }
707
708 #[inline(always)]
710 pub const fn in_pieces(&self) -> NonZeroU64 {
711 NonZeroU64::new(
712 u64::from(self.0)
713 .saturating_add(1)
714 .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
715 )
716 .expect("Not zero; qed")
717 }
718
719 #[inline(always)]
721 pub fn segment_index(&self) -> SegmentIndex {
722 self.0
723 }
724
725 #[inline(always)]
729 pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
730 self.as_non_zero_u64()
731 .checked_add(min_sector_lifetime.as_non_zero_u64().get())
732 .map(Self::new)
733 }
734}
735
736#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
738#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
739#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
740#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
741#[repr(C)]
742pub struct ArchivedBlockProgress {
743 bytes: u32,
745}
746
747impl Default for ArchivedBlockProgress {
748 #[inline(always)]
751 fn default() -> Self {
752 Self::new_complete()
753 }
754}
755
756impl ArchivedBlockProgress {
757 #[inline(always)]
759 pub const fn new_complete() -> Self {
760 Self { bytes: 0 }
761 }
762
763 #[inline(always)]
765 pub const fn new_partial(new_partial: NonZeroU32) -> Self {
766 Self {
767 bytes: new_partial.get(),
768 }
769 }
770
771 #[inline(always)]
773 pub const fn partial(&self) -> Option<NonZeroU32> {
774 NonZeroU32::new(self.bytes)
775 }
776}
777
778#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
780#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
781#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
782#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
783#[repr(C)]
784pub struct LastArchivedBlock {
785 pub number: Unaligned<BlockNumber>,
787 pub archived_progress: ArchivedBlockProgress,
789}
790
791impl LastArchivedBlock {
792 #[inline(always)]
794 pub fn partial_archived(&self) -> Option<NonZeroU32> {
795 self.archived_progress.partial()
796 }
797
798 #[inline(always)]
800 pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
801 self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
802 }
803
804 #[inline(always)]
806 pub fn set_complete(&mut self) {
807 self.archived_progress = ArchivedBlockProgress::new_complete();
808 }
809
810 pub const fn number(&self) -> BlockNumber {
812 self.number.as_inner()
813 }
814}
815
816#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
818#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
819#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
820#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
821#[repr(C)]
822pub struct SegmentHeader {
823 pub index: Unaligned<LocalSegmentIndex>,
825 pub root: SegmentRoot,
827 pub prev_segment_header_hash: Blake3Hash,
829 pub last_archived_block: LastArchivedBlock,
831}
832
833impl SegmentHeader {
834 #[inline(always)]
836 pub fn hash(&self) -> Blake3Hash {
837 const {
838 assert!(size_of::<Self>() <= CHUNK_LEN);
839 }
840 Blake3Hash::new(
841 single_chunk_hash(self.as_bytes())
842 .expect("Less than a single chunk worth of bytes; qed"),
843 )
844 }
845}
846
847#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
851#[repr(C)]
852pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
853
854impl fmt::Debug for RecordedHistorySegment {
855 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
856 f.debug_struct("RecordedHistorySegment")
857 .finish_non_exhaustive()
858 }
859}
860
861impl AsRef<[u8]> for RecordedHistorySegment {
862 #[inline]
863 fn as_ref(&self) -> &[u8] {
864 Record::slice_to_repr(&self.0).as_flattened().as_flattened()
865 }
866}
867
868impl AsMut<[u8]> for RecordedHistorySegment {
869 #[inline]
870 fn as_mut(&mut self) -> &mut [u8] {
871 Record::slice_mut_to_repr(&mut self.0)
872 .as_flattened_mut()
873 .as_flattened_mut()
874 }
875}
876
877impl RecordedHistorySegment {
878 pub const NUM_RAW_RECORDS: usize = 128;
880 pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
882 pub const NUM_PIECES: usize =
885 Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
886 pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
892
893 #[inline]
895 #[cfg(feature = "alloc")]
896 pub fn new_boxed() -> Box<Self> {
897 unsafe { Box::<Self>::new_zeroed().assume_init() }
900 }
901}