1#[cfg(feature = "alloc")]
4mod cow_bytes;
5#[cfg(feature = "alloc")]
6mod flat_pieces;
7#[cfg(feature = "alloc")]
8mod piece;
9
10#[cfg(feature = "alloc")]
11pub use crate::pieces::flat_pieces::FlatPieces;
12#[cfg(feature = "alloc")]
13pub use crate::pieces::piece::Piece;
14use crate::segments::{RecordedHistorySegment, SegmentIndex, SegmentRoot};
15#[cfg(feature = "serde")]
16use ::serde::{Deserialize, Deserializer, Serialize, Serializer};
17use ab_io_type::trivial_type::TrivialType;
18use ab_merkle_tree::balanced::BalancedMerkleTree;
19#[cfg(feature = "alloc")]
20use alloc::boxed::Box;
21#[cfg(feature = "alloc")]
22use alloc::vec::Vec;
23use blake3::OUT_LEN;
24use core::array::TryFromSliceError;
25use core::hash::Hash;
26use core::iter::Step;
27#[cfg(feature = "alloc")]
28use core::slice;
29use core::{fmt, mem};
30use derive_more::{
31 Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul,
32 MulAssign, Sub, SubAssign,
33};
34#[cfg(feature = "scale-codec")]
35use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
36#[cfg(feature = "serde")]
37use serde_big_array::BigArray;
38
39#[derive(
41 Debug,
42 Display,
43 Default,
44 Copy,
45 Clone,
46 Ord,
47 PartialOrd,
48 Eq,
49 PartialEq,
50 Hash,
51 From,
52 Into,
53 Add,
54 AddAssign,
55 Sub,
56 SubAssign,
57 Mul,
58 MulAssign,
59 Div,
60 DivAssign,
61)]
62#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
63#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
64#[repr(C)]
65pub struct PieceIndex(u64);
66
67impl Step for PieceIndex {
68 #[inline]
69 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
70 u64::steps_between(&start.0, &end.0)
71 }
72
73 #[inline]
74 fn forward_checked(start: Self, count: usize) -> Option<Self> {
75 u64::forward_checked(start.0, count).map(Self)
76 }
77
78 #[inline]
79 fn backward_checked(start: Self, count: usize) -> Option<Self> {
80 u64::backward_checked(start.0, count).map(Self)
81 }
82}
83
84impl PieceIndex {
85 pub const SIZE: usize = size_of::<u64>();
87 pub const ZERO: PieceIndex = PieceIndex(0);
89 pub const ONE: PieceIndex = PieceIndex(1);
91
92 #[inline]
94 pub const fn new(n: u64) -> Self {
95 Self(n)
96 }
97
98 #[inline]
100 pub const fn from_bytes(bytes: [u8; Self::SIZE]) -> Self {
101 Self(u64::from_le_bytes(bytes))
102 }
103
104 #[inline]
106 pub const fn to_bytes(self) -> [u8; Self::SIZE] {
107 self.0.to_le_bytes()
108 }
109
110 #[inline]
112 pub const fn segment_index(&self) -> SegmentIndex {
113 SegmentIndex::new(self.0 / RecordedHistorySegment::NUM_PIECES as u64)
114 }
115
116 #[inline]
118 pub fn position(&self) -> PiecePosition {
119 PiecePosition::from((self.0 % RecordedHistorySegment::NUM_PIECES as u64) as u8)
120 }
121}
122
123const {
124 assert!(RecordedHistorySegment::NUM_PIECES == usize::from(u8::MAX) + 1);
126}
127
128#[derive(
130 Debug,
131 Display,
132 Default,
133 Copy,
134 Clone,
135 Ord,
136 PartialOrd,
137 Eq,
138 PartialEq,
139 Hash,
140 From,
141 Into,
142 TrivialType,
143)]
144#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
145#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
146#[repr(C)]
147pub struct PiecePosition(u8);
148
149impl Step for PiecePosition {
150 #[inline]
151 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
152 u8::steps_between(&start.0, &end.0)
153 }
154
155 #[inline]
156 fn forward_checked(start: Self, count: usize) -> Option<Self> {
157 u8::forward_checked(start.0, count).map(Self)
158 }
159
160 #[inline]
161 fn backward_checked(start: Self, count: usize) -> Option<Self> {
162 u8::backward_checked(start.0, count).map(Self)
163 }
164}
165
166impl From<PiecePosition> for u16 {
167 #[inline]
168 fn from(original: PiecePosition) -> Self {
169 Self::from(original.0)
170 }
171}
172
173impl From<PiecePosition> for u32 {
174 #[inline]
175 fn from(original: PiecePosition) -> Self {
176 Self::from(original.0)
177 }
178}
179
180impl From<PiecePosition> for u64 {
181 #[inline]
182 fn from(original: PiecePosition) -> Self {
183 Self::from(original.0)
184 }
185}
186
187impl From<PiecePosition> for usize {
188 #[inline]
189 fn from(original: PiecePosition) -> Self {
190 usize::from(original.0)
191 }
192}
193
194#[derive(
196 Debug,
197 Display,
198 Default,
199 Copy,
200 Clone,
201 Ord,
202 PartialOrd,
203 Eq,
204 PartialEq,
205 Hash,
206 From,
207 Into,
208 Add,
209 AddAssign,
210 Sub,
211 SubAssign,
212 Mul,
213 MulAssign,
214 Div,
215 DivAssign,
216 TrivialType,
217)]
218#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
219#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
220#[repr(C)]
221pub struct PieceOffset(u16);
222
223impl Step for PieceOffset {
224 #[inline]
225 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
226 u16::steps_between(&start.0, &end.0)
227 }
228
229 #[inline]
230 fn forward_checked(start: Self, count: usize) -> Option<Self> {
231 u16::forward_checked(start.0, count).map(Self)
232 }
233
234 #[inline]
235 fn backward_checked(start: Self, count: usize) -> Option<Self> {
236 u16::backward_checked(start.0, count).map(Self)
237 }
238}
239
240impl From<PieceOffset> for u32 {
241 #[inline]
242 fn from(original: PieceOffset) -> Self {
243 Self::from(original.0)
244 }
245}
246
247impl From<PieceOffset> for u64 {
248 #[inline]
249 fn from(original: PieceOffset) -> Self {
250 Self::from(original.0)
251 }
252}
253
254impl From<PieceOffset> for usize {
255 #[inline]
256 fn from(original: PieceOffset) -> Self {
257 usize::from(original.0)
258 }
259}
260
261impl PieceOffset {
262 pub const ZERO: Self = Self(0);
264 pub const ONE: Self = Self(1);
266 pub const SIZE: usize = size_of::<u16>();
268
269 #[inline]
271 pub const fn to_bytes(self) -> [u8; size_of::<u16>()] {
272 self.0.to_le_bytes()
273 }
274}
275
276#[derive(
278 Default,
279 Copy,
280 Clone,
281 Eq,
282 PartialEq,
283 Ord,
284 PartialOrd,
285 Hash,
286 From,
287 Into,
288 AsRef,
289 AsMut,
290 Deref,
291 DerefMut,
292 TrivialType,
293)]
294#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
295#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
296#[cfg_attr(feature = "serde", serde(transparent))]
297#[repr(C)]
298pub struct RecordChunk([u8; RecordChunk::SIZE]);
299
300impl fmt::Debug for RecordChunk {
301 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
302 for byte in self.0 {
303 write!(f, "{byte:02x}")?;
304 }
305 Ok(())
306 }
307}
308
309impl RecordChunk {
310 pub const SIZE: usize = 32;
312
313 #[inline]
315 pub fn slice_to_repr(value: &[Self]) -> &[[u8; RecordChunk::SIZE]] {
316 unsafe { mem::transmute(value) }
318 }
319
320 #[inline]
322 pub fn slice_from_repr(value: &[[u8; RecordChunk::SIZE]]) -> &[Self] {
323 unsafe { mem::transmute(value) }
325 }
326
327 #[inline]
330 pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; RecordChunk::SIZE]] {
331 unsafe { mem::transmute(value) }
333 }
334
335 #[inline]
338 pub fn slice_mut_from_repr(value: &mut [[u8; RecordChunk::SIZE]]) -> &mut [Self] {
339 unsafe { mem::transmute(value) }
341 }
342}
343
344#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
348#[repr(C)]
349pub struct Record([[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]);
350
351impl fmt::Debug for Record {
352 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
353 for byte in self.0.as_flattened() {
354 write!(f, "{byte:02x}")?;
355 }
356 Ok(())
357 }
358}
359
360impl Default for Record {
361 #[inline]
362 fn default() -> Self {
363 Self([Default::default(); Record::NUM_CHUNKS])
364 }
365}
366
367impl AsRef<[u8]> for Record {
368 #[inline]
369 fn as_ref(&self) -> &[u8] {
370 self.0.as_flattened()
371 }
372}
373
374impl AsMut<[u8]> for Record {
375 #[inline]
376 fn as_mut(&mut self) -> &mut [u8] {
377 self.0.as_flattened_mut()
378 }
379}
380
381impl From<&Record> for &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
382 #[inline]
383 fn from(value: &Record) -> Self {
384 unsafe { mem::transmute(value) }
386 }
387}
388
389impl From<&[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &Record {
390 #[inline]
391 fn from(value: &[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
392 unsafe { mem::transmute(value) }
394 }
395}
396
397impl From<&mut Record> for &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS] {
398 #[inline]
399 fn from(value: &mut Record) -> Self {
400 unsafe { mem::transmute(value) }
402 }
403}
404
405impl From<&mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]> for &mut Record {
406 #[inline]
407 fn from(value: &mut [[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]) -> Self {
408 unsafe { mem::transmute(value) }
410 }
411}
412
413impl From<&Record> for &[u8; Record::SIZE] {
414 #[inline]
415 fn from(value: &Record) -> Self {
416 unsafe { mem::transmute(value) }
419 }
420}
421
422impl From<&[u8; Record::SIZE]> for &Record {
423 #[inline]
424 fn from(value: &[u8; Record::SIZE]) -> Self {
425 unsafe { mem::transmute(value) }
428 }
429}
430
431impl From<&mut Record> for &mut [u8; Record::SIZE] {
432 #[inline]
433 fn from(value: &mut Record) -> Self {
434 unsafe { mem::transmute(value) }
437 }
438}
439
440impl From<&mut [u8; Record::SIZE]> for &mut Record {
441 #[inline]
442 fn from(value: &mut [u8; Record::SIZE]) -> Self {
443 unsafe { mem::transmute(value) }
446 }
447}
448
449impl Record {
450 pub const NUM_CHUNKS: usize = 2_usize.pow(15);
452 pub const NUM_S_BUCKETS: usize = Record::NUM_CHUNKS
456 * RecordedHistorySegment::ERASURE_CODING_RATE.1
457 / RecordedHistorySegment::ERASURE_CODING_RATE.0;
458 pub const SIZE: usize = RecordChunk::SIZE * Record::NUM_CHUNKS;
460
461 #[inline]
463 #[cfg(feature = "alloc")]
464 pub fn new_boxed() -> Box<Self> {
465 unsafe { Box::new_zeroed().assume_init() }
468 }
469
470 #[inline]
472 #[cfg(feature = "alloc")]
473 pub fn new_zero_vec(length: usize) -> Vec<Self> {
474 let mut records = Vec::with_capacity(length);
477 {
478 let slice = records.spare_capacity_mut();
479 let slice = unsafe {
483 slice::from_raw_parts_mut(
484 slice
485 .as_mut_ptr()
486 .cast::<[[mem::MaybeUninit<u8>; RecordChunk::SIZE]; Record::NUM_CHUNKS]>(),
487 length,
488 )
489 };
490 for byte in slice.as_flattened_mut().as_flattened_mut() {
491 byte.write(0);
492 }
493 }
494 unsafe {
496 records.set_len(records.capacity());
497 }
498
499 records
500 }
501
502 #[inline]
505 pub fn slice_to_repr(value: &[Self]) -> &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
506 unsafe { mem::transmute(value) }
508 }
509
510 #[inline]
513 pub fn slice_from_repr(value: &[[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]]) -> &[Self] {
514 unsafe { mem::transmute(value) }
516 }
517
518 #[inline]
521 pub fn slice_mut_to_repr(
522 value: &mut [Self],
523 ) -> &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]] {
524 unsafe { mem::transmute(value) }
526 }
527
528 #[inline]
531 pub fn slice_mut_from_repr(
532 value: &mut [[[u8; RecordChunk::SIZE]; Record::NUM_CHUNKS]],
533 ) -> &mut [Self] {
534 unsafe { mem::transmute(value) }
536 }
537}
538
539#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
541#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
542#[repr(C)]
543pub struct RecordRoot([u8; RecordRoot::SIZE]);
544
545impl fmt::Debug for RecordRoot {
546 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
547 for byte in self.0 {
548 write!(f, "{byte:02x}")?;
549 }
550 Ok(())
551 }
552}
553
554#[cfg(feature = "serde")]
555#[derive(Serialize, Deserialize)]
556#[serde(transparent)]
557struct RecordRootBinary(#[serde(with = "BigArray")] [u8; RecordRoot::SIZE]);
558
559#[cfg(feature = "serde")]
560#[derive(Serialize, Deserialize)]
561#[serde(transparent)]
562struct RecordRootHex(#[serde(with = "hex")] [u8; RecordRoot::SIZE]);
563
564#[cfg(feature = "serde")]
565impl Serialize for RecordRoot {
566 #[inline]
567 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
568 where
569 S: Serializer,
570 {
571 if serializer.is_human_readable() {
572 RecordRootHex(self.0).serialize(serializer)
573 } else {
574 RecordRootBinary(self.0).serialize(serializer)
575 }
576 }
577}
578
579#[cfg(feature = "serde")]
580impl<'de> Deserialize<'de> for RecordRoot {
581 #[inline]
582 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
583 where
584 D: Deserializer<'de>,
585 {
586 Ok(Self(if deserializer.is_human_readable() {
587 RecordRootHex::deserialize(deserializer)?.0
588 } else {
589 RecordRootBinary::deserialize(deserializer)?.0
590 }))
591 }
592}
593
594impl Default for RecordRoot {
595 #[inline]
596 fn default() -> Self {
597 Self([0; Self::SIZE])
598 }
599}
600
601impl TryFrom<&[u8]> for RecordRoot {
602 type Error = TryFromSliceError;
603
604 #[inline]
605 fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
606 <[u8; Self::SIZE]>::try_from(slice).map(Self)
607 }
608}
609
610impl AsRef<[u8]> for RecordRoot {
611 #[inline]
612 fn as_ref(&self) -> &[u8] {
613 &self.0
614 }
615}
616
617impl AsMut<[u8]> for RecordRoot {
618 #[inline]
619 fn as_mut(&mut self) -> &mut [u8] {
620 &mut self.0
621 }
622}
623
624impl From<&RecordRoot> for &[u8; RecordRoot::SIZE] {
625 #[inline]
626 fn from(value: &RecordRoot) -> Self {
627 unsafe { mem::transmute(value) }
630 }
631}
632
633impl From<&[u8; RecordRoot::SIZE]> for &RecordRoot {
634 #[inline]
635 fn from(value: &[u8; RecordRoot::SIZE]) -> Self {
636 unsafe { mem::transmute(value) }
639 }
640}
641
642impl From<&mut RecordRoot> for &mut [u8; RecordRoot::SIZE] {
643 #[inline]
644 fn from(value: &mut RecordRoot) -> Self {
645 unsafe { mem::transmute(value) }
648 }
649}
650
651impl From<&mut [u8; RecordRoot::SIZE]> for &mut RecordRoot {
652 #[inline]
653 fn from(value: &mut [u8; RecordRoot::SIZE]) -> Self {
654 unsafe { mem::transmute(value) }
657 }
658}
659
660impl RecordRoot {
661 pub const SIZE: usize = 32;
663
664 pub fn is_valid(
666 &self,
667 segment_root: &SegmentRoot,
668 record_proof: &RecordProof,
669 position: PiecePosition,
670 ) -> bool {
671 BalancedMerkleTree::<{ RecordedHistorySegment::NUM_PIECES }>::verify(
672 segment_root,
673 record_proof,
674 usize::from(position),
675 self.0,
676 )
677 }
678}
679
680#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into)]
682#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
683pub struct RecordChunksRoot([u8; RecordChunksRoot::SIZE]);
684
685impl fmt::Debug for RecordChunksRoot {
686 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
687 for byte in self.0 {
688 write!(f, "{byte:02x}")?;
689 }
690 Ok(())
691 }
692}
693
694#[cfg(feature = "serde")]
695#[derive(Serialize, Deserialize)]
696#[serde(transparent)]
697struct RecordChunksRootBinary(#[serde(with = "BigArray")] [u8; RecordChunksRoot::SIZE]);
698
699#[cfg(feature = "serde")]
700#[derive(Serialize, Deserialize)]
701#[serde(transparent)]
702struct RecordChunksRootHex(#[serde(with = "hex")] [u8; RecordChunksRoot::SIZE]);
703
704#[cfg(feature = "serde")]
705impl Serialize for RecordChunksRoot {
706 #[inline]
707 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
708 where
709 S: Serializer,
710 {
711 if serializer.is_human_readable() {
712 RecordChunksRootHex(self.0).serialize(serializer)
713 } else {
714 RecordChunksRootBinary(self.0).serialize(serializer)
715 }
716 }
717}
718
719#[cfg(feature = "serde")]
720impl<'de> Deserialize<'de> for RecordChunksRoot {
721 #[inline]
722 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
723 where
724 D: Deserializer<'de>,
725 {
726 Ok(Self(if deserializer.is_human_readable() {
727 RecordChunksRootHex::deserialize(deserializer)?.0
728 } else {
729 RecordChunksRootBinary::deserialize(deserializer)?.0
730 }))
731 }
732}
733
734impl Default for RecordChunksRoot {
735 #[inline]
736 fn default() -> Self {
737 Self([0; Self::SIZE])
738 }
739}
740
741impl TryFrom<&[u8]> for RecordChunksRoot {
742 type Error = TryFromSliceError;
743
744 #[inline]
745 fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
746 <[u8; Self::SIZE]>::try_from(slice).map(Self)
747 }
748}
749
750impl AsRef<[u8]> for RecordChunksRoot {
751 #[inline]
752 fn as_ref(&self) -> &[u8] {
753 &self.0
754 }
755}
756
757impl AsMut<[u8]> for RecordChunksRoot {
758 #[inline]
759 fn as_mut(&mut self) -> &mut [u8] {
760 &mut self.0
761 }
762}
763
764impl From<&RecordChunksRoot> for &[u8; RecordChunksRoot::SIZE] {
765 #[inline]
766 fn from(value: &RecordChunksRoot) -> Self {
767 unsafe { mem::transmute(value) }
770 }
771}
772
773impl From<&[u8; RecordChunksRoot::SIZE]> for &RecordChunksRoot {
774 #[inline]
775 fn from(value: &[u8; RecordChunksRoot::SIZE]) -> Self {
776 unsafe { mem::transmute(value) }
779 }
780}
781
782impl From<&mut RecordChunksRoot> for &mut [u8; RecordChunksRoot::SIZE] {
783 #[inline]
784 fn from(value: &mut RecordChunksRoot) -> Self {
785 unsafe { mem::transmute(value) }
788 }
789}
790
791impl From<&mut [u8; RecordChunksRoot::SIZE]> for &mut RecordChunksRoot {
792 #[inline]
793 fn from(value: &mut [u8; RecordChunksRoot::SIZE]) -> Self {
794 unsafe { mem::transmute(value) }
797 }
798}
799
800impl RecordChunksRoot {
801 pub const SIZE: usize = 32;
803}
804
805#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
807#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
808#[repr(C)]
809pub struct RecordProof([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
810
811impl fmt::Debug for RecordProof {
812 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
813 write!(f, "[")?;
814 for hash in self.0 {
815 for byte in hash {
816 write!(f, "{byte:02x}")?;
817 }
818 write!(f, ", ")?;
819 }
820 write!(f, "]")?;
821 Ok(())
822 }
823}
824
825#[cfg(feature = "serde")]
826#[derive(Serialize, Deserialize)]
827#[serde(transparent)]
828struct RecordProofBinary([[u8; OUT_LEN]; RecordProof::NUM_HASHES]);
829
830#[cfg(feature = "serde")]
831#[derive(Serialize, Deserialize)]
832#[serde(transparent)]
833struct RecordProofHexHash(#[serde(with = "hex")] [u8; OUT_LEN]);
834
835#[cfg(feature = "serde")]
836#[derive(Serialize, Deserialize)]
837#[serde(transparent)]
838struct RecordProofHex([RecordProofHexHash; RecordProof::NUM_HASHES]);
839
840#[cfg(feature = "serde")]
841impl Serialize for RecordProof {
842 #[inline]
843 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
844 where
845 S: Serializer,
846 {
847 if serializer.is_human_readable() {
848 RecordProofHex(unsafe {
851 mem::transmute::<
852 [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
853 [RecordProofHexHash; RecordProof::NUM_HASHES],
854 >(self.0)
855 })
856 .serialize(serializer)
857 } else {
858 RecordProofBinary(self.0).serialize(serializer)
859 }
860 }
861}
862
863#[cfg(feature = "serde")]
864impl<'de> Deserialize<'de> for RecordProof {
865 #[inline]
866 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
867 where
868 D: Deserializer<'de>,
869 {
870 Ok(Self(if deserializer.is_human_readable() {
871 unsafe {
874 mem::transmute::<
875 [RecordProofHexHash; RecordProof::NUM_HASHES],
876 [[u8; OUT_LEN]; RecordProof::NUM_HASHES],
877 >(RecordProofHex::deserialize(deserializer)?.0)
878 }
879 } else {
880 RecordProofBinary::deserialize(deserializer)?.0
881 }))
882 }
883}
884
885impl Default for RecordProof {
886 #[inline]
887 fn default() -> Self {
888 Self([[0; OUT_LEN]; RecordProof::NUM_HASHES])
889 }
890}
891
892impl AsRef<[u8]> for RecordProof {
893 #[inline]
894 fn as_ref(&self) -> &[u8] {
895 self.0.as_flattened()
896 }
897}
898
899impl AsMut<[u8]> for RecordProof {
900 #[inline]
901 fn as_mut(&mut self) -> &mut [u8] {
902 self.0.as_flattened_mut()
903 }
904}
905
906impl From<&RecordProof> for &[u8; RecordProof::SIZE] {
907 #[inline]
908 fn from(value: &RecordProof) -> Self {
909 unsafe { mem::transmute(value) }
912 }
913}
914
915impl From<&[u8; RecordProof::SIZE]> for &RecordProof {
916 #[inline]
917 fn from(value: &[u8; RecordProof::SIZE]) -> Self {
918 unsafe { mem::transmute(value) }
921 }
922}
923
924impl From<&mut RecordProof> for &mut [u8; RecordProof::SIZE] {
925 #[inline]
926 fn from(value: &mut RecordProof) -> Self {
927 unsafe { mem::transmute(value) }
930 }
931}
932
933impl From<&mut [u8; RecordProof::SIZE]> for &mut RecordProof {
934 #[inline]
935 fn from(value: &mut [u8; RecordProof::SIZE]) -> Self {
936 unsafe { mem::transmute(value) }
939 }
940}
941
942impl RecordProof {
943 pub const SIZE: usize = OUT_LEN * Self::NUM_HASHES;
945 const NUM_HASHES: usize = RecordedHistorySegment::NUM_PIECES.ilog2() as usize;
946}
947
948#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Deref, DerefMut, AsRef, AsMut)]
956#[repr(C)]
957pub struct PieceArray([u8; PieceArray::SIZE]);
958
959impl fmt::Debug for PieceArray {
960 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
961 for byte in self.0 {
962 write!(f, "{byte:02x}")?;
963 }
964 Ok(())
965 }
966}
967
968impl Default for PieceArray {
969 #[inline]
970 fn default() -> Self {
971 Self([0u8; Self::SIZE])
972 }
973}
974
975impl AsRef<[u8]> for PieceArray {
976 #[inline]
977 fn as_ref(&self) -> &[u8] {
978 &self.0
979 }
980}
981
982impl AsMut<[u8]> for PieceArray {
983 #[inline]
984 fn as_mut(&mut self) -> &mut [u8] {
985 &mut self.0
986 }
987}
988
989impl From<&PieceArray> for &[u8; PieceArray::SIZE] {
990 #[inline]
991 fn from(value: &PieceArray) -> Self {
992 unsafe { mem::transmute(value) }
995 }
996}
997
998impl From<&[u8; PieceArray::SIZE]> for &PieceArray {
999 #[inline]
1000 fn from(value: &[u8; PieceArray::SIZE]) -> Self {
1001 unsafe { mem::transmute(value) }
1004 }
1005}
1006
1007impl From<&mut PieceArray> for &mut [u8; PieceArray::SIZE] {
1008 #[inline]
1009 fn from(value: &mut PieceArray) -> Self {
1010 unsafe { mem::transmute(value) }
1013 }
1014}
1015
1016impl From<&mut [u8; PieceArray::SIZE]> for &mut PieceArray {
1017 #[inline]
1018 fn from(value: &mut [u8; PieceArray::SIZE]) -> Self {
1019 unsafe { mem::transmute(value) }
1022 }
1023}
1024
1025impl PieceArray {
1026 pub const SIZE: usize =
1028 Record::SIZE + RecordRoot::SIZE + RecordChunksRoot::SIZE + RecordProof::SIZE;
1029
1030 #[inline]
1032 #[cfg(feature = "alloc")]
1033 pub fn new_boxed() -> Box<Self> {
1034 unsafe { Box::<Self>::new_zeroed().assume_init() }
1037 }
1038
1039 pub fn is_valid(&self, segment_root: &SegmentRoot, position: PiecePosition) -> bool {
1041 let (record, &record_root, parity_chunks_root, record_proof) = self.split();
1042
1043 let source_record_merkle_tree_root = BalancedMerkleTree::compute_root_only(record);
1044 let record_merkle_tree_root = BalancedMerkleTree::compute_root_only(&[
1045 source_record_merkle_tree_root,
1046 **parity_chunks_root,
1047 ]);
1048
1049 if record_merkle_tree_root != *record_root {
1050 return false;
1051 }
1052
1053 record_root.is_valid(segment_root, record_proof, position)
1054 }
1055
1056 #[inline]
1058 pub fn split(&self) -> (&Record, &RecordRoot, &RecordChunksRoot, &RecordProof) {
1059 let (record, extra) = self.0.split_at(Record::SIZE);
1060 let (root, extra) = extra.split_at(RecordRoot::SIZE);
1061 let (parity_chunks_root, proof) = extra.split_at(RecordChunksRoot::SIZE);
1062
1063 let record = <&[u8; Record::SIZE]>::try_from(record)
1064 .expect("Slice of memory has correct length; qed");
1065 let root = <&[u8; RecordRoot::SIZE]>::try_from(root)
1066 .expect("Slice of memory has correct length; qed");
1067 let parity_chunks_root = <&[u8; RecordChunksRoot::SIZE]>::try_from(parity_chunks_root)
1068 .expect("Slice of memory has correct length; qed");
1069 let proof = <&[u8; RecordProof::SIZE]>::try_from(proof)
1070 .expect("Slice of memory has correct length; qed");
1071
1072 (
1073 record.into(),
1074 root.into(),
1075 parity_chunks_root.into(),
1076 proof.into(),
1077 )
1078 }
1079
1080 #[inline]
1082 pub fn split_mut(
1083 &mut self,
1084 ) -> (
1085 &mut Record,
1086 &mut RecordRoot,
1087 &mut RecordChunksRoot,
1088 &mut RecordProof,
1089 ) {
1090 let (record, extra) = self.0.split_at_mut(Record::SIZE);
1091 let (root, extra) = extra.split_at_mut(RecordRoot::SIZE);
1092 let (parity_chunks_root, proof) = extra.split_at_mut(RecordChunksRoot::SIZE);
1093
1094 let record = <&mut [u8; Record::SIZE]>::try_from(record)
1095 .expect("Slice of memory has correct length; qed");
1096 let root = <&mut [u8; RecordRoot::SIZE]>::try_from(root)
1097 .expect("Slice of memory has correct length; qed");
1098 let parity_chunks_root = <&mut [u8; RecordChunksRoot::SIZE]>::try_from(parity_chunks_root)
1099 .expect("Slice of memory has correct length; qed");
1100 let proof = <&mut [u8; RecordProof::SIZE]>::try_from(proof)
1101 .expect("Slice of memory has correct length; qed");
1102
1103 (
1104 record.into(),
1105 root.into(),
1106 parity_chunks_root.into(),
1107 proof.into(),
1108 )
1109 }
1110
1111 #[inline]
1113 pub fn record(&self) -> &Record {
1114 self.split().0
1115 }
1116
1117 #[inline]
1119 pub fn record_mut(&mut self) -> &mut Record {
1120 self.split_mut().0
1121 }
1122
1123 #[inline]
1125 pub fn root(&self) -> &RecordRoot {
1126 self.split().1
1127 }
1128
1129 #[inline]
1131 pub fn root_mut(&mut self) -> &mut RecordRoot {
1132 self.split_mut().1
1133 }
1134
1135 #[inline]
1137 pub fn parity_chunks_root(&self) -> &RecordChunksRoot {
1138 self.split().2
1139 }
1140
1141 #[inline]
1143 pub fn parity_chunks_root_mut(&mut self) -> &mut RecordChunksRoot {
1144 self.split_mut().2
1145 }
1146
1147 #[inline]
1149 pub fn proof(&self) -> &RecordProof {
1150 self.split().3
1151 }
1152
1153 #[inline]
1155 pub fn proof_mut(&mut self) -> &mut RecordProof {
1156 self.split_mut().3
1157 }
1158
1159 #[inline]
1162 pub fn slice_to_repr(value: &[Self]) -> &[[u8; Self::SIZE]] {
1163 unsafe { mem::transmute(value) }
1166 }
1167
1168 #[inline]
1171 pub fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
1172 unsafe { mem::transmute(value) }
1175 }
1176
1177 #[inline]
1180 pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; Self::SIZE]] {
1181 unsafe { mem::transmute(value) }
1184 }
1185
1186 #[inline]
1189 pub fn slice_mut_from_repr(value: &mut [[u8; Self::SIZE]]) -> &mut [Self] {
1190 unsafe { mem::transmute(value) }
1193 }
1194}
1195
1196#[cfg(feature = "alloc")]
1197impl From<Box<PieceArray>> for Vec<u8> {
1198 fn from(value: Box<PieceArray>) -> Self {
1199 let mut value = mem::ManuallyDrop::new(value);
1200 unsafe { Vec::from_raw_parts(value.as_mut_ptr(), PieceArray::SIZE, PieceArray::SIZE) }
1202 }
1203}