1#[cfg(feature = "alloc")]
4pub mod owned;
5
6use crate::block::align_to_and_ensure_zero_padding;
7#[cfg(feature = "alloc")]
8use crate::block::body::owned::{
9 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
10 OwnedLeafShardBody,
11};
12use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::SegmentRoot;
16use crate::shard::ShardKind;
17use crate::transaction::Transaction;
18use ab_io_type::trivial_type::TrivialType;
19use ab_merkle_tree::balanced::BalancedMerkleTree;
20use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
21use core::iter::TrustedLen;
22use core::{fmt, slice};
23use derive_more::From;
24use yoke::Yokeable;
25
26pub trait GenericBlockBody<'a>
28where
29 Self: Copy + Into<BlockBody<'a>> + fmt::Debug,
30{
31 const SHARD_KIND: ShardKind;
33
34 #[cfg(feature = "alloc")]
36 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
37 where
38 Self: 'a;
39
40 #[cfg(feature = "alloc")]
42 fn to_owned(self) -> Self::Owned;
43
44 fn root(&self) -> Blake3Hash;
46}
47
48#[inline]
50pub fn compute_segments_root<Item, Iter>(segment_roots: Iter) -> Blake3Hash
51where
52 Item: AsRef<[u8]>,
53 Iter: IntoIterator<Item = Item>,
54{
55 const _: () = {
59 assert!(u32::MAX == 4294967295);
60 };
61 let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
63 segment_roots.into_iter().map(|segment_root| {
64 blake3::hash(segment_root.as_ref())
67 }),
68 );
69
70 Blake3Hash::new(root.unwrap_or_default())
71}
72
73#[derive(Debug, Clone)]
75pub struct IntermediateShardBlockInfo<'a> {
76 pub header: IntermediateShardHeader<'a>,
78 pub segment_roots_proof: Option<&'a [u8; 32]>,
80 pub own_segment_roots: &'a [SegmentRoot],
82 pub child_segment_roots: &'a [SegmentRoot],
84}
85
86impl IntermediateShardBlockInfo<'_> {
87 #[inline]
89 pub fn root(&self) -> Blake3Hash {
90 const MAX_N: usize = 3;
92 let leaves: [_; MAX_N] = [
93 ***self.header.root(),
94 *compute_segments_root(self.own_segment_roots),
95 *compute_segments_root(self.child_segment_roots),
96 ];
97
98 let root = UnbalancedMerkleTree::compute_root_only::<{ MAX_N as u64 }, _, _>(leaves)
99 .expect("The list is not empty; qed");
100
101 Blake3Hash::new(root)
102 }
103}
104
105#[derive(Debug, Copy, Clone)]
107pub struct IntermediateShardBlocksInfo<'a> {
108 num_blocks: usize,
109 bytes: &'a [u8],
110}
111
112impl<'a> IntermediateShardBlocksInfo<'a> {
113 #[inline]
119 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
120 let num_blocks = bytes.split_off(..size_of::<u16>())?;
134 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[1], num_blocks[2]]));
135
136 let bytes_start = bytes;
137
138 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() + size_of::<u16>()))?;
139
140 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
141
142 for _ in 0..num_blocks {
143 let num_own_segment_roots = usize::from(counts[0]);
144 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
145 counts = &counts[3..];
146
147 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
148
149 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
150
151 if num_own_segment_roots + num_child_segment_roots > 0 {
152 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
153 }
154
155 let _segment_roots = remainder.split_off(
156 ..(num_own_segment_roots + num_child_segment_roots) * SegmentRoot::SIZE,
157 )?;
158 }
159
160 Some((
161 Self {
162 num_blocks,
163 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
164 },
165 remainder,
166 ))
167 }
168
169 #[inline]
171 pub fn iter(
172 &self,
173 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + Clone + 'a
174 {
175 let (mut counts, mut remainder) = unsafe {
177 self.bytes
178 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
179 };
180
181 (0..self.num_blocks).map(move |_| {
182 let num_own_segment_roots = usize::from(counts[0]);
183 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
184 counts = &counts[3..];
185
186 let header;
188 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
189 .expect("Already checked in constructor; qed");
190
191 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
192 .expect("Already checked in constructor; qed");
193
194 let segment_roots_proof = if num_own_segment_roots + num_child_segment_roots > 0 {
195 let segment_roots_proof;
196 (segment_roots_proof, remainder) =
198 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
199 Some(unsafe {
201 segment_roots_proof
202 .as_ptr()
203 .cast::<[u8; SegmentRoot::SIZE]>()
204 .as_ref_unchecked()
205 })
206 } else {
207 None
208 };
209
210 let own_segment_roots;
211 (own_segment_roots, remainder) =
213 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
214 let own_segment_roots = unsafe {
216 slice::from_raw_parts(
217 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
218 num_own_segment_roots,
219 )
220 };
221 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
222
223 let child_segment_roots;
224 (child_segment_roots, remainder) = unsafe {
226 remainder.split_at_unchecked(num_child_segment_roots * SegmentRoot::SIZE)
227 };
228 let child_segment_roots = unsafe {
230 slice::from_raw_parts(
231 child_segment_roots
232 .as_ptr()
233 .cast::<[u8; SegmentRoot::SIZE]>(),
234 num_child_segment_roots,
235 )
236 };
237 let child_segment_roots = SegmentRoot::slice_from_repr(child_segment_roots);
238
239 IntermediateShardBlockInfo {
240 header,
241 segment_roots_proof,
242 own_segment_roots,
243 child_segment_roots,
244 }
245 })
246 }
247
248 #[inline(always)]
250 pub const fn len(&self) -> usize {
251 self.num_blocks
252 }
253
254 #[inline(always)]
256 pub const fn is_empty(&self) -> bool {
257 self.num_blocks == 0
258 }
259
260 #[inline]
264 pub fn segments_root(&self) -> Blake3Hash {
265 compute_segments_root(
266 self.iter()
267 .flat_map(|shard_block_info| {
268 [
269 shard_block_info.own_segment_roots,
270 shard_block_info.child_segment_roots,
271 ]
272 })
273 .flatten(),
274 )
275 }
276
277 #[inline]
281 pub fn headers_root(&self) -> Blake3Hash {
282 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
283 self.iter().map(|shard_block_info| {
285 blake3::hash(shard_block_info.header.root().as_ref())
290 }),
291 )
292 .unwrap_or_default();
293
294 Blake3Hash::new(root)
295 }
296
297 #[inline]
301 pub fn root(&self) -> Blake3Hash {
302 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
303 self.iter().map(|shard_block_info| shard_block_info.root()),
304 )
305 .unwrap_or_default();
306
307 Blake3Hash::new(root)
308 }
309}
310
311#[derive(Debug, Copy, Clone, Yokeable)]
313#[non_exhaustive]
315pub struct BeaconChainBody<'a> {
316 own_segment_roots: &'a [SegmentRoot],
318 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
320 pot_checkpoints: &'a [PotCheckpoints],
323}
324
325impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
326 const SHARD_KIND: ShardKind = ShardKind::BeaconChain;
327
328 #[cfg(feature = "alloc")]
329 type Owned = OwnedBeaconChainBody;
330
331 #[cfg(feature = "alloc")]
332 #[inline(always)]
333 fn to_owned(self) -> Self::Owned {
334 self.to_owned()
335 }
336
337 #[inline(always)]
338 fn root(&self) -> Blake3Hash {
339 self.root()
340 }
341}
342
343impl<'a> BeaconChainBody<'a> {
344 #[inline]
350 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
351 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
359 let num_pot_checkpoints =
361 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
362
363 if num_pot_checkpoints == 0 {
364 return None;
365 }
366
367 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
368 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
369
370 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
371 let own_segment_roots = unsafe {
373 slice::from_raw_parts(
374 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
375 num_own_segment_roots,
376 )
377 };
378 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
379
380 let (intermediate_shard_blocks, remainder) =
381 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
382
383 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
384 let pot_checkpoints = unsafe {
386 slice::from_raw_parts(
387 pot_checkpoints
388 .as_ptr()
389 .cast::<[u8; PotCheckpoints::SIZE]>(),
390 num_pot_checkpoints,
391 )
392 };
393 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
394
395 let body = Self {
396 pot_checkpoints,
397 own_segment_roots,
398 intermediate_shard_blocks,
399 };
400
401 if !body.is_internally_consistent() {
402 return None;
403 }
404
405 Some((body, remainder))
406 }
407
408 #[inline]
413 pub fn is_internally_consistent(&self) -> bool {
414 self.intermediate_shard_blocks
415 .iter()
416 .all(|intermediate_shard_block| {
417 let Some(&segment_roots_proof) = intermediate_shard_block.segment_roots_proof
418 else {
419 return true;
420 };
421
422 BalancedMerkleTree::<2>::verify(
423 &intermediate_shard_block.header.result.body_root,
424 &[segment_roots_proof],
425 0,
426 BalancedMerkleTree::compute_root_only(&[
427 *compute_segments_root(intermediate_shard_block.own_segment_roots),
428 *compute_segments_root(intermediate_shard_block.child_segment_roots),
429 ]),
430 )
431 })
432 }
433
434 #[inline]
437 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
438 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
446 let num_pot_checkpoints =
448 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
449
450 if num_pot_checkpoints == 0 {
451 return None;
452 }
453
454 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
455 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
456
457 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
458 let own_segment_roots = unsafe {
460 slice::from_raw_parts(
461 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
462 num_own_segment_roots,
463 )
464 };
465 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
466
467 let (intermediate_shard_blocks, remainder) =
468 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
469
470 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
471 let pot_checkpoints = unsafe {
473 slice::from_raw_parts(
474 pot_checkpoints
475 .as_ptr()
476 .cast::<[u8; PotCheckpoints::SIZE]>(),
477 num_pot_checkpoints,
478 )
479 };
480 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
481
482 Some((
483 Self {
484 pot_checkpoints,
485 own_segment_roots,
486 intermediate_shard_blocks,
487 },
488 remainder,
489 ))
490 }
491
492 #[cfg(feature = "alloc")]
494 #[inline(always)]
495 pub fn to_owned(self) -> OwnedBeaconChainBody {
496 OwnedBeaconChainBody::new(
497 self.own_segment_roots.iter().copied(),
498 self.intermediate_shard_blocks.iter(),
499 self.pot_checkpoints,
500 )
501 .expect("`self` is always a valid invariant; qed")
502 }
503
504 #[inline(always)]
506 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
507 self.own_segment_roots
508 }
509
510 #[inline(always)]
512 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
513 &self.intermediate_shard_blocks
514 }
515
516 #[inline(always)]
519 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
520 self.pot_checkpoints
521 }
522
523 #[inline]
525 pub fn root(&self) -> Blake3Hash {
526 let root = BalancedMerkleTree::compute_root_only(&[
527 *compute_segments_root(self.own_segment_roots),
528 *self.intermediate_shard_blocks.segments_root(),
529 *self.intermediate_shard_blocks.headers_root(),
530 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
531 .into(),
532 ]);
533
534 Blake3Hash::new(root)
535 }
536}
537
538#[derive(Debug, Clone)]
540pub struct LeafShardBlockInfo<'a> {
541 pub header: LeafShardHeader<'a>,
543 pub segment_roots_proof: Option<&'a [u8; 32]>,
545 pub own_segment_roots: &'a [SegmentRoot],
547}
548
549#[derive(Debug, Copy, Clone)]
551pub struct LeafShardBlocksInfo<'a> {
552 num_blocks: usize,
553 bytes: &'a [u8],
554}
555
556impl<'a> LeafShardBlocksInfo<'a> {
557 #[inline]
563 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
564 let num_blocks = bytes.split_off(..size_of::<u16>())?;
576 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
577
578 let bytes_start = bytes;
579
580 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
581
582 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
583
584 for _ in 0..num_blocks {
585 let num_own_segment_roots = usize::from(counts[0]);
586 counts = &counts[1..];
587
588 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
589
590 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
591
592 if num_own_segment_roots > 0 {
593 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
594 }
595
596 let _own_segment_roots =
597 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
598 }
599
600 Some((
601 Self {
602 num_blocks,
603 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
604 },
605 remainder,
606 ))
607 }
608
609 #[inline]
611 pub fn iter(
612 &self,
613 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + Clone + 'a {
614 let (mut counts, mut remainder) = unsafe {
616 self.bytes
617 .split_at_unchecked(self.num_blocks * size_of::<u8>())
618 };
619
620 (0..self.num_blocks).map(move |_| {
621 let num_own_segment_roots = usize::from(counts[0]);
622 counts = &counts[1..];
623
624 let header;
626 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
627 .expect("Already checked in constructor; qed");
628
629 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
630 .expect("Already checked in constructor; qed");
631
632 let segment_roots_proof = if num_own_segment_roots > 0 {
633 let segment_roots_proof;
634 (segment_roots_proof, remainder) =
636 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
637 Some(unsafe {
639 segment_roots_proof
640 .as_ptr()
641 .cast::<[u8; SegmentRoot::SIZE]>()
642 .as_ref_unchecked()
643 })
644 } else {
645 None
646 };
647
648 let own_segment_roots;
649 (own_segment_roots, remainder) =
651 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
652 let own_segment_roots = unsafe {
654 slice::from_raw_parts(
655 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
656 num_own_segment_roots,
657 )
658 };
659 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
660
661 LeafShardBlockInfo {
662 header,
663 segment_roots_proof,
664 own_segment_roots,
665 }
666 })
667 }
668
669 #[inline(always)]
671 pub const fn len(&self) -> usize {
672 self.num_blocks
673 }
674
675 #[inline(always)]
677 pub const fn is_empty(&self) -> bool {
678 self.num_blocks == 0
679 }
680
681 #[inline]
685 pub fn segments_root(&self) -> Blake3Hash {
686 compute_segments_root(
687 self.iter()
688 .flat_map(|shard_block_info| shard_block_info.own_segment_roots),
689 )
690 }
691
692 #[inline]
696 pub fn headers_root(&self) -> Blake3Hash {
697 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
698 self.iter().map(|shard_block_info| {
699 blake3::hash(shard_block_info.header.root().as_ref())
704 }),
705 )
706 .unwrap_or_default();
707
708 Blake3Hash::new(root)
709 }
710}
711
712#[derive(Debug, Copy, Clone, Yokeable)]
714#[non_exhaustive]
716pub struct IntermediateShardBody<'a> {
717 own_segment_roots: &'a [SegmentRoot],
719 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
721}
722
723impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
724 const SHARD_KIND: ShardKind = ShardKind::IntermediateShard;
725
726 #[cfg(feature = "alloc")]
727 type Owned = OwnedIntermediateShardBody;
728
729 #[cfg(feature = "alloc")]
730 #[inline(always)]
731 fn to_owned(self) -> Self::Owned {
732 self.to_owned()
733 }
734
735 #[inline(always)]
736 fn root(&self) -> Blake3Hash {
737 self.root()
738 }
739}
740
741impl<'a> IntermediateShardBody<'a> {
742 #[inline]
748 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
749 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
755 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
756
757 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
758 let own_segment_roots = unsafe {
760 slice::from_raw_parts(
761 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
762 num_own_segment_roots,
763 )
764 };
765 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
766
767 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
768
769 let body = Self {
770 own_segment_roots,
771 leaf_shard_blocks,
772 };
773
774 if !body.is_internally_consistent() {
775 return None;
776 }
777
778 Some((body, remainder))
779 }
780
781 #[inline]
786 pub fn is_internally_consistent(&self) -> bool {
787 self.leaf_shard_blocks.iter().all(|leaf_shard_block| {
788 let Some(&segment_roots_proof) = leaf_shard_block.segment_roots_proof else {
789 return true;
790 };
791
792 BalancedMerkleTree::<2>::verify(
793 &leaf_shard_block.header.result.body_root,
794 &[segment_roots_proof],
795 0,
796 *compute_segments_root(leaf_shard_block.own_segment_roots),
797 )
798 })
799 }
800
801 #[inline]
804 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
805 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
811 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
812
813 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
814 let own_segment_roots = unsafe {
816 slice::from_raw_parts(
817 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
818 num_own_segment_roots,
819 )
820 };
821 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
822
823 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
824
825 Some((
826 Self {
827 own_segment_roots,
828 leaf_shard_blocks,
829 },
830 remainder,
831 ))
832 }
833
834 #[inline(always)]
836 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
837 self.own_segment_roots
838 }
839
840 #[inline(always)]
842 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
843 &self.leaf_shard_blocks
844 }
845
846 #[inline]
848 pub fn segment_roots_proof(&self) -> [u8; 32] {
849 *self.leaf_shard_blocks.headers_root()
850 }
851
852 #[cfg(feature = "alloc")]
854 #[inline(always)]
855 pub fn to_owned(self) -> OwnedIntermediateShardBody {
856 OwnedIntermediateShardBody::new(
857 self.own_segment_roots.iter().copied(),
858 self.leaf_shard_blocks.iter(),
859 )
860 .expect("`self` is always a valid invariant; qed")
861 }
862
863 #[inline]
865 pub fn root(&self) -> Blake3Hash {
866 let root = UnbalancedMerkleTree::compute_root_only::<3, _, _>([
867 *compute_segments_root(self.own_segment_roots),
868 *self.leaf_shard_blocks.segments_root(),
869 *self.leaf_shard_blocks.headers_root(),
870 ])
871 .expect("List is not empty; qed");
872
873 Blake3Hash::new(root)
874 }
875}
876
877#[derive(Debug, Copy, Clone)]
879pub struct Transactions<'a> {
880 num_transactions: usize,
881 bytes: &'a [u8],
882}
883
884impl<'a> Transactions<'a> {
885 #[inline]
891 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
892 let num_transactions = bytes.split_off(..size_of::<u32>())?;
900 let num_transactions = u32::from_le_bytes([
901 num_transactions[0],
902 num_transactions[1],
903 num_transactions[2],
904 num_transactions[3],
905 ]) as usize;
906
907 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
908 let bytes_start = remainder;
909
910 for _ in 0..num_transactions {
911 (_, remainder) = Transaction::try_from_bytes(bytes)?;
912 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
913 }
914
915 Some((
916 Self {
917 num_transactions,
918 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
919 },
920 remainder,
921 ))
922 }
923
924 #[inline]
926 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + Clone + 'a {
927 let mut remainder = self.bytes;
928
929 (0..self.num_transactions).map(move |_| {
930 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
932
933 remainder = &remainder[transaction.encoded_size()..];
934 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
935 .expect("Already checked in constructor; qed");
936
937 transaction
938 })
939 }
940
941 #[inline(always)]
943 pub const fn len(&self) -> usize {
944 self.num_transactions
945 }
946
947 #[inline(always)]
949 pub const fn is_empty(&self) -> bool {
950 self.num_transactions == 0
951 }
952
953 #[inline]
957 pub fn root(&self) -> Blake3Hash {
958 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
959 self.iter().map(|transaction| {
960 blake3::hash(transaction.hash().as_ref())
964 }),
965 )
966 .unwrap_or_default();
967
968 Blake3Hash::new(root)
969 }
970}
971
972#[derive(Debug, Copy, Clone, Yokeable)]
974#[non_exhaustive]
976pub struct LeafShardBody<'a> {
977 own_segment_roots: &'a [SegmentRoot],
979 transactions: Transactions<'a>,
981}
982
983impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
984 const SHARD_KIND: ShardKind = ShardKind::LeafShard;
985
986 #[cfg(feature = "alloc")]
987 type Owned = OwnedLeafShardBody;
988
989 #[cfg(feature = "alloc")]
990 #[inline(always)]
991 fn to_owned(self) -> Self::Owned {
992 self.to_owned()
993 }
994
995 #[inline(always)]
996 fn root(&self) -> Blake3Hash {
997 self.root()
998 }
999}
1000
1001impl<'a> LeafShardBody<'a> {
1002 #[inline]
1008 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1009 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1015 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1016
1017 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1018 let own_segment_roots = unsafe {
1020 slice::from_raw_parts(
1021 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1022 num_own_segment_roots,
1023 )
1024 };
1025 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1026
1027 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1028
1029 let body = Self {
1030 own_segment_roots,
1031 transactions,
1032 };
1033
1034 if !body.is_internally_consistent() {
1035 return None;
1036 }
1037
1038 Some((body, remainder))
1039 }
1040
1041 #[inline]
1046 pub fn is_internally_consistent(&self) -> bool {
1047 true
1049 }
1050
1051 #[inline]
1054 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1055 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1061 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1062
1063 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1064 let own_segment_roots = unsafe {
1066 slice::from_raw_parts(
1067 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1068 num_own_segment_roots,
1069 )
1070 };
1071 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1072
1073 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1074
1075 Some((
1076 Self {
1077 own_segment_roots,
1078 transactions,
1079 },
1080 remainder,
1081 ))
1082 }
1083
1084 #[inline(always)]
1086 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
1087 self.own_segment_roots
1088 }
1089
1090 #[inline(always)]
1092 pub fn transactions(&self) -> &Transactions<'a> {
1093 &self.transactions
1094 }
1095
1096 #[inline]
1098 pub fn segment_roots_proof(&self) -> [u8; 32] {
1099 *self.transactions.root()
1100 }
1101
1102 #[cfg(feature = "alloc")]
1104 #[inline(always)]
1105 pub fn to_owned(self) -> OwnedLeafShardBody {
1106 let mut builder = OwnedLeafShardBody::init(self.own_segment_roots.iter().copied())
1107 .expect("`self` is always a valid invariant; qed");
1108 for transaction in self.transactions.iter() {
1109 builder
1110 .add_transaction(transaction)
1111 .expect("`self` is always a valid invariant; qed");
1112 }
1113
1114 builder.finish()
1115 }
1116
1117 #[inline]
1119 pub fn root(&self) -> Blake3Hash {
1120 let root = BalancedMerkleTree::compute_root_only(&[
1121 *compute_segments_root(self.own_segment_roots),
1122 *self.transactions.root(),
1123 ]);
1124
1125 Blake3Hash::new(root)
1126 }
1127}
1128
1129#[derive(Debug, Copy, Clone, From)]
1134pub enum BlockBody<'a> {
1135 BeaconChain(BeaconChainBody<'a>),
1137 IntermediateShard(IntermediateShardBody<'a>),
1139 LeafShard(LeafShardBody<'a>),
1141}
1142
1143impl<'a> BlockBody<'a> {
1144 #[inline]
1151 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: ShardKind) -> Option<(Self, &'a [u8])> {
1152 match shard_kind {
1153 ShardKind::BeaconChain => {
1154 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1155 Some((Self::BeaconChain(body), remainder))
1156 }
1157 ShardKind::IntermediateShard => {
1158 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1159 Some((Self::IntermediateShard(body), remainder))
1160 }
1161 ShardKind::LeafShard => {
1162 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1163 Some((Self::LeafShard(body), remainder))
1164 }
1165 ShardKind::Phantom | ShardKind::Invalid => {
1166 None
1168 }
1169 }
1170 }
1171
1172 #[inline]
1177 pub fn is_internally_consistent(&self) -> bool {
1178 match self {
1179 Self::BeaconChain(body) => body.is_internally_consistent(),
1180 Self::IntermediateShard(body) => body.is_internally_consistent(),
1181 Self::LeafShard(body) => body.is_internally_consistent(),
1182 }
1183 }
1184
1185 #[inline]
1188 pub fn try_from_bytes_unchecked(
1189 bytes: &'a [u8],
1190 shard_kind: ShardKind,
1191 ) -> Option<(Self, &'a [u8])> {
1192 match shard_kind {
1193 ShardKind::BeaconChain => {
1194 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1195 Some((Self::BeaconChain(body), remainder))
1196 }
1197 ShardKind::IntermediateShard => {
1198 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1199 Some((Self::IntermediateShard(body), remainder))
1200 }
1201 ShardKind::LeafShard => {
1202 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1203 Some((Self::LeafShard(body), remainder))
1204 }
1205 ShardKind::Phantom | ShardKind::Invalid => {
1206 None
1208 }
1209 }
1210 }
1211
1212 #[cfg(feature = "alloc")]
1214 #[inline(always)]
1215 pub fn to_owned(self) -> OwnedBlockBody {
1216 match self {
1217 Self::BeaconChain(body) => body.to_owned().into(),
1218 Self::IntermediateShard(body) => body.to_owned().into(),
1219 Self::LeafShard(body) => body.to_owned().into(),
1220 }
1221 }
1222
1223 #[inline]
1228 pub fn root(&self) -> Blake3Hash {
1229 match self {
1230 Self::BeaconChain(body) => body.root(),
1231 Self::IntermediateShard(body) => body.root(),
1232 Self::LeafShard(body) => body.root(),
1233 }
1234 }
1235}