1#[cfg(feature = "alloc")]
4pub mod owned;
5
6use crate::block::align_to_and_ensure_zero_padding;
7#[cfg(feature = "alloc")]
8use crate::block::body::owned::{
9 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
10 OwnedLeafShardBody,
11};
12use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::SegmentRoot;
16use crate::shard::RealShardKind;
17use crate::transaction::Transaction;
18use ab_blake3::single_block_hash;
19use ab_io_type::trivial_type::TrivialType;
20use ab_merkle_tree::balanced::BalancedMerkleTree;
21use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
22use core::iter::TrustedLen;
23use core::{fmt, slice};
24use derive_more::From;
25use yoke::Yokeable;
26
27pub trait GenericBlockBody<'a>
29where
30 Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
31{
32 const SHARD_KIND: RealShardKind;
34
35 #[cfg(feature = "alloc")]
37 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
38 where
39 Self: 'a;
40
41 #[cfg(feature = "alloc")]
43 fn to_owned(self) -> Self::Owned;
44
45 fn root(&self) -> Blake3Hash;
47}
48
49#[inline]
51pub fn compute_segments_root<'a, Iter>(segment_roots: Iter) -> Blake3Hash
52where
53 Iter: IntoIterator<Item = &'a SegmentRoot>,
54{
55 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
59 #[expect(clippy::eq_op, reason = "Intentional documentation")]
60 const _: () = {
61 assert!(u32::MAX == 4294967295);
62 };
63 let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
65 segment_roots.into_iter().map(|segment_root| {
66 single_block_hash(segment_root.as_ref())
69 .expect("Less than a single block worth of bytes; qed")
70 }),
71 );
72
73 Blake3Hash::new(root.unwrap_or_default())
74}
75
76#[derive(Debug, Clone)]
78pub struct IntermediateShardBlockInfo<'a> {
79 pub header: IntermediateShardHeader<'a>,
81 pub segment_roots_proof: Option<&'a [u8; 32]>,
83 pub own_segment_roots: &'a [SegmentRoot],
85 pub child_segment_roots: &'a [SegmentRoot],
87}
88
89impl IntermediateShardBlockInfo<'_> {
90 #[inline]
92 pub fn root(&self) -> Blake3Hash {
93 const MAX_N: usize = 3;
95 let leaves: [_; MAX_N] = [
96 ***self.header.root(),
97 *compute_segments_root(self.own_segment_roots),
98 *compute_segments_root(self.child_segment_roots),
99 ];
100
101 let root = UnbalancedMerkleTree::compute_root_only::<{ MAX_N as u64 }, _, _>(leaves)
102 .expect("The list is not empty; qed");
103
104 Blake3Hash::new(root)
105 }
106}
107
108#[derive(Debug, Copy, Clone)]
110pub struct IntermediateShardBlocksInfo<'a> {
111 num_blocks: usize,
112 bytes: &'a [u8],
113}
114
115impl<'a> IntermediateShardBlocksInfo<'a> {
116 #[inline]
122 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
123 let num_blocks = bytes.split_off(..size_of::<u16>())?;
137 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
138
139 let bytes_start = bytes;
140
141 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() + size_of::<u16>()))?;
142
143 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
144
145 for _ in 0..num_blocks {
146 let num_own_segment_roots = usize::from(counts[0]);
147 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
148 counts = &counts[3..];
149
150 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
151
152 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
153
154 if num_own_segment_roots + num_child_segment_roots > 0 {
155 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
156 }
157
158 let _segment_roots = remainder.split_off(
159 ..(num_own_segment_roots + num_child_segment_roots) * SegmentRoot::SIZE,
160 )?;
161 }
162
163 Some((
164 Self {
165 num_blocks,
166 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
167 },
168 remainder,
169 ))
170 }
171
172 #[inline]
174 pub fn iter(
175 &self,
176 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + Clone + 'a
177 {
178 let (mut counts, mut remainder) = unsafe {
180 self.bytes
181 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
182 };
183
184 (0..self.num_blocks).map(move |_| {
185 let num_own_segment_roots = usize::from(counts[0]);
186 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
187 counts = &counts[3..];
188
189 let header;
191 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
192 .expect("Already checked in constructor; qed");
193
194 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
195 .expect("Already checked in constructor; qed");
196
197 let segment_roots_proof = if num_own_segment_roots + num_child_segment_roots > 0 {
198 let segment_roots_proof;
199 (segment_roots_proof, remainder) =
201 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
202 Some(unsafe {
204 segment_roots_proof
205 .as_ptr()
206 .cast::<[u8; SegmentRoot::SIZE]>()
207 .as_ref_unchecked()
208 })
209 } else {
210 None
211 };
212
213 let own_segment_roots;
214 (own_segment_roots, remainder) =
216 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
217 let own_segment_roots = unsafe {
219 slice::from_raw_parts(
220 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
221 num_own_segment_roots,
222 )
223 };
224 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
225
226 let child_segment_roots;
227 (child_segment_roots, remainder) = unsafe {
229 remainder.split_at_unchecked(num_child_segment_roots * SegmentRoot::SIZE)
230 };
231 let child_segment_roots = unsafe {
233 slice::from_raw_parts(
234 child_segment_roots
235 .as_ptr()
236 .cast::<[u8; SegmentRoot::SIZE]>(),
237 num_child_segment_roots,
238 )
239 };
240 let child_segment_roots = SegmentRoot::slice_from_repr(child_segment_roots);
241
242 IntermediateShardBlockInfo {
243 header,
244 segment_roots_proof,
245 own_segment_roots,
246 child_segment_roots,
247 }
248 })
249 }
250
251 #[inline(always)]
253 pub const fn len(&self) -> usize {
254 self.num_blocks
255 }
256
257 #[inline(always)]
259 pub const fn is_empty(&self) -> bool {
260 self.num_blocks == 0
261 }
262
263 #[inline]
267 pub fn segments_root(&self) -> Blake3Hash {
268 compute_segments_root(
269 self.iter()
270 .flat_map(|shard_block_info| {
271 [
272 shard_block_info.own_segment_roots,
273 shard_block_info.child_segment_roots,
274 ]
275 })
276 .flatten(),
277 )
278 }
279
280 #[inline]
284 pub fn headers_root(&self) -> Blake3Hash {
285 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
286 self.iter().map(|shard_block_info| {
288 single_block_hash(shard_block_info.header.root().as_ref())
292 .expect("Less than a single block worth of bytes; qed")
293 }),
294 )
295 .unwrap_or_default();
296
297 Blake3Hash::new(root)
298 }
299
300 #[inline]
304 pub fn root(&self) -> Blake3Hash {
305 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
306 self.iter().map(|shard_block_info| shard_block_info.root()),
307 )
308 .unwrap_or_default();
309
310 Blake3Hash::new(root)
311 }
312}
313
314#[derive(Debug, Copy, Clone, Yokeable)]
316#[non_exhaustive]
318pub struct BeaconChainBody<'a> {
319 own_segment_roots: &'a [SegmentRoot],
321 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
323 pot_checkpoints: &'a [PotCheckpoints],
326}
327
328impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
329 const SHARD_KIND: RealShardKind = RealShardKind::BeaconChain;
330
331 #[cfg(feature = "alloc")]
332 type Owned = OwnedBeaconChainBody;
333
334 #[cfg(feature = "alloc")]
335 #[inline(always)]
336 fn to_owned(self) -> Self::Owned {
337 self.to_owned()
338 }
339
340 #[inline(always)]
341 fn root(&self) -> Blake3Hash {
342 self.root()
343 }
344}
345
346impl<'a> BeaconChainBody<'a> {
347 #[inline]
353 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
354 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
362 let num_pot_checkpoints =
364 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
365
366 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
367 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
368
369 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
370 let own_segment_roots = unsafe {
372 slice::from_raw_parts(
373 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
374 num_own_segment_roots,
375 )
376 };
377 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
378
379 let (intermediate_shard_blocks, mut remainder) =
380 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
381
382 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
383 let pot_checkpoints = unsafe {
385 slice::from_raw_parts(
386 pot_checkpoints
387 .as_ptr()
388 .cast::<[u8; PotCheckpoints::SIZE]>(),
389 num_pot_checkpoints,
390 )
391 };
392 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
393
394 let body = Self {
395 pot_checkpoints,
396 own_segment_roots,
397 intermediate_shard_blocks,
398 };
399
400 if !body.is_internally_consistent() {
401 return None;
402 }
403
404 Some((body, remainder))
405 }
406
407 #[inline]
412 pub fn is_internally_consistent(&self) -> bool {
413 self.intermediate_shard_blocks
414 .iter()
415 .all(|intermediate_shard_block| {
416 let Some(&segment_roots_proof) = intermediate_shard_block.segment_roots_proof
417 else {
418 return true;
419 };
420
421 BalancedMerkleTree::<2>::verify(
422 &intermediate_shard_block.header.result.body_root,
423 &[segment_roots_proof],
424 0,
425 BalancedMerkleTree::compute_root_only(&[
426 *compute_segments_root(intermediate_shard_block.own_segment_roots),
427 *compute_segments_root(intermediate_shard_block.child_segment_roots),
428 ]),
429 )
430 })
431 }
432
433 #[inline]
436 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
437 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
445 let num_pot_checkpoints =
447 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
448
449 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
450 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
451
452 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
453 let own_segment_roots = unsafe {
455 slice::from_raw_parts(
456 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
457 num_own_segment_roots,
458 )
459 };
460 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
461
462 let (intermediate_shard_blocks, mut remainder) =
463 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
464
465 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
466 let pot_checkpoints = unsafe {
468 slice::from_raw_parts(
469 pot_checkpoints
470 .as_ptr()
471 .cast::<[u8; PotCheckpoints::SIZE]>(),
472 num_pot_checkpoints,
473 )
474 };
475 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
476
477 Some((
478 Self {
479 pot_checkpoints,
480 own_segment_roots,
481 intermediate_shard_blocks,
482 },
483 remainder,
484 ))
485 }
486
487 #[cfg(feature = "alloc")]
489 #[inline(always)]
490 pub fn to_owned(self) -> OwnedBeaconChainBody {
491 OwnedBeaconChainBody::new(
492 self.own_segment_roots.iter().copied(),
493 self.intermediate_shard_blocks.iter(),
494 self.pot_checkpoints,
495 )
496 .expect("`self` is always a valid invariant; qed")
497 }
498
499 #[inline(always)]
501 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
502 self.own_segment_roots
503 }
504
505 #[inline(always)]
507 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
508 &self.intermediate_shard_blocks
509 }
510
511 #[inline(always)]
514 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
515 self.pot_checkpoints
516 }
517
518 #[inline]
520 pub fn root(&self) -> Blake3Hash {
521 let root = BalancedMerkleTree::compute_root_only(&[
522 *compute_segments_root(self.own_segment_roots),
523 *self.intermediate_shard_blocks.segments_root(),
524 *self.intermediate_shard_blocks.headers_root(),
525 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
526 .into(),
527 ]);
528
529 Blake3Hash::new(root)
530 }
531}
532
533#[derive(Debug, Clone)]
535pub struct LeafShardBlockInfo<'a> {
536 pub header: LeafShardHeader<'a>,
538 pub segment_roots_proof: Option<&'a [u8; 32]>,
540 pub own_segment_roots: &'a [SegmentRoot],
542}
543
544#[derive(Debug, Copy, Clone)]
546pub struct LeafShardBlocksInfo<'a> {
547 num_blocks: usize,
548 bytes: &'a [u8],
549}
550
551impl<'a> LeafShardBlocksInfo<'a> {
552 #[inline]
558 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
559 let num_blocks = bytes.split_off(..size_of::<u16>())?;
571 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
572
573 let bytes_start = bytes;
574
575 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
576
577 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
578
579 for _ in 0..num_blocks {
580 let num_own_segment_roots = usize::from(counts[0]);
581 counts = &counts[1..];
582
583 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
584
585 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
586
587 if num_own_segment_roots > 0 {
588 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
589 }
590
591 let _own_segment_roots =
592 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
593 }
594
595 Some((
596 Self {
597 num_blocks,
598 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
599 },
600 remainder,
601 ))
602 }
603
604 #[inline]
606 pub fn iter(
607 &self,
608 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + Clone + 'a {
609 let (mut counts, mut remainder) = unsafe {
611 self.bytes
612 .split_at_unchecked(self.num_blocks * size_of::<u8>())
613 };
614
615 (0..self.num_blocks).map(move |_| {
616 let num_own_segment_roots = usize::from(counts[0]);
617 counts = &counts[1..];
618
619 let header;
621 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
622 .expect("Already checked in constructor; qed");
623
624 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
625 .expect("Already checked in constructor; qed");
626
627 let segment_roots_proof = if num_own_segment_roots > 0 {
628 let segment_roots_proof;
629 (segment_roots_proof, remainder) =
631 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
632 Some(unsafe {
634 segment_roots_proof
635 .as_ptr()
636 .cast::<[u8; SegmentRoot::SIZE]>()
637 .as_ref_unchecked()
638 })
639 } else {
640 None
641 };
642
643 let own_segment_roots;
644 (own_segment_roots, remainder) =
646 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
647 let own_segment_roots = unsafe {
649 slice::from_raw_parts(
650 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
651 num_own_segment_roots,
652 )
653 };
654 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
655
656 LeafShardBlockInfo {
657 header,
658 segment_roots_proof,
659 own_segment_roots,
660 }
661 })
662 }
663
664 #[inline(always)]
666 pub const fn len(&self) -> usize {
667 self.num_blocks
668 }
669
670 #[inline(always)]
672 pub const fn is_empty(&self) -> bool {
673 self.num_blocks == 0
674 }
675
676 #[inline]
680 pub fn segments_root(&self) -> Blake3Hash {
681 compute_segments_root(
682 self.iter()
683 .flat_map(|shard_block_info| shard_block_info.own_segment_roots),
684 )
685 }
686
687 #[inline]
691 pub fn headers_root(&self) -> Blake3Hash {
692 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
693 self.iter().map(|shard_block_info| {
694 single_block_hash(shard_block_info.header.root().as_ref())
698 .expect("Less than a single block worth of bytes; qed")
699 }),
700 )
701 .unwrap_or_default();
702
703 Blake3Hash::new(root)
704 }
705}
706
707#[derive(Debug, Copy, Clone, Yokeable)]
709#[non_exhaustive]
711pub struct IntermediateShardBody<'a> {
712 own_segment_roots: &'a [SegmentRoot],
714 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
716}
717
718impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
719 const SHARD_KIND: RealShardKind = RealShardKind::IntermediateShard;
720
721 #[cfg(feature = "alloc")]
722 type Owned = OwnedIntermediateShardBody;
723
724 #[cfg(feature = "alloc")]
725 #[inline(always)]
726 fn to_owned(self) -> Self::Owned {
727 self.to_owned()
728 }
729
730 #[inline(always)]
731 fn root(&self) -> Blake3Hash {
732 self.root()
733 }
734}
735
736impl<'a> IntermediateShardBody<'a> {
737 #[inline]
743 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
744 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
750 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
751
752 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
753 let own_segment_roots = unsafe {
755 slice::from_raw_parts(
756 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
757 num_own_segment_roots,
758 )
759 };
760 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
761
762 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
763
764 let body = Self {
765 own_segment_roots,
766 leaf_shard_blocks,
767 };
768
769 if !body.is_internally_consistent() {
770 return None;
771 }
772
773 Some((body, remainder))
774 }
775
776 #[inline]
781 pub fn is_internally_consistent(&self) -> bool {
782 self.leaf_shard_blocks.iter().all(|leaf_shard_block| {
783 let Some(&segment_roots_proof) = leaf_shard_block.segment_roots_proof else {
784 return true;
785 };
786
787 BalancedMerkleTree::<2>::verify(
788 &leaf_shard_block.header.result.body_root,
789 &[segment_roots_proof],
790 0,
791 *compute_segments_root(leaf_shard_block.own_segment_roots),
792 )
793 })
794 }
795
796 #[inline]
799 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
800 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
806 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
807
808 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
809 let own_segment_roots = unsafe {
811 slice::from_raw_parts(
812 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
813 num_own_segment_roots,
814 )
815 };
816 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
817
818 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
819
820 Some((
821 Self {
822 own_segment_roots,
823 leaf_shard_blocks,
824 },
825 remainder,
826 ))
827 }
828
829 #[inline(always)]
831 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
832 self.own_segment_roots
833 }
834
835 #[inline(always)]
837 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
838 &self.leaf_shard_blocks
839 }
840
841 #[inline]
843 pub fn segment_roots_proof(&self) -> [u8; 32] {
844 *self.leaf_shard_blocks.headers_root()
845 }
846
847 #[cfg(feature = "alloc")]
849 #[inline(always)]
850 pub fn to_owned(self) -> OwnedIntermediateShardBody {
851 OwnedIntermediateShardBody::new(
852 self.own_segment_roots.iter().copied(),
853 self.leaf_shard_blocks.iter(),
854 )
855 .expect("`self` is always a valid invariant; qed")
856 }
857
858 #[inline]
860 pub fn root(&self) -> Blake3Hash {
861 let root = UnbalancedMerkleTree::compute_root_only::<3, _, _>([
862 *compute_segments_root(self.own_segment_roots),
863 *self.leaf_shard_blocks.segments_root(),
864 *self.leaf_shard_blocks.headers_root(),
865 ])
866 .expect("List is not empty; qed");
867
868 Blake3Hash::new(root)
869 }
870}
871
872#[derive(Debug, Copy, Clone)]
874pub struct Transactions<'a> {
875 num_transactions: usize,
876 bytes: &'a [u8],
877}
878
879impl<'a> Transactions<'a> {
880 #[inline]
886 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
887 let num_transactions = bytes.split_off(..size_of::<u32>())?;
895 let num_transactions = u32::from_le_bytes([
896 num_transactions[0],
897 num_transactions[1],
898 num_transactions[2],
899 num_transactions[3],
900 ]) as usize;
901
902 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
903 let bytes_start = remainder;
904
905 for _ in 0..num_transactions {
906 (_, remainder) = Transaction::try_from_bytes(bytes)?;
907 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
908 }
909
910 Some((
911 Self {
912 num_transactions,
913 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
914 },
915 remainder,
916 ))
917 }
918
919 #[inline]
921 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + Clone + 'a {
922 let mut remainder = self.bytes;
923
924 (0..self.num_transactions).map(move |_| {
925 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
927
928 remainder = &remainder[transaction.encoded_size()..];
929 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
930 .expect("Already checked in constructor; qed");
931
932 transaction
933 })
934 }
935
936 #[inline(always)]
938 pub const fn len(&self) -> usize {
939 self.num_transactions
940 }
941
942 #[inline(always)]
944 pub const fn is_empty(&self) -> bool {
945 self.num_transactions == 0
946 }
947
948 #[inline]
952 pub fn root(&self) -> Blake3Hash {
953 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
954 self.iter().map(|transaction| {
955 single_block_hash(transaction.hash().as_ref())
959 .expect("Less than a single block worth of bytes; qed")
960 }),
961 )
962 .unwrap_or_default();
963
964 Blake3Hash::new(root)
965 }
966}
967
968#[derive(Debug, Copy, Clone, Yokeable)]
970#[non_exhaustive]
972pub struct LeafShardBody<'a> {
973 own_segment_roots: &'a [SegmentRoot],
975 transactions: Transactions<'a>,
977}
978
979impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
980 const SHARD_KIND: RealShardKind = RealShardKind::LeafShard;
981
982 #[cfg(feature = "alloc")]
983 type Owned = OwnedLeafShardBody;
984
985 #[cfg(feature = "alloc")]
986 #[inline(always)]
987 fn to_owned(self) -> Self::Owned {
988 self.to_owned()
989 }
990
991 #[inline(always)]
992 fn root(&self) -> Blake3Hash {
993 self.root()
994 }
995}
996
997impl<'a> LeafShardBody<'a> {
998 #[inline]
1004 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1005 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1011 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1012
1013 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1014 let own_segment_roots = unsafe {
1016 slice::from_raw_parts(
1017 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1018 num_own_segment_roots,
1019 )
1020 };
1021 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1022
1023 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1024
1025 let body = Self {
1026 own_segment_roots,
1027 transactions,
1028 };
1029
1030 if !body.is_internally_consistent() {
1031 return None;
1032 }
1033
1034 Some((body, remainder))
1035 }
1036
1037 #[inline]
1042 pub fn is_internally_consistent(&self) -> bool {
1043 true
1045 }
1046
1047 #[inline]
1050 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1051 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1057 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1058
1059 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1060 let own_segment_roots = unsafe {
1062 slice::from_raw_parts(
1063 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1064 num_own_segment_roots,
1065 )
1066 };
1067 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1068
1069 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1070
1071 Some((
1072 Self {
1073 own_segment_roots,
1074 transactions,
1075 },
1076 remainder,
1077 ))
1078 }
1079
1080 #[inline(always)]
1082 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
1083 self.own_segment_roots
1084 }
1085
1086 #[inline(always)]
1088 pub fn transactions(&self) -> &Transactions<'a> {
1089 &self.transactions
1090 }
1091
1092 #[inline]
1094 pub fn segment_roots_proof(&self) -> [u8; 32] {
1095 *self.transactions.root()
1096 }
1097
1098 #[cfg(feature = "alloc")]
1100 #[inline(always)]
1101 pub fn to_owned(self) -> OwnedLeafShardBody {
1102 let mut builder = OwnedLeafShardBody::init(self.own_segment_roots.iter().copied())
1103 .expect("`self` is always a valid invariant; qed");
1104 for transaction in self.transactions.iter() {
1105 builder
1106 .add_transaction(transaction)
1107 .expect("`self` is always a valid invariant; qed");
1108 }
1109
1110 builder.finish()
1111 }
1112
1113 #[inline]
1115 pub fn root(&self) -> Blake3Hash {
1116 let root = BalancedMerkleTree::compute_root_only(&[
1117 *compute_segments_root(self.own_segment_roots),
1118 *self.transactions.root(),
1119 ]);
1120
1121 Blake3Hash::new(root)
1122 }
1123}
1124
1125#[derive(Debug, Copy, Clone, From)]
1130pub enum BlockBody<'a> {
1131 BeaconChain(BeaconChainBody<'a>),
1133 IntermediateShard(IntermediateShardBody<'a>),
1135 LeafShard(LeafShardBody<'a>),
1137}
1138
1139impl<'a> BlockBody<'a> {
1140 #[inline]
1147 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: RealShardKind) -> Option<(Self, &'a [u8])> {
1148 match shard_kind {
1149 RealShardKind::BeaconChain => {
1150 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1151 Some((Self::BeaconChain(body), remainder))
1152 }
1153 RealShardKind::IntermediateShard => {
1154 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1155 Some((Self::IntermediateShard(body), remainder))
1156 }
1157 RealShardKind::LeafShard => {
1158 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1159 Some((Self::LeafShard(body), remainder))
1160 }
1161 }
1162 }
1163
1164 #[inline]
1169 pub fn is_internally_consistent(&self) -> bool {
1170 match self {
1171 Self::BeaconChain(body) => body.is_internally_consistent(),
1172 Self::IntermediateShard(body) => body.is_internally_consistent(),
1173 Self::LeafShard(body) => body.is_internally_consistent(),
1174 }
1175 }
1176
1177 #[inline]
1180 pub fn try_from_bytes_unchecked(
1181 bytes: &'a [u8],
1182 shard_kind: RealShardKind,
1183 ) -> Option<(Self, &'a [u8])> {
1184 match shard_kind {
1185 RealShardKind::BeaconChain => {
1186 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1187 Some((Self::BeaconChain(body), remainder))
1188 }
1189 RealShardKind::IntermediateShard => {
1190 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1191 Some((Self::IntermediateShard(body), remainder))
1192 }
1193 RealShardKind::LeafShard => {
1194 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1195 Some((Self::LeafShard(body), remainder))
1196 }
1197 }
1198 }
1199
1200 #[cfg(feature = "alloc")]
1202 #[inline(always)]
1203 pub fn to_owned(self) -> OwnedBlockBody {
1204 match self {
1205 Self::BeaconChain(body) => body.to_owned().into(),
1206 Self::IntermediateShard(body) => body.to_owned().into(),
1207 Self::LeafShard(body) => body.to_owned().into(),
1208 }
1209 }
1210
1211 #[inline]
1216 pub fn root(&self) -> Blake3Hash {
1217 match self {
1218 Self::BeaconChain(body) => body.root(),
1219 Self::IntermediateShard(body) => body.root(),
1220 Self::LeafShard(body) => body.root(),
1221 }
1222 }
1223}