1#[cfg(feature = "alloc")]
4pub mod owned;
5
6use crate::block::align_to_and_ensure_zero_padding;
7#[cfg(feature = "alloc")]
8use crate::block::body::owned::{
9 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
10 OwnedLeafShardBody,
11};
12use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::SegmentRoot;
16use crate::shard::ShardKind;
17use crate::transaction::Transaction;
18use ab_blake3::single_block_hash;
19use ab_io_type::trivial_type::TrivialType;
20use ab_merkle_tree::balanced::BalancedMerkleTree;
21use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
22use core::iter::TrustedLen;
23use core::{fmt, slice};
24use derive_more::From;
25use yoke::Yokeable;
26
27pub trait GenericBlockBody<'a>
29where
30 Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
31{
32 const SHARD_KIND: ShardKind;
34
35 #[cfg(feature = "alloc")]
37 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
38 where
39 Self: 'a;
40
41 #[cfg(feature = "alloc")]
43 fn to_owned(self) -> Self::Owned;
44
45 fn root(&self) -> Blake3Hash;
47}
48
49#[inline]
51pub fn compute_segments_root<'a, Iter>(segment_roots: Iter) -> Blake3Hash
52where
53 Iter: IntoIterator<Item = &'a SegmentRoot>,
54{
55 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
59 #[expect(clippy::eq_op, reason = "Intentional documentation")]
60 const _: () = {
61 assert!(u32::MAX == 4294967295);
62 };
63 let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
65 segment_roots.into_iter().map(|segment_root| {
66 single_block_hash(segment_root.as_ref())
69 .expect("Less than a single block worth of bytes; qed")
70 }),
71 );
72
73 Blake3Hash::new(root.unwrap_or_default())
74}
75
76#[derive(Debug, Clone)]
78pub struct IntermediateShardBlockInfo<'a> {
79 pub header: IntermediateShardHeader<'a>,
81 pub segment_roots_proof: Option<&'a [u8; 32]>,
83 pub own_segment_roots: &'a [SegmentRoot],
85 pub child_segment_roots: &'a [SegmentRoot],
87}
88
89impl IntermediateShardBlockInfo<'_> {
90 #[inline]
92 pub fn root(&self) -> Blake3Hash {
93 const MAX_N: usize = 3;
95 let leaves: [_; MAX_N] = [
96 ***self.header.root(),
97 *compute_segments_root(self.own_segment_roots),
98 *compute_segments_root(self.child_segment_roots),
99 ];
100
101 let root = UnbalancedMerkleTree::compute_root_only::<{ MAX_N as u64 }, _, _>(leaves)
102 .expect("The list is not empty; qed");
103
104 Blake3Hash::new(root)
105 }
106}
107
108#[derive(Debug, Copy, Clone)]
110pub struct IntermediateShardBlocksInfo<'a> {
111 num_blocks: usize,
112 bytes: &'a [u8],
113}
114
115impl<'a> IntermediateShardBlocksInfo<'a> {
116 #[inline]
122 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
123 let num_blocks = bytes.split_off(..size_of::<u16>())?;
137 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[1], num_blocks[2]]));
138
139 let bytes_start = bytes;
140
141 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() + size_of::<u16>()))?;
142
143 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
144
145 for _ in 0..num_blocks {
146 let num_own_segment_roots = usize::from(counts[0]);
147 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
148 counts = &counts[3..];
149
150 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
151
152 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
153
154 if num_own_segment_roots + num_child_segment_roots > 0 {
155 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
156 }
157
158 let _segment_roots = remainder.split_off(
159 ..(num_own_segment_roots + num_child_segment_roots) * SegmentRoot::SIZE,
160 )?;
161 }
162
163 Some((
164 Self {
165 num_blocks,
166 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
167 },
168 remainder,
169 ))
170 }
171
172 #[inline]
174 pub fn iter(
175 &self,
176 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + Clone + 'a
177 {
178 let (mut counts, mut remainder) = unsafe {
180 self.bytes
181 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
182 };
183
184 (0..self.num_blocks).map(move |_| {
185 let num_own_segment_roots = usize::from(counts[0]);
186 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
187 counts = &counts[3..];
188
189 let header;
191 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
192 .expect("Already checked in constructor; qed");
193
194 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
195 .expect("Already checked in constructor; qed");
196
197 let segment_roots_proof = if num_own_segment_roots + num_child_segment_roots > 0 {
198 let segment_roots_proof;
199 (segment_roots_proof, remainder) =
201 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
202 Some(unsafe {
204 segment_roots_proof
205 .as_ptr()
206 .cast::<[u8; SegmentRoot::SIZE]>()
207 .as_ref_unchecked()
208 })
209 } else {
210 None
211 };
212
213 let own_segment_roots;
214 (own_segment_roots, remainder) =
216 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
217 let own_segment_roots = unsafe {
219 slice::from_raw_parts(
220 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
221 num_own_segment_roots,
222 )
223 };
224 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
225
226 let child_segment_roots;
227 (child_segment_roots, remainder) = unsafe {
229 remainder.split_at_unchecked(num_child_segment_roots * SegmentRoot::SIZE)
230 };
231 let child_segment_roots = unsafe {
233 slice::from_raw_parts(
234 child_segment_roots
235 .as_ptr()
236 .cast::<[u8; SegmentRoot::SIZE]>(),
237 num_child_segment_roots,
238 )
239 };
240 let child_segment_roots = SegmentRoot::slice_from_repr(child_segment_roots);
241
242 IntermediateShardBlockInfo {
243 header,
244 segment_roots_proof,
245 own_segment_roots,
246 child_segment_roots,
247 }
248 })
249 }
250
251 #[inline(always)]
253 pub const fn len(&self) -> usize {
254 self.num_blocks
255 }
256
257 #[inline(always)]
259 pub const fn is_empty(&self) -> bool {
260 self.num_blocks == 0
261 }
262
263 #[inline]
267 pub fn segments_root(&self) -> Blake3Hash {
268 compute_segments_root(
269 self.iter()
270 .flat_map(|shard_block_info| {
271 [
272 shard_block_info.own_segment_roots,
273 shard_block_info.child_segment_roots,
274 ]
275 })
276 .flatten(),
277 )
278 }
279
280 #[inline]
284 pub fn headers_root(&self) -> Blake3Hash {
285 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
286 self.iter().map(|shard_block_info| {
288 single_block_hash(shard_block_info.header.root().as_ref())
292 .expect("Less than a single block worth of bytes; qed")
293 }),
294 )
295 .unwrap_or_default();
296
297 Blake3Hash::new(root)
298 }
299
300 #[inline]
304 pub fn root(&self) -> Blake3Hash {
305 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
306 self.iter().map(|shard_block_info| shard_block_info.root()),
307 )
308 .unwrap_or_default();
309
310 Blake3Hash::new(root)
311 }
312}
313
314#[derive(Debug, Copy, Clone, Yokeable)]
316#[non_exhaustive]
318pub struct BeaconChainBody<'a> {
319 own_segment_roots: &'a [SegmentRoot],
321 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
323 pot_checkpoints: &'a [PotCheckpoints],
326}
327
328impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
329 const SHARD_KIND: ShardKind = ShardKind::BeaconChain;
330
331 #[cfg(feature = "alloc")]
332 type Owned = OwnedBeaconChainBody;
333
334 #[cfg(feature = "alloc")]
335 #[inline(always)]
336 fn to_owned(self) -> Self::Owned {
337 self.to_owned()
338 }
339
340 #[inline(always)]
341 fn root(&self) -> Blake3Hash {
342 self.root()
343 }
344}
345
346impl<'a> BeaconChainBody<'a> {
347 #[inline]
353 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
354 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
362 let num_pot_checkpoints =
364 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
365
366 if num_pot_checkpoints == 0 {
367 return None;
368 }
369
370 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
371 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
372
373 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
374 let own_segment_roots = unsafe {
376 slice::from_raw_parts(
377 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
378 num_own_segment_roots,
379 )
380 };
381 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
382
383 let (intermediate_shard_blocks, remainder) =
384 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
385
386 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
387 let pot_checkpoints = unsafe {
389 slice::from_raw_parts(
390 pot_checkpoints
391 .as_ptr()
392 .cast::<[u8; PotCheckpoints::SIZE]>(),
393 num_pot_checkpoints,
394 )
395 };
396 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
397
398 let body = Self {
399 pot_checkpoints,
400 own_segment_roots,
401 intermediate_shard_blocks,
402 };
403
404 if !body.is_internally_consistent() {
405 return None;
406 }
407
408 Some((body, remainder))
409 }
410
411 #[inline]
416 pub fn is_internally_consistent(&self) -> bool {
417 self.intermediate_shard_blocks
418 .iter()
419 .all(|intermediate_shard_block| {
420 let Some(&segment_roots_proof) = intermediate_shard_block.segment_roots_proof
421 else {
422 return true;
423 };
424
425 BalancedMerkleTree::<2>::verify(
426 &intermediate_shard_block.header.result.body_root,
427 &[segment_roots_proof],
428 0,
429 BalancedMerkleTree::compute_root_only(&[
430 *compute_segments_root(intermediate_shard_block.own_segment_roots),
431 *compute_segments_root(intermediate_shard_block.child_segment_roots),
432 ]),
433 )
434 })
435 }
436
437 #[inline]
440 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
441 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
449 let num_pot_checkpoints =
451 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
452
453 if num_pot_checkpoints == 0 {
454 return None;
455 }
456
457 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
458 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
459
460 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
461 let own_segment_roots = unsafe {
463 slice::from_raw_parts(
464 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
465 num_own_segment_roots,
466 )
467 };
468 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
469
470 let (intermediate_shard_blocks, remainder) =
471 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
472
473 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
474 let pot_checkpoints = unsafe {
476 slice::from_raw_parts(
477 pot_checkpoints
478 .as_ptr()
479 .cast::<[u8; PotCheckpoints::SIZE]>(),
480 num_pot_checkpoints,
481 )
482 };
483 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
484
485 Some((
486 Self {
487 pot_checkpoints,
488 own_segment_roots,
489 intermediate_shard_blocks,
490 },
491 remainder,
492 ))
493 }
494
495 #[cfg(feature = "alloc")]
497 #[inline(always)]
498 pub fn to_owned(self) -> OwnedBeaconChainBody {
499 OwnedBeaconChainBody::new(
500 self.own_segment_roots.iter().copied(),
501 self.intermediate_shard_blocks.iter(),
502 self.pot_checkpoints,
503 )
504 .expect("`self` is always a valid invariant; qed")
505 }
506
507 #[inline(always)]
509 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
510 self.own_segment_roots
511 }
512
513 #[inline(always)]
515 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
516 &self.intermediate_shard_blocks
517 }
518
519 #[inline(always)]
522 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
523 self.pot_checkpoints
524 }
525
526 #[inline]
528 pub fn root(&self) -> Blake3Hash {
529 let root = BalancedMerkleTree::compute_root_only(&[
530 *compute_segments_root(self.own_segment_roots),
531 *self.intermediate_shard_blocks.segments_root(),
532 *self.intermediate_shard_blocks.headers_root(),
533 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
534 .into(),
535 ]);
536
537 Blake3Hash::new(root)
538 }
539}
540
541#[derive(Debug, Clone)]
543pub struct LeafShardBlockInfo<'a> {
544 pub header: LeafShardHeader<'a>,
546 pub segment_roots_proof: Option<&'a [u8; 32]>,
548 pub own_segment_roots: &'a [SegmentRoot],
550}
551
552#[derive(Debug, Copy, Clone)]
554pub struct LeafShardBlocksInfo<'a> {
555 num_blocks: usize,
556 bytes: &'a [u8],
557}
558
559impl<'a> LeafShardBlocksInfo<'a> {
560 #[inline]
566 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
567 let num_blocks = bytes.split_off(..size_of::<u16>())?;
579 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
580
581 let bytes_start = bytes;
582
583 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
584
585 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
586
587 for _ in 0..num_blocks {
588 let num_own_segment_roots = usize::from(counts[0]);
589 counts = &counts[1..];
590
591 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
592
593 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
594
595 if num_own_segment_roots > 0 {
596 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
597 }
598
599 let _own_segment_roots =
600 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
601 }
602
603 Some((
604 Self {
605 num_blocks,
606 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
607 },
608 remainder,
609 ))
610 }
611
612 #[inline]
614 pub fn iter(
615 &self,
616 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + Clone + 'a {
617 let (mut counts, mut remainder) = unsafe {
619 self.bytes
620 .split_at_unchecked(self.num_blocks * size_of::<u8>())
621 };
622
623 (0..self.num_blocks).map(move |_| {
624 let num_own_segment_roots = usize::from(counts[0]);
625 counts = &counts[1..];
626
627 let header;
629 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
630 .expect("Already checked in constructor; qed");
631
632 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
633 .expect("Already checked in constructor; qed");
634
635 let segment_roots_proof = if num_own_segment_roots > 0 {
636 let segment_roots_proof;
637 (segment_roots_proof, remainder) =
639 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
640 Some(unsafe {
642 segment_roots_proof
643 .as_ptr()
644 .cast::<[u8; SegmentRoot::SIZE]>()
645 .as_ref_unchecked()
646 })
647 } else {
648 None
649 };
650
651 let own_segment_roots;
652 (own_segment_roots, remainder) =
654 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
655 let own_segment_roots = unsafe {
657 slice::from_raw_parts(
658 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
659 num_own_segment_roots,
660 )
661 };
662 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
663
664 LeafShardBlockInfo {
665 header,
666 segment_roots_proof,
667 own_segment_roots,
668 }
669 })
670 }
671
672 #[inline(always)]
674 pub const fn len(&self) -> usize {
675 self.num_blocks
676 }
677
678 #[inline(always)]
680 pub const fn is_empty(&self) -> bool {
681 self.num_blocks == 0
682 }
683
684 #[inline]
688 pub fn segments_root(&self) -> Blake3Hash {
689 compute_segments_root(
690 self.iter()
691 .flat_map(|shard_block_info| shard_block_info.own_segment_roots),
692 )
693 }
694
695 #[inline]
699 pub fn headers_root(&self) -> Blake3Hash {
700 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
701 self.iter().map(|shard_block_info| {
702 single_block_hash(shard_block_info.header.root().as_ref())
706 .expect("Less than a single block worth of bytes; qed")
707 }),
708 )
709 .unwrap_or_default();
710
711 Blake3Hash::new(root)
712 }
713}
714
715#[derive(Debug, Copy, Clone, Yokeable)]
717#[non_exhaustive]
719pub struct IntermediateShardBody<'a> {
720 own_segment_roots: &'a [SegmentRoot],
722 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
724}
725
726impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
727 const SHARD_KIND: ShardKind = ShardKind::IntermediateShard;
728
729 #[cfg(feature = "alloc")]
730 type Owned = OwnedIntermediateShardBody;
731
732 #[cfg(feature = "alloc")]
733 #[inline(always)]
734 fn to_owned(self) -> Self::Owned {
735 self.to_owned()
736 }
737
738 #[inline(always)]
739 fn root(&self) -> Blake3Hash {
740 self.root()
741 }
742}
743
744impl<'a> IntermediateShardBody<'a> {
745 #[inline]
751 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
752 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
758 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
759
760 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
761 let own_segment_roots = unsafe {
763 slice::from_raw_parts(
764 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
765 num_own_segment_roots,
766 )
767 };
768 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
769
770 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
771
772 let body = Self {
773 own_segment_roots,
774 leaf_shard_blocks,
775 };
776
777 if !body.is_internally_consistent() {
778 return None;
779 }
780
781 Some((body, remainder))
782 }
783
784 #[inline]
789 pub fn is_internally_consistent(&self) -> bool {
790 self.leaf_shard_blocks.iter().all(|leaf_shard_block| {
791 let Some(&segment_roots_proof) = leaf_shard_block.segment_roots_proof else {
792 return true;
793 };
794
795 BalancedMerkleTree::<2>::verify(
796 &leaf_shard_block.header.result.body_root,
797 &[segment_roots_proof],
798 0,
799 *compute_segments_root(leaf_shard_block.own_segment_roots),
800 )
801 })
802 }
803
804 #[inline]
807 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
808 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
814 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
815
816 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
817 let own_segment_roots = unsafe {
819 slice::from_raw_parts(
820 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
821 num_own_segment_roots,
822 )
823 };
824 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
825
826 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
827
828 Some((
829 Self {
830 own_segment_roots,
831 leaf_shard_blocks,
832 },
833 remainder,
834 ))
835 }
836
837 #[inline(always)]
839 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
840 self.own_segment_roots
841 }
842
843 #[inline(always)]
845 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
846 &self.leaf_shard_blocks
847 }
848
849 #[inline]
851 pub fn segment_roots_proof(&self) -> [u8; 32] {
852 *self.leaf_shard_blocks.headers_root()
853 }
854
855 #[cfg(feature = "alloc")]
857 #[inline(always)]
858 pub fn to_owned(self) -> OwnedIntermediateShardBody {
859 OwnedIntermediateShardBody::new(
860 self.own_segment_roots.iter().copied(),
861 self.leaf_shard_blocks.iter(),
862 )
863 .expect("`self` is always a valid invariant; qed")
864 }
865
866 #[inline]
868 pub fn root(&self) -> Blake3Hash {
869 let root = UnbalancedMerkleTree::compute_root_only::<3, _, _>([
870 *compute_segments_root(self.own_segment_roots),
871 *self.leaf_shard_blocks.segments_root(),
872 *self.leaf_shard_blocks.headers_root(),
873 ])
874 .expect("List is not empty; qed");
875
876 Blake3Hash::new(root)
877 }
878}
879
880#[derive(Debug, Copy, Clone)]
882pub struct Transactions<'a> {
883 num_transactions: usize,
884 bytes: &'a [u8],
885}
886
887impl<'a> Transactions<'a> {
888 #[inline]
894 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
895 let num_transactions = bytes.split_off(..size_of::<u32>())?;
903 let num_transactions = u32::from_le_bytes([
904 num_transactions[0],
905 num_transactions[1],
906 num_transactions[2],
907 num_transactions[3],
908 ]) as usize;
909
910 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
911 let bytes_start = remainder;
912
913 for _ in 0..num_transactions {
914 (_, remainder) = Transaction::try_from_bytes(bytes)?;
915 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
916 }
917
918 Some((
919 Self {
920 num_transactions,
921 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
922 },
923 remainder,
924 ))
925 }
926
927 #[inline]
929 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + Clone + 'a {
930 let mut remainder = self.bytes;
931
932 (0..self.num_transactions).map(move |_| {
933 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
935
936 remainder = &remainder[transaction.encoded_size()..];
937 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
938 .expect("Already checked in constructor; qed");
939
940 transaction
941 })
942 }
943
944 #[inline(always)]
946 pub const fn len(&self) -> usize {
947 self.num_transactions
948 }
949
950 #[inline(always)]
952 pub const fn is_empty(&self) -> bool {
953 self.num_transactions == 0
954 }
955
956 #[inline]
960 pub fn root(&self) -> Blake3Hash {
961 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
962 self.iter().map(|transaction| {
963 single_block_hash(transaction.hash().as_ref())
967 .expect("Less than a single block worth of bytes; qed")
968 }),
969 )
970 .unwrap_or_default();
971
972 Blake3Hash::new(root)
973 }
974}
975
976#[derive(Debug, Copy, Clone, Yokeable)]
978#[non_exhaustive]
980pub struct LeafShardBody<'a> {
981 own_segment_roots: &'a [SegmentRoot],
983 transactions: Transactions<'a>,
985}
986
987impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
988 const SHARD_KIND: ShardKind = ShardKind::LeafShard;
989
990 #[cfg(feature = "alloc")]
991 type Owned = OwnedLeafShardBody;
992
993 #[cfg(feature = "alloc")]
994 #[inline(always)]
995 fn to_owned(self) -> Self::Owned {
996 self.to_owned()
997 }
998
999 #[inline(always)]
1000 fn root(&self) -> Blake3Hash {
1001 self.root()
1002 }
1003}
1004
1005impl<'a> LeafShardBody<'a> {
1006 #[inline]
1012 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1013 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1019 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1020
1021 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1022 let own_segment_roots = unsafe {
1024 slice::from_raw_parts(
1025 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1026 num_own_segment_roots,
1027 )
1028 };
1029 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1030
1031 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1032
1033 let body = Self {
1034 own_segment_roots,
1035 transactions,
1036 };
1037
1038 if !body.is_internally_consistent() {
1039 return None;
1040 }
1041
1042 Some((body, remainder))
1043 }
1044
1045 #[inline]
1050 pub fn is_internally_consistent(&self) -> bool {
1051 true
1053 }
1054
1055 #[inline]
1058 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1059 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1065 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1066
1067 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1068 let own_segment_roots = unsafe {
1070 slice::from_raw_parts(
1071 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1072 num_own_segment_roots,
1073 )
1074 };
1075 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1076
1077 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1078
1079 Some((
1080 Self {
1081 own_segment_roots,
1082 transactions,
1083 },
1084 remainder,
1085 ))
1086 }
1087
1088 #[inline(always)]
1090 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
1091 self.own_segment_roots
1092 }
1093
1094 #[inline(always)]
1096 pub fn transactions(&self) -> &Transactions<'a> {
1097 &self.transactions
1098 }
1099
1100 #[inline]
1102 pub fn segment_roots_proof(&self) -> [u8; 32] {
1103 *self.transactions.root()
1104 }
1105
1106 #[cfg(feature = "alloc")]
1108 #[inline(always)]
1109 pub fn to_owned(self) -> OwnedLeafShardBody {
1110 let mut builder = OwnedLeafShardBody::init(self.own_segment_roots.iter().copied())
1111 .expect("`self` is always a valid invariant; qed");
1112 for transaction in self.transactions.iter() {
1113 builder
1114 .add_transaction(transaction)
1115 .expect("`self` is always a valid invariant; qed");
1116 }
1117
1118 builder.finish()
1119 }
1120
1121 #[inline]
1123 pub fn root(&self) -> Blake3Hash {
1124 let root = BalancedMerkleTree::compute_root_only(&[
1125 *compute_segments_root(self.own_segment_roots),
1126 *self.transactions.root(),
1127 ]);
1128
1129 Blake3Hash::new(root)
1130 }
1131}
1132
1133#[derive(Debug, Copy, Clone, From)]
1138pub enum BlockBody<'a> {
1139 BeaconChain(BeaconChainBody<'a>),
1141 IntermediateShard(IntermediateShardBody<'a>),
1143 LeafShard(LeafShardBody<'a>),
1145}
1146
1147impl<'a> BlockBody<'a> {
1148 #[inline]
1155 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: ShardKind) -> Option<(Self, &'a [u8])> {
1156 match shard_kind {
1157 ShardKind::BeaconChain => {
1158 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1159 Some((Self::BeaconChain(body), remainder))
1160 }
1161 ShardKind::IntermediateShard => {
1162 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1163 Some((Self::IntermediateShard(body), remainder))
1164 }
1165 ShardKind::LeafShard => {
1166 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1167 Some((Self::LeafShard(body), remainder))
1168 }
1169 ShardKind::Phantom | ShardKind::Invalid => {
1170 None
1172 }
1173 }
1174 }
1175
1176 #[inline]
1181 pub fn is_internally_consistent(&self) -> bool {
1182 match self {
1183 Self::BeaconChain(body) => body.is_internally_consistent(),
1184 Self::IntermediateShard(body) => body.is_internally_consistent(),
1185 Self::LeafShard(body) => body.is_internally_consistent(),
1186 }
1187 }
1188
1189 #[inline]
1192 pub fn try_from_bytes_unchecked(
1193 bytes: &'a [u8],
1194 shard_kind: ShardKind,
1195 ) -> Option<(Self, &'a [u8])> {
1196 match shard_kind {
1197 ShardKind::BeaconChain => {
1198 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1199 Some((Self::BeaconChain(body), remainder))
1200 }
1201 ShardKind::IntermediateShard => {
1202 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1203 Some((Self::IntermediateShard(body), remainder))
1204 }
1205 ShardKind::LeafShard => {
1206 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1207 Some((Self::LeafShard(body), remainder))
1208 }
1209 ShardKind::Phantom | ShardKind::Invalid => {
1210 None
1212 }
1213 }
1214 }
1215
1216 #[cfg(feature = "alloc")]
1218 #[inline(always)]
1219 pub fn to_owned(self) -> OwnedBlockBody {
1220 match self {
1221 Self::BeaconChain(body) => body.to_owned().into(),
1222 Self::IntermediateShard(body) => body.to_owned().into(),
1223 Self::LeafShard(body) => body.to_owned().into(),
1224 }
1225 }
1226
1227 #[inline]
1232 pub fn root(&self) -> Blake3Hash {
1233 match self {
1234 Self::BeaconChain(body) => body.root(),
1235 Self::IntermediateShard(body) => body.root(),
1236 Self::LeafShard(body) => body.root(),
1237 }
1238 }
1239}