1#[cfg(feature = "alloc")]
4pub mod owned;
5
6use crate::block::align_to_and_ensure_zero_padding;
7#[cfg(feature = "alloc")]
8use crate::block::body::owned::{
9 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
10 OwnedLeafShardBody,
11};
12use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::SegmentRoot;
16use crate::shard::ShardKind;
17use crate::transaction::Transaction;
18use ab_blake3::single_block_hash;
19use ab_io_type::trivial_type::TrivialType;
20use ab_merkle_tree::balanced::BalancedMerkleTree;
21use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
22use core::iter::TrustedLen;
23use core::{fmt, slice};
24use derive_more::From;
25use yoke::Yokeable;
26
27pub trait GenericBlockBody<'a>
29where
30 Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
31{
32 const SHARD_KIND: ShardKind;
34
35 #[cfg(feature = "alloc")]
37 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
38 where
39 Self: 'a;
40
41 #[cfg(feature = "alloc")]
43 fn to_owned(self) -> Self::Owned;
44
45 fn root(&self) -> Blake3Hash;
47}
48
49#[inline]
51pub fn compute_segments_root<'a, Iter>(segment_roots: Iter) -> Blake3Hash
52where
53 Iter: IntoIterator<Item = &'a SegmentRoot>,
54{
55 const _: () = {
59 assert!(u32::MAX == 4294967295);
60 };
61 let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
63 segment_roots.into_iter().map(|segment_root| {
64 single_block_hash(segment_root.as_ref())
67 .expect("Less than a single block worth of bytes; qed")
68 }),
69 );
70
71 Blake3Hash::new(root.unwrap_or_default())
72}
73
74#[derive(Debug, Clone)]
76pub struct IntermediateShardBlockInfo<'a> {
77 pub header: IntermediateShardHeader<'a>,
79 pub segment_roots_proof: Option<&'a [u8; 32]>,
81 pub own_segment_roots: &'a [SegmentRoot],
83 pub child_segment_roots: &'a [SegmentRoot],
85}
86
87impl IntermediateShardBlockInfo<'_> {
88 #[inline]
90 pub fn root(&self) -> Blake3Hash {
91 const MAX_N: usize = 3;
93 let leaves: [_; MAX_N] = [
94 ***self.header.root(),
95 *compute_segments_root(self.own_segment_roots),
96 *compute_segments_root(self.child_segment_roots),
97 ];
98
99 let root = UnbalancedMerkleTree::compute_root_only::<{ MAX_N as u64 }, _, _>(leaves)
100 .expect("The list is not empty; qed");
101
102 Blake3Hash::new(root)
103 }
104}
105
106#[derive(Debug, Copy, Clone)]
108pub struct IntermediateShardBlocksInfo<'a> {
109 num_blocks: usize,
110 bytes: &'a [u8],
111}
112
113impl<'a> IntermediateShardBlocksInfo<'a> {
114 #[inline]
120 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
121 let num_blocks = bytes.split_off(..size_of::<u16>())?;
135 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[1], num_blocks[2]]));
136
137 let bytes_start = bytes;
138
139 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() + size_of::<u16>()))?;
140
141 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
142
143 for _ in 0..num_blocks {
144 let num_own_segment_roots = usize::from(counts[0]);
145 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
146 counts = &counts[3..];
147
148 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
149
150 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
151
152 if num_own_segment_roots + num_child_segment_roots > 0 {
153 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
154 }
155
156 let _segment_roots = remainder.split_off(
157 ..(num_own_segment_roots + num_child_segment_roots) * SegmentRoot::SIZE,
158 )?;
159 }
160
161 Some((
162 Self {
163 num_blocks,
164 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
165 },
166 remainder,
167 ))
168 }
169
170 #[inline]
172 pub fn iter(
173 &self,
174 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + Clone + 'a
175 {
176 let (mut counts, mut remainder) = unsafe {
178 self.bytes
179 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
180 };
181
182 (0..self.num_blocks).map(move |_| {
183 let num_own_segment_roots = usize::from(counts[0]);
184 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
185 counts = &counts[3..];
186
187 let header;
189 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
190 .expect("Already checked in constructor; qed");
191
192 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
193 .expect("Already checked in constructor; qed");
194
195 let segment_roots_proof = if num_own_segment_roots + num_child_segment_roots > 0 {
196 let segment_roots_proof;
197 (segment_roots_proof, remainder) =
199 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
200 Some(unsafe {
202 segment_roots_proof
203 .as_ptr()
204 .cast::<[u8; SegmentRoot::SIZE]>()
205 .as_ref_unchecked()
206 })
207 } else {
208 None
209 };
210
211 let own_segment_roots;
212 (own_segment_roots, remainder) =
214 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
215 let own_segment_roots = unsafe {
217 slice::from_raw_parts(
218 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
219 num_own_segment_roots,
220 )
221 };
222 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
223
224 let child_segment_roots;
225 (child_segment_roots, remainder) = unsafe {
227 remainder.split_at_unchecked(num_child_segment_roots * SegmentRoot::SIZE)
228 };
229 let child_segment_roots = unsafe {
231 slice::from_raw_parts(
232 child_segment_roots
233 .as_ptr()
234 .cast::<[u8; SegmentRoot::SIZE]>(),
235 num_child_segment_roots,
236 )
237 };
238 let child_segment_roots = SegmentRoot::slice_from_repr(child_segment_roots);
239
240 IntermediateShardBlockInfo {
241 header,
242 segment_roots_proof,
243 own_segment_roots,
244 child_segment_roots,
245 }
246 })
247 }
248
249 #[inline(always)]
251 pub const fn len(&self) -> usize {
252 self.num_blocks
253 }
254
255 #[inline(always)]
257 pub const fn is_empty(&self) -> bool {
258 self.num_blocks == 0
259 }
260
261 #[inline]
265 pub fn segments_root(&self) -> Blake3Hash {
266 compute_segments_root(
267 self.iter()
268 .flat_map(|shard_block_info| {
269 [
270 shard_block_info.own_segment_roots,
271 shard_block_info.child_segment_roots,
272 ]
273 })
274 .flatten(),
275 )
276 }
277
278 #[inline]
282 pub fn headers_root(&self) -> Blake3Hash {
283 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
284 self.iter().map(|shard_block_info| {
286 single_block_hash(shard_block_info.header.root().as_ref())
290 .expect("Less than a single block worth of bytes; qed")
291 }),
292 )
293 .unwrap_or_default();
294
295 Blake3Hash::new(root)
296 }
297
298 #[inline]
302 pub fn root(&self) -> Blake3Hash {
303 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
304 self.iter().map(|shard_block_info| shard_block_info.root()),
305 )
306 .unwrap_or_default();
307
308 Blake3Hash::new(root)
309 }
310}
311
312#[derive(Debug, Copy, Clone, Yokeable)]
314#[non_exhaustive]
316pub struct BeaconChainBody<'a> {
317 own_segment_roots: &'a [SegmentRoot],
319 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
321 pot_checkpoints: &'a [PotCheckpoints],
324}
325
326impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
327 const SHARD_KIND: ShardKind = ShardKind::BeaconChain;
328
329 #[cfg(feature = "alloc")]
330 type Owned = OwnedBeaconChainBody;
331
332 #[cfg(feature = "alloc")]
333 #[inline(always)]
334 fn to_owned(self) -> Self::Owned {
335 self.to_owned()
336 }
337
338 #[inline(always)]
339 fn root(&self) -> Blake3Hash {
340 self.root()
341 }
342}
343
344impl<'a> BeaconChainBody<'a> {
345 #[inline]
351 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
352 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
360 let num_pot_checkpoints =
362 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
363
364 if num_pot_checkpoints == 0 {
365 return None;
366 }
367
368 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
369 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
370
371 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
372 let own_segment_roots = unsafe {
374 slice::from_raw_parts(
375 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
376 num_own_segment_roots,
377 )
378 };
379 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
380
381 let (intermediate_shard_blocks, remainder) =
382 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
383
384 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
385 let pot_checkpoints = unsafe {
387 slice::from_raw_parts(
388 pot_checkpoints
389 .as_ptr()
390 .cast::<[u8; PotCheckpoints::SIZE]>(),
391 num_pot_checkpoints,
392 )
393 };
394 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
395
396 let body = Self {
397 pot_checkpoints,
398 own_segment_roots,
399 intermediate_shard_blocks,
400 };
401
402 if !body.is_internally_consistent() {
403 return None;
404 }
405
406 Some((body, remainder))
407 }
408
409 #[inline]
414 pub fn is_internally_consistent(&self) -> bool {
415 self.intermediate_shard_blocks
416 .iter()
417 .all(|intermediate_shard_block| {
418 let Some(&segment_roots_proof) = intermediate_shard_block.segment_roots_proof
419 else {
420 return true;
421 };
422
423 BalancedMerkleTree::<2>::verify(
424 &intermediate_shard_block.header.result.body_root,
425 &[segment_roots_proof],
426 0,
427 BalancedMerkleTree::compute_root_only(&[
428 *compute_segments_root(intermediate_shard_block.own_segment_roots),
429 *compute_segments_root(intermediate_shard_block.child_segment_roots),
430 ]),
431 )
432 })
433 }
434
435 #[inline]
438 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
439 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
447 let num_pot_checkpoints =
449 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
450
451 if num_pot_checkpoints == 0 {
452 return None;
453 }
454
455 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
456 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
457
458 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
459 let own_segment_roots = unsafe {
461 slice::from_raw_parts(
462 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
463 num_own_segment_roots,
464 )
465 };
466 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
467
468 let (intermediate_shard_blocks, remainder) =
469 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
470
471 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
472 let pot_checkpoints = unsafe {
474 slice::from_raw_parts(
475 pot_checkpoints
476 .as_ptr()
477 .cast::<[u8; PotCheckpoints::SIZE]>(),
478 num_pot_checkpoints,
479 )
480 };
481 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
482
483 Some((
484 Self {
485 pot_checkpoints,
486 own_segment_roots,
487 intermediate_shard_blocks,
488 },
489 remainder,
490 ))
491 }
492
493 #[cfg(feature = "alloc")]
495 #[inline(always)]
496 pub fn to_owned(self) -> OwnedBeaconChainBody {
497 OwnedBeaconChainBody::new(
498 self.own_segment_roots.iter().copied(),
499 self.intermediate_shard_blocks.iter(),
500 self.pot_checkpoints,
501 )
502 .expect("`self` is always a valid invariant; qed")
503 }
504
505 #[inline(always)]
507 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
508 self.own_segment_roots
509 }
510
511 #[inline(always)]
513 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
514 &self.intermediate_shard_blocks
515 }
516
517 #[inline(always)]
520 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
521 self.pot_checkpoints
522 }
523
524 #[inline]
526 pub fn root(&self) -> Blake3Hash {
527 let root = BalancedMerkleTree::compute_root_only(&[
528 *compute_segments_root(self.own_segment_roots),
529 *self.intermediate_shard_blocks.segments_root(),
530 *self.intermediate_shard_blocks.headers_root(),
531 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
532 .into(),
533 ]);
534
535 Blake3Hash::new(root)
536 }
537}
538
539#[derive(Debug, Clone)]
541pub struct LeafShardBlockInfo<'a> {
542 pub header: LeafShardHeader<'a>,
544 pub segment_roots_proof: Option<&'a [u8; 32]>,
546 pub own_segment_roots: &'a [SegmentRoot],
548}
549
550#[derive(Debug, Copy, Clone)]
552pub struct LeafShardBlocksInfo<'a> {
553 num_blocks: usize,
554 bytes: &'a [u8],
555}
556
557impl<'a> LeafShardBlocksInfo<'a> {
558 #[inline]
564 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
565 let num_blocks = bytes.split_off(..size_of::<u16>())?;
577 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
578
579 let bytes_start = bytes;
580
581 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
582
583 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
584
585 for _ in 0..num_blocks {
586 let num_own_segment_roots = usize::from(counts[0]);
587 counts = &counts[1..];
588
589 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
590
591 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
592
593 if num_own_segment_roots > 0 {
594 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
595 }
596
597 let _own_segment_roots =
598 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
599 }
600
601 Some((
602 Self {
603 num_blocks,
604 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
605 },
606 remainder,
607 ))
608 }
609
610 #[inline]
612 pub fn iter(
613 &self,
614 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + Clone + 'a {
615 let (mut counts, mut remainder) = unsafe {
617 self.bytes
618 .split_at_unchecked(self.num_blocks * size_of::<u8>())
619 };
620
621 (0..self.num_blocks).map(move |_| {
622 let num_own_segment_roots = usize::from(counts[0]);
623 counts = &counts[1..];
624
625 let header;
627 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
628 .expect("Already checked in constructor; qed");
629
630 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
631 .expect("Already checked in constructor; qed");
632
633 let segment_roots_proof = if num_own_segment_roots > 0 {
634 let segment_roots_proof;
635 (segment_roots_proof, remainder) =
637 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
638 Some(unsafe {
640 segment_roots_proof
641 .as_ptr()
642 .cast::<[u8; SegmentRoot::SIZE]>()
643 .as_ref_unchecked()
644 })
645 } else {
646 None
647 };
648
649 let own_segment_roots;
650 (own_segment_roots, remainder) =
652 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
653 let own_segment_roots = unsafe {
655 slice::from_raw_parts(
656 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
657 num_own_segment_roots,
658 )
659 };
660 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
661
662 LeafShardBlockInfo {
663 header,
664 segment_roots_proof,
665 own_segment_roots,
666 }
667 })
668 }
669
670 #[inline(always)]
672 pub const fn len(&self) -> usize {
673 self.num_blocks
674 }
675
676 #[inline(always)]
678 pub const fn is_empty(&self) -> bool {
679 self.num_blocks == 0
680 }
681
682 #[inline]
686 pub fn segments_root(&self) -> Blake3Hash {
687 compute_segments_root(
688 self.iter()
689 .flat_map(|shard_block_info| shard_block_info.own_segment_roots),
690 )
691 }
692
693 #[inline]
697 pub fn headers_root(&self) -> Blake3Hash {
698 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
699 self.iter().map(|shard_block_info| {
700 single_block_hash(shard_block_info.header.root().as_ref())
704 .expect("Less than a single block worth of bytes; qed")
705 }),
706 )
707 .unwrap_or_default();
708
709 Blake3Hash::new(root)
710 }
711}
712
713#[derive(Debug, Copy, Clone, Yokeable)]
715#[non_exhaustive]
717pub struct IntermediateShardBody<'a> {
718 own_segment_roots: &'a [SegmentRoot],
720 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
722}
723
724impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
725 const SHARD_KIND: ShardKind = ShardKind::IntermediateShard;
726
727 #[cfg(feature = "alloc")]
728 type Owned = OwnedIntermediateShardBody;
729
730 #[cfg(feature = "alloc")]
731 #[inline(always)]
732 fn to_owned(self) -> Self::Owned {
733 self.to_owned()
734 }
735
736 #[inline(always)]
737 fn root(&self) -> Blake3Hash {
738 self.root()
739 }
740}
741
742impl<'a> IntermediateShardBody<'a> {
743 #[inline]
749 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
750 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
756 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
757
758 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
759 let own_segment_roots = unsafe {
761 slice::from_raw_parts(
762 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
763 num_own_segment_roots,
764 )
765 };
766 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
767
768 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
769
770 let body = Self {
771 own_segment_roots,
772 leaf_shard_blocks,
773 };
774
775 if !body.is_internally_consistent() {
776 return None;
777 }
778
779 Some((body, remainder))
780 }
781
782 #[inline]
787 pub fn is_internally_consistent(&self) -> bool {
788 self.leaf_shard_blocks.iter().all(|leaf_shard_block| {
789 let Some(&segment_roots_proof) = leaf_shard_block.segment_roots_proof else {
790 return true;
791 };
792
793 BalancedMerkleTree::<2>::verify(
794 &leaf_shard_block.header.result.body_root,
795 &[segment_roots_proof],
796 0,
797 *compute_segments_root(leaf_shard_block.own_segment_roots),
798 )
799 })
800 }
801
802 #[inline]
805 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
806 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
812 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
813
814 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
815 let own_segment_roots = unsafe {
817 slice::from_raw_parts(
818 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
819 num_own_segment_roots,
820 )
821 };
822 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
823
824 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
825
826 Some((
827 Self {
828 own_segment_roots,
829 leaf_shard_blocks,
830 },
831 remainder,
832 ))
833 }
834
835 #[inline(always)]
837 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
838 self.own_segment_roots
839 }
840
841 #[inline(always)]
843 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
844 &self.leaf_shard_blocks
845 }
846
847 #[inline]
849 pub fn segment_roots_proof(&self) -> [u8; 32] {
850 *self.leaf_shard_blocks.headers_root()
851 }
852
853 #[cfg(feature = "alloc")]
855 #[inline(always)]
856 pub fn to_owned(self) -> OwnedIntermediateShardBody {
857 OwnedIntermediateShardBody::new(
858 self.own_segment_roots.iter().copied(),
859 self.leaf_shard_blocks.iter(),
860 )
861 .expect("`self` is always a valid invariant; qed")
862 }
863
864 #[inline]
866 pub fn root(&self) -> Blake3Hash {
867 let root = UnbalancedMerkleTree::compute_root_only::<3, _, _>([
868 *compute_segments_root(self.own_segment_roots),
869 *self.leaf_shard_blocks.segments_root(),
870 *self.leaf_shard_blocks.headers_root(),
871 ])
872 .expect("List is not empty; qed");
873
874 Blake3Hash::new(root)
875 }
876}
877
878#[derive(Debug, Copy, Clone)]
880pub struct Transactions<'a> {
881 num_transactions: usize,
882 bytes: &'a [u8],
883}
884
885impl<'a> Transactions<'a> {
886 #[inline]
892 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
893 let num_transactions = bytes.split_off(..size_of::<u32>())?;
901 let num_transactions = u32::from_le_bytes([
902 num_transactions[0],
903 num_transactions[1],
904 num_transactions[2],
905 num_transactions[3],
906 ]) as usize;
907
908 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
909 let bytes_start = remainder;
910
911 for _ in 0..num_transactions {
912 (_, remainder) = Transaction::try_from_bytes(bytes)?;
913 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
914 }
915
916 Some((
917 Self {
918 num_transactions,
919 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
920 },
921 remainder,
922 ))
923 }
924
925 #[inline]
927 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + Clone + 'a {
928 let mut remainder = self.bytes;
929
930 (0..self.num_transactions).map(move |_| {
931 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
933
934 remainder = &remainder[transaction.encoded_size()..];
935 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
936 .expect("Already checked in constructor; qed");
937
938 transaction
939 })
940 }
941
942 #[inline(always)]
944 pub const fn len(&self) -> usize {
945 self.num_transactions
946 }
947
948 #[inline(always)]
950 pub const fn is_empty(&self) -> bool {
951 self.num_transactions == 0
952 }
953
954 #[inline]
958 pub fn root(&self) -> Blake3Hash {
959 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 + 1 }, _, _>(
960 self.iter().map(|transaction| {
961 single_block_hash(transaction.hash().as_ref())
965 .expect("Less than a single block worth of bytes; qed")
966 }),
967 )
968 .unwrap_or_default();
969
970 Blake3Hash::new(root)
971 }
972}
973
974#[derive(Debug, Copy, Clone, Yokeable)]
976#[non_exhaustive]
978pub struct LeafShardBody<'a> {
979 own_segment_roots: &'a [SegmentRoot],
981 transactions: Transactions<'a>,
983}
984
985impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
986 const SHARD_KIND: ShardKind = ShardKind::LeafShard;
987
988 #[cfg(feature = "alloc")]
989 type Owned = OwnedLeafShardBody;
990
991 #[cfg(feature = "alloc")]
992 #[inline(always)]
993 fn to_owned(self) -> Self::Owned {
994 self.to_owned()
995 }
996
997 #[inline(always)]
998 fn root(&self) -> Blake3Hash {
999 self.root()
1000 }
1001}
1002
1003impl<'a> LeafShardBody<'a> {
1004 #[inline]
1010 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1011 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1017 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1018
1019 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1020 let own_segment_roots = unsafe {
1022 slice::from_raw_parts(
1023 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1024 num_own_segment_roots,
1025 )
1026 };
1027 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1028
1029 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1030
1031 let body = Self {
1032 own_segment_roots,
1033 transactions,
1034 };
1035
1036 if !body.is_internally_consistent() {
1037 return None;
1038 }
1039
1040 Some((body, remainder))
1041 }
1042
1043 #[inline]
1048 pub fn is_internally_consistent(&self) -> bool {
1049 true
1051 }
1052
1053 #[inline]
1056 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1057 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1063 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1064
1065 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1066 let own_segment_roots = unsafe {
1068 slice::from_raw_parts(
1069 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1070 num_own_segment_roots,
1071 )
1072 };
1073 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1074
1075 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1076
1077 Some((
1078 Self {
1079 own_segment_roots,
1080 transactions,
1081 },
1082 remainder,
1083 ))
1084 }
1085
1086 #[inline(always)]
1088 pub fn own_segment_roots(&self) -> &'a [SegmentRoot] {
1089 self.own_segment_roots
1090 }
1091
1092 #[inline(always)]
1094 pub fn transactions(&self) -> &Transactions<'a> {
1095 &self.transactions
1096 }
1097
1098 #[inline]
1100 pub fn segment_roots_proof(&self) -> [u8; 32] {
1101 *self.transactions.root()
1102 }
1103
1104 #[cfg(feature = "alloc")]
1106 #[inline(always)]
1107 pub fn to_owned(self) -> OwnedLeafShardBody {
1108 let mut builder = OwnedLeafShardBody::init(self.own_segment_roots.iter().copied())
1109 .expect("`self` is always a valid invariant; qed");
1110 for transaction in self.transactions.iter() {
1111 builder
1112 .add_transaction(transaction)
1113 .expect("`self` is always a valid invariant; qed");
1114 }
1115
1116 builder.finish()
1117 }
1118
1119 #[inline]
1121 pub fn root(&self) -> Blake3Hash {
1122 let root = BalancedMerkleTree::compute_root_only(&[
1123 *compute_segments_root(self.own_segment_roots),
1124 *self.transactions.root(),
1125 ]);
1126
1127 Blake3Hash::new(root)
1128 }
1129}
1130
1131#[derive(Debug, Copy, Clone, From)]
1136pub enum BlockBody<'a> {
1137 BeaconChain(BeaconChainBody<'a>),
1139 IntermediateShard(IntermediateShardBody<'a>),
1141 LeafShard(LeafShardBody<'a>),
1143}
1144
1145impl<'a> BlockBody<'a> {
1146 #[inline]
1153 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: ShardKind) -> Option<(Self, &'a [u8])> {
1154 match shard_kind {
1155 ShardKind::BeaconChain => {
1156 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1157 Some((Self::BeaconChain(body), remainder))
1158 }
1159 ShardKind::IntermediateShard => {
1160 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1161 Some((Self::IntermediateShard(body), remainder))
1162 }
1163 ShardKind::LeafShard => {
1164 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1165 Some((Self::LeafShard(body), remainder))
1166 }
1167 ShardKind::Phantom | ShardKind::Invalid => {
1168 None
1170 }
1171 }
1172 }
1173
1174 #[inline]
1179 pub fn is_internally_consistent(&self) -> bool {
1180 match self {
1181 Self::BeaconChain(body) => body.is_internally_consistent(),
1182 Self::IntermediateShard(body) => body.is_internally_consistent(),
1183 Self::LeafShard(body) => body.is_internally_consistent(),
1184 }
1185 }
1186
1187 #[inline]
1190 pub fn try_from_bytes_unchecked(
1191 bytes: &'a [u8],
1192 shard_kind: ShardKind,
1193 ) -> Option<(Self, &'a [u8])> {
1194 match shard_kind {
1195 ShardKind::BeaconChain => {
1196 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1197 Some((Self::BeaconChain(body), remainder))
1198 }
1199 ShardKind::IntermediateShard => {
1200 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1201 Some((Self::IntermediateShard(body), remainder))
1202 }
1203 ShardKind::LeafShard => {
1204 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1205 Some((Self::LeafShard(body), remainder))
1206 }
1207 ShardKind::Phantom | ShardKind::Invalid => {
1208 None
1210 }
1211 }
1212 }
1213
1214 #[cfg(feature = "alloc")]
1216 #[inline(always)]
1217 pub fn to_owned(self) -> OwnedBlockBody {
1218 match self {
1219 Self::BeaconChain(body) => body.to_owned().into(),
1220 Self::IntermediateShard(body) => body.to_owned().into(),
1221 Self::LeafShard(body) => body.to_owned().into(),
1222 }
1223 }
1224
1225 #[inline]
1230 pub fn root(&self) -> Blake3Hash {
1231 match self {
1232 Self::BeaconChain(body) => body.root(),
1233 Self::IntermediateShard(body) => body.root(),
1234 Self::LeafShard(body) => body.root(),
1235 }
1236 }
1237}