1#[cfg(feature = "alloc")]
4pub mod owned;
5
6use crate::block::align_to_and_ensure_zero_padding;
7#[cfg(feature = "alloc")]
8use crate::block::body::owned::{
9 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBeaconChainBodyError, OwnedBlockBody,
10 OwnedBlockBodyError, OwnedIntermediateShardBody, OwnedIntermediateShardBodyError,
11 OwnedLeafShardBody, OwnedLeafShardBodyError,
12};
13use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
14use crate::hashes::Blake3Hash;
15use crate::pot::PotCheckpoints;
16use crate::segments::SegmentRoot;
17use crate::shard::ShardKind;
18use crate::transaction::Transaction;
19use ab_io_type::trivial_type::TrivialType;
20use ab_merkle_tree::balanced_hashed::BalancedHashedMerkleTree;
21use ab_merkle_tree::unbalanced_hashed::UnbalancedHashedMerkleTree;
22use core::iter::TrustedLen;
23use core::{fmt, slice};
24use derive_more::From;
25
26pub trait GenericBlockBody<'a>
28where
29 Self: Copy + fmt::Debug,
30{
31 #[cfg(feature = "alloc")]
33 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
34 where
35 Self: 'a;
36
37 #[cfg(feature = "alloc")]
39 fn try_to_owned(self) -> Option<Self::Owned>;
40
41 fn root(&self) -> Blake3Hash;
43}
44
45#[inline]
47pub fn compute_segments_root<Item, Iter>(segment_roots: Iter) -> Blake3Hash
48where
49 Item: AsRef<[u8]>,
50 Iter: IntoIterator<Item = Item>,
51{
52 const _: () = {
56 assert!(u32::MAX == 4294967295);
57 };
58 let root = UnbalancedHashedMerkleTree::compute_root_only::<4294967295, _, _>(
60 segment_roots.into_iter().map(|segment_root| {
61 blake3::hash(segment_root.as_ref())
64 }),
65 );
66
67 Blake3Hash::new(root.unwrap_or_default())
68}
69
70#[derive(Debug, Copy, Clone)]
72pub struct IntermediateShardBlockInfo<'a> {
73 pub header: IntermediateShardHeader<'a>,
75 pub segment_roots_proof: Option<&'a [u8; 32]>,
77 pub own_segment_roots: &'a [SegmentRoot],
79 pub child_segment_roots: &'a [SegmentRoot],
81}
82
83impl IntermediateShardBlockInfo<'_> {
84 #[inline]
86 pub fn root(&self) -> Blake3Hash {
87 const MAX_N: usize = 3;
89 let leaves: [_; MAX_N] = [
90 **self.header.root(),
91 *compute_segments_root(self.own_segment_roots),
92 *compute_segments_root(self.child_segment_roots),
93 ];
94
95 let root = UnbalancedHashedMerkleTree::compute_root_only::<MAX_N, _, _>(leaves)
96 .expect("The list is not empty; qed");
97
98 Blake3Hash::new(root)
99 }
100}
101
102#[derive(Debug, Copy, Clone)]
104pub struct IntermediateShardBlocksInfo<'a> {
105 num_blocks: usize,
106 bytes: &'a [u8],
107}
108
109impl<'a> IntermediateShardBlocksInfo<'a> {
110 #[inline]
116 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
117 let num_blocks = bytes.split_off(..size_of::<u16>())?;
131 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[1], num_blocks[2]]));
132
133 let bytes_start = bytes;
134
135 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() + size_of::<u16>()))?;
136
137 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
138
139 for _ in 0..num_blocks {
140 let num_own_segment_roots = usize::from(counts[0]);
141 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
142 counts = &counts[3..];
143
144 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
145
146 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
147
148 if num_own_segment_roots + num_child_segment_roots > 0 {
149 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
150 }
151
152 let _segment_roots = remainder.split_off(
153 ..(num_own_segment_roots + num_child_segment_roots) * SegmentRoot::SIZE,
154 )?;
155 }
156
157 Some((
158 Self {
159 num_blocks,
160 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
161 },
162 remainder,
163 ))
164 }
165
166 #[inline]
168 pub fn iter(
169 &self,
170 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + Clone + 'a
171 {
172 let (mut counts, mut remainder) = unsafe {
174 self.bytes
175 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
176 };
177
178 (0..self.num_blocks).map(move |_| {
179 let num_own_segment_roots = usize::from(counts[0]);
180 let num_child_segment_roots = usize::from(u16::from_le_bytes([counts[1], counts[2]]));
181 counts = &counts[3..];
182
183 let header;
185 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
186 .expect("Already checked in constructor; qed");
187
188 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
189 .expect("Already checked in constructor; qed");
190
191 let segment_roots_proof = if num_own_segment_roots + num_child_segment_roots > 0 {
192 let segment_roots_proof;
193 (segment_roots_proof, remainder) =
195 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
196 Some(unsafe {
198 segment_roots_proof
199 .as_ptr()
200 .cast::<[u8; SegmentRoot::SIZE]>()
201 .as_ref_unchecked()
202 })
203 } else {
204 None
205 };
206
207 let own_segment_roots;
208 (own_segment_roots, remainder) =
210 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
211 let own_segment_roots = unsafe {
213 slice::from_raw_parts(
214 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
215 num_own_segment_roots,
216 )
217 };
218 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
219
220 let child_segment_roots;
221 (child_segment_roots, remainder) = unsafe {
223 remainder.split_at_unchecked(num_child_segment_roots * SegmentRoot::SIZE)
224 };
225 let child_segment_roots = unsafe {
227 slice::from_raw_parts(
228 child_segment_roots
229 .as_ptr()
230 .cast::<[u8; SegmentRoot::SIZE]>(),
231 num_child_segment_roots,
232 )
233 };
234 let child_segment_roots = SegmentRoot::slice_from_repr(child_segment_roots);
235
236 IntermediateShardBlockInfo {
237 header,
238 segment_roots_proof,
239 own_segment_roots,
240 child_segment_roots,
241 }
242 })
243 }
244
245 #[inline(always)]
247 pub const fn len(&self) -> usize {
248 self.num_blocks
249 }
250
251 #[inline(always)]
253 pub const fn is_empty(&self) -> bool {
254 self.num_blocks == 0
255 }
256
257 #[inline]
261 pub fn segments_root(&self) -> Blake3Hash {
262 compute_segments_root(
263 self.iter()
264 .flat_map(|shard_block_info| {
265 [
266 shard_block_info.own_segment_roots,
267 shard_block_info.child_segment_roots,
268 ]
269 })
270 .flatten(),
271 )
272 }
273
274 #[inline]
278 pub fn headers_root(&self) -> Blake3Hash {
279 let root =
280 UnbalancedHashedMerkleTree::compute_root_only::<{ u16::MAX as usize + 1 }, _, _>(
281 self.iter().map(|shard_block_info| {
283 blake3::hash(shard_block_info.header.root().as_ref())
288 }),
289 )
290 .unwrap_or_default();
291
292 Blake3Hash::new(root)
293 }
294
295 #[inline]
299 pub fn root(&self) -> Blake3Hash {
300 let root =
301 UnbalancedHashedMerkleTree::compute_root_only::<{ u16::MAX as usize + 1 }, _, _>(
302 self.iter().map(|shard_block_info| shard_block_info.root()),
303 )
304 .unwrap_or_default();
305
306 Blake3Hash::new(root)
307 }
308}
309
310#[derive(Debug, Copy, Clone)]
312pub struct BeaconChainBody<'a> {
313 pub own_segment_roots: &'a [SegmentRoot],
315 pub intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
317 pub pot_checkpoints: &'a [PotCheckpoints],
320}
321
322impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
323 #[cfg(feature = "alloc")]
324 type Owned = OwnedBeaconChainBody;
325
326 #[cfg(feature = "alloc")]
327 #[inline(always)]
328 fn try_to_owned(self) -> Option<Self::Owned> {
329 self.to_owned().ok()
330 }
331
332 #[inline(always)]
333 fn root(&self) -> Blake3Hash {
334 self.root()
335 }
336}
337
338impl<'a> BeaconChainBody<'a> {
339 #[inline]
345 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
346 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
354 let num_pot_checkpoints =
356 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
357
358 if num_pot_checkpoints == 0 {
359 return None;
360 }
361
362 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
363 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
364
365 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
366 let own_segment_roots = unsafe {
368 slice::from_raw_parts(
369 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
370 num_own_segment_roots,
371 )
372 };
373 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
374
375 let (intermediate_shard_blocks, remainder) =
376 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
377
378 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
379 let pot_checkpoints = unsafe {
381 slice::from_raw_parts(
382 pot_checkpoints
383 .as_ptr()
384 .cast::<[u8; PotCheckpoints::SIZE]>(),
385 num_pot_checkpoints,
386 )
387 };
388 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
389
390 let body = Self {
391 pot_checkpoints,
392 own_segment_roots,
393 intermediate_shard_blocks,
394 };
395
396 if !body.is_internally_consistent() {
397 return None;
398 }
399
400 Some((body, remainder))
401 }
402
403 #[inline]
405 pub fn is_internally_consistent(&self) -> bool {
406 self.intermediate_shard_blocks
407 .iter()
408 .all(|intermediate_shard_block| {
409 let Some(&segment_roots_proof) = intermediate_shard_block.segment_roots_proof
410 else {
411 return true;
412 };
413
414 BalancedHashedMerkleTree::<2>::verify(
415 &intermediate_shard_block.header.result.body_root,
416 &[segment_roots_proof],
417 0,
418 BalancedHashedMerkleTree::compute_root_only(&[
419 *compute_segments_root(intermediate_shard_block.own_segment_roots),
420 *compute_segments_root(intermediate_shard_block.child_segment_roots),
421 ]),
422 )
423 })
424 }
425
426 #[inline]
429 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
430 let num_pot_checkpoints = bytes.split_off(..size_of::<u16>())?;
438 let num_pot_checkpoints =
440 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
441
442 if num_pot_checkpoints == 0 {
443 return None;
444 }
445
446 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
447 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
448
449 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
450 let own_segment_roots = unsafe {
452 slice::from_raw_parts(
453 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
454 num_own_segment_roots,
455 )
456 };
457 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
458
459 let (intermediate_shard_blocks, remainder) =
460 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
461
462 let pot_checkpoints = bytes.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
463 let pot_checkpoints = unsafe {
465 slice::from_raw_parts(
466 pot_checkpoints
467 .as_ptr()
468 .cast::<[u8; PotCheckpoints::SIZE]>(),
469 num_pot_checkpoints,
470 )
471 };
472 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
473
474 Some((
475 Self {
476 pot_checkpoints,
477 own_segment_roots,
478 intermediate_shard_blocks,
479 },
480 remainder,
481 ))
482 }
483
484 #[cfg(feature = "alloc")]
486 #[inline(always)]
487 pub fn to_owned(self) -> Result<OwnedBeaconChainBody, OwnedBeaconChainBodyError> {
488 OwnedBeaconChainBody::from_body(self)
489 }
490
491 #[inline]
493 pub fn root(&self) -> Blake3Hash {
494 let root = BalancedHashedMerkleTree::compute_root_only(&[
495 *compute_segments_root(self.own_segment_roots),
496 *self.intermediate_shard_blocks.segments_root(),
497 *self.intermediate_shard_blocks.headers_root(),
498 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
499 .into(),
500 ]);
501
502 Blake3Hash::new(root)
503 }
504}
505
506#[derive(Debug, Copy, Clone)]
508pub struct LeafShardBlockInfo<'a> {
509 pub header: LeafShardHeader<'a>,
511 pub segment_roots_proof: Option<&'a [u8; 32]>,
513 pub own_segment_roots: &'a [SegmentRoot],
515}
516
517#[derive(Debug, Copy, Clone)]
519pub struct LeafShardBlocksInfo<'a> {
520 num_blocks: usize,
521 bytes: &'a [u8],
522}
523
524impl<'a> LeafShardBlocksInfo<'a> {
525 #[inline]
531 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
532 let num_blocks = bytes.split_off(..size_of::<u16>())?;
544 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
545
546 let bytes_start = bytes;
547
548 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
549
550 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
551
552 for _ in 0..num_blocks {
553 let num_own_segment_roots = usize::from(counts[0]);
554 counts = &counts[1..];
555
556 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
557
558 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
559
560 if num_own_segment_roots > 0 {
561 let _segment_roots_proof = remainder.split_off(..SegmentRoot::SIZE)?;
562 }
563
564 let _own_segment_roots =
565 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
566 }
567
568 Some((
569 Self {
570 num_blocks,
571 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
572 },
573 remainder,
574 ))
575 }
576
577 #[inline]
579 pub fn iter(
580 &self,
581 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + Clone + 'a {
582 let (mut counts, mut remainder) = unsafe {
584 self.bytes
585 .split_at_unchecked(self.num_blocks * size_of::<u8>())
586 };
587
588 (0..self.num_blocks).map(move |_| {
589 let num_own_segment_roots = usize::from(counts[0]);
590 counts = &counts[1..];
591
592 let header;
594 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
595 .expect("Already checked in constructor; qed");
596
597 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
598 .expect("Already checked in constructor; qed");
599
600 let segment_roots_proof = if num_own_segment_roots > 0 {
601 let segment_roots_proof;
602 (segment_roots_proof, remainder) =
604 unsafe { remainder.split_at_unchecked(SegmentRoot::SIZE) };
605 Some(unsafe {
607 segment_roots_proof
608 .as_ptr()
609 .cast::<[u8; SegmentRoot::SIZE]>()
610 .as_ref_unchecked()
611 })
612 } else {
613 None
614 };
615
616 let own_segment_roots;
617 (own_segment_roots, remainder) =
619 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
620 let own_segment_roots = unsafe {
622 slice::from_raw_parts(
623 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
624 num_own_segment_roots,
625 )
626 };
627 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
628
629 LeafShardBlockInfo {
630 header,
631 segment_roots_proof,
632 own_segment_roots,
633 }
634 })
635 }
636
637 #[inline(always)]
639 pub const fn len(&self) -> usize {
640 self.num_blocks
641 }
642
643 #[inline(always)]
645 pub const fn is_empty(&self) -> bool {
646 self.num_blocks == 0
647 }
648
649 #[inline]
653 pub fn segments_root(&self) -> Blake3Hash {
654 compute_segments_root(
655 self.iter()
656 .flat_map(|shard_block_info| shard_block_info.own_segment_roots),
657 )
658 }
659
660 #[inline]
664 pub fn headers_root(&self) -> Blake3Hash {
665 let root =
666 UnbalancedHashedMerkleTree::compute_root_only::<{ u16::MAX as usize + 1 }, _, _>(
667 self.iter().map(|shard_block_info| {
668 blake3::hash(shard_block_info.header.root().as_ref())
673 }),
674 )
675 .unwrap_or_default();
676
677 Blake3Hash::new(root)
678 }
679}
680
681#[derive(Debug, Copy, Clone)]
683pub struct Transactions<'a> {
684 num_transactions: usize,
685 bytes: &'a [u8],
686}
687
688impl<'a> Transactions<'a> {
689 #[inline]
695 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
696 let num_transactions = bytes.split_off(..size_of::<u32>())?;
704 let num_transactions = u32::from_le_bytes([
705 num_transactions[0],
706 num_transactions[1],
707 num_transactions[2],
708 num_transactions[3],
709 ]) as usize;
710
711 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
712 let bytes_start = remainder;
713
714 for _ in 0..num_transactions {
715 (_, remainder) = Transaction::try_from_bytes(bytes)?;
716 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
717 }
718
719 Some((
720 Self {
721 num_transactions,
722 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
723 },
724 remainder,
725 ))
726 }
727
728 #[inline]
730 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + Clone + 'a {
731 let mut remainder = self.bytes;
732
733 (0..self.num_transactions).map(move |_| {
734 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
736
737 remainder = &remainder[transaction.encoded_size()..];
738 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
739 .expect("Already checked in constructor; qed");
740
741 transaction
742 })
743 }
744
745 #[inline(always)]
747 pub const fn len(&self) -> usize {
748 self.num_transactions
749 }
750
751 #[inline(always)]
753 pub const fn is_empty(&self) -> bool {
754 self.num_transactions == 0
755 }
756
757 #[inline]
761 pub fn root(&self) -> Blake3Hash {
762 let root =
763 UnbalancedHashedMerkleTree::compute_root_only::<{ u16::MAX as usize + 1 }, _, _>(
764 self.iter().map(|transaction| {
765 blake3::hash(transaction.hash().as_ref())
769 }),
770 )
771 .unwrap_or_default();
772
773 Blake3Hash::new(root)
774 }
775}
776
777#[derive(Debug, Copy, Clone)]
779pub struct IntermediateShardBody<'a> {
780 pub own_segment_roots: &'a [SegmentRoot],
782 pub leaf_shard_blocks: LeafShardBlocksInfo<'a>,
784 pub transactions: Transactions<'a>,
786}
787
788impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
789 #[cfg(feature = "alloc")]
790 type Owned = OwnedIntermediateShardBody;
791
792 #[cfg(feature = "alloc")]
793 #[inline(always)]
794 fn try_to_owned(self) -> Option<Self::Owned> {
795 self.to_owned().ok()
796 }
797
798 #[inline(always)]
799 fn root(&self) -> Blake3Hash {
800 self.root()
801 }
802}
803
804impl<'a> IntermediateShardBody<'a> {
805 #[inline]
811 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
812 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
819 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
820
821 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
822 let own_segment_roots = unsafe {
824 slice::from_raw_parts(
825 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
826 num_own_segment_roots,
827 )
828 };
829 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
830
831 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
832
833 let (transactions, remainder) = Transactions::try_from_bytes(remainder)?;
834
835 let body = Self {
836 own_segment_roots,
837 leaf_shard_blocks,
838 transactions,
839 };
840
841 if !body.is_internally_consistent() {
842 return None;
843 }
844
845 Some((body, remainder))
846 }
847
848 #[inline]
850 pub fn is_internally_consistent(&self) -> bool {
851 self.leaf_shard_blocks.iter().all(|leaf_shard_block| {
852 let Some(&segment_roots_proof) = leaf_shard_block.segment_roots_proof else {
853 return true;
854 };
855
856 BalancedHashedMerkleTree::<2>::verify(
857 &leaf_shard_block.header.result.body_root,
858 &[segment_roots_proof],
859 0,
860 *compute_segments_root(leaf_shard_block.own_segment_roots),
861 )
862 })
863 }
864
865 #[inline]
868 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
869 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
876 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
877
878 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
879 let own_segment_roots = unsafe {
881 slice::from_raw_parts(
882 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
883 num_own_segment_roots,
884 )
885 };
886 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
887
888 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
889
890 let (transactions, remainder) = Transactions::try_from_bytes(remainder)?;
891
892 Some((
893 Self {
894 own_segment_roots,
895 leaf_shard_blocks,
896 transactions,
897 },
898 remainder,
899 ))
900 }
901
902 #[inline]
904 pub fn segment_roots_proof(&self) -> [u8; 32] {
905 BalancedHashedMerkleTree::compute_root_only(&[
908 *self.leaf_shard_blocks.headers_root(),
909 *self.transactions.root(),
910 ])
911 }
912
913 #[cfg(feature = "alloc")]
915 #[inline(always)]
916 pub fn to_owned(self) -> Result<OwnedIntermediateShardBody, OwnedIntermediateShardBodyError> {
917 OwnedIntermediateShardBody::from_body(self)
918 }
919
920 #[inline]
922 pub fn root(&self) -> Blake3Hash {
923 let root = BalancedHashedMerkleTree::compute_root_only(&[
924 *compute_segments_root(self.own_segment_roots),
925 *self.leaf_shard_blocks.segments_root(),
926 *self.leaf_shard_blocks.headers_root(),
927 *self.transactions.root(),
928 ]);
929
930 Blake3Hash::new(root)
931 }
932}
933
934#[derive(Debug, Copy, Clone)]
936pub struct LeafShardBody<'a> {
937 pub own_segment_roots: &'a [SegmentRoot],
939 pub transactions: Transactions<'a>,
941}
942
943impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
944 #[cfg(feature = "alloc")]
945 type Owned = OwnedLeafShardBody;
946
947 #[cfg(feature = "alloc")]
948 #[inline(always)]
949 fn try_to_owned(self) -> Option<Self::Owned> {
950 self.to_owned().ok()
951 }
952
953 #[inline(always)]
954 fn root(&self) -> Blake3Hash {
955 self.root()
956 }
957}
958
959impl<'a> LeafShardBody<'a> {
960 #[inline]
966 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
967 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
973 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
974
975 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
976 let own_segment_roots = unsafe {
978 slice::from_raw_parts(
979 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
980 num_own_segment_roots,
981 )
982 };
983 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
984
985 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
986
987 let body = Self {
988 own_segment_roots,
989 transactions,
990 };
991
992 if !body.is_internally_consistent() {
993 return None;
994 }
995
996 Some((body, remainder))
997 }
998
999 #[inline]
1001 pub fn is_internally_consistent(&self) -> bool {
1002 true
1004 }
1005
1006 #[inline]
1009 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1010 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1016 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1017
1018 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1019 let own_segment_roots = unsafe {
1021 slice::from_raw_parts(
1022 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1023 num_own_segment_roots,
1024 )
1025 };
1026 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1027
1028 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1029
1030 Some((
1031 Self {
1032 own_segment_roots,
1033 transactions,
1034 },
1035 remainder,
1036 ))
1037 }
1038
1039 #[inline]
1041 pub fn segment_roots_proof(&self) -> [u8; 32] {
1042 *self.transactions.root()
1043 }
1044
1045 #[cfg(feature = "alloc")]
1047 #[inline(always)]
1048 pub fn to_owned(self) -> Result<OwnedLeafShardBody, OwnedLeafShardBodyError> {
1049 OwnedLeafShardBody::from_body(self)
1050 }
1051
1052 #[inline]
1054 pub fn root(&self) -> Blake3Hash {
1055 let root = BalancedHashedMerkleTree::compute_root_only(&[
1056 *compute_segments_root(self.own_segment_roots),
1057 *self.transactions.root(),
1058 ]);
1059
1060 Blake3Hash::new(root)
1061 }
1062}
1063
1064#[derive(Debug, Copy, Clone, From)]
1069pub enum BlockBody<'a> {
1070 BeaconChain(BeaconChainBody<'a>),
1072 IntermediateShard(IntermediateShardBody<'a>),
1074 LeafShard(LeafShardBody<'a>),
1076}
1077
1078impl<'a> GenericBlockBody<'a> for BlockBody<'a> {
1079 #[cfg(feature = "alloc")]
1080 type Owned = OwnedBlockBody;
1081
1082 #[cfg(feature = "alloc")]
1083 #[inline(always)]
1084 fn try_to_owned(self) -> Option<Self::Owned> {
1085 self.to_owned().ok()
1086 }
1087
1088 #[inline(always)]
1089 fn root(&self) -> Blake3Hash {
1090 self.root()
1091 }
1092}
1093
1094impl<'a> BlockBody<'a> {
1095 #[inline]
1102 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: ShardKind) -> Option<(Self, &'a [u8])> {
1103 match shard_kind {
1104 ShardKind::BeaconChain => {
1105 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1106 Some((Self::BeaconChain(body), remainder))
1107 }
1108 ShardKind::IntermediateShard => {
1109 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1110 Some((Self::IntermediateShard(body), remainder))
1111 }
1112 ShardKind::LeafShard => {
1113 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1114 Some((Self::LeafShard(body), remainder))
1115 }
1116 ShardKind::Phantom | ShardKind::Invalid => {
1117 None
1119 }
1120 }
1121 }
1122
1123 #[inline]
1125 pub fn is_internally_consistent(&self) -> bool {
1126 match self {
1127 Self::BeaconChain(body) => body.is_internally_consistent(),
1128 Self::IntermediateShard(body) => body.is_internally_consistent(),
1129 Self::LeafShard(body) => body.is_internally_consistent(),
1130 }
1131 }
1132
1133 #[inline]
1136 pub fn try_from_bytes_unchecked(
1137 bytes: &'a [u8],
1138 shard_kind: ShardKind,
1139 ) -> Option<(Self, &'a [u8])> {
1140 match shard_kind {
1141 ShardKind::BeaconChain => {
1142 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1143 Some((Self::BeaconChain(body), remainder))
1144 }
1145 ShardKind::IntermediateShard => {
1146 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1147 Some((Self::IntermediateShard(body), remainder))
1148 }
1149 ShardKind::LeafShard => {
1150 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1151 Some((Self::LeafShard(body), remainder))
1152 }
1153 ShardKind::Phantom | ShardKind::Invalid => {
1154 None
1156 }
1157 }
1158 }
1159
1160 #[cfg(feature = "alloc")]
1162 #[inline(always)]
1163 pub fn to_owned(self) -> Result<OwnedBlockBody, OwnedBlockBodyError> {
1164 OwnedBlockBody::from_body(self)
1165 }
1166
1167 #[inline]
1172 pub fn root(&self) -> Blake3Hash {
1173 match self {
1174 Self::BeaconChain(body) => body.root(),
1175 Self::IntermediateShard(body) => body.root(),
1176 Self::LeafShard(body) => body.root(),
1177 }
1178 }
1179}